forge/cmd/test/
mod.rs

1use super::{install, test::filter::ProjectPathsAwareFilter, watch::WatchArgs};
2use crate::{
3    decode::decode_console_logs,
4    gas_report::GasReport,
5    multi_runner::matches_contract,
6    result::{SuiteResult, TestOutcome, TestStatus},
7    traces::{
8        debug::{ContractSources, DebugTraceIdentifier},
9        decode_trace_arena, folded_stack_trace,
10        identifier::SignaturesIdentifier,
11        CallTraceDecoderBuilder, InternalTraceMode, TraceKind,
12    },
13    MultiContractRunner, MultiContractRunnerBuilder, TestFilter,
14};
15use alloy_primitives::U256;
16use chrono::Utc;
17use clap::{Parser, ValueHint};
18use eyre::{bail, Context, OptionExt, Result};
19use foundry_block_explorers::EtherscanApiVersion;
20use foundry_cli::{
21    opts::{BuildOpts, GlobalArgs},
22    utils::{self, LoadConfig},
23};
24use foundry_common::{compile::ProjectCompiler, evm::EvmArgs, fs, shell, TestFunctionExt};
25use foundry_compilers::{
26    artifacts::output_selection::OutputSelection,
27    compilers::{
28        multi::{MultiCompiler, MultiCompilerLanguage},
29        Language,
30    },
31    utils::source_files_iter,
32    ProjectCompileOutput,
33};
34use foundry_config::{
35    figment,
36    figment::{
37        value::{Dict, Map},
38        Metadata, Profile, Provider,
39    },
40    filter::GlobMatcher,
41    Config,
42};
43use foundry_debugger::Debugger;
44use foundry_evm::traces::identifier::TraceIdentifiers;
45use regex::Regex;
46use std::{
47    collections::{BTreeMap, BTreeSet},
48    fmt::Write,
49    path::PathBuf,
50    sync::{mpsc::channel, Arc},
51    time::{Duration, Instant},
52};
53use yansi::Paint;
54
55mod filter;
56mod summary;
57use crate::{result::TestKind, traces::render_trace_arena_inner};
58pub use filter::FilterArgs;
59use quick_junit::{NonSuccessKind, Report, TestCase, TestCaseStatus, TestSuite};
60use summary::{format_invariant_metrics_table, TestSummaryReport};
61
62// Loads project's figment and merges the build cli arguments into it
63foundry_config::merge_impl_figment_convert!(TestArgs, build, evm);
64
65/// CLI arguments for `forge test`.
66#[derive(Clone, Debug, Parser)]
67#[command(next_help_heading = "Test options")]
68pub struct TestArgs {
69    // Include global options for users of this struct.
70    #[command(flatten)]
71    pub global: GlobalArgs,
72
73    /// The contract file you want to test, it's a shortcut for --match-path.
74    #[arg(value_hint = ValueHint::FilePath)]
75    pub path: Option<GlobMatcher>,
76
77    /// Run a single test in the debugger.
78    ///
79    /// The matching test will be opened in the debugger regardless of the outcome of the test.
80    ///
81    /// If the matching test is a fuzz test, then it will open the debugger on the first failure
82    /// case. If the fuzz test does not fail, it will open the debugger on the last fuzz case.
83    #[arg(long, conflicts_with_all = ["flamegraph", "flamechart", "decode_internal", "rerun"])]
84    debug: bool,
85
86    /// Generate a flamegraph for a single test. Implies `--decode-internal`.
87    ///
88    /// A flame graph is used to visualize which functions or operations within the smart contract
89    /// are consuming the most gas overall in a sorted manner.
90    #[arg(long)]
91    flamegraph: bool,
92
93    /// Generate a flamechart for a single test. Implies `--decode-internal`.
94    ///
95    /// A flame chart shows the gas usage over time, illustrating when each function is
96    /// called (execution order) and how much gas it consumes at each point in the timeline.
97    #[arg(long, conflicts_with = "flamegraph")]
98    flamechart: bool,
99
100    /// Identify internal functions in traces.
101    ///
102    /// This will trace internal functions and decode stack parameters.
103    ///
104    /// Parameters stored in memory (such as bytes or arrays) are currently decoded only when a
105    /// single function is matched, similarly to `--debug`, for performance reasons.
106    #[arg(long)]
107    decode_internal: bool,
108
109    /// Dumps all debugger steps to file.
110    #[arg(
111        long,
112        requires = "debug",
113        value_hint = ValueHint::FilePath,
114        value_name = "PATH"
115    )]
116    dump: Option<PathBuf>,
117
118    /// Print a gas report.
119    #[arg(long, env = "FORGE_GAS_REPORT")]
120    gas_report: bool,
121
122    /// Check gas snapshots against previous runs.
123    #[arg(long, env = "FORGE_SNAPSHOT_CHECK")]
124    gas_snapshot_check: Option<bool>,
125
126    /// Enable/disable recording of gas snapshot results.
127    #[arg(long, env = "FORGE_SNAPSHOT_EMIT")]
128    gas_snapshot_emit: Option<bool>,
129
130    /// Exit with code 0 even if a test fails.
131    #[arg(long, env = "FORGE_ALLOW_FAILURE")]
132    allow_failure: bool,
133
134    /// Suppress successful test traces and show only traces for failures.
135    #[arg(long, short, env = "FORGE_SUPPRESS_SUCCESSFUL_TRACES", help_heading = "Display options")]
136    suppress_successful_traces: bool,
137
138    /// Output test results as JUnit XML report.
139    #[arg(long, conflicts_with_all = ["quiet", "json", "gas_report", "summary", "list", "show_progress"], help_heading = "Display options")]
140    pub junit: bool,
141
142    /// Stop running tests after the first failure.
143    #[arg(long)]
144    pub fail_fast: bool,
145
146    /// The Etherscan (or equivalent) API key.
147    #[arg(long, env = "ETHERSCAN_API_KEY", value_name = "KEY")]
148    etherscan_api_key: Option<String>,
149
150    /// The Etherscan API version.
151    #[arg(long, env = "ETHERSCAN_API_VERSION", value_name = "VERSION")]
152    etherscan_api_version: Option<EtherscanApiVersion>,
153
154    /// List tests instead of running them.
155    #[arg(long, short, conflicts_with_all = ["show_progress", "decode_internal", "summary"], help_heading = "Display options")]
156    list: bool,
157
158    /// Set seed used to generate randomness during your fuzz runs.
159    #[arg(long)]
160    pub fuzz_seed: Option<U256>,
161
162    #[arg(long, env = "FOUNDRY_FUZZ_RUNS", value_name = "RUNS")]
163    pub fuzz_runs: Option<u64>,
164
165    /// Timeout for each fuzz run in seconds.
166    #[arg(long, env = "FOUNDRY_FUZZ_TIMEOUT", value_name = "TIMEOUT")]
167    pub fuzz_timeout: Option<u64>,
168
169    /// File to rerun fuzz failures from.
170    #[arg(long)]
171    pub fuzz_input_file: Option<String>,
172
173    /// Show test execution progress.
174    #[arg(long, conflicts_with_all = ["quiet", "json"], help_heading = "Display options")]
175    pub show_progress: bool,
176
177    /// Re-run recorded test failures from last run.
178    /// If no failure recorded then regular test run is performed.
179    #[arg(long)]
180    pub rerun: bool,
181
182    /// Print test summary table.
183    #[arg(long, help_heading = "Display options")]
184    pub summary: bool,
185
186    /// Print detailed test summary table.
187    #[arg(long, help_heading = "Display options", requires = "summary")]
188    pub detailed: bool,
189
190    #[command(flatten)]
191    filter: FilterArgs,
192
193    #[command(flatten)]
194    evm: EvmArgs,
195
196    #[command(flatten)]
197    pub build: BuildOpts,
198
199    #[command(flatten)]
200    pub watch: WatchArgs,
201}
202
203impl TestArgs {
204    pub async fn run(self) -> Result<TestOutcome> {
205        trace!(target: "forge::test", "executing test command");
206        self.execute_tests().await
207    }
208
209    /// Returns sources which include any tests to be executed.
210    /// If no filters are provided, sources are filtered by existence of test/invariant methods in
211    /// them, If filters are provided, sources are additionally filtered by them.
212    pub fn get_sources_to_compile(
213        &self,
214        config: &Config,
215        filter: &ProjectPathsAwareFilter,
216    ) -> Result<BTreeSet<PathBuf>> {
217        let mut project = config.create_project(true, true)?;
218        project.update_output_selection(|selection| {
219            *selection = OutputSelection::common_output_selection(["abi".to_string()]);
220        });
221
222        let output = project.compile()?;
223
224        if output.has_compiler_errors() {
225            sh_println!("{output}")?;
226            eyre::bail!("Compilation failed");
227        }
228
229        // ABIs of all sources
230        let abis = output
231            .into_artifacts()
232            .filter_map(|(id, artifact)| artifact.abi.map(|abi| (id, abi)))
233            .collect::<BTreeMap<_, _>>();
234
235        // Filter sources by their abis and contract names.
236        let mut test_sources = abis
237            .iter()
238            .filter(|(id, abi)| matches_contract(id, abi, filter))
239            .map(|(id, _)| id.source.clone())
240            .collect::<BTreeSet<_>>();
241
242        if test_sources.is_empty() {
243            if filter.is_empty() {
244                sh_println!(
245                    "No tests found in project! \
246                        Forge looks for functions that starts with `test`."
247                )?;
248            } else {
249                sh_println!("No tests match the provided pattern:")?;
250                sh_print!("{filter}")?;
251
252                // Try to suggest a test when there's no match
253                if let Some(test_pattern) = &filter.args().test_pattern {
254                    let test_name = test_pattern.as_str();
255                    let candidates = abis
256                        .into_iter()
257                        .filter(|(id, _)| {
258                            filter.matches_path(&id.source) && filter.matches_contract(&id.name)
259                        })
260                        .flat_map(|(_, abi)| abi.functions.into_keys())
261                        .collect::<Vec<_>>();
262                    if let Some(suggestion) = utils::did_you_mean(test_name, candidates).pop() {
263                        sh_println!("\nDid you mean `{suggestion}`?")?;
264                    }
265                }
266            }
267
268            eyre::bail!("No tests to run");
269        }
270
271        // Always recompile all sources to ensure that `getCode` cheatcode can use any artifact.
272        test_sources.extend(source_files_iter(
273            &project.paths.sources,
274            MultiCompilerLanguage::FILE_EXTENSIONS,
275        ));
276
277        Ok(test_sources)
278    }
279
280    /// Executes all the tests in the project.
281    ///
282    /// This will trigger the build process first. On success all test contracts that match the
283    /// configured filter will be executed
284    ///
285    /// Returns the test results for all matching tests.
286    pub async fn execute_tests(mut self) -> Result<TestOutcome> {
287        // Merge all configs.
288        let (mut config, mut evm_opts) = self.load_config_and_evm_opts()?;
289
290        // Explicitly enable isolation for gas reports for more correct gas accounting.
291        if self.gas_report {
292            evm_opts.isolate = true;
293        } else {
294            // Do not collect gas report traces if gas report is not enabled.
295            config.fuzz.gas_report_samples = 0;
296            config.invariant.gas_report_samples = 0;
297        }
298
299        // Install missing dependencies.
300        if install::install_missing_dependencies(&mut config) && config.auto_detect_remappings {
301            // need to re-configure here to also catch additional remappings
302            config = self.load_config()?;
303        }
304
305        // Set up the project.
306        let project = config.project()?;
307
308        let filter = self.filter(&config)?;
309        trace!(target: "forge::test", ?filter, "using filter");
310
311        let sources_to_compile = self.get_sources_to_compile(&config, &filter)?;
312
313        let compiler = ProjectCompiler::new()
314            .dynamic_test_linking(config.dynamic_test_linking)
315            .quiet(shell::is_json() || self.junit)
316            .files(sources_to_compile);
317
318        let output = compiler.compile(&project)?;
319
320        // Create test options from general project settings and compiler output.
321        let project_root = &project.paths.root;
322
323        let should_debug = self.debug;
324        let should_draw = self.flamegraph || self.flamechart;
325
326        // Determine print verbosity and executor verbosity.
327        let verbosity = evm_opts.verbosity;
328        if (self.gas_report && evm_opts.verbosity < 3) || self.flamegraph || self.flamechart {
329            evm_opts.verbosity = 3;
330        }
331
332        let env = evm_opts.evm_env().await?;
333
334        // Enable internal tracing for more informative flamegraph.
335        if should_draw && !self.decode_internal {
336            self.decode_internal = true;
337        }
338
339        // Choose the internal function tracing mode, if --decode-internal is provided.
340        let decode_internal = if self.decode_internal {
341            // If more than one function matched, we enable simple tracing.
342            // If only one function matched, we enable full tracing. This is done in `run_tests`.
343            InternalTraceMode::Simple
344        } else {
345            InternalTraceMode::None
346        };
347
348        // Prepare the test builder.
349        let config = Arc::new(config);
350        let runner = MultiContractRunnerBuilder::new(config.clone())
351            .set_debug(should_debug)
352            .set_decode_internal(decode_internal)
353            .initial_balance(evm_opts.initial_balance)
354            .evm_spec(config.evm_spec_id())
355            .sender(evm_opts.sender)
356            .with_fork(evm_opts.get_fork(&config, env.clone()))
357            .enable_isolation(evm_opts.isolate)
358            .odyssey(evm_opts.odyssey)
359            .build::<MultiCompiler>(project_root, &output, env, evm_opts)?;
360
361        let libraries = runner.libraries.clone();
362        let mut outcome = self.run_tests(runner, config, verbosity, &filter, &output).await?;
363
364        if should_draw {
365            let (suite_name, test_name, mut test_result) =
366                outcome.remove_first().ok_or_eyre("no tests were executed")?;
367
368            let (_, arena) = test_result
369                .traces
370                .iter_mut()
371                .find(|(kind, _)| *kind == TraceKind::Execution)
372                .unwrap();
373
374            // Decode traces.
375            let decoder = outcome.last_run_decoder.as_ref().unwrap();
376            decode_trace_arena(arena, decoder).await;
377            let mut fst = folded_stack_trace::build(arena);
378
379            let label = if self.flamegraph { "flamegraph" } else { "flamechart" };
380            let contract = suite_name.split(':').next_back().unwrap();
381            let test_name = test_name.trim_end_matches("()");
382            let file_name = format!("cache/{label}_{contract}_{test_name}.svg");
383            let file = std::fs::File::create(&file_name).wrap_err("failed to create file")?;
384            let file = std::io::BufWriter::new(file);
385
386            let mut options = inferno::flamegraph::Options::default();
387            options.title = format!("{label} {contract}::{test_name}");
388            options.count_name = "gas".to_string();
389            if self.flamechart {
390                options.flame_chart = true;
391                fst.reverse();
392            }
393
394            // Generate SVG.
395            inferno::flamegraph::from_lines(&mut options, fst.iter().map(String::as_str), file)
396                .wrap_err("failed to write svg")?;
397            sh_println!("Saved to {file_name}")?;
398
399            // Open SVG in default program.
400            if let Err(e) = opener::open(&file_name) {
401                sh_err!("Failed to open {file_name}; please open it manually: {e}")?;
402            }
403        }
404
405        if should_debug {
406            // Get first non-empty suite result. We will have only one such entry.
407            let (_, _, test_result) =
408                outcome.remove_first().ok_or_eyre("no tests were executed")?;
409
410            let sources =
411                ContractSources::from_project_output(&output, project.root(), Some(&libraries))?;
412
413            // Run the debugger.
414            let mut builder = Debugger::builder()
415                .traces(
416                    test_result.traces.iter().filter(|(t, _)| t.is_execution()).cloned().collect(),
417                )
418                .sources(sources)
419                .breakpoints(test_result.breakpoints.clone());
420
421            if let Some(decoder) = &outcome.last_run_decoder {
422                builder = builder.decoder(decoder);
423            }
424
425            let mut debugger = builder.build();
426            if let Some(dump_path) = self.dump {
427                debugger.dump_to_file(&dump_path)?;
428            } else {
429                debugger.try_run_tui()?;
430            }
431        }
432
433        Ok(outcome)
434    }
435
436    /// Run all tests that matches the filter predicate from a test runner
437    pub async fn run_tests(
438        &self,
439        mut runner: MultiContractRunner,
440        config: Arc<Config>,
441        verbosity: u8,
442        filter: &ProjectPathsAwareFilter,
443        output: &ProjectCompileOutput,
444    ) -> eyre::Result<TestOutcome> {
445        if self.list {
446            return list(runner, filter);
447        }
448
449        trace!(target: "forge::test", "running all tests");
450
451        // If we need to render to a serialized format, we should not print anything else to stdout.
452        let silent = self.gas_report && shell::is_json() || self.summary && shell::is_json();
453
454        let num_filtered = runner.matching_test_functions(filter).count();
455        if num_filtered != 1 && (self.debug || self.flamegraph || self.flamechart) {
456            let action = if self.flamegraph {
457                "generate a flamegraph"
458            } else if self.flamechart {
459                "generate a flamechart"
460            } else {
461                "run the debugger"
462            };
463            let filter = if filter.is_empty() {
464                String::new()
465            } else {
466                format!("\n\nFilter used:\n{filter}")
467            };
468            eyre::bail!(
469                "{num_filtered} tests matched your criteria, but exactly 1 test must match in order to {action}.\n\n\
470                 Use --match-contract and --match-path to further limit the search.{filter}",
471            );
472        }
473
474        // If exactly one test matched, we enable full tracing.
475        if num_filtered == 1 && self.decode_internal {
476            runner.decode_internal = InternalTraceMode::Full;
477        }
478
479        // Run tests in a non-streaming fashion and collect results for serialization.
480        if !self.gas_report && !self.summary && shell::is_json() {
481            let mut results = runner.test_collect(filter)?;
482            results.values_mut().for_each(|suite_result| {
483                for test_result in suite_result.test_results.values_mut() {
484                    if verbosity >= 2 {
485                        // Decode logs at level 2 and above.
486                        test_result.decoded_logs = decode_console_logs(&test_result.logs);
487                    } else {
488                        // Empty logs for non verbose runs.
489                        test_result.logs = vec![];
490                    }
491                }
492            });
493            sh_println!("{}", serde_json::to_string(&results)?)?;
494            return Ok(TestOutcome::new(results, self.allow_failure));
495        }
496
497        if self.junit {
498            let results = runner.test_collect(filter)?;
499            sh_println!("{}", junit_xml_report(&results, verbosity).to_string()?)?;
500            return Ok(TestOutcome::new(results, self.allow_failure));
501        }
502
503        let remote_chain_id = runner.evm_opts.get_remote_chain_id().await;
504        let known_contracts = runner.known_contracts.clone();
505
506        let libraries = runner.libraries.clone();
507
508        // Run tests in a streaming fashion.
509        let (tx, rx) = channel::<(String, SuiteResult)>();
510        let timer = Instant::now();
511        let show_progress = config.show_progress;
512        let handle = tokio::task::spawn_blocking({
513            let filter = filter.clone();
514            move || runner.test(&filter, tx, show_progress)
515        });
516
517        // Set up trace identifiers.
518        let mut identifier = TraceIdentifiers::new().with_local(&known_contracts);
519
520        // Avoid using etherscan for gas report as we decode more traces and this will be
521        // expensive.
522        if !self.gas_report {
523            identifier = identifier.with_etherscan(&config, remote_chain_id)?;
524        }
525
526        // Build the trace decoder.
527        let mut builder = CallTraceDecoderBuilder::new()
528            .with_known_contracts(&known_contracts)
529            .with_verbosity(verbosity);
530        // Signatures are of no value for gas reports.
531        if !self.gas_report {
532            builder =
533                builder.with_signature_identifier(SignaturesIdentifier::from_config(&config)?);
534        }
535
536        if self.decode_internal {
537            let sources =
538                ContractSources::from_project_output(output, &config.root, Some(&libraries))?;
539            builder = builder.with_debug_identifier(DebugTraceIdentifier::new(sources));
540        }
541        let mut decoder = builder.build();
542
543        let mut gas_report = self.gas_report.then(|| {
544            GasReport::new(
545                config.gas_reports.clone(),
546                config.gas_reports_ignore.clone(),
547                config.gas_reports_include_tests,
548            )
549        });
550
551        let mut gas_snapshots = BTreeMap::<String, BTreeMap<String, String>>::new();
552
553        let mut outcome = TestOutcome::empty(self.allow_failure);
554
555        let mut any_test_failed = false;
556        for (contract_name, suite_result) in rx {
557            let tests = &suite_result.test_results;
558
559            // Clear the addresses and labels from previous test.
560            decoder.clear_addresses();
561
562            // We identify addresses if we're going to print *any* trace or gas report.
563            let identify_addresses = verbosity >= 3 ||
564                self.gas_report ||
565                self.debug ||
566                self.flamegraph ||
567                self.flamechart;
568
569            // Print suite header.
570            if !silent {
571                sh_println!()?;
572                for warning in &suite_result.warnings {
573                    sh_warn!("{warning}")?;
574                }
575                if !tests.is_empty() {
576                    let len = tests.len();
577                    let tests = if len > 1 { "tests" } else { "test" };
578                    sh_println!("Ran {len} {tests} for {contract_name}")?;
579                }
580            }
581
582            // Process individual test results, printing logs and traces when necessary.
583            for (name, result) in tests {
584                let show_traces =
585                    !self.suppress_successful_traces || result.status == TestStatus::Failure;
586                if !silent {
587                    sh_println!("{}", result.short_result(name))?;
588
589                    // Display invariant metrics if invariant kind.
590                    if let TestKind::Invariant { metrics, .. } = &result.kind {
591                        if !metrics.is_empty() {
592                            let _ = sh_println!("\n{}\n", format_invariant_metrics_table(metrics));
593                        }
594                    }
595
596                    // We only display logs at level 2 and above
597                    if verbosity >= 2 && show_traces {
598                        // We only decode logs from Hardhat and DS-style console events
599                        let console_logs = decode_console_logs(&result.logs);
600                        if !console_logs.is_empty() {
601                            sh_println!("Logs:")?;
602                            for log in console_logs {
603                                sh_println!("  {log}")?;
604                            }
605                            sh_println!()?;
606                        }
607                    }
608                }
609
610                // We shouldn't break out of the outer loop directly here so that we finish
611                // processing the remaining tests and print the suite summary.
612                any_test_failed |= result.status == TestStatus::Failure;
613
614                // Clear the addresses and labels from previous runs.
615                decoder.clear_addresses();
616                decoder
617                    .labels
618                    .extend(result.labeled_addresses.iter().map(|(k, v)| (*k, v.clone())));
619
620                // Identify addresses and decode traces.
621                let mut decoded_traces = Vec::with_capacity(result.traces.len());
622                for (kind, arena) in &mut result.traces.clone() {
623                    if identify_addresses {
624                        decoder.identify(arena, &mut identifier);
625                    }
626
627                    // verbosity:
628                    // - 0..3: nothing
629                    // - 3: only display traces for failed tests
630                    // - 4: also display the setup trace for failed tests
631                    // - 5..: display all traces for all tests, including storage changes
632                    let should_include = match kind {
633                        TraceKind::Execution => {
634                            (verbosity == 3 && result.status.is_failure()) || verbosity >= 4
635                        }
636                        TraceKind::Setup => {
637                            (verbosity == 4 && result.status.is_failure()) || verbosity >= 5
638                        }
639                        TraceKind::Deployment => false,
640                    };
641
642                    if should_include {
643                        decode_trace_arena(arena, &decoder).await;
644                        decoded_traces.push(render_trace_arena_inner(arena, false, verbosity > 4));
645                    }
646                }
647
648                if !silent && show_traces && !decoded_traces.is_empty() {
649                    sh_println!("Traces:")?;
650                    for trace in &decoded_traces {
651                        sh_println!("{trace}")?;
652                    }
653                }
654
655                if let Some(gas_report) = &mut gas_report {
656                    gas_report.analyze(result.traces.iter().map(|(_, a)| &a.arena), &decoder).await;
657
658                    for trace in &result.gas_report_traces {
659                        decoder.clear_addresses();
660
661                        // Re-execute setup and deployment traces to collect identities created in
662                        // setUp and constructor.
663                        for (kind, arena) in &result.traces {
664                            if !matches!(kind, TraceKind::Execution) {
665                                decoder.identify(arena, &mut identifier);
666                            }
667                        }
668
669                        for arena in trace {
670                            decoder.identify(arena, &mut identifier);
671                            gas_report.analyze([arena], &decoder).await;
672                        }
673                    }
674                }
675
676                // Collect and merge gas snapshots.
677                for (group, new_snapshots) in &result.gas_snapshots {
678                    gas_snapshots.entry(group.clone()).or_default().extend(new_snapshots.clone());
679                }
680            }
681
682            // Write gas snapshots to disk if any were collected.
683            if !gas_snapshots.is_empty() {
684                // By default `gas_snapshot_check` is set to `false` in the config.
685                //
686                // The user can either:
687                // - Set `FORGE_SNAPSHOT_CHECK=true` in the environment.
688                // - Pass `--gas-snapshot-check=true` as a CLI argument.
689                // - Set `gas_snapshot_check = true` in the config.
690                //
691                // If the user passes `--gas-snapshot-check=<bool>` then it will override the config
692                // and the environment variable, disabling the check if `false` is passed.
693                //
694                // Exiting early with code 1 if differences are found.
695                if self.gas_snapshot_check.unwrap_or(config.gas_snapshot_check) {
696                    let differences_found = gas_snapshots.clone().into_iter().fold(
697                        false,
698                        |mut found, (group, snapshots)| {
699                            // If the snapshot file doesn't exist, we can't compare so we skip.
700                            if !&config.snapshots.join(format!("{group}.json")).exists() {
701                                return false;
702                            }
703
704                            let previous_snapshots: BTreeMap<String, String> =
705                                fs::read_json_file(&config.snapshots.join(format!("{group}.json")))
706                                    .expect("Failed to read snapshots from disk");
707
708                            let diff: BTreeMap<_, _> = snapshots
709                                .iter()
710                                .filter_map(|(k, v)| {
711                                    previous_snapshots.get(k).and_then(|previous_snapshot| {
712                                        if previous_snapshot != v {
713                                            Some((
714                                                k.clone(),
715                                                (previous_snapshot.clone(), v.clone()),
716                                            ))
717                                        } else {
718                                            None
719                                        }
720                                    })
721                                })
722                                .collect();
723
724                            if !diff.is_empty() {
725                                let _ = sh_eprintln!(
726                                    "{}",
727                                    format!("\n[{group}] Failed to match snapshots:").red().bold()
728                                );
729
730                                for (key, (previous_snapshot, snapshot)) in &diff {
731                                    let _ = sh_eprintln!(
732                                        "{}",
733                                        format!("- [{key}] {previous_snapshot} → {snapshot}").red()
734                                    );
735                                }
736
737                                found = true;
738                            }
739
740                            found
741                        },
742                    );
743
744                    if differences_found {
745                        sh_eprintln!()?;
746                        eyre::bail!("Snapshots differ from previous run");
747                    }
748                }
749
750                // By default `gas_snapshot_emit` is set to `true` in the config.
751                //
752                // The user can either:
753                // - Set `FORGE_SNAPSHOT_EMIT=false` in the environment.
754                // - Pass `--gas-snapshot-emit=false` as a CLI argument.
755                // - Set `gas_snapshot_emit = false` in the config.
756                //
757                // If the user passes `--gas-snapshot-emit=<bool>` then it will override the config
758                // and the environment variable, enabling the check if `true` is passed.
759                if self.gas_snapshot_emit.unwrap_or(config.gas_snapshot_emit) {
760                    // Create `snapshots` directory if it doesn't exist.
761                    fs::create_dir_all(&config.snapshots)?;
762
763                    // Write gas snapshots to disk per group.
764                    gas_snapshots.clone().into_iter().for_each(|(group, snapshots)| {
765                        fs::write_pretty_json_file(
766                            &config.snapshots.join(format!("{group}.json")),
767                            &snapshots,
768                        )
769                        .expect("Failed to write gas snapshots to disk");
770                    });
771                }
772            }
773
774            // Print suite summary.
775            if !silent {
776                sh_println!("{}", suite_result.summary())?;
777            }
778
779            // Add the suite result to the outcome.
780            outcome.results.insert(contract_name, suite_result);
781
782            // Stop processing the remaining suites if any test failed and `fail_fast` is set.
783            if self.fail_fast && any_test_failed {
784                break;
785            }
786        }
787        outcome.last_run_decoder = Some(decoder);
788        let duration = timer.elapsed();
789
790        trace!(target: "forge::test", len=outcome.results.len(), %any_test_failed, "done with results");
791
792        if let Some(gas_report) = gas_report {
793            let finalized = gas_report.finalize();
794            sh_println!("{}", &finalized)?;
795            outcome.gas_report = Some(finalized);
796        }
797
798        if !self.summary && !shell::is_json() {
799            sh_println!("{}", outcome.summary(duration))?;
800        }
801
802        if self.summary && !outcome.results.is_empty() {
803            let summary_report = TestSummaryReport::new(self.detailed, outcome.clone());
804            sh_println!("{}", &summary_report)?;
805        }
806
807        // Reattach the task.
808        if let Err(e) = handle.await {
809            match e.try_into_panic() {
810                Ok(payload) => std::panic::resume_unwind(payload),
811                Err(e) => return Err(e.into()),
812            }
813        }
814
815        // Persist test run failures to enable replaying.
816        persist_run_failures(&config, &outcome);
817
818        Ok(outcome)
819    }
820
821    /// Returns the flattened [`FilterArgs`] arguments merged with [`Config`].
822    /// Loads and applies filter from file if only last test run failures performed.
823    pub fn filter(&self, config: &Config) -> Result<ProjectPathsAwareFilter> {
824        let mut filter = self.filter.clone();
825        if self.rerun {
826            filter.test_pattern = last_run_failures(config);
827        }
828        if filter.path_pattern.is_some() {
829            if self.path.is_some() {
830                bail!("Can not supply both --match-path and |path|");
831            }
832        } else {
833            filter.path_pattern = self.path.clone();
834        }
835        Ok(filter.merge_with_config(config))
836    }
837
838    /// Returns whether `BuildArgs` was configured with `--watch`
839    pub fn is_watch(&self) -> bool {
840        self.watch.watch.is_some()
841    }
842
843    /// Returns the [`watchexec::Config`] necessary to bootstrap a new watch loop.
844    pub(crate) fn watchexec_config(&self) -> Result<watchexec::Config> {
845        self.watch.watchexec_config(|| {
846            let config = self.load_config()?;
847            Ok([config.src, config.test])
848        })
849    }
850}
851
852impl Provider for TestArgs {
853    fn metadata(&self) -> Metadata {
854        Metadata::named("Core Build Args Provider")
855    }
856
857    fn data(&self) -> Result<Map<Profile, Dict>, figment::Error> {
858        let mut dict = Dict::default();
859
860        let mut fuzz_dict = Dict::default();
861        if let Some(fuzz_seed) = self.fuzz_seed {
862            fuzz_dict.insert("seed".to_string(), fuzz_seed.to_string().into());
863        }
864        if let Some(fuzz_runs) = self.fuzz_runs {
865            fuzz_dict.insert("runs".to_string(), fuzz_runs.into());
866        }
867        if let Some(fuzz_timeout) = self.fuzz_timeout {
868            fuzz_dict.insert("timeout".to_string(), fuzz_timeout.into());
869        }
870        if let Some(fuzz_input_file) = self.fuzz_input_file.clone() {
871            fuzz_dict.insert("failure_persist_file".to_string(), fuzz_input_file.into());
872        }
873        dict.insert("fuzz".to_string(), fuzz_dict.into());
874
875        if let Some(etherscan_api_key) =
876            self.etherscan_api_key.as_ref().filter(|s| !s.trim().is_empty())
877        {
878            dict.insert("etherscan_api_key".to_string(), etherscan_api_key.to_string().into());
879        }
880
881        if let Some(api_version) = &self.etherscan_api_version {
882            dict.insert("etherscan_api_version".to_string(), api_version.to_string().into());
883        }
884
885        if self.show_progress {
886            dict.insert("show_progress".to_string(), true.into());
887        }
888
889        Ok(Map::from([(Config::selected_profile(), dict)]))
890    }
891}
892
893/// Lists all matching tests
894fn list(runner: MultiContractRunner, filter: &ProjectPathsAwareFilter) -> Result<TestOutcome> {
895    let results = runner.list(filter);
896
897    if shell::is_json() {
898        sh_println!("{}", serde_json::to_string(&results)?)?;
899    } else {
900        for (file, contracts) in &results {
901            sh_println!("{file}")?;
902            for (contract, tests) in contracts {
903                sh_println!("  {contract}")?;
904                sh_println!("    {}\n", tests.join("\n    "))?;
905            }
906        }
907    }
908    Ok(TestOutcome::empty(false))
909}
910
911/// Load persisted filter (with last test run failures) from file.
912fn last_run_failures(config: &Config) -> Option<regex::Regex> {
913    match fs::read_to_string(&config.test_failures_file) {
914        Ok(filter) => Some(Regex::new(&filter).unwrap()),
915        Err(_) => None,
916    }
917}
918
919/// Persist filter with last test run failures (only if there's any failure).
920fn persist_run_failures(config: &Config, outcome: &TestOutcome) {
921    if outcome.failed() > 0 && fs::create_file(&config.test_failures_file).is_ok() {
922        let mut filter = String::new();
923        let mut failures = outcome.failures().peekable();
924        while let Some((test_name, _)) = failures.next() {
925            if test_name.is_any_test() {
926                if let Some(test_match) = test_name.split("(").next() {
927                    filter.push_str(test_match);
928                    if failures.peek().is_some() {
929                        filter.push('|');
930                    }
931                }
932            }
933        }
934        let _ = fs::write(&config.test_failures_file, filter);
935    }
936}
937
938/// Generate test report in JUnit XML report format.
939fn junit_xml_report(results: &BTreeMap<String, SuiteResult>, verbosity: u8) -> Report {
940    let mut total_duration = Duration::default();
941    let mut junit_report = Report::new("Test run");
942    junit_report.set_timestamp(Utc::now());
943    for (suite_name, suite_result) in results {
944        let mut test_suite = TestSuite::new(suite_name);
945        total_duration += suite_result.duration;
946        test_suite.set_time(suite_result.duration);
947        test_suite.set_system_out(suite_result.summary());
948        for (test_name, test_result) in &suite_result.test_results {
949            let mut test_status = match test_result.status {
950                TestStatus::Success => TestCaseStatus::success(),
951                TestStatus::Failure => TestCaseStatus::non_success(NonSuccessKind::Failure),
952                TestStatus::Skipped => TestCaseStatus::skipped(),
953            };
954            if let Some(reason) = &test_result.reason {
955                test_status.set_message(reason);
956            }
957
958            let mut test_case = TestCase::new(test_name, test_status);
959            test_case.set_time(test_result.duration);
960
961            let mut sys_out = String::new();
962            let result_report = test_result.kind.report();
963            write!(sys_out, "{test_result} {test_name} {result_report}").unwrap();
964            if verbosity >= 2 && !test_result.logs.is_empty() {
965                write!(sys_out, "\\nLogs:\\n").unwrap();
966                let console_logs = decode_console_logs(&test_result.logs);
967                for log in console_logs {
968                    write!(sys_out, "  {log}\\n").unwrap();
969                }
970            }
971
972            test_case.set_system_out(sys_out);
973            test_suite.add_test_case(test_case);
974        }
975        junit_report.add_test_suite(test_suite);
976    }
977    junit_report.set_time(total_duration);
978    junit_report
979}
980
981#[cfg(test)]
982mod tests {
983    use super::*;
984    use foundry_config::Chain;
985
986    #[test]
987    fn watch_parse() {
988        let args: TestArgs = TestArgs::parse_from(["foundry-cli", "-vw"]);
989        assert!(args.watch.watch.is_some());
990    }
991
992    #[test]
993    fn fuzz_seed() {
994        let args: TestArgs = TestArgs::parse_from(["foundry-cli", "--fuzz-seed", "0x10"]);
995        assert!(args.fuzz_seed.is_some());
996    }
997
998    // <https://github.com/foundry-rs/foundry/issues/5913>
999    #[test]
1000    fn fuzz_seed_exists() {
1001        let args: TestArgs =
1002            TestArgs::parse_from(["foundry-cli", "-vvv", "--gas-report", "--fuzz-seed", "0x10"]);
1003        assert!(args.fuzz_seed.is_some());
1004    }
1005
1006    #[test]
1007    fn extract_chain() {
1008        let test = |arg: &str, expected: Chain| {
1009            let args = TestArgs::parse_from(["foundry-cli", arg]);
1010            assert_eq!(args.evm.env.chain, Some(expected));
1011            let (config, evm_opts) = args.load_config_and_evm_opts().unwrap();
1012            assert_eq!(config.chain, Some(expected));
1013            assert_eq!(evm_opts.env.chain_id, Some(expected.id()));
1014        };
1015        test("--chain-id=1", Chain::mainnet());
1016        test("--chain-id=42", Chain::from_id(42));
1017    }
1018}