Skip to main content

forge/cmd/test/
mod.rs

1use super::{install, test::filter::ProjectPathsAwareFilter, watch::WatchArgs};
2use crate::{
3    MultiContractRunner, MultiContractRunnerBuilder,
4    decode::decode_console_logs,
5    gas_report::GasReport,
6    multi_runner::matches_artifact,
7    result::{SuiteResult, TestOutcome, TestStatus},
8    traces::{
9        CallTraceDecoderBuilder, InternalTraceMode, TraceKind,
10        debug::{ContractSources, DebugTraceIdentifier},
11        decode_trace_arena, folded_stack_trace,
12        identifier::SignaturesIdentifier,
13    },
14};
15use alloy_primitives::U256;
16use chrono::Utc;
17use clap::{Parser, ValueHint};
18use eyre::{Context, OptionExt, Result, bail};
19use foundry_cli::{
20    opts::{BuildOpts, EvmArgs, GlobalArgs},
21    utils::{self, LoadConfig},
22};
23use foundry_common::{EmptyTestFilter, TestFunctionExt, compile::ProjectCompiler, fs, shell};
24use foundry_compilers::{
25    ProjectCompileOutput,
26    artifacts::{Libraries, output_selection::OutputSelection},
27    compilers::{
28        Language,
29        multi::{MultiCompiler, MultiCompilerLanguage},
30    },
31    utils::source_files_iter,
32};
33use foundry_config::{
34    Config, figment,
35    figment::{
36        Metadata, Profile, Provider,
37        value::{Dict, Map},
38    },
39    filter::GlobMatcher,
40};
41use foundry_debugger::Debugger;
42use foundry_evm::{
43    core::evm::{
44        BlockEnvFor, EthEvmNetwork, FoundryEvmNetwork, OpEvmNetwork, SpecFor, TempoEvmNetwork,
45        TxEnvFor,
46    },
47    opts::EvmOpts,
48    traces::{backtrace::BacktraceBuilder, identifier::TraceIdentifiers, prune_trace_depth},
49};
50use rand::Rng;
51use regex::Regex;
52use revm::context::Transaction;
53use std::{
54    collections::{BTreeMap, BTreeSet},
55    fmt::Write,
56    path::{Path, PathBuf},
57    sync::{Arc, mpsc::channel},
58    time::{Duration, Instant},
59};
60use yansi::Paint;
61
62mod filter;
63mod summary;
64use crate::{result::TestKind, traces::render_trace_arena_inner};
65pub use filter::FilterArgs;
66use quick_junit::{NonSuccessKind, Report, TestCase, TestCaseStatus, TestSuite};
67use summary::{TestSummaryReport, format_invariant_metrics_table};
68
69// Loads project's figment and merges the build cli arguments into it
70foundry_config::merge_impl_figment_convert!(TestArgs, build, evm);
71
72/// CLI arguments for `forge test`.
73#[derive(Clone, Debug, Parser)]
74#[command(next_help_heading = "Test options")]
75pub struct TestArgs {
76    // Include global options for users of this struct.
77    #[command(flatten)]
78    pub global: GlobalArgs,
79
80    /// The contract file you want to test, it's a shortcut for --match-path.
81    #[arg(value_hint = ValueHint::FilePath)]
82    pub path: Option<GlobMatcher>,
83
84    /// Run a single test in the debugger.
85    ///
86    /// The matching test will be opened in the debugger regardless of the outcome of the test.
87    ///
88    /// If the matching test is a fuzz test, then it will open the debugger on the first failure
89    /// case. If the fuzz test does not fail, it will open the debugger on the last fuzz case.
90    #[arg(long, conflicts_with_all = ["flamegraph", "flamechart", "decode_internal", "rerun"])]
91    debug: bool,
92
93    /// Generate a flamegraph for a single test. Implies `--decode-internal`.
94    ///
95    /// A flame graph is used to visualize which functions or operations within the smart contract
96    /// are consuming the most gas overall in a sorted manner.
97    #[arg(long)]
98    flamegraph: bool,
99
100    /// Generate a flamechart for a single test. Implies `--decode-internal`.
101    ///
102    /// A flame chart shows the gas usage over time, illustrating when each function is
103    /// called (execution order) and how much gas it consumes at each point in the timeline.
104    #[arg(long, conflicts_with = "flamegraph")]
105    flamechart: bool,
106
107    /// Identify internal functions in traces.
108    ///
109    /// This will trace internal functions and decode stack parameters.
110    ///
111    /// Parameters stored in memory (such as bytes or arrays) are currently decoded only when a
112    /// single function is matched, similarly to `--debug`, for performance reasons.
113    #[arg(long)]
114    decode_internal: bool,
115
116    /// Dumps all debugger steps to file.
117    #[arg(
118        long,
119        requires = "debug",
120        value_hint = ValueHint::FilePath,
121        value_name = "PATH"
122    )]
123    dump: Option<PathBuf>,
124
125    /// Print a gas report.
126    #[arg(long, env = "FORGE_GAS_REPORT")]
127    gas_report: bool,
128
129    /// Check gas snapshots against previous runs.
130    #[arg(long, env = "FORGE_SNAPSHOT_CHECK")]
131    gas_snapshot_check: Option<bool>,
132
133    /// Enable/disable recording of gas snapshot results.
134    #[arg(long, env = "FORGE_SNAPSHOT_EMIT")]
135    gas_snapshot_emit: Option<bool>,
136
137    /// Exit with code 0 even if a test fails.
138    #[arg(long, env = "FORGE_ALLOW_FAILURE")]
139    allow_failure: bool,
140
141    /// Suppress successful test traces and show only traces for failures.
142    #[arg(long, short, env = "FORGE_SUPPRESS_SUCCESSFUL_TRACES", help_heading = "Display options")]
143    suppress_successful_traces: bool,
144
145    /// Defines the depth of a trace
146    #[arg(long)]
147    trace_depth: Option<usize>,
148
149    /// Output test results as JUnit XML report.
150    #[arg(long, conflicts_with_all = ["quiet", "json", "gas_report", "summary", "list", "show_progress"], help_heading = "Display options")]
151    pub junit: bool,
152
153    /// Stop running tests after the first failure.
154    #[arg(long)]
155    pub fail_fast: bool,
156
157    /// The Etherscan (or equivalent) API key.
158    #[arg(long, env = "ETHERSCAN_API_KEY", value_name = "KEY")]
159    etherscan_api_key: Option<String>,
160
161    /// List tests instead of running them.
162    #[arg(long, short, conflicts_with_all = ["show_progress", "decode_internal", "summary"], help_heading = "Display options")]
163    list: bool,
164
165    /// Set seed used to generate randomness during your fuzz runs.
166    #[arg(long)]
167    pub fuzz_seed: Option<U256>,
168
169    #[arg(long, env = "FOUNDRY_FUZZ_RUNS", value_name = "RUNS")]
170    pub fuzz_runs: Option<u64>,
171
172    /// Timeout for each fuzz run in seconds.
173    #[arg(long, env = "FOUNDRY_FUZZ_TIMEOUT", value_name = "TIMEOUT")]
174    pub fuzz_timeout: Option<u64>,
175
176    /// File to rerun fuzz failures from.
177    #[arg(long)]
178    pub fuzz_input_file: Option<String>,
179
180    /// Show test execution progress.
181    #[arg(long, conflicts_with_all = ["quiet", "json"], help_heading = "Display options")]
182    pub show_progress: bool,
183
184    /// Re-run recorded test failures from last run.
185    /// If no failure recorded then regular test run is performed.
186    #[arg(long)]
187    pub rerun: bool,
188
189    /// Print test summary table.
190    #[arg(long, help_heading = "Display options")]
191    pub summary: bool,
192
193    /// Print detailed test summary table.
194    #[arg(long, help_heading = "Display options", requires = "summary")]
195    pub detailed: bool,
196
197    /// Disables the labels in the traces.
198    #[arg(long, help_heading = "Display options")]
199    pub disable_labels: bool,
200
201    #[command(flatten)]
202    filter: FilterArgs,
203
204    #[command(flatten)]
205    evm: EvmArgs,
206
207    #[command(flatten)]
208    pub build: BuildOpts,
209
210    #[command(flatten)]
211    pub watch: WatchArgs,
212}
213
214impl TestArgs {
215    pub async fn run(mut self) -> Result<TestOutcome> {
216        trace!(target: "forge::test", "executing test command");
217        self.compile_and_run().await
218    }
219
220    /// Returns a list of files that need to be compiled in order to run all the tests that match
221    /// the given filter.
222    ///
223    /// This means that it will return all sources that are not test contracts or that match the
224    /// filter. We want to compile all non-test sources always because tests might depend on them
225    /// dynamically through cheatcodes.
226    #[instrument(target = "forge::test", skip_all)]
227    pub fn get_sources_to_compile(
228        &self,
229        config: &Config,
230        test_filter: &ProjectPathsAwareFilter,
231    ) -> Result<BTreeSet<PathBuf>> {
232        // An empty filter doesn't filter out anything.
233        // We can still optimize slightly by excluding scripts.
234        if test_filter.is_empty() {
235            return Ok(source_files_iter(&config.src, MultiCompilerLanguage::FILE_EXTENSIONS)
236                .chain(source_files_iter(&config.test, MultiCompilerLanguage::FILE_EXTENSIONS))
237                .collect());
238        }
239
240        let mut project = config.create_project(true, true)?;
241        project.update_output_selection(|selection| {
242            *selection = OutputSelection::common_output_selection(["abi".to_string()]);
243        });
244        let output = project.compile()?;
245        if output.has_compiler_errors() {
246            sh_println!("{output}")?;
247            eyre::bail!("Compilation failed");
248        }
249
250        Ok(output
251            .artifact_ids()
252            .filter_map(|(id, artifact)| artifact.abi.as_ref().map(|abi| (id, abi)))
253            .filter(|(id, abi)| {
254                id.source.starts_with(&config.src) || matches_artifact(test_filter, id, abi)
255            })
256            .map(|(id, _)| id.source)
257            .collect())
258    }
259
260    /// Executes all the tests in the project.
261    ///
262    /// This will trigger the build process first. On success all test contracts that match the
263    /// configured filter will be executed
264    ///
265    /// Returns the test results for all matching tests.
266    pub async fn compile_and_run(&mut self) -> Result<TestOutcome> {
267        // Merge all configs.
268        let (mut config, evm_opts) = self.load_config_and_evm_opts()?;
269
270        // Install missing dependencies.
271        if install::install_missing_dependencies(&mut config).await && config.auto_detect_remappings
272        {
273            // need to re-configure here to also catch additional remappings
274            config = self.load_config()?;
275        }
276
277        // Set up the project.
278        let project = config.project()?;
279
280        let filter = self.filter(&config)?;
281        trace!(target: "forge::test", ?filter, "using filter");
282
283        let compiler = ProjectCompiler::new()
284            .dynamic_test_linking(config.dynamic_test_linking)
285            .quiet(shell::is_json() || self.junit)
286            .files(self.get_sources_to_compile(&config, &filter)?);
287        let output = compiler.compile(&project)?;
288
289        self.run_tests(&project.paths.root, config, evm_opts, &output, &filter, false).await
290    }
291
292    /// Executes all the tests in the project.
293    ///
294    /// See [`Self::compile_and_run`] for more details.
295    pub async fn run_tests(
296        &mut self,
297        project_root: &Path,
298        mut config: Config,
299        mut evm_opts: EvmOpts,
300        output: &ProjectCompileOutput,
301        filter: &ProjectPathsAwareFilter,
302        coverage: bool,
303    ) -> Result<TestOutcome> {
304        // Explicitly enable isolation for gas reports for more correct gas accounting.
305        if self.gas_report {
306            evm_opts.isolate = true;
307        } else {
308            // Do not collect gas report traces if gas report is not enabled.
309            config.fuzz.gas_report_samples = 0;
310            config.invariant.gas_report_samples = 0;
311        }
312
313        // Generate a random fuzz seed if none provided, for reproducibility.
314        config.fuzz.seed = config
315            .fuzz
316            .seed
317            .or_else(|| Some(U256::from_be_bytes(rand::rng().random::<[u8; 32]>())));
318
319        // Create test options from general project settings and compiler output.
320        let should_debug = self.debug;
321        let should_draw = self.flamegraph || self.flamechart;
322
323        // Determine executor verbosity.
324        if (self.gas_report && evm_opts.verbosity < 3) || self.flamegraph || self.flamechart {
325            evm_opts.verbosity = 3;
326        }
327
328        // Enable internal tracing for more informative flamegraph.
329        if should_draw && !self.decode_internal {
330            self.decode_internal = true;
331        }
332
333        // Choose the internal function tracing mode, if --decode-internal is provided.
334        let decode_internal = if self.decode_internal {
335            // If more than one function matched, we enable simple tracing.
336            // If only one function matched, we enable full tracing. This is done in `run_tests`.
337            InternalTraceMode::Simple
338        } else {
339            InternalTraceMode::None
340        };
341
342        // Auto-detect network from fork chain ID when not explicitly configured.
343        evm_opts.infer_network_from_fork().await;
344
345        // Dispatch based on network type.
346        let (libraries, mut outcome) = if evm_opts.networks.is_tempo() {
347            self.build_and_run_tests::<TempoEvmNetwork>(
348                config,
349                evm_opts,
350                output,
351                filter,
352                coverage,
353                should_debug,
354                decode_internal,
355            )
356            .await?
357        } else if evm_opts.networks.is_optimism() {
358            self.build_and_run_tests::<OpEvmNetwork>(
359                config,
360                evm_opts,
361                output,
362                filter,
363                coverage,
364                should_debug,
365                decode_internal,
366            )
367            .await?
368        } else {
369            self.build_and_run_tests::<EthEvmNetwork>(
370                config,
371                evm_opts,
372                output,
373                filter,
374                coverage,
375                should_debug,
376                decode_internal,
377            )
378            .await?
379        };
380
381        if should_draw {
382            let (suite_name, test_name, mut test_result) =
383                outcome.remove_first().ok_or_eyre("no tests were executed")?;
384
385            let (_, arena) = test_result
386                .traces
387                .iter_mut()
388                .find(|(kind, _)| *kind == TraceKind::Execution)
389                .unwrap();
390
391            // Decode traces.
392            let decoder = outcome.last_run_decoder.as_ref().unwrap();
393            decode_trace_arena(arena, decoder).await;
394            let mut fst = folded_stack_trace::build(arena, self.evm.isolate);
395
396            let label = if self.flamegraph { "flamegraph" } else { "flamechart" };
397            let contract = suite_name.split(':').next_back().unwrap();
398            let test_name = test_name.trim_end_matches("()");
399            let file_name = format!("cache/{label}_{contract}_{test_name}.svg");
400            let file = std::fs::File::create(&file_name).wrap_err("failed to create file")?;
401            let file = std::io::BufWriter::new(file);
402
403            let mut options = inferno::flamegraph::Options::default();
404            options.title = format!("{label} {contract}::{test_name}");
405            options.count_name = "gas".to_string();
406            if self.flamechart {
407                options.flame_chart = true;
408                fst.reverse();
409            }
410
411            // Generate SVG.
412            inferno::flamegraph::from_lines(&mut options, fst.iter().map(String::as_str), file)
413                .wrap_err("failed to write svg")?;
414            sh_println!("Saved to {file_name}")?;
415
416            // Open SVG in default program.
417            if let Err(e) = opener::open(&file_name) {
418                sh_err!("Failed to open {file_name}; please open it manually: {e}")?;
419            }
420        }
421
422        if should_debug {
423            // Get first non-empty suite result. We will have only one such entry.
424            let (_, _, test_result) =
425                outcome.remove_first().ok_or_eyre("no tests were executed")?;
426
427            let sources =
428                ContractSources::from_project_output(output, project_root, Some(&libraries))?;
429
430            // Run the debugger.
431            let mut builder = Debugger::builder()
432                .traces(
433                    test_result.traces.iter().filter(|(t, _)| t.is_execution()).cloned().collect(),
434                )
435                .sources(sources)
436                .breakpoints(test_result.breakpoints.clone());
437
438            if let Some(decoder) = &outcome.last_run_decoder {
439                builder = builder.decoder(decoder);
440            }
441
442            let mut debugger = builder.build();
443            if let Some(dump_path) = &self.dump {
444                debugger.dump_to_file(dump_path)?;
445            } else {
446                debugger.try_run_tui()?;
447            }
448        }
449
450        Ok(outcome)
451    }
452
453    /// Build the test runner and execute tests for a specific network type.
454    #[allow(clippy::too_many_arguments)]
455    async fn build_and_run_tests<FEN: FoundryEvmNetwork>(
456        &self,
457        config: Config,
458        evm_opts: EvmOpts,
459        output: &ProjectCompileOutput,
460        filter: &ProjectPathsAwareFilter,
461        coverage: bool,
462        should_debug: bool,
463        decode_internal: InternalTraceMode,
464    ) -> eyre::Result<(Libraries, TestOutcome)> {
465        let verbosity = evm_opts.verbosity;
466        let (evm_env, tx_env, fork_block) =
467            evm_opts.env::<SpecFor<FEN>, BlockEnvFor<FEN>, TxEnvFor<FEN>>().await?;
468
469        let config = Arc::new(config);
470        let runner = MultiContractRunnerBuilder::new(config.clone())
471            .set_debug(should_debug)
472            .set_decode_internal(decode_internal)
473            .initial_balance(evm_opts.initial_balance)
474            .sender(evm_opts.sender)
475            .with_fork(evm_opts.get_fork(&config, evm_env.cfg_env.chain_id, fork_block))
476            .enable_isolation(evm_opts.isolate)
477            .fail_fast(self.fail_fast)
478            .set_coverage(coverage)
479            .build::<FEN, MultiCompiler>(output, evm_env, tx_env, evm_opts)?;
480
481        let libraries = runner.libraries.clone();
482        let outcome = self.run_tests_inner(runner, config, verbosity, filter, output).await?;
483        Ok((libraries, outcome))
484    }
485
486    /// Run all tests that matches the filter predicate from a test runner
487    async fn run_tests_inner<FEN: FoundryEvmNetwork>(
488        &self,
489        mut runner: MultiContractRunner<FEN>,
490        config: Arc<Config>,
491        verbosity: u8,
492        filter: &ProjectPathsAwareFilter,
493        output: &ProjectCompileOutput,
494    ) -> eyre::Result<TestOutcome> {
495        let fuzz_seed = config.fuzz.seed;
496        if self.list {
497            return list(runner, filter);
498        }
499
500        trace!(target: "forge::test", "running all tests");
501
502        // If we need to render to a serialized format, we should not print anything else to stdout.
503        let silent = self.gas_report && shell::is_json() || self.summary && shell::is_json();
504
505        let num_filtered = runner.matching_test_functions(filter).count();
506
507        if num_filtered == 0 {
508            let total_tests = if filter.is_empty() {
509                num_filtered
510            } else {
511                runner.matching_test_functions(&EmptyTestFilter::default()).count()
512            };
513            if total_tests == 0 {
514                sh_println!(
515                    "No tests found in project! Forge looks for functions that start with `test`"
516                )?;
517            } else {
518                let mut msg = format!("no tests match the provided pattern:\n{filter}");
519                // Try to suggest a test when there's no match.
520                if let Some(test_pattern) = &filter.args().test_pattern {
521                    let test_name = test_pattern.as_str();
522                    // Filter contracts but not test functions.
523                    let candidates = runner.all_test_functions(filter).map(|f| &f.name);
524                    if let Some(suggestion) = utils::did_you_mean(test_name, candidates).pop() {
525                        write!(msg, "\nDid you mean `{suggestion}`?")?;
526                    }
527                }
528                sh_warn!("{msg}")?;
529            }
530            return Ok(TestOutcome::empty(Some(runner.known_contracts.clone()), false));
531        }
532
533        if num_filtered != 1 && (self.debug || self.flamegraph || self.flamechart) {
534            let action = if self.flamegraph {
535                "generate a flamegraph"
536            } else if self.flamechart {
537                "generate a flamechart"
538            } else {
539                "run the debugger"
540            };
541            let filter = if filter.is_empty() {
542                String::new()
543            } else {
544                format!("\n\nFilter used:\n{filter}")
545            };
546            eyre::bail!(
547                "{num_filtered} tests matched your criteria, but exactly 1 test must match in order to {action}.\n\n\
548                 Use --match-contract and --match-path to further limit the search.{filter}",
549            );
550        }
551
552        // If exactly one test matched, we enable full tracing.
553        if num_filtered == 1 && self.decode_internal {
554            runner.decode_internal = InternalTraceMode::Full;
555        }
556
557        // Run tests in a non-streaming fashion and collect results for serialization.
558        if !self.gas_report && !self.summary && shell::is_json() {
559            let mut results = runner.test_collect(filter)?;
560            for suite_result in results.values_mut() {
561                for test_result in suite_result.test_results.values_mut() {
562                    if verbosity >= 2 {
563                        // Decode logs at level 2 and above.
564                        test_result.decoded_logs = decode_console_logs(&test_result.logs);
565                    } else {
566                        // Empty logs for non verbose runs.
567                        test_result.logs = vec![];
568                    }
569                }
570            }
571            sh_println!("{}", serde_json::to_string(&results)?)?;
572            let kc = runner.known_contracts.clone();
573            return Ok(TestOutcome::new(Some(kc), results, self.allow_failure, fuzz_seed));
574        }
575
576        if self.junit {
577            let results = runner.test_collect(filter)?;
578            sh_println!("{}", junit_xml_report(&results, verbosity).to_string()?)?;
579            let kc = runner.known_contracts.clone();
580            return Ok(TestOutcome::new(Some(kc), results, self.allow_failure, fuzz_seed));
581        }
582
583        let remote_chain =
584            if runner.fork.is_some() { runner.tx_env.chain_id().map(Into::into) } else { None };
585        let known_contracts = runner.known_contracts.clone();
586
587        let libraries = runner.libraries.clone();
588
589        // Run tests in a streaming fashion.
590        let (tx, rx) = channel::<(String, SuiteResult)>();
591        let timer = Instant::now();
592        let show_progress = config.show_progress;
593        let handle = tokio::task::spawn_blocking({
594            let filter = filter.clone();
595            move || runner.test(&filter, tx, show_progress).map(|()| runner)
596        });
597
598        // Set up trace identifiers.
599        let mut identifier = TraceIdentifiers::new().with_local(&known_contracts);
600
601        // Avoid using external identifiers for gas report as we decode more traces and this will be
602        // expensive. Also skip external identifiers for local tests (no remote chain) to avoid
603        // unnecessary Etherscan API calls that significantly slow down test execution.
604        if !self.gas_report && remote_chain.is_some() {
605            identifier = identifier.with_external(&config, remote_chain)?;
606        }
607
608        // Build the trace decoder.
609        let mut builder = CallTraceDecoderBuilder::new()
610            .with_known_contracts(&known_contracts)
611            .with_label_disabled(self.disable_labels)
612            .with_verbosity(verbosity)
613            .with_chain_id(remote_chain.map(|c| c.id()));
614        // Signatures are of no value for gas reports.
615        if !self.gas_report {
616            builder =
617                builder.with_signature_identifier(SignaturesIdentifier::from_config(&config)?);
618        }
619
620        if self.decode_internal {
621            let sources =
622                ContractSources::from_project_output(output, &config.root, Some(&libraries))?;
623            builder = builder.with_debug_identifier(DebugTraceIdentifier::new(sources));
624        }
625        let mut decoder = builder.build();
626
627        let mut gas_report = self.gas_report.then(|| {
628            GasReport::new(
629                config.gas_reports.clone(),
630                config.gas_reports_ignore.clone(),
631                config.gas_reports_include_tests,
632            )
633        });
634
635        let mut gas_snapshots = BTreeMap::<String, BTreeMap<String, String>>::new();
636
637        let mut outcome = TestOutcome::empty(None, self.allow_failure);
638        outcome.fuzz_seed = fuzz_seed;
639
640        let mut any_test_failed = false;
641        let mut backtrace_builder = None;
642        for (contract_name, mut suite_result) in rx {
643            let tests = &mut suite_result.test_results;
644            let has_tests = !tests.is_empty();
645
646            // Clear the addresses and labels from previous test.
647            decoder.clear_addresses();
648
649            // We identify addresses if we're going to print *any* trace or gas report.
650            let identify_addresses = verbosity >= 3
651                || self.gas_report
652                || self.debug
653                || self.flamegraph
654                || self.flamechart;
655
656            // Print suite header.
657            if !silent {
658                sh_println!()?;
659                for warning in &suite_result.warnings {
660                    sh_warn!("{warning}")?;
661                }
662                if has_tests {
663                    let len = tests.len();
664                    let tests = if len > 1 { "tests" } else { "test" };
665                    sh_println!("Ran {len} {tests} for {contract_name}")?;
666                }
667            }
668
669            // Process individual test results, printing logs and traces when necessary.
670            for (name, result) in tests {
671                let show_traces =
672                    !self.suppress_successful_traces || result.status == TestStatus::Failure;
673                if !silent {
674                    sh_println!("{}", result.short_result(name))?;
675
676                    // Display invariant metrics if invariant kind.
677                    if let TestKind::Invariant { metrics, .. } = &result.kind
678                        && !metrics.is_empty()
679                    {
680                        let _ = sh_println!("\n{}\n", format_invariant_metrics_table(metrics));
681                    }
682
683                    // We only display logs at level 2 and above
684                    if verbosity >= 2 && show_traces {
685                        // We only decode logs from Hardhat and DS-style console events
686                        let console_logs = decode_console_logs(&result.logs);
687                        if !console_logs.is_empty() {
688                            sh_println!("Logs:")?;
689                            for log in console_logs {
690                                sh_println!("  {log}")?;
691                            }
692                            sh_println!()?;
693                        }
694                    }
695                }
696
697                // We shouldn't break out of the outer loop directly here so that we finish
698                // processing the remaining tests and print the suite summary.
699                any_test_failed |= result.status == TestStatus::Failure;
700
701                // Clear the addresses and labels from previous runs.
702                decoder.clear_addresses();
703                decoder.labels.extend(result.labels.iter().map(|(k, v)| (*k, v.clone())));
704
705                // Identify addresses and decode traces.
706                let mut decoded_traces = Vec::with_capacity(result.traces.len());
707                for (kind, arena) in &mut result.traces {
708                    if identify_addresses {
709                        decoder.identify(arena, &mut identifier);
710                    }
711
712                    // verbosity:
713                    // - 0..3: nothing
714                    // - 3: only display traces for failed tests
715                    // - 4: also display the setup trace for failed tests
716                    // - 5..: display all traces for all tests, including storage changes
717                    let should_include = match kind {
718                        TraceKind::Execution => {
719                            (verbosity == 3 && result.status.is_failure()) || verbosity >= 4
720                        }
721                        TraceKind::Setup => {
722                            (verbosity == 4 && result.status.is_failure()) || verbosity >= 5
723                        }
724                        TraceKind::Deployment => false,
725                    };
726
727                    if should_include {
728                        decode_trace_arena(arena, &decoder).await;
729
730                        if let Some(trace_depth) = self.trace_depth {
731                            prune_trace_depth(arena, trace_depth);
732                        }
733
734                        decoded_traces.push(render_trace_arena_inner(arena, false, verbosity > 4));
735                    }
736                }
737
738                if !silent && show_traces && !decoded_traces.is_empty() {
739                    sh_println!("Traces:")?;
740                    for trace in &decoded_traces {
741                        sh_println!("{trace}")?;
742                    }
743                }
744
745                // Extract and display backtrace for failed tests when verbosity >= 3.
746                // At verbosity 3-4 backtraces show contract/function names only.
747                // At verbosity 5 backtraces include source file locations.
748                if !silent
749                    && result.status.is_failure()
750                    && verbosity >= 3
751                    && !result.traces.is_empty()
752                    && let Some((_, arena)) =
753                        result.traces.iter().find(|(kind, _)| matches!(kind, TraceKind::Execution))
754                {
755                    // Lazily initialize the backtrace builder on first failure
756                    let builder = backtrace_builder.get_or_insert_with(|| {
757                        BacktraceBuilder::new(
758                            output,
759                            config.root.clone(),
760                            config.parsed_libraries().ok(),
761                            config.via_ir,
762                        )
763                    });
764
765                    let backtrace = builder.from_traces(arena);
766
767                    if !backtrace.is_empty() {
768                        sh_println!("{}", backtrace)?;
769                    }
770                }
771
772                if let Some(gas_report) = &mut gas_report {
773                    gas_report.analyze(result.traces.iter().map(|(_, a)| &a.arena), &decoder).await;
774
775                    for trace in &result.gas_report_traces {
776                        decoder.clear_addresses();
777
778                        // Re-execute setup and deployment traces to collect identities created in
779                        // setUp and constructor.
780                        for (kind, arena) in &result.traces {
781                            if !matches!(kind, TraceKind::Execution) {
782                                decoder.identify(arena, &mut identifier);
783                            }
784                        }
785
786                        for arena in trace {
787                            decoder.identify(arena, &mut identifier);
788                            gas_report.analyze([arena], &decoder).await;
789                        }
790                    }
791                }
792                // Clear memory.
793                result.gas_report_traces = Default::default();
794
795                // Collect and merge gas snapshots.
796                for (group, new_snapshots) in &result.gas_snapshots {
797                    gas_snapshots.entry(group.clone()).or_default().extend(new_snapshots.clone());
798                }
799            }
800
801            // Write gas snapshots to disk if any were collected.
802            if !gas_snapshots.is_empty() {
803                // By default `gas_snapshot_check` is set to `false` in the config.
804                //
805                // The user can either:
806                // - Set `FORGE_SNAPSHOT_CHECK=true` in the environment.
807                // - Pass `--gas-snapshot-check=true` as a CLI argument.
808                // - Set `gas_snapshot_check = true` in the config.
809                //
810                // If the user passes `--gas-snapshot-check=<bool>` then it will override the config
811                // and the environment variable, disabling the check if `false` is passed.
812                //
813                // Exiting early with code 1 if differences are found.
814                if self.gas_snapshot_check.unwrap_or(config.gas_snapshot_check) {
815                    let differences_found = gas_snapshots.clone().into_iter().fold(
816                        false,
817                        |mut found, (group, snapshots)| {
818                            // If the snapshot file doesn't exist, we can't compare so we skip.
819                            if !&config.snapshots.join(format!("{group}.json")).exists() {
820                                return found;
821                            }
822
823                            let previous_snapshots: BTreeMap<String, String> =
824                                fs::read_json_file(&config.snapshots.join(format!("{group}.json")))
825                                    .expect("Failed to read snapshots from disk");
826
827                            let diff: BTreeMap<_, _> = snapshots
828                                .iter()
829                                .filter_map(|(k, v)| {
830                                    previous_snapshots.get(k).and_then(|previous_snapshot| {
831                                        (previous_snapshot != v).then(|| {
832                                            (k.clone(), (previous_snapshot.clone(), v.clone()))
833                                        })
834                                    })
835                                })
836                                .collect();
837
838                            if !diff.is_empty() {
839                                let _ = sh_eprintln!(
840                                    "{}",
841                                    format!("\n[{group}] Failed to match snapshots:").red().bold()
842                                );
843
844                                for (key, (previous_snapshot, snapshot)) in &diff {
845                                    let _ = sh_eprintln!(
846                                        "{}",
847                                        format!("- [{key}] {previous_snapshot} → {snapshot}").red()
848                                    );
849                                }
850
851                                found = true;
852                            }
853
854                            found
855                        },
856                    );
857
858                    if differences_found {
859                        sh_eprintln!()?;
860                        eyre::bail!("Snapshots differ from previous run");
861                    }
862                }
863
864                // By default `gas_snapshot_emit` is set to `true` in the config.
865                //
866                // The user can either:
867                // - Set `FORGE_SNAPSHOT_EMIT=false` in the environment.
868                // - Pass `--gas-snapshot-emit=false` as a CLI argument.
869                // - Set `gas_snapshot_emit = false` in the config.
870                //
871                // If the user passes `--gas-snapshot-emit=<bool>` then it will override the config
872                // and the environment variable, enabling the check if `true` is passed.
873                if self.gas_snapshot_emit.unwrap_or(config.gas_snapshot_emit) {
874                    // Create `snapshots` directory if it doesn't exist.
875                    fs::create_dir_all(&config.snapshots)?;
876
877                    // Write gas snapshots to disk per group.
878                    gas_snapshots.clone().into_iter().for_each(|(group, snapshots)| {
879                        fs::write_pretty_json_file(
880                            &config.snapshots.join(format!("{group}.json")),
881                            &snapshots,
882                        )
883                        .expect("Failed to write gas snapshots to disk");
884                    });
885                }
886            }
887
888            // Print suite summary.
889            if !silent && has_tests {
890                sh_println!("{}", suite_result.summary())?;
891            }
892
893            // Add the suite result to the outcome.
894            outcome.results.insert(contract_name, suite_result);
895
896            // Stop processing the remaining suites if any test failed and `fail_fast` is set.
897            if self.fail_fast && any_test_failed {
898                break;
899            }
900        }
901        outcome.last_run_decoder = Some(decoder);
902        let duration = timer.elapsed();
903
904        trace!(target: "forge::test", len=outcome.results.len(), %any_test_failed, "done with results");
905
906        if let Some(gas_report) = gas_report {
907            let finalized = gas_report.finalize();
908            sh_println!("{}", &finalized)?;
909            outcome.gas_report = Some(finalized);
910        }
911
912        if !self.summary && !shell::is_json() {
913            sh_println!("{}", outcome.summary(duration))?;
914        }
915
916        if self.summary && !outcome.results.is_empty() {
917            let summary_report = TestSummaryReport::new(self.detailed, outcome.clone());
918            sh_println!("{}", &summary_report)?;
919        }
920
921        // Reattach the task.
922        match handle.await {
923            Ok(result) => {
924                let runner = result?;
925                outcome.known_contracts = Some(runner.known_contracts);
926            }
927            Err(e) => match e.try_into_panic() {
928                Ok(payload) => std::panic::resume_unwind(payload),
929                Err(e) => return Err(e.into()),
930            },
931        }
932
933        // Persist test run failures to enable replaying.
934        persist_run_failures(&config, &outcome);
935
936        Ok(outcome)
937    }
938
939    /// Returns the flattened [`FilterArgs`] arguments merged with [`Config`].
940    /// Loads and applies filter from file if only last test run failures performed.
941    pub fn filter(&self, config: &Config) -> Result<ProjectPathsAwareFilter> {
942        let mut filter = self.filter.clone();
943        if self.rerun {
944            filter.test_pattern = last_run_failures(config);
945        }
946        if filter.path_pattern.is_some() {
947            if self.path.is_some() {
948                bail!("Can not supply both --match-path and |path|");
949            }
950        } else {
951            filter.path_pattern = self.path.clone();
952        }
953        Ok(filter.merge_with_config(config))
954    }
955
956    /// Returns whether `BuildArgs` was configured with `--watch`
957    pub const fn is_watch(&self) -> bool {
958        self.watch.watch.is_some()
959    }
960
961    /// Returns the [`watchexec::Config`] necessary to bootstrap a new watch loop.
962    pub(crate) fn watchexec_config(&self) -> Result<watchexec::Config> {
963        self.watch.watchexec_config(|| {
964            let config = self.load_config()?;
965            Ok([config.src, config.test])
966        })
967    }
968}
969
970impl Provider for TestArgs {
971    fn metadata(&self) -> Metadata {
972        Metadata::named("Core Build Args Provider")
973    }
974
975    fn data(&self) -> Result<Map<Profile, Dict>, figment::Error> {
976        let mut dict = Dict::default();
977
978        let mut fuzz_dict = Dict::default();
979        if let Some(fuzz_seed) = self.fuzz_seed {
980            fuzz_dict.insert("seed".to_string(), fuzz_seed.to_string().into());
981        }
982        if let Some(fuzz_runs) = self.fuzz_runs {
983            fuzz_dict.insert("runs".to_string(), fuzz_runs.into());
984        }
985        if let Some(fuzz_timeout) = self.fuzz_timeout {
986            fuzz_dict.insert("timeout".to_string(), fuzz_timeout.into());
987        }
988        if let Some(fuzz_input_file) = self.fuzz_input_file.clone() {
989            fuzz_dict.insert("failure_persist_file".to_string(), fuzz_input_file.into());
990        }
991        dict.insert("fuzz".to_string(), fuzz_dict.into());
992
993        if let Some(etherscan_api_key) =
994            self.etherscan_api_key.as_ref().filter(|s| !s.trim().is_empty())
995        {
996            dict.insert("etherscan_api_key".to_string(), etherscan_api_key.clone().into());
997        }
998
999        if self.show_progress {
1000            dict.insert("show_progress".to_string(), true.into());
1001        }
1002
1003        Ok(Map::from([(Config::selected_profile(), dict)]))
1004    }
1005}
1006
1007/// Lists all matching tests
1008fn list<FEN: FoundryEvmNetwork>(
1009    runner: MultiContractRunner<FEN>,
1010    filter: &ProjectPathsAwareFilter,
1011) -> Result<TestOutcome> {
1012    let results = runner.list(filter);
1013
1014    if shell::is_json() {
1015        sh_println!("{}", serde_json::to_string(&results)?)?;
1016    } else {
1017        for (file, contracts) in &results {
1018            sh_println!("{file}")?;
1019            for (contract, tests) in contracts {
1020                sh_println!("  {contract}")?;
1021                sh_println!("    {}\n", tests.join("\n    "))?;
1022            }
1023        }
1024    }
1025    Ok(TestOutcome::empty(Some(runner.known_contracts), false))
1026}
1027
1028/// Load persisted filter (with last test run failures) from file.
1029fn last_run_failures(config: &Config) -> Option<regex::Regex> {
1030    match fs::read_to_string(&config.test_failures_file) {
1031        Ok(filter) => Regex::new(&filter)
1032            .inspect_err(|e| {
1033                _ = sh_warn!(
1034                    "failed to parse test filter from {:?}: {e}",
1035                    config.test_failures_file
1036                )
1037            })
1038            .ok(),
1039        Err(_) => None,
1040    }
1041}
1042
1043/// Persist filter with last test run failures (only if there's any failure).
1044fn persist_run_failures(config: &Config, outcome: &TestOutcome) {
1045    if outcome.failed() > 0 && fs::create_file(&config.test_failures_file).is_ok() {
1046        let mut filter = String::new();
1047        let mut failures = outcome.failures().peekable();
1048        while let Some((test_name, _)) = failures.next() {
1049            if test_name.is_any_test()
1050                && let Some(test_match) = test_name.split('(').next()
1051            {
1052                filter.push_str(test_match);
1053                if failures.peek().is_some() {
1054                    filter.push('|');
1055                }
1056            }
1057        }
1058        let _ = fs::write(&config.test_failures_file, filter);
1059    }
1060}
1061
1062/// Generate test report in JUnit XML report format.
1063fn junit_xml_report(results: &BTreeMap<String, SuiteResult>, verbosity: u8) -> Report {
1064    let mut total_duration = Duration::default();
1065    let mut junit_report = Report::new("Test run");
1066    junit_report.set_timestamp(Utc::now());
1067    for (suite_name, suite_result) in results {
1068        let mut test_suite = TestSuite::new(suite_name);
1069        total_duration += suite_result.duration;
1070        test_suite.set_time(suite_result.duration);
1071        test_suite.set_system_out(suite_result.summary());
1072        for (test_name, test_result) in &suite_result.test_results {
1073            let mut test_status = match test_result.status {
1074                TestStatus::Success => TestCaseStatus::success(),
1075                TestStatus::Failure => TestCaseStatus::non_success(NonSuccessKind::Failure),
1076                TestStatus::Skipped => TestCaseStatus::skipped(),
1077            };
1078            if let Some(reason) = &test_result.reason {
1079                test_status.set_message(reason);
1080            }
1081
1082            let mut test_case = TestCase::new(test_name, test_status);
1083            test_case.set_time(test_result.duration);
1084
1085            let mut sys_out = String::new();
1086            let result_report = test_result.kind.report();
1087            write!(sys_out, "{test_result} {test_name} {result_report}").unwrap();
1088            if verbosity >= 2 && !test_result.logs.is_empty() {
1089                write!(sys_out, "\\nLogs:\\n").unwrap();
1090                let console_logs = decode_console_logs(&test_result.logs);
1091                for log in console_logs {
1092                    write!(sys_out, "  {log}\\n").unwrap();
1093                }
1094            }
1095
1096            test_case.set_system_out(sys_out);
1097            test_suite.add_test_case(test_case);
1098        }
1099        junit_report.add_test_suite(test_suite);
1100    }
1101    junit_report.set_time(total_duration);
1102    junit_report
1103}
1104
1105#[cfg(test)]
1106mod tests {
1107    use super::*;
1108    use foundry_config::Chain;
1109
1110    #[test]
1111    fn watch_parse() {
1112        let args: TestArgs = TestArgs::parse_from(["foundry-cli", "-vw"]);
1113        assert!(args.watch.watch.is_some());
1114    }
1115
1116    #[test]
1117    fn fuzz_seed() {
1118        let args: TestArgs = TestArgs::parse_from(["foundry-cli", "--fuzz-seed", "0x10"]);
1119        assert!(args.fuzz_seed.is_some());
1120    }
1121
1122    #[test]
1123    fn depth_trace() {
1124        let args: TestArgs = TestArgs::parse_from(["foundry-cli", "--trace-depth", "2"]);
1125        assert!(args.trace_depth.is_some());
1126    }
1127
1128    // <https://github.com/foundry-rs/foundry/issues/5913>
1129    #[test]
1130    fn fuzz_seed_exists() {
1131        let args: TestArgs =
1132            TestArgs::parse_from(["foundry-cli", "-vvv", "--gas-report", "--fuzz-seed", "0x10"]);
1133        assert!(args.fuzz_seed.is_some());
1134    }
1135
1136    #[test]
1137    fn extract_chain() {
1138        let test = |arg: &str, expected: Chain| {
1139            let args = TestArgs::parse_from(["foundry-cli", arg]);
1140            assert_eq!(args.evm.env.chain, Some(expected));
1141            let (config, evm_opts) = args.load_config_and_evm_opts().unwrap();
1142            assert_eq!(config.chain, Some(expected));
1143            assert_eq!(evm_opts.env.chain_id, Some(expected.id()));
1144        };
1145        test("--chain-id=1", Chain::mainnet());
1146        test("--chain-id=42", Chain::from_id(42));
1147    }
1148}