foundry_evm_traces/
lib.rs

1//! # foundry-evm-traces
2//!
3//! EVM trace identifying and decoding.
4
5#![cfg_attr(not(test), warn(unused_crate_dependencies))]
6#![cfg_attr(docsrs, feature(doc_cfg))]
7
8#[macro_use]
9extern crate foundry_common;
10
11#[macro_use]
12extern crate tracing;
13
14use foundry_common::{
15    contracts::{ContractsByAddress, ContractsByArtifact},
16    shell,
17};
18use revm::bytecode::opcode::OpCode;
19use revm_inspectors::tracing::{
20    OpcodeFilter,
21    types::{DecodedTraceStep, TraceMemberOrder},
22};
23use serde::{Deserialize, Serialize};
24use std::{
25    borrow::Cow,
26    collections::BTreeSet,
27    ops::{Deref, DerefMut},
28};
29
30use alloy_primitives::map::HashMap;
31
32pub use revm_inspectors::tracing::{
33    CallTraceArena, FourByteInspector, GethTraceBuilder, ParityTraceBuilder, StackSnapshotType,
34    TraceWriter, TracingInspector, TracingInspectorConfig,
35    types::{
36        CallKind, CallLog, CallTrace, CallTraceNode, DecodedCallData, DecodedCallLog,
37        DecodedCallTrace,
38    },
39};
40
41/// Call trace address identifiers.
42///
43/// Identifiers figure out what ABIs and labels belong to all the addresses of the trace.
44pub mod identifier;
45use identifier::LocalTraceIdentifier;
46
47mod decoder;
48pub use decoder::{CallTraceDecoder, CallTraceDecoderBuilder};
49
50pub mod debug;
51pub use debug::DebugTraceIdentifier;
52
53pub mod folded_stack_trace;
54
55pub mod backtrace;
56
57pub type Traces = Vec<(TraceKind, SparsedTraceArena)>;
58
59/// Trace arena keeping track of ignored trace items.
60#[derive(Debug, Clone, Serialize, Deserialize)]
61pub struct SparsedTraceArena {
62    /// Full trace arena.
63    #[serde(flatten)]
64    pub arena: CallTraceArena,
65    /// Ranges of trace steps to ignore in format (start_node, start_step) -> (end_node, end_step).
66    /// See `foundry_cheatcodes::utils::IgnoredTraces` for more information.
67    #[serde(default, skip_serializing_if = "HashMap::is_empty")]
68    pub ignored: HashMap<(usize, usize), (usize, usize)>,
69}
70
71impl SparsedTraceArena {
72    /// Goes over entire trace arena and removes ignored trace items.
73    fn resolve_arena(&self) -> Cow<'_, CallTraceArena> {
74        if self.ignored.is_empty() {
75            Cow::Borrowed(&self.arena)
76        } else {
77            let mut arena = self.arena.clone();
78
79            fn clear_node(
80                nodes: &mut [CallTraceNode],
81                node_idx: usize,
82                ignored: &HashMap<(usize, usize), (usize, usize)>,
83                cur_ignore_end: &mut Option<(usize, usize)>,
84            ) {
85                // Prepend an additional None item to the ordering to handle the beginning of the
86                // trace.
87                let items = std::iter::once(None)
88                    .chain(nodes[node_idx].ordering.clone().into_iter().map(Some))
89                    .enumerate();
90
91                let mut internal_calls = Vec::new();
92                let mut items_to_remove = BTreeSet::new();
93                for (item_idx, item) in items {
94                    if let Some(end_node) = ignored.get(&(node_idx, item_idx)) {
95                        *cur_ignore_end = Some(*end_node);
96                    }
97
98                    let mut remove = cur_ignore_end.is_some() & item.is_some();
99
100                    match item {
101                        // we only remove calls if they did not start/pause tracing
102                        Some(TraceMemberOrder::Call(child_idx)) => {
103                            clear_node(
104                                nodes,
105                                nodes[node_idx].children[child_idx],
106                                ignored,
107                                cur_ignore_end,
108                            );
109                            remove &= cur_ignore_end.is_some();
110                        }
111                        // we only remove decoded internal calls if they did not start/pause tracing
112                        Some(TraceMemberOrder::Step(step_idx)) => {
113                            // If this is an internal call beginning, track it in `internal_calls`
114                            if let Some(decoded) = &nodes[node_idx].trace.steps[step_idx].decoded
115                                && let DecodedTraceStep::InternalCall(_, end_step_idx) = &**decoded
116                            {
117                                internal_calls.push((item_idx, remove, *end_step_idx));
118                                // we decide if we should remove it later
119                                remove = false;
120                            }
121                            // Handle ends of internal calls
122                            internal_calls.retain(|(start_item_idx, remove_start, end_idx)| {
123                                if *end_idx != step_idx {
124                                    return true;
125                                }
126                                // only remove start if end should be removed as well
127                                if *remove_start && remove {
128                                    items_to_remove.insert(*start_item_idx);
129                                } else {
130                                    remove = false;
131                                }
132
133                                false
134                            });
135                        }
136                        _ => {}
137                    }
138
139                    if remove {
140                        items_to_remove.insert(item_idx);
141                    }
142
143                    if let Some((end_node, end_step_idx)) = cur_ignore_end
144                        && node_idx == *end_node
145                        && item_idx == *end_step_idx
146                    {
147                        *cur_ignore_end = None;
148                    }
149                }
150
151                for (offset, item_idx) in items_to_remove.into_iter().enumerate() {
152                    nodes[node_idx].ordering.remove(item_idx - offset - 1);
153                }
154            }
155
156            clear_node(arena.nodes_mut(), 0, &self.ignored, &mut None);
157
158            Cow::Owned(arena)
159        }
160    }
161}
162
163impl Deref for SparsedTraceArena {
164    type Target = CallTraceArena;
165
166    fn deref(&self) -> &Self::Target {
167        &self.arena
168    }
169}
170
171impl DerefMut for SparsedTraceArena {
172    fn deref_mut(&mut self) -> &mut Self::Target {
173        &mut self.arena
174    }
175}
176
177/// Decode a collection of call traces.
178///
179/// The traces will be decoded using the given decoder, if possible.
180pub async fn decode_trace_arena(arena: &mut CallTraceArena, decoder: &CallTraceDecoder) {
181    decoder.prefetch_signatures(arena.nodes()).await;
182    decoder.populate_traces(arena.nodes_mut()).await;
183}
184
185/// Render a collection of call traces to a string.
186pub fn render_trace_arena(arena: &SparsedTraceArena) -> String {
187    render_trace_arena_inner(arena, false, false)
188}
189
190/// Prunes trace depth if depth is provided as an argument
191pub fn prune_trace_depth(arena: &mut CallTraceArena, depth: usize) {
192    for node in arena.nodes_mut() {
193        if node.trace.depth >= depth {
194            node.ordering.clear();
195        }
196    }
197}
198
199/// Render a collection of call traces to a string optionally including contract creation bytecodes
200/// and in JSON format.
201pub fn render_trace_arena_inner(
202    arena: &SparsedTraceArena,
203    with_bytecodes: bool,
204    with_storage_changes: bool,
205) -> String {
206    if shell::is_json() {
207        return serde_json::to_string(&arena.resolve_arena()).expect("Failed to write traces");
208    }
209
210    let mut w = TraceWriter::new(Vec::<u8>::new())
211        .color_cheatcodes(true)
212        .use_colors(convert_color_choice(shell::color_choice()))
213        .write_bytecodes(with_bytecodes)
214        .with_storage_changes(with_storage_changes);
215    w.write_arena(&arena.resolve_arena()).expect("Failed to write traces");
216    String::from_utf8(w.into_writer()).expect("trace writer wrote invalid UTF-8")
217}
218
219fn convert_color_choice(choice: shell::ColorChoice) -> revm_inspectors::ColorChoice {
220    match choice {
221        shell::ColorChoice::Auto => revm_inspectors::ColorChoice::Auto,
222        shell::ColorChoice::Always => revm_inspectors::ColorChoice::Always,
223        shell::ColorChoice::Never => revm_inspectors::ColorChoice::Never,
224    }
225}
226
227/// Specifies the kind of trace.
228#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
229pub enum TraceKind {
230    Deployment,
231    Setup,
232    Execution,
233}
234
235impl TraceKind {
236    /// Returns `true` if the trace kind is [`Deployment`].
237    ///
238    /// [`Deployment`]: TraceKind::Deployment
239    #[must_use]
240    pub fn is_deployment(self) -> bool {
241        matches!(self, Self::Deployment)
242    }
243
244    /// Returns `true` if the trace kind is [`Setup`].
245    ///
246    /// [`Setup`]: TraceKind::Setup
247    #[must_use]
248    pub fn is_setup(self) -> bool {
249        matches!(self, Self::Setup)
250    }
251
252    /// Returns `true` if the trace kind is [`Execution`].
253    ///
254    /// [`Execution`]: TraceKind::Execution
255    #[must_use]
256    pub fn is_execution(self) -> bool {
257        matches!(self, Self::Execution)
258    }
259}
260
261/// Given a list of traces and artifacts, it returns a map connecting address to abi
262pub fn load_contracts<'a>(
263    traces: impl IntoIterator<Item = &'a CallTraceArena>,
264    known_contracts: &ContractsByArtifact,
265) -> ContractsByAddress {
266    let mut local_identifier = LocalTraceIdentifier::new(known_contracts);
267    let decoder = CallTraceDecoder::new();
268    let mut contracts = ContractsByAddress::new();
269    for trace in traces {
270        for address in decoder.identify_addresses(trace, &mut local_identifier) {
271            if let (Some(contract), Some(abi)) = (address.contract, address.abi) {
272                contracts.insert(address.address, (contract, abi.into_owned()));
273            }
274        }
275    }
276    contracts
277}
278
279/// Different kinds of internal functions tracing.
280#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Default)]
281pub enum InternalTraceMode {
282    #[default]
283    None,
284    /// Traces internal functions without decoding inputs/outputs from memory.
285    Simple,
286    /// Same as `Simple`, but also tracks memory snapshots.
287    Full,
288}
289
290impl From<InternalTraceMode> for TraceMode {
291    fn from(mode: InternalTraceMode) -> Self {
292        match mode {
293            InternalTraceMode::None => Self::None,
294            InternalTraceMode::Simple => Self::JumpSimple,
295            InternalTraceMode::Full => Self::Jump,
296        }
297    }
298}
299
300// Different kinds of traces used by different foundry components.
301#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Default)]
302pub enum TraceMode {
303    /// Disabled tracing.
304    #[default]
305    None,
306    /// Simple call trace, no steps tracing required.
307    Call,
308    /// Call trace with steps tracing for JUMP and JUMPDEST opcodes.
309    ///
310    /// Does not enable tracking memory or stack snapshots.
311    Steps,
312    /// Call trace with tracing for JUMP and JUMPDEST opcode steps.
313    ///
314    /// Used for internal functions identification. Does not track memory snapshots.
315    JumpSimple,
316    /// Call trace with tracing for JUMP and JUMPDEST opcode steps.
317    ///
318    /// Same as `JumpSimple`, but tracks memory snapshots as well.
319    Jump,
320    /// Call trace with complete steps tracing.
321    ///
322    /// Used by debugger.
323    Debug,
324    /// Debug trace with storage changes.
325    RecordStateDiff,
326}
327
328impl TraceMode {
329    pub const fn is_none(self) -> bool {
330        matches!(self, Self::None)
331    }
332
333    pub const fn is_call(self) -> bool {
334        matches!(self, Self::Call)
335    }
336
337    pub const fn is_steps(self) -> bool {
338        matches!(self, Self::Steps)
339    }
340
341    pub const fn is_jump_simple(self) -> bool {
342        matches!(self, Self::JumpSimple)
343    }
344
345    pub const fn is_jump(self) -> bool {
346        matches!(self, Self::Jump)
347    }
348
349    pub const fn record_state_diff(self) -> bool {
350        matches!(self, Self::RecordStateDiff)
351    }
352
353    pub const fn is_debug(self) -> bool {
354        matches!(self, Self::Debug)
355    }
356
357    pub fn with_debug(self, yes: bool) -> Self {
358        if yes { std::cmp::max(self, Self::Debug) } else { self }
359    }
360
361    pub fn with_decode_internal(self, mode: InternalTraceMode) -> Self {
362        std::cmp::max(self, mode.into())
363    }
364
365    pub fn with_state_changes(self, yes: bool) -> Self {
366        if yes { std::cmp::max(self, Self::RecordStateDiff) } else { self }
367    }
368
369    pub fn with_verbosity(self, verbosity: u8) -> Self {
370        match verbosity {
371            0..3 => self,
372            3..=4 => std::cmp::max(self, Self::Call),
373            // Enable step recording for backtraces when verbosity is 5 or higher.
374            // We need to ensure we're recording JUMP AND JUMPDEST steps.
375            _ => std::cmp::min(self, Self::Steps),
376        }
377    }
378
379    pub fn into_config(self) -> Option<TracingInspectorConfig> {
380        if self.is_none() {
381            None
382        } else {
383            TracingInspectorConfig {
384                record_steps: self >= Self::Steps,
385                record_memory_snapshots: self >= Self::Jump,
386                record_stack_snapshots: if self > Self::Steps {
387                    StackSnapshotType::Full
388                } else {
389                    StackSnapshotType::None
390                },
391                record_logs: true,
392                record_state_diff: self.record_state_diff(),
393                record_returndata_snapshots: self.is_debug(),
394                record_opcodes_filter: (self.is_steps() || self.is_jump() || self.is_jump_simple())
395                    .then(|| OpcodeFilter::new().enabled(OpCode::JUMP).enabled(OpCode::JUMPDEST)),
396                exclude_precompile_calls: false,
397                record_immediate_bytes: self.is_debug(),
398            }
399            .into()
400        }
401    }
402}