foundry_evm_traces/
lib.rs

1//! # foundry-evm-traces
2//!
3//! EVM trace identifying and decoding.
4
5#![cfg_attr(not(test), warn(unused_crate_dependencies))]
6#![cfg_attr(docsrs, feature(doc_cfg))]
7
8#[macro_use]
9extern crate foundry_common;
10
11#[macro_use]
12extern crate tracing;
13
14use foundry_common::{
15    contracts::{ContractsByAddress, ContractsByArtifact},
16    shell,
17};
18use revm::bytecode::opcode::OpCode;
19use revm_inspectors::tracing::{
20    OpcodeFilter,
21    types::{DecodedTraceStep, TraceMemberOrder},
22};
23use serde::{Deserialize, Serialize};
24use std::{
25    borrow::Cow,
26    collections::BTreeSet,
27    ops::{Deref, DerefMut},
28};
29
30use alloy_primitives::map::HashMap;
31
32pub use revm_inspectors::tracing::{
33    CallTraceArena, FourByteInspector, GethTraceBuilder, ParityTraceBuilder, StackSnapshotType,
34    TraceWriter, TracingInspector, TracingInspectorConfig,
35    types::{
36        CallKind, CallLog, CallTrace, CallTraceNode, DecodedCallData, DecodedCallLog,
37        DecodedCallTrace,
38    },
39};
40
41/// Call trace address identifiers.
42///
43/// Identifiers figure out what ABIs and labels belong to all the addresses of the trace.
44pub mod identifier;
45use identifier::LocalTraceIdentifier;
46
47mod decoder;
48pub use decoder::{CallTraceDecoder, CallTraceDecoderBuilder};
49
50pub mod debug;
51pub use debug::DebugTraceIdentifier;
52
53pub mod folded_stack_trace;
54
55pub mod backtrace;
56
57pub type Traces = Vec<(TraceKind, SparsedTraceArena)>;
58
59/// Trace arena keeping track of ignored trace items.
60#[derive(Debug, Clone, Serialize, Deserialize)]
61pub struct SparsedTraceArena {
62    /// Full trace arena.
63    #[serde(flatten)]
64    pub arena: CallTraceArena,
65    /// Ranges of trace steps to ignore in format (start_node, start_step) -> (end_node, end_step).
66    /// See `foundry_cheatcodes::utils::IgnoredTraces` for more information.
67    #[serde(default, skip_serializing_if = "HashMap::is_empty")]
68    pub ignored: HashMap<(usize, usize), (usize, usize)>,
69}
70
71impl SparsedTraceArena {
72    /// Goes over entire trace arena and removes ignored trace items.
73    fn resolve_arena(&self) -> Cow<'_, CallTraceArena> {
74        if self.ignored.is_empty() {
75            Cow::Borrowed(&self.arena)
76        } else {
77            let mut arena = self.arena.clone();
78
79            fn clear_node(
80                nodes: &mut [CallTraceNode],
81                node_idx: usize,
82                ignored: &HashMap<(usize, usize), (usize, usize)>,
83                cur_ignore_end: &mut Option<(usize, usize)>,
84            ) {
85                // Prepend an additional None item to the ordering to handle the beginning of the
86                // trace.
87                let items = std::iter::once(None)
88                    .chain(nodes[node_idx].ordering.clone().into_iter().map(Some))
89                    .enumerate();
90
91                let mut internal_calls = Vec::new();
92                let mut items_to_remove = BTreeSet::new();
93                for (item_idx, item) in items {
94                    if let Some(end_node) = ignored.get(&(node_idx, item_idx)) {
95                        *cur_ignore_end = Some(*end_node);
96                    }
97
98                    let mut remove = cur_ignore_end.is_some() & item.is_some();
99
100                    match item {
101                        // we only remove calls if they did not start/pause tracing
102                        Some(TraceMemberOrder::Call(child_idx)) => {
103                            clear_node(
104                                nodes,
105                                nodes[node_idx].children[child_idx],
106                                ignored,
107                                cur_ignore_end,
108                            );
109                            remove &= cur_ignore_end.is_some();
110                        }
111                        // we only remove decoded internal calls if they did not start/pause tracing
112                        Some(TraceMemberOrder::Step(step_idx)) => {
113                            // If this is an internal call beginning, track it in `internal_calls`
114                            if let Some(decoded) = &nodes[node_idx].trace.steps[step_idx].decoded
115                                && let DecodedTraceStep::InternalCall(_, end_step_idx) = &**decoded
116                            {
117                                internal_calls.push((item_idx, remove, *end_step_idx));
118                                // we decide if we should remove it later
119                                remove = false;
120                            }
121                            // Handle ends of internal calls
122                            internal_calls.retain(|(start_item_idx, remove_start, end_idx)| {
123                                if *end_idx != step_idx {
124                                    return true;
125                                }
126                                // only remove start if end should be removed as well
127                                if *remove_start && remove {
128                                    items_to_remove.insert(*start_item_idx);
129                                } else {
130                                    remove = false;
131                                }
132
133                                false
134                            });
135                        }
136                        _ => {}
137                    }
138
139                    if remove {
140                        items_to_remove.insert(item_idx);
141                    }
142
143                    if let Some((end_node, end_step_idx)) = cur_ignore_end
144                        && node_idx == *end_node
145                        && item_idx == *end_step_idx
146                    {
147                        *cur_ignore_end = None;
148                    }
149                }
150
151                for (offset, item_idx) in items_to_remove.into_iter().enumerate() {
152                    nodes[node_idx].ordering.remove(item_idx - offset - 1);
153                }
154            }
155
156            clear_node(arena.nodes_mut(), 0, &self.ignored, &mut None);
157
158            Cow::Owned(arena)
159        }
160    }
161}
162
163impl Deref for SparsedTraceArena {
164    type Target = CallTraceArena;
165
166    fn deref(&self) -> &Self::Target {
167        &self.arena
168    }
169}
170
171impl DerefMut for SparsedTraceArena {
172    fn deref_mut(&mut self) -> &mut Self::Target {
173        &mut self.arena
174    }
175}
176
177/// Decode a collection of call traces.
178///
179/// The traces will be decoded using the given decoder, if possible.
180pub async fn decode_trace_arena(arena: &mut CallTraceArena, decoder: &CallTraceDecoder) {
181    decoder.prefetch_signatures(arena.nodes()).await;
182    decoder.populate_traces(arena.nodes_mut()).await;
183}
184
185/// Render a collection of call traces to a string.
186pub fn render_trace_arena(arena: &SparsedTraceArena) -> String {
187    render_trace_arena_inner(arena, false, false)
188}
189
190/// Render a collection of call traces to a string optionally including contract creation bytecodes
191/// and in JSON format.
192pub fn render_trace_arena_inner(
193    arena: &SparsedTraceArena,
194    with_bytecodes: bool,
195    with_storage_changes: bool,
196) -> String {
197    if shell::is_json() {
198        return serde_json::to_string(&arena.resolve_arena()).expect("Failed to write traces");
199    }
200
201    let mut w = TraceWriter::new(Vec::<u8>::new())
202        .color_cheatcodes(true)
203        .use_colors(convert_color_choice(shell::color_choice()))
204        .write_bytecodes(with_bytecodes)
205        .with_storage_changes(with_storage_changes);
206    w.write_arena(&arena.resolve_arena()).expect("Failed to write traces");
207    String::from_utf8(w.into_writer()).expect("trace writer wrote invalid UTF-8")
208}
209
210fn convert_color_choice(choice: shell::ColorChoice) -> revm_inspectors::ColorChoice {
211    match choice {
212        shell::ColorChoice::Auto => revm_inspectors::ColorChoice::Auto,
213        shell::ColorChoice::Always => revm_inspectors::ColorChoice::Always,
214        shell::ColorChoice::Never => revm_inspectors::ColorChoice::Never,
215    }
216}
217
218/// Specifies the kind of trace.
219#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
220pub enum TraceKind {
221    Deployment,
222    Setup,
223    Execution,
224}
225
226impl TraceKind {
227    /// Returns `true` if the trace kind is [`Deployment`].
228    ///
229    /// [`Deployment`]: TraceKind::Deployment
230    #[must_use]
231    pub fn is_deployment(self) -> bool {
232        matches!(self, Self::Deployment)
233    }
234
235    /// Returns `true` if the trace kind is [`Setup`].
236    ///
237    /// [`Setup`]: TraceKind::Setup
238    #[must_use]
239    pub fn is_setup(self) -> bool {
240        matches!(self, Self::Setup)
241    }
242
243    /// Returns `true` if the trace kind is [`Execution`].
244    ///
245    /// [`Execution`]: TraceKind::Execution
246    #[must_use]
247    pub fn is_execution(self) -> bool {
248        matches!(self, Self::Execution)
249    }
250}
251
252/// Given a list of traces and artifacts, it returns a map connecting address to abi
253pub fn load_contracts<'a>(
254    traces: impl IntoIterator<Item = &'a CallTraceArena>,
255    known_contracts: &ContractsByArtifact,
256) -> ContractsByAddress {
257    let mut local_identifier = LocalTraceIdentifier::new(known_contracts);
258    let decoder = CallTraceDecoder::new();
259    let mut contracts = ContractsByAddress::new();
260    for trace in traces {
261        for address in decoder.identify_addresses(trace, &mut local_identifier) {
262            if let (Some(contract), Some(abi)) = (address.contract, address.abi) {
263                contracts.insert(address.address, (contract, abi.into_owned()));
264            }
265        }
266    }
267    contracts
268}
269
270/// Different kinds of internal functions tracing.
271#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Default)]
272pub enum InternalTraceMode {
273    #[default]
274    None,
275    /// Traces internal functions without decoding inputs/outputs from memory.
276    Simple,
277    /// Same as `Simple`, but also tracks memory snapshots.
278    Full,
279}
280
281impl From<InternalTraceMode> for TraceMode {
282    fn from(mode: InternalTraceMode) -> Self {
283        match mode {
284            InternalTraceMode::None => Self::None,
285            InternalTraceMode::Simple => Self::JumpSimple,
286            InternalTraceMode::Full => Self::Jump,
287        }
288    }
289}
290
291// Different kinds of traces used by different foundry components.
292#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Default)]
293pub enum TraceMode {
294    /// Disabled tracing.
295    #[default]
296    None,
297    /// Simple call trace, no steps tracing required.
298    Call,
299    /// Call trace with steps tracing for JUMP and JUMPDEST opcodes.
300    ///
301    /// Does not enable tracking memory or stack snapshots.
302    Steps,
303    /// Call trace with tracing for JUMP and JUMPDEST opcode steps.
304    ///
305    /// Used for internal functions identification. Does not track memory snapshots.
306    JumpSimple,
307    /// Call trace with tracing for JUMP and JUMPDEST opcode steps.
308    ///
309    /// Same as `JumpSimple`, but tracks memory snapshots as well.
310    Jump,
311    /// Call trace with complete steps tracing.
312    ///
313    /// Used by debugger.
314    Debug,
315    /// Debug trace with storage changes.
316    RecordStateDiff,
317}
318
319impl TraceMode {
320    pub const fn is_none(self) -> bool {
321        matches!(self, Self::None)
322    }
323
324    pub const fn is_call(self) -> bool {
325        matches!(self, Self::Call)
326    }
327
328    pub const fn is_steps(self) -> bool {
329        matches!(self, Self::Steps)
330    }
331
332    pub const fn is_jump_simple(self) -> bool {
333        matches!(self, Self::JumpSimple)
334    }
335
336    pub const fn is_jump(self) -> bool {
337        matches!(self, Self::Jump)
338    }
339
340    pub const fn record_state_diff(self) -> bool {
341        matches!(self, Self::RecordStateDiff)
342    }
343
344    pub const fn is_debug(self) -> bool {
345        matches!(self, Self::Debug)
346    }
347
348    pub fn with_debug(self, yes: bool) -> Self {
349        if yes { std::cmp::max(self, Self::Debug) } else { self }
350    }
351
352    pub fn with_decode_internal(self, mode: InternalTraceMode) -> Self {
353        std::cmp::max(self, mode.into())
354    }
355
356    pub fn with_state_changes(self, yes: bool) -> Self {
357        if yes { std::cmp::max(self, Self::RecordStateDiff) } else { self }
358    }
359
360    pub fn with_verbosity(self, verbosity: u8) -> Self {
361        // Enable step recording for backtraces when verbosity >= 3
362        // We need to ensure we're recording JUMP AND JUMPDEST steps:
363        if verbosity >= 3 { std::cmp::max(self, Self::Steps) } else { self }
364    }
365
366    pub fn into_config(self) -> Option<TracingInspectorConfig> {
367        if self.is_none() {
368            None
369        } else {
370            TracingInspectorConfig {
371                record_steps: self >= Self::Steps,
372                record_memory_snapshots: self >= Self::Jump,
373                record_stack_snapshots: if self > Self::Steps {
374                    StackSnapshotType::Full
375                } else {
376                    StackSnapshotType::None
377                },
378                record_logs: true,
379                record_state_diff: self.record_state_diff(),
380                record_returndata_snapshots: self.is_debug(),
381                record_opcodes_filter: (self.is_steps() || self.is_jump() || self.is_jump_simple())
382                    .then(|| OpcodeFilter::new().enabled(OpCode::JUMP).enabled(OpCode::JUMPDEST)),
383                exclude_precompile_calls: false,
384                record_immediate_bytes: self.is_debug(),
385            }
386            .into()
387        }
388    }
389}