foundry_evm_traces/
lib.rs

1//! # foundry-evm-traces
2//!
3//! EVM trace identifying and decoding.
4
5#![cfg_attr(not(test), warn(unused_crate_dependencies))]
6#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))]
7
8#[macro_use]
9extern crate foundry_common;
10
11#[macro_use]
12extern crate tracing;
13
14use foundry_common::{
15    contracts::{ContractsByAddress, ContractsByArtifact},
16    shell,
17};
18use revm::bytecode::opcode::OpCode;
19use revm_inspectors::tracing::{
20    OpcodeFilter,
21    types::{DecodedTraceStep, TraceMemberOrder},
22};
23use serde::{Deserialize, Serialize};
24use std::{
25    borrow::Cow,
26    collections::BTreeSet,
27    ops::{Deref, DerefMut},
28};
29
30use alloy_primitives::map::HashMap;
31
32pub use revm_inspectors::tracing::{
33    CallTraceArena, FourByteInspector, GethTraceBuilder, ParityTraceBuilder, StackSnapshotType,
34    TraceWriter, TracingInspector, TracingInspectorConfig,
35    types::{
36        CallKind, CallLog, CallTrace, CallTraceNode, DecodedCallData, DecodedCallLog,
37        DecodedCallTrace,
38    },
39};
40
41/// Call trace address identifiers.
42///
43/// Identifiers figure out what ABIs and labels belong to all the addresses of the trace.
44pub mod identifier;
45use identifier::LocalTraceIdentifier;
46
47mod decoder;
48pub use decoder::{CallTraceDecoder, CallTraceDecoderBuilder};
49
50pub mod debug;
51pub use debug::DebugTraceIdentifier;
52
53pub mod folded_stack_trace;
54
55pub type Traces = Vec<(TraceKind, SparsedTraceArena)>;
56
57/// Trace arena keeping track of ignored trace items.
58#[derive(Debug, Clone, Serialize, Deserialize)]
59pub struct SparsedTraceArena {
60    /// Full trace arena.
61    #[serde(flatten)]
62    pub arena: CallTraceArena,
63    /// Ranges of trace steps to ignore in format (start_node, start_step) -> (end_node, end_step).
64    /// See `foundry_cheatcodes::utils::IgnoredTraces` for more information.
65    #[serde(default, skip_serializing_if = "HashMap::is_empty")]
66    pub ignored: HashMap<(usize, usize), (usize, usize)>,
67}
68
69impl SparsedTraceArena {
70    /// Goes over entire trace arena and removes ignored trace items.
71    fn resolve_arena(&self) -> Cow<'_, CallTraceArena> {
72        if self.ignored.is_empty() {
73            Cow::Borrowed(&self.arena)
74        } else {
75            let mut arena = self.arena.clone();
76
77            fn clear_node(
78                nodes: &mut [CallTraceNode],
79                node_idx: usize,
80                ignored: &HashMap<(usize, usize), (usize, usize)>,
81                cur_ignore_end: &mut Option<(usize, usize)>,
82            ) {
83                // Prepend an additional None item to the ordering to handle the beginning of the
84                // trace.
85                let items = std::iter::once(None)
86                    .chain(nodes[node_idx].ordering.clone().into_iter().map(Some))
87                    .enumerate();
88
89                let mut internal_calls = Vec::new();
90                let mut items_to_remove = BTreeSet::new();
91                for (item_idx, item) in items {
92                    if let Some(end_node) = ignored.get(&(node_idx, item_idx)) {
93                        *cur_ignore_end = Some(*end_node);
94                    }
95
96                    let mut remove = cur_ignore_end.is_some() & item.is_some();
97
98                    match item {
99                        // we only remove calls if they did not start/pause tracing
100                        Some(TraceMemberOrder::Call(child_idx)) => {
101                            clear_node(
102                                nodes,
103                                nodes[node_idx].children[child_idx],
104                                ignored,
105                                cur_ignore_end,
106                            );
107                            remove &= cur_ignore_end.is_some();
108                        }
109                        // we only remove decoded internal calls if they did not start/pause tracing
110                        Some(TraceMemberOrder::Step(step_idx)) => {
111                            // If this is an internal call beginning, track it in `internal_calls`
112                            if let Some(DecodedTraceStep::InternalCall(_, end_step_idx)) =
113                                &nodes[node_idx].trace.steps[step_idx].decoded
114                            {
115                                internal_calls.push((item_idx, remove, *end_step_idx));
116                                // we decide if we should remove it later
117                                remove = false;
118                            }
119                            // Handle ends of internal calls
120                            internal_calls.retain(|(start_item_idx, remove_start, end_idx)| {
121                                if *end_idx != step_idx {
122                                    return true;
123                                }
124                                // only remove start if end should be removed as well
125                                if *remove_start && remove {
126                                    items_to_remove.insert(*start_item_idx);
127                                } else {
128                                    remove = false;
129                                }
130
131                                false
132                            });
133                        }
134                        _ => {}
135                    }
136
137                    if remove {
138                        items_to_remove.insert(item_idx);
139                    }
140
141                    if let Some((end_node, end_step_idx)) = cur_ignore_end
142                        && node_idx == *end_node
143                        && item_idx == *end_step_idx
144                    {
145                        *cur_ignore_end = None;
146                    }
147                }
148
149                for (offset, item_idx) in items_to_remove.into_iter().enumerate() {
150                    nodes[node_idx].ordering.remove(item_idx - offset - 1);
151                }
152            }
153
154            clear_node(arena.nodes_mut(), 0, &self.ignored, &mut None);
155
156            Cow::Owned(arena)
157        }
158    }
159}
160
161impl Deref for SparsedTraceArena {
162    type Target = CallTraceArena;
163
164    fn deref(&self) -> &Self::Target {
165        &self.arena
166    }
167}
168
169impl DerefMut for SparsedTraceArena {
170    fn deref_mut(&mut self) -> &mut Self::Target {
171        &mut self.arena
172    }
173}
174
175/// Decode a collection of call traces.
176///
177/// The traces will be decoded using the given decoder, if possible.
178pub async fn decode_trace_arena(arena: &mut CallTraceArena, decoder: &CallTraceDecoder) {
179    decoder.prefetch_signatures(arena.nodes()).await;
180    decoder.populate_traces(arena.nodes_mut()).await;
181}
182
183/// Render a collection of call traces to a string.
184pub fn render_trace_arena(arena: &SparsedTraceArena) -> String {
185    render_trace_arena_inner(arena, false, false)
186}
187
188/// Render a collection of call traces to a string optionally including contract creation bytecodes
189/// and in JSON format.
190pub fn render_trace_arena_inner(
191    arena: &SparsedTraceArena,
192    with_bytecodes: bool,
193    with_storage_changes: bool,
194) -> String {
195    if shell::is_json() {
196        return serde_json::to_string(&arena.resolve_arena()).expect("Failed to write traces");
197    }
198
199    let mut w = TraceWriter::new(Vec::<u8>::new())
200        .color_cheatcodes(true)
201        .use_colors(convert_color_choice(shell::color_choice()))
202        .write_bytecodes(with_bytecodes)
203        .with_storage_changes(with_storage_changes);
204    w.write_arena(&arena.resolve_arena()).expect("Failed to write traces");
205    String::from_utf8(w.into_writer()).expect("trace writer wrote invalid UTF-8")
206}
207
208fn convert_color_choice(choice: shell::ColorChoice) -> revm_inspectors::ColorChoice {
209    match choice {
210        shell::ColorChoice::Auto => revm_inspectors::ColorChoice::Auto,
211        shell::ColorChoice::Always => revm_inspectors::ColorChoice::Always,
212        shell::ColorChoice::Never => revm_inspectors::ColorChoice::Never,
213    }
214}
215
216/// Specifies the kind of trace.
217#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
218pub enum TraceKind {
219    Deployment,
220    Setup,
221    Execution,
222}
223
224impl TraceKind {
225    /// Returns `true` if the trace kind is [`Deployment`].
226    ///
227    /// [`Deployment`]: TraceKind::Deployment
228    #[must_use]
229    pub fn is_deployment(self) -> bool {
230        matches!(self, Self::Deployment)
231    }
232
233    /// Returns `true` if the trace kind is [`Setup`].
234    ///
235    /// [`Setup`]: TraceKind::Setup
236    #[must_use]
237    pub fn is_setup(self) -> bool {
238        matches!(self, Self::Setup)
239    }
240
241    /// Returns `true` if the trace kind is [`Execution`].
242    ///
243    /// [`Execution`]: TraceKind::Execution
244    #[must_use]
245    pub fn is_execution(self) -> bool {
246        matches!(self, Self::Execution)
247    }
248}
249
250/// Given a list of traces and artifacts, it returns a map connecting address to abi
251pub fn load_contracts<'a>(
252    traces: impl IntoIterator<Item = &'a CallTraceArena>,
253    known_contracts: &ContractsByArtifact,
254) -> ContractsByAddress {
255    let mut local_identifier = LocalTraceIdentifier::new(known_contracts);
256    let decoder = CallTraceDecoder::new();
257    let mut contracts = ContractsByAddress::new();
258    for trace in traces {
259        for address in decoder.identify_addresses(trace, &mut local_identifier) {
260            if let (Some(contract), Some(abi)) = (address.contract, address.abi) {
261                contracts.insert(address.address, (contract, abi.into_owned()));
262            }
263        }
264    }
265    contracts
266}
267
268/// Different kinds of internal functions tracing.
269#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Default)]
270pub enum InternalTraceMode {
271    #[default]
272    None,
273    /// Traces internal functions without decoding inputs/outputs from memory.
274    Simple,
275    /// Same as `Simple`, but also tracks memory snapshots.
276    Full,
277}
278
279impl From<InternalTraceMode> for TraceMode {
280    fn from(mode: InternalTraceMode) -> Self {
281        match mode {
282            InternalTraceMode::None => Self::None,
283            InternalTraceMode::Simple => Self::JumpSimple,
284            InternalTraceMode::Full => Self::Jump,
285        }
286    }
287}
288
289// Different kinds of traces used by different foundry components.
290#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Default)]
291pub enum TraceMode {
292    /// Disabled tracing.
293    #[default]
294    None,
295    /// Simple call trace, no steps tracing required.
296    Call,
297    /// Call trace with tracing for JUMP and JUMPDEST opcode steps.
298    ///
299    /// Used for internal functions identification. Does not track memory snapshots.
300    JumpSimple,
301    /// Call trace with tracing for JUMP and JUMPDEST opcode steps.
302    ///
303    /// Same as `JumpSimple`, but tracks memory snapshots as well.
304    Jump,
305    /// Call trace with complete steps tracing.
306    ///
307    /// Used by debugger.
308    Debug,
309    /// Debug trace with storage changes.
310    RecordStateDiff,
311}
312
313impl TraceMode {
314    pub const fn is_none(self) -> bool {
315        matches!(self, Self::None)
316    }
317
318    pub const fn is_call(self) -> bool {
319        matches!(self, Self::Call)
320    }
321
322    pub const fn is_jump_simple(self) -> bool {
323        matches!(self, Self::JumpSimple)
324    }
325
326    pub const fn is_jump(self) -> bool {
327        matches!(self, Self::Jump)
328    }
329
330    pub const fn record_state_diff(self) -> bool {
331        matches!(self, Self::RecordStateDiff)
332    }
333
334    pub const fn is_debug(self) -> bool {
335        matches!(self, Self::Debug)
336    }
337
338    pub fn with_debug(self, yes: bool) -> Self {
339        if yes { std::cmp::max(self, Self::Debug) } else { self }
340    }
341
342    pub fn with_decode_internal(self, mode: InternalTraceMode) -> Self {
343        std::cmp::max(self, mode.into())
344    }
345
346    pub fn with_state_changes(self, yes: bool) -> Self {
347        if yes { std::cmp::max(self, Self::RecordStateDiff) } else { self }
348    }
349
350    pub fn with_verbosity(self, verbosity: u8) -> Self {
351        if verbosity >= 3 { std::cmp::max(self, Self::Call) } else { self }
352    }
353
354    pub fn into_config(self) -> Option<TracingInspectorConfig> {
355        if self.is_none() {
356            None
357        } else {
358            TracingInspectorConfig {
359                record_steps: self >= Self::JumpSimple,
360                record_memory_snapshots: self >= Self::Jump,
361                record_stack_snapshots: if self >= Self::JumpSimple {
362                    StackSnapshotType::Full
363                } else {
364                    StackSnapshotType::None
365                },
366                record_logs: true,
367                record_state_diff: self.record_state_diff(),
368                record_returndata_snapshots: self.is_debug(),
369                record_opcodes_filter: (self.is_jump() || self.is_jump_simple())
370                    .then(|| OpcodeFilter::new().enabled(OpCode::JUMP).enabled(OpCode::JUMPDEST)),
371                exclude_precompile_calls: false,
372                record_immediate_bytes: self.is_debug(),
373            }
374            .into()
375        }
376    }
377}