foundry_evm_traces/
lib.rs

1//! # foundry-evm-traces
2//!
3//! EVM trace identifying and decoding.
4
5#![cfg_attr(not(test), warn(unused_crate_dependencies))]
6#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))]
7
8#[macro_use]
9extern crate foundry_common;
10
11#[macro_use]
12extern crate tracing;
13
14use foundry_common::{
15    contracts::{ContractsByAddress, ContractsByArtifact},
16    shell,
17};
18use revm::interpreter::OpCode;
19use revm_inspectors::tracing::{
20    types::{DecodedTraceStep, TraceMemberOrder},
21    OpcodeFilter,
22};
23use serde::{Deserialize, Serialize};
24use std::{
25    borrow::Cow,
26    collections::BTreeSet,
27    ops::{Deref, DerefMut},
28};
29
30use alloy_primitives::map::HashMap;
31
32pub use revm_inspectors::tracing::{
33    types::{
34        CallKind, CallLog, CallTrace, CallTraceNode, DecodedCallData, DecodedCallLog,
35        DecodedCallTrace,
36    },
37    CallTraceArena, FourByteInspector, GethTraceBuilder, ParityTraceBuilder, StackSnapshotType,
38    TraceWriter, TracingInspector, TracingInspectorConfig,
39};
40
41/// Call trace address identifiers.
42///
43/// Identifiers figure out what ABIs and labels belong to all the addresses of the trace.
44pub mod identifier;
45use identifier::{LocalTraceIdentifier, TraceIdentifier};
46
47mod decoder;
48pub use decoder::{CallTraceDecoder, CallTraceDecoderBuilder};
49
50pub mod debug;
51pub use debug::DebugTraceIdentifier;
52
53pub mod folded_stack_trace;
54
55pub type Traces = Vec<(TraceKind, SparsedTraceArena)>;
56
57/// Trace arena keeping track of ignored trace items.
58#[derive(Debug, Clone, Serialize, Deserialize)]
59pub struct SparsedTraceArena {
60    /// Full trace arena.
61    #[serde(flatten)]
62    pub arena: CallTraceArena,
63    /// Ranges of trace steps to ignore in format (start_node, start_step) -> (end_node, end_step).
64    /// See `foundry_cheatcodes::utils::IgnoredTraces` for more information.
65    #[serde(default, skip_serializing_if = "HashMap::is_empty")]
66    pub ignored: HashMap<(usize, usize), (usize, usize)>,
67}
68
69impl SparsedTraceArena {
70    /// Goes over entire trace arena and removes ignored trace items.
71    fn resolve_arena(&self) -> Cow<'_, CallTraceArena> {
72        if self.ignored.is_empty() {
73            Cow::Borrowed(&self.arena)
74        } else {
75            let mut arena = self.arena.clone();
76
77            fn clear_node(
78                nodes: &mut [CallTraceNode],
79                node_idx: usize,
80                ignored: &HashMap<(usize, usize), (usize, usize)>,
81                cur_ignore_end: &mut Option<(usize, usize)>,
82            ) {
83                // Prepend an additional None item to the ordering to handle the beginning of the
84                // trace.
85                let items = std::iter::once(None)
86                    .chain(nodes[node_idx].ordering.clone().into_iter().map(Some))
87                    .enumerate();
88
89                let mut iternal_calls = Vec::new();
90                let mut items_to_remove = BTreeSet::new();
91                for (item_idx, item) in items {
92                    if let Some(end_node) = ignored.get(&(node_idx, item_idx)) {
93                        *cur_ignore_end = Some(*end_node);
94                    }
95
96                    let mut remove = cur_ignore_end.is_some() & item.is_some();
97
98                    match item {
99                        // we only remove calls if they did not start/pause tracing
100                        Some(TraceMemberOrder::Call(child_idx)) => {
101                            clear_node(
102                                nodes,
103                                nodes[node_idx].children[child_idx],
104                                ignored,
105                                cur_ignore_end,
106                            );
107                            remove &= cur_ignore_end.is_some();
108                        }
109                        // we only remove decoded internal calls if they did not start/pause tracing
110                        Some(TraceMemberOrder::Step(step_idx)) => {
111                            // If this is an internal call beginning, track it in `iternal_calls`
112                            if let Some(DecodedTraceStep::InternalCall(_, end_step_idx)) =
113                                &nodes[node_idx].trace.steps[step_idx].decoded
114                            {
115                                iternal_calls.push((item_idx, remove, *end_step_idx));
116                                // we decide if we should remove it later
117                                remove = false;
118                            }
119                            // Handle ends of internal calls
120                            iternal_calls.retain(|(start_item_idx, remove_start, end_step_idx)| {
121                                if *end_step_idx != step_idx {
122                                    return true;
123                                }
124                                // only remove start if end should be removed as well
125                                if *remove_start && remove {
126                                    items_to_remove.insert(*start_item_idx);
127                                } else {
128                                    remove = false;
129                                }
130
131                                false
132                            });
133                        }
134                        _ => {}
135                    }
136
137                    if remove {
138                        items_to_remove.insert(item_idx);
139                    }
140
141                    if let Some((end_node, end_step_idx)) = cur_ignore_end {
142                        if node_idx == *end_node && item_idx == *end_step_idx {
143                            *cur_ignore_end = None;
144                        }
145                    }
146                }
147
148                for (offset, item_idx) in items_to_remove.into_iter().enumerate() {
149                    nodes[node_idx].ordering.remove(item_idx - offset - 1);
150                }
151            }
152
153            clear_node(arena.nodes_mut(), 0, &self.ignored, &mut None);
154
155            Cow::Owned(arena)
156        }
157    }
158}
159
160impl Deref for SparsedTraceArena {
161    type Target = CallTraceArena;
162
163    fn deref(&self) -> &Self::Target {
164        &self.arena
165    }
166}
167
168impl DerefMut for SparsedTraceArena {
169    fn deref_mut(&mut self) -> &mut Self::Target {
170        &mut self.arena
171    }
172}
173
174/// Decode a collection of call traces.
175///
176/// The traces will be decoded using the given decoder, if possible.
177pub async fn decode_trace_arena(
178    arena: &mut CallTraceArena,
179    decoder: &CallTraceDecoder,
180) -> Result<(), std::fmt::Error> {
181    decoder.prefetch_signatures(arena.nodes()).await;
182    decoder.populate_traces(arena.nodes_mut()).await;
183
184    Ok(())
185}
186
187/// Render a collection of call traces to a string.
188pub fn render_trace_arena(arena: &SparsedTraceArena) -> String {
189    render_trace_arena_inner(arena, false, false)
190}
191
192/// Render a collection of call traces to a string optionally including contract creation bytecodes
193/// and in JSON format.
194pub fn render_trace_arena_inner(
195    arena: &SparsedTraceArena,
196    with_bytecodes: bool,
197    with_storage_changes: bool,
198) -> String {
199    if shell::is_json() {
200        return serde_json::to_string(&arena.resolve_arena()).expect("Failed to write traces");
201    }
202
203    let mut w = TraceWriter::new(Vec::<u8>::new())
204        .color_cheatcodes(true)
205        .use_colors(convert_color_choice(shell::color_choice()))
206        .write_bytecodes(with_bytecodes)
207        .with_storage_changes(with_storage_changes);
208    w.write_arena(&arena.resolve_arena()).expect("Failed to write traces");
209    String::from_utf8(w.into_writer()).expect("trace writer wrote invalid UTF-8")
210}
211
212fn convert_color_choice(choice: shell::ColorChoice) -> revm_inspectors::ColorChoice {
213    match choice {
214        shell::ColorChoice::Auto => revm_inspectors::ColorChoice::Auto,
215        shell::ColorChoice::Always => revm_inspectors::ColorChoice::Always,
216        shell::ColorChoice::Never => revm_inspectors::ColorChoice::Never,
217    }
218}
219
220/// Specifies the kind of trace.
221#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
222pub enum TraceKind {
223    Deployment,
224    Setup,
225    Execution,
226}
227
228impl TraceKind {
229    /// Returns `true` if the trace kind is [`Deployment`].
230    ///
231    /// [`Deployment`]: TraceKind::Deployment
232    #[must_use]
233    pub fn is_deployment(self) -> bool {
234        matches!(self, Self::Deployment)
235    }
236
237    /// Returns `true` if the trace kind is [`Setup`].
238    ///
239    /// [`Setup`]: TraceKind::Setup
240    #[must_use]
241    pub fn is_setup(self) -> bool {
242        matches!(self, Self::Setup)
243    }
244
245    /// Returns `true` if the trace kind is [`Execution`].
246    ///
247    /// [`Execution`]: TraceKind::Execution
248    #[must_use]
249    pub fn is_execution(self) -> bool {
250        matches!(self, Self::Execution)
251    }
252}
253
254/// Given a list of traces and artifacts, it returns a map connecting address to abi
255pub fn load_contracts<'a>(
256    traces: impl IntoIterator<Item = &'a CallTraceArena>,
257    known_contracts: &ContractsByArtifact,
258) -> ContractsByAddress {
259    let mut local_identifier = LocalTraceIdentifier::new(known_contracts);
260    let decoder = CallTraceDecoder::new();
261    let mut contracts = ContractsByAddress::new();
262    for trace in traces {
263        for address in local_identifier.identify_addresses(decoder.trace_addresses(trace)) {
264            if let (Some(contract), Some(abi)) = (address.contract, address.abi) {
265                contracts.insert(address.address, (contract, abi.into_owned()));
266            }
267        }
268    }
269    contracts
270}
271
272/// Different kinds of internal functions tracing.
273#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Default)]
274pub enum InternalTraceMode {
275    #[default]
276    None,
277    /// Traces internal functions without decoding inputs/outputs from memory.
278    Simple,
279    /// Same as `Simple`, but also tracks memory snapshots.
280    Full,
281}
282
283impl From<InternalTraceMode> for TraceMode {
284    fn from(mode: InternalTraceMode) -> Self {
285        match mode {
286            InternalTraceMode::None => Self::None,
287            InternalTraceMode::Simple => Self::JumpSimple,
288            InternalTraceMode::Full => Self::Jump,
289        }
290    }
291}
292
293// Different kinds of traces used by different foundry components.
294#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Default)]
295pub enum TraceMode {
296    /// Disabled tracing.
297    #[default]
298    None,
299    /// Simple call trace, no steps tracing required.
300    Call,
301    /// Call trace with tracing for JUMP and JUMPDEST opcode steps.
302    ///
303    /// Used for internal functions identification. Does not track memory snapshots.
304    JumpSimple,
305    /// Call trace with tracing for JUMP and JUMPDEST opcode steps.
306    ///
307    /// Same as `JumpSimple`, but tracks memory snapshots as well.
308    Jump,
309    /// Call trace with complete steps tracing.
310    ///
311    /// Used by debugger.
312    Debug,
313    /// Debug trace with storage changes.
314    RecordStateDiff,
315}
316
317impl TraceMode {
318    pub const fn is_none(self) -> bool {
319        matches!(self, Self::None)
320    }
321
322    pub const fn is_call(self) -> bool {
323        matches!(self, Self::Call)
324    }
325
326    pub const fn is_jump_simple(self) -> bool {
327        matches!(self, Self::JumpSimple)
328    }
329
330    pub const fn is_jump(self) -> bool {
331        matches!(self, Self::Jump)
332    }
333
334    pub const fn record_state_diff(self) -> bool {
335        matches!(self, Self::RecordStateDiff)
336    }
337
338    pub const fn is_debug(self) -> bool {
339        matches!(self, Self::Debug)
340    }
341
342    pub fn with_debug(self, yes: bool) -> Self {
343        if yes {
344            std::cmp::max(self, Self::Debug)
345        } else {
346            self
347        }
348    }
349
350    pub fn with_decode_internal(self, mode: InternalTraceMode) -> Self {
351        std::cmp::max(self, mode.into())
352    }
353
354    pub fn with_state_changes(self, yes: bool) -> Self {
355        if yes {
356            std::cmp::max(self, Self::RecordStateDiff)
357        } else {
358            self
359        }
360    }
361
362    pub fn with_verbosity(self, verbosity: u8) -> Self {
363        if verbosity >= 3 {
364            std::cmp::max(self, Self::Call)
365        } else {
366            self
367        }
368    }
369
370    pub fn into_config(self) -> Option<TracingInspectorConfig> {
371        if self.is_none() {
372            None
373        } else {
374            TracingInspectorConfig {
375                record_steps: self >= Self::JumpSimple,
376                record_memory_snapshots: self >= Self::Jump,
377                record_stack_snapshots: if self >= Self::JumpSimple {
378                    StackSnapshotType::Full
379                } else {
380                    StackSnapshotType::None
381                },
382                record_logs: true,
383                record_state_diff: self.record_state_diff(),
384                record_returndata_snapshots: self.is_debug(),
385                record_opcodes_filter: (self.is_jump() || self.is_jump_simple())
386                    .then(|| OpcodeFilter::new().enabled(OpCode::JUMP).enabled(OpCode::JUMPDEST)),
387                exclude_precompile_calls: false,
388                record_immediate_bytes: self.is_debug(),
389            }
390            .into()
391        }
392    }
393}