1#![cfg_attr(not(test), warn(unused_crate_dependencies))]
6#![cfg_attr(docsrs, feature(doc_cfg))]
7
8#[macro_use]
9extern crate foundry_common;
10
11#[macro_use]
12extern crate tracing;
13
14use foundry_common::{
15 contracts::{ContractsByAddress, ContractsByArtifact},
16 shell,
17};
18use revm::bytecode::opcode::OpCode;
19use revm_inspectors::tracing::{
20 OpcodeFilter,
21 types::{DecodedTraceStep, TraceMemberOrder},
22};
23use serde::{Deserialize, Serialize};
24use std::{
25 borrow::Cow,
26 collections::BTreeSet,
27 ops::{Deref, DerefMut},
28};
29
30use alloy_primitives::map::HashMap;
31
32pub use revm_inspectors::tracing::{
33 CallTraceArena, FourByteInspector, GethTraceBuilder, ParityTraceBuilder, StackSnapshotType,
34 TraceWriter, TracingInspector, TracingInspectorConfig,
35 types::{
36 CallKind, CallLog, CallTrace, CallTraceNode, DecodedCallData, DecodedCallLog,
37 DecodedCallTrace,
38 },
39};
40
41pub mod identifier;
45use identifier::LocalTraceIdentifier;
46
47mod decoder;
48pub use decoder::{CallTraceDecoder, CallTraceDecoderBuilder};
49
50pub mod debug;
51pub use debug::DebugTraceIdentifier;
52
53pub mod folded_stack_trace;
54
55pub mod backtrace;
56
57pub type Traces = Vec<(TraceKind, SparsedTraceArena)>;
58
59#[derive(Debug, Clone, Serialize, Deserialize)]
61pub struct SparsedTraceArena {
62 #[serde(flatten)]
64 pub arena: CallTraceArena,
65 #[serde(default, skip_serializing_if = "HashMap::is_empty")]
68 pub ignored: HashMap<(usize, usize), (usize, usize)>,
69}
70
71impl SparsedTraceArena {
72 fn resolve_arena(&self) -> Cow<'_, CallTraceArena> {
74 if self.ignored.is_empty() {
75 Cow::Borrowed(&self.arena)
76 } else {
77 let mut arena = self.arena.clone();
78
79 fn clear_node(
80 nodes: &mut [CallTraceNode],
81 node_idx: usize,
82 ignored: &HashMap<(usize, usize), (usize, usize)>,
83 cur_ignore_end: &mut Option<(usize, usize)>,
84 ) {
85 let items = std::iter::once(None)
88 .chain(nodes[node_idx].ordering.clone().into_iter().map(Some))
89 .enumerate();
90
91 let mut internal_calls = Vec::new();
92 let mut items_to_remove = BTreeSet::new();
93 for (item_idx, item) in items {
94 if let Some(end_node) = ignored.get(&(node_idx, item_idx)) {
95 *cur_ignore_end = Some(*end_node);
96 }
97
98 let mut remove = cur_ignore_end.is_some() & item.is_some();
99
100 match item {
101 Some(TraceMemberOrder::Call(child_idx)) => {
103 clear_node(
104 nodes,
105 nodes[node_idx].children[child_idx],
106 ignored,
107 cur_ignore_end,
108 );
109 remove &= cur_ignore_end.is_some();
110 }
111 Some(TraceMemberOrder::Step(step_idx)) => {
113 if let Some(decoded) = &nodes[node_idx].trace.steps[step_idx].decoded
115 && let DecodedTraceStep::InternalCall(_, end_step_idx) = &**decoded
116 {
117 internal_calls.push((item_idx, remove, *end_step_idx));
118 remove = false;
120 }
121 internal_calls.retain(|(start_item_idx, remove_start, end_idx)| {
123 if *end_idx != step_idx {
124 return true;
125 }
126 if *remove_start && remove {
128 items_to_remove.insert(*start_item_idx);
129 } else {
130 remove = false;
131 }
132
133 false
134 });
135 }
136 _ => {}
137 }
138
139 if remove {
140 items_to_remove.insert(item_idx);
141 }
142
143 if let Some((end_node, end_step_idx)) = cur_ignore_end
144 && node_idx == *end_node
145 && item_idx == *end_step_idx
146 {
147 *cur_ignore_end = None;
148 }
149 }
150
151 for (offset, item_idx) in items_to_remove.into_iter().enumerate() {
152 nodes[node_idx].ordering.remove(item_idx - offset - 1);
153 }
154 }
155
156 clear_node(arena.nodes_mut(), 0, &self.ignored, &mut None);
157
158 Cow::Owned(arena)
159 }
160 }
161}
162
163impl Deref for SparsedTraceArena {
164 type Target = CallTraceArena;
165
166 fn deref(&self) -> &Self::Target {
167 &self.arena
168 }
169}
170
171impl DerefMut for SparsedTraceArena {
172 fn deref_mut(&mut self) -> &mut Self::Target {
173 &mut self.arena
174 }
175}
176
177pub async fn decode_trace_arena(arena: &mut CallTraceArena, decoder: &CallTraceDecoder) {
181 decoder.prefetch_signatures(arena.nodes()).await;
182 decoder.populate_traces(arena.nodes_mut()).await;
183}
184
185pub fn render_trace_arena(arena: &SparsedTraceArena) -> String {
187 render_trace_arena_inner(arena, false, false)
188}
189
190pub fn prune_trace_depth(arena: &mut CallTraceArena, depth: usize) {
192 for node in arena.nodes_mut() {
193 if node.trace.depth >= depth {
194 node.ordering.clear();
195 }
196 }
197}
198
199pub fn render_trace_arena_inner(
202 arena: &SparsedTraceArena,
203 with_bytecodes: bool,
204 with_storage_changes: bool,
205) -> String {
206 if shell::is_json() {
207 return serde_json::to_string(&arena.resolve_arena()).expect("Failed to serialize traces");
208 }
209
210 let mut w = TraceWriter::new(Vec::<u8>::new())
211 .color_cheatcodes(true)
212 .use_colors(convert_color_choice(shell::color_choice()))
213 .write_bytecodes(with_bytecodes)
214 .with_storage_changes(with_storage_changes);
215 w.write_arena(&arena.resolve_arena()).expect("Failed to write traces");
216 String::from_utf8(w.into_writer()).expect("trace writer wrote invalid UTF-8")
217}
218
219fn convert_color_choice(choice: shell::ColorChoice) -> revm_inspectors::ColorChoice {
220 match choice {
221 shell::ColorChoice::Auto => revm_inspectors::ColorChoice::Auto,
222 shell::ColorChoice::Always => revm_inspectors::ColorChoice::Always,
223 shell::ColorChoice::Never => revm_inspectors::ColorChoice::Never,
224 }
225}
226
227#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
229pub enum TraceKind {
230 Deployment,
231 Setup,
232 Execution,
233}
234
235impl TraceKind {
236 #[must_use]
240 pub fn is_deployment(self) -> bool {
241 matches!(self, Self::Deployment)
242 }
243
244 #[must_use]
248 pub fn is_setup(self) -> bool {
249 matches!(self, Self::Setup)
250 }
251
252 #[must_use]
256 pub fn is_execution(self) -> bool {
257 matches!(self, Self::Execution)
258 }
259}
260
261pub fn load_contracts<'a>(
263 traces: impl IntoIterator<Item = &'a CallTraceArena>,
264 known_contracts: &ContractsByArtifact,
265) -> ContractsByAddress {
266 let mut local_identifier = LocalTraceIdentifier::new(known_contracts);
267 let decoder = CallTraceDecoder::new();
268 let mut contracts = ContractsByAddress::new();
269 for trace in traces {
270 for address in decoder.identify_addresses(trace, &mut local_identifier) {
271 if let (Some(contract), Some(abi)) = (address.contract, address.abi) {
272 contracts.insert(address.address, (contract, abi.into_owned()));
273 }
274 }
275 }
276 contracts
277}
278
279#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Default)]
281pub enum InternalTraceMode {
282 #[default]
283 None,
284 Simple,
286 Full,
288}
289
290impl From<InternalTraceMode> for TraceMode {
291 fn from(mode: InternalTraceMode) -> Self {
292 match mode {
293 InternalTraceMode::None => Self::None,
294 InternalTraceMode::Simple => Self::JumpSimple,
295 InternalTraceMode::Full => Self::Jump,
296 }
297 }
298}
299
300#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Default)]
302pub enum TraceMode {
303 #[default]
305 None,
306 Call,
308 Steps,
312 JumpSimple,
316 Jump,
320 Debug,
324 RecordStateDiff,
329}
330
331impl TraceMode {
332 pub const fn is_none(self) -> bool {
333 matches!(self, Self::None)
334 }
335
336 pub const fn is_call(self) -> bool {
337 matches!(self, Self::Call)
338 }
339
340 pub const fn is_steps(self) -> bool {
341 matches!(self, Self::Steps)
342 }
343
344 pub const fn is_jump_simple(self) -> bool {
345 matches!(self, Self::JumpSimple)
346 }
347
348 pub const fn is_jump(self) -> bool {
349 matches!(self, Self::Jump)
350 }
351
352 pub const fn record_state_diff(self) -> bool {
353 matches!(self, Self::RecordStateDiff)
354 }
355
356 pub const fn is_debug(self) -> bool {
357 matches!(self, Self::Debug)
358 }
359
360 pub fn with_debug(self, yes: bool) -> Self {
361 if yes { std::cmp::max(self, Self::Debug) } else { self }
362 }
363
364 pub fn with_decode_internal(self, mode: InternalTraceMode) -> Self {
365 std::cmp::max(self, mode.into())
366 }
367
368 pub fn with_state_changes(self, yes: bool) -> Self {
369 if yes { std::cmp::max(self, Self::RecordStateDiff) } else { self }
370 }
371
372 pub fn with_verbosity(self, verbosity: u8) -> Self {
373 match verbosity {
374 0..3 => self,
375 3..=4 => std::cmp::max(self, Self::Call),
376 _ => std::cmp::max(self, Self::RecordStateDiff),
379 }
380 }
381
382 pub fn into_config(self) -> Option<TracingInspectorConfig> {
383 if self.is_none() {
384 None
385 } else {
386 let effective = if self.record_state_diff() { Self::Steps } else { self };
391 TracingInspectorConfig {
392 record_steps: self >= Self::Steps,
393 record_memory_snapshots: effective >= Self::Jump,
394 record_stack_snapshots: if effective > Self::Steps {
395 StackSnapshotType::Full
396 } else {
397 StackSnapshotType::None
398 },
399 record_logs: true,
400 record_state_diff: self.record_state_diff(),
401 record_returndata_snapshots: effective.is_debug(),
402 record_opcodes_filter: if self.record_state_diff() {
404 None
405 } else {
406 (effective.is_steps() || effective.is_jump() || effective.is_jump_simple())
407 .then(|| {
408 OpcodeFilter::new().enabled(OpCode::JUMP).enabled(OpCode::JUMPDEST)
409 })
410 },
411 exclude_precompile_calls: false,
412 record_immediate_bytes: effective.is_debug(),
413 }
414 .into()
415 }
416 }
417}
418
419#[cfg(test)]
420mod tests {
421 use super::*;
422
423 #[test]
426 fn verbosity_0_through_2_is_noop() {
427 for v in 0..=2 {
428 assert_eq!(TraceMode::None.with_verbosity(v), TraceMode::None, "v={v}");
429 assert_eq!(TraceMode::Call.with_verbosity(v), TraceMode::Call, "v={v}");
430 assert_eq!(TraceMode::Debug.with_verbosity(v), TraceMode::Debug, "v={v}");
431 }
432 }
433
434 #[test]
435 fn verbosity_3_and_4_raises_to_call() {
436 for v in 3..=4 {
437 assert_eq!(TraceMode::None.with_verbosity(v), TraceMode::Call, "v={v}");
438 assert_eq!(TraceMode::Debug.with_verbosity(v), TraceMode::Debug, "v={v}");
440 assert_eq!(
441 TraceMode::RecordStateDiff.with_verbosity(v),
442 TraceMode::RecordStateDiff,
443 "v={v}"
444 );
445 }
446 }
447
448 #[test]
449 fn verbosity_5_raises_to_record_state_diff() {
450 assert_eq!(TraceMode::None.with_verbosity(5), TraceMode::RecordStateDiff);
451 assert_eq!(TraceMode::Call.with_verbosity(5), TraceMode::RecordStateDiff);
452 assert_eq!(TraceMode::Steps.with_verbosity(5), TraceMode::RecordStateDiff);
453 assert_eq!(TraceMode::Debug.with_verbosity(5), TraceMode::RecordStateDiff);
454 assert_eq!(TraceMode::RecordStateDiff.with_verbosity(5), TraceMode::RecordStateDiff);
456 }
457
458 #[test]
461 fn config_at_verbosity_0_is_none() {
462 let mode = TraceMode::None.with_verbosity(0);
463 assert!(mode.into_config().is_none());
464 }
465
466 #[test]
467 fn config_at_verbosity_3_records_calls_only() {
468 let cfg = TraceMode::None.with_verbosity(3).into_config().unwrap();
469 assert!(!cfg.record_steps, "verbosity 3 should not record steps");
470 assert!(!cfg.record_state_diff, "verbosity 3 should not record state diff");
471 assert!(cfg.record_logs, "verbosity 3 should record logs");
472 }
473
474 #[test]
475 fn config_at_verbosity_5_records_steps_and_state_diff() {
476 let cfg = TraceMode::None.with_verbosity(5).into_config().unwrap();
477 assert!(cfg.record_steps, "verbosity 5 must record steps for backtraces");
478 assert!(cfg.record_state_diff, "verbosity 5 must record state diff");
479 assert!(cfg.record_logs, "verbosity 5 must record logs");
480 assert!(!cfg.record_memory_snapshots, "verbosity 5 should not record memory snapshots");
482 assert_eq!(
483 cfg.record_stack_snapshots,
484 StackSnapshotType::None,
485 "verbosity 5 should not record stack snapshots"
486 );
487 assert!(
489 cfg.record_opcodes_filter.is_none(),
490 "verbosity 5 needs unfiltered opcodes for state diff"
491 );
492 }
493
494 #[test]
495 fn config_debug_mode_unchanged() {
496 let cfg = TraceMode::Debug.into_config().unwrap();
498 assert!(cfg.record_steps);
499 assert!(cfg.record_memory_snapshots, "Debug must record memory snapshots");
500 assert_eq!(
501 cfg.record_stack_snapshots,
502 StackSnapshotType::Full,
503 "Debug must record full stack snapshots"
504 );
505 assert!(cfg.record_returndata_snapshots, "Debug must record returndata");
506 assert!(cfg.record_immediate_bytes, "Debug must record immediate bytes");
507 assert!(cfg.record_opcodes_filter.is_none(), "Debug must record all opcodes (no filter)");
508 assert!(!cfg.record_state_diff, "Debug alone should not record state diff");
509 }
510}