1#![cfg_attr(not(test), warn(unused_crate_dependencies))]
6#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))]
7
8#[macro_use]
9extern crate foundry_common;
10
11#[macro_use]
12extern crate tracing;
13
14use foundry_common::{
15 contracts::{ContractsByAddress, ContractsByArtifact},
16 shell,
17};
18use revm::bytecode::opcode::OpCode;
19use revm_inspectors::tracing::{
20 types::{DecodedTraceStep, TraceMemberOrder},
21 OpcodeFilter,
22};
23use serde::{Deserialize, Serialize};
24use std::{
25 borrow::Cow,
26 collections::BTreeSet,
27 ops::{Deref, DerefMut},
28};
29
30use alloy_primitives::map::HashMap;
31
32pub use revm_inspectors::tracing::{
33 types::{
34 CallKind, CallLog, CallTrace, CallTraceNode, DecodedCallData, DecodedCallLog,
35 DecodedCallTrace,
36 },
37 CallTraceArena, FourByteInspector, GethTraceBuilder, ParityTraceBuilder, StackSnapshotType,
38 TraceWriter, TracingInspector, TracingInspectorConfig,
39};
40
41pub mod identifier;
45use identifier::LocalTraceIdentifier;
46
47mod decoder;
48pub use decoder::{CallTraceDecoder, CallTraceDecoderBuilder};
49
50pub mod debug;
51pub use debug::DebugTraceIdentifier;
52
53pub mod folded_stack_trace;
54
55pub type Traces = Vec<(TraceKind, SparsedTraceArena)>;
56
57#[derive(Debug, Clone, Serialize, Deserialize)]
59pub struct SparsedTraceArena {
60 #[serde(flatten)]
62 pub arena: CallTraceArena,
63 #[serde(default, skip_serializing_if = "HashMap::is_empty")]
66 pub ignored: HashMap<(usize, usize), (usize, usize)>,
67}
68
69impl SparsedTraceArena {
70 fn resolve_arena(&self) -> Cow<'_, CallTraceArena> {
72 if self.ignored.is_empty() {
73 Cow::Borrowed(&self.arena)
74 } else {
75 let mut arena = self.arena.clone();
76
77 fn clear_node(
78 nodes: &mut [CallTraceNode],
79 node_idx: usize,
80 ignored: &HashMap<(usize, usize), (usize, usize)>,
81 cur_ignore_end: &mut Option<(usize, usize)>,
82 ) {
83 let items = std::iter::once(None)
86 .chain(nodes[node_idx].ordering.clone().into_iter().map(Some))
87 .enumerate();
88
89 let mut iternal_calls = Vec::new();
90 let mut items_to_remove = BTreeSet::new();
91 for (item_idx, item) in items {
92 if let Some(end_node) = ignored.get(&(node_idx, item_idx)) {
93 *cur_ignore_end = Some(*end_node);
94 }
95
96 let mut remove = cur_ignore_end.is_some() & item.is_some();
97
98 match item {
99 Some(TraceMemberOrder::Call(child_idx)) => {
101 clear_node(
102 nodes,
103 nodes[node_idx].children[child_idx],
104 ignored,
105 cur_ignore_end,
106 );
107 remove &= cur_ignore_end.is_some();
108 }
109 Some(TraceMemberOrder::Step(step_idx)) => {
111 if let Some(DecodedTraceStep::InternalCall(_, end_step_idx)) =
113 &nodes[node_idx].trace.steps[step_idx].decoded
114 {
115 iternal_calls.push((item_idx, remove, *end_step_idx));
116 remove = false;
118 }
119 iternal_calls.retain(|(start_item_idx, remove_start, end_step_idx)| {
121 if *end_step_idx != step_idx {
122 return true;
123 }
124 if *remove_start && remove {
126 items_to_remove.insert(*start_item_idx);
127 } else {
128 remove = false;
129 }
130
131 false
132 });
133 }
134 _ => {}
135 }
136
137 if remove {
138 items_to_remove.insert(item_idx);
139 }
140
141 if let Some((end_node, end_step_idx)) = cur_ignore_end {
142 if node_idx == *end_node && item_idx == *end_step_idx {
143 *cur_ignore_end = None;
144 }
145 }
146 }
147
148 for (offset, item_idx) in items_to_remove.into_iter().enumerate() {
149 nodes[node_idx].ordering.remove(item_idx - offset - 1);
150 }
151 }
152
153 clear_node(arena.nodes_mut(), 0, &self.ignored, &mut None);
154
155 Cow::Owned(arena)
156 }
157 }
158}
159
160impl Deref for SparsedTraceArena {
161 type Target = CallTraceArena;
162
163 fn deref(&self) -> &Self::Target {
164 &self.arena
165 }
166}
167
168impl DerefMut for SparsedTraceArena {
169 fn deref_mut(&mut self) -> &mut Self::Target {
170 &mut self.arena
171 }
172}
173
174pub async fn decode_trace_arena(arena: &mut CallTraceArena, decoder: &CallTraceDecoder) {
178 decoder.prefetch_signatures(arena.nodes()).await;
179 decoder.populate_traces(arena.nodes_mut()).await;
180}
181
182pub fn render_trace_arena(arena: &SparsedTraceArena) -> String {
184 render_trace_arena_inner(arena, false, false)
185}
186
187pub fn render_trace_arena_inner(
190 arena: &SparsedTraceArena,
191 with_bytecodes: bool,
192 with_storage_changes: bool,
193) -> String {
194 if shell::is_json() {
195 return serde_json::to_string(&arena.resolve_arena()).expect("Failed to write traces");
196 }
197
198 let mut w = TraceWriter::new(Vec::<u8>::new())
199 .color_cheatcodes(true)
200 .use_colors(convert_color_choice(shell::color_choice()))
201 .write_bytecodes(with_bytecodes)
202 .with_storage_changes(with_storage_changes);
203 w.write_arena(&arena.resolve_arena()).expect("Failed to write traces");
204 String::from_utf8(w.into_writer()).expect("trace writer wrote invalid UTF-8")
205}
206
207fn convert_color_choice(choice: shell::ColorChoice) -> revm_inspectors::ColorChoice {
208 match choice {
209 shell::ColorChoice::Auto => revm_inspectors::ColorChoice::Auto,
210 shell::ColorChoice::Always => revm_inspectors::ColorChoice::Always,
211 shell::ColorChoice::Never => revm_inspectors::ColorChoice::Never,
212 }
213}
214
215#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
217pub enum TraceKind {
218 Deployment,
219 Setup,
220 Execution,
221}
222
223impl TraceKind {
224 #[must_use]
228 pub fn is_deployment(self) -> bool {
229 matches!(self, Self::Deployment)
230 }
231
232 #[must_use]
236 pub fn is_setup(self) -> bool {
237 matches!(self, Self::Setup)
238 }
239
240 #[must_use]
244 pub fn is_execution(self) -> bool {
245 matches!(self, Self::Execution)
246 }
247}
248
249pub fn load_contracts<'a>(
251 traces: impl IntoIterator<Item = &'a CallTraceArena>,
252 known_contracts: &ContractsByArtifact,
253) -> ContractsByAddress {
254 let mut local_identifier = LocalTraceIdentifier::new(known_contracts);
255 let decoder = CallTraceDecoder::new();
256 let mut contracts = ContractsByAddress::new();
257 for trace in traces {
258 for address in decoder.identify_addresses(trace, &mut local_identifier) {
259 if let (Some(contract), Some(abi)) = (address.contract, address.abi) {
260 contracts.insert(address.address, (contract, abi.into_owned()));
261 }
262 }
263 }
264 contracts
265}
266
267#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Default)]
269pub enum InternalTraceMode {
270 #[default]
271 None,
272 Simple,
274 Full,
276}
277
278impl From<InternalTraceMode> for TraceMode {
279 fn from(mode: InternalTraceMode) -> Self {
280 match mode {
281 InternalTraceMode::None => Self::None,
282 InternalTraceMode::Simple => Self::JumpSimple,
283 InternalTraceMode::Full => Self::Jump,
284 }
285 }
286}
287
288#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Default)]
290pub enum TraceMode {
291 #[default]
293 None,
294 Call,
296 JumpSimple,
300 Jump,
304 Debug,
308 RecordStateDiff,
310}
311
312impl TraceMode {
313 pub const fn is_none(self) -> bool {
314 matches!(self, Self::None)
315 }
316
317 pub const fn is_call(self) -> bool {
318 matches!(self, Self::Call)
319 }
320
321 pub const fn is_jump_simple(self) -> bool {
322 matches!(self, Self::JumpSimple)
323 }
324
325 pub const fn is_jump(self) -> bool {
326 matches!(self, Self::Jump)
327 }
328
329 pub const fn record_state_diff(self) -> bool {
330 matches!(self, Self::RecordStateDiff)
331 }
332
333 pub const fn is_debug(self) -> bool {
334 matches!(self, Self::Debug)
335 }
336
337 pub fn with_debug(self, yes: bool) -> Self {
338 if yes {
339 std::cmp::max(self, Self::Debug)
340 } else {
341 self
342 }
343 }
344
345 pub fn with_decode_internal(self, mode: InternalTraceMode) -> Self {
346 std::cmp::max(self, mode.into())
347 }
348
349 pub fn with_state_changes(self, yes: bool) -> Self {
350 if yes {
351 std::cmp::max(self, Self::RecordStateDiff)
352 } else {
353 self
354 }
355 }
356
357 pub fn with_verbosity(self, verbosity: u8) -> Self {
358 if verbosity >= 3 {
359 std::cmp::max(self, Self::Call)
360 } else {
361 self
362 }
363 }
364
365 pub fn into_config(self) -> Option<TracingInspectorConfig> {
366 if self.is_none() {
367 None
368 } else {
369 TracingInspectorConfig {
370 record_steps: self >= Self::JumpSimple,
371 record_memory_snapshots: self >= Self::Jump,
372 record_stack_snapshots: if self >= Self::JumpSimple {
373 StackSnapshotType::Full
374 } else {
375 StackSnapshotType::None
376 },
377 record_logs: true,
378 record_state_diff: self.record_state_diff(),
379 record_returndata_snapshots: self.is_debug(),
380 record_opcodes_filter: (self.is_jump() || self.is_jump_simple())
381 .then(|| OpcodeFilter::new().enabled(OpCode::JUMP).enabled(OpCode::JUMPDEST)),
382 exclude_precompile_calls: false,
383 record_immediate_bytes: self.is_debug(),
384 }
385 .into()
386 }
387 }
388}