foundry_cheatcodes/evm/
record_debug_step.rs
1use alloy_primitives::{Bytes, U256};
2
3use foundry_evm_traces::CallTraceArena;
4use revm::interpreter::{InstructionResult, OpCode};
5
6use foundry_evm_core::buffer::{get_buffer_accesses, BufferKind};
7use revm_inspectors::tracing::types::{CallTraceStep, RecordedMemory, TraceMemberOrder};
8use spec::Vm::DebugStep;
9
10pub(crate) fn flatten_call_trace(
13 root: usize,
14 arena: &CallTraceArena,
15 node_start_idx: usize,
16) -> Vec<&CallTraceStep> {
17 let mut steps = Vec::new();
18 let mut record_started = false;
19
20 recursive_flatten_call_trace(root, arena, node_start_idx, &mut record_started, &mut steps);
22 steps
23}
24
25fn recursive_flatten_call_trace<'a>(
30 node_idx: usize,
31 arena: &'a CallTraceArena,
32 node_start_idx: usize,
33 record_started: &mut bool,
34 flatten_steps: &mut Vec<&'a CallTraceStep>,
35) {
36 if !*record_started && node_idx >= node_start_idx {
39 *record_started = true;
40 }
41
42 let node = &arena.nodes()[node_idx];
43
44 for order in &node.ordering {
45 match order {
46 TraceMemberOrder::Step(step_idx) => {
47 if *record_started {
48 let step = &node.trace.steps[*step_idx];
49 flatten_steps.push(step);
50 }
51 }
52 TraceMemberOrder::Call(call_idx) => {
53 let child_node_idx = node.children[*call_idx];
54 recursive_flatten_call_trace(
55 child_node_idx,
56 arena,
57 node_start_idx,
58 record_started,
59 flatten_steps,
60 );
61 }
62 _ => {}
63 }
64 }
65}
66
67pub(crate) fn convert_call_trace_to_debug_step(step: &CallTraceStep) -> DebugStep {
69 let opcode = step.op.get();
70 let stack = get_stack_inputs_for_opcode(opcode, step.stack.as_ref());
71
72 let memory = get_memory_input_for_opcode(opcode, step.stack.as_ref(), step.memory.as_ref());
73
74 let is_out_of_gas = step.status == InstructionResult::OutOfGas ||
75 step.status == InstructionResult::MemoryOOG ||
76 step.status == InstructionResult::MemoryLimitOOG ||
77 step.status == InstructionResult::PrecompileOOG ||
78 step.status == InstructionResult::InvalidOperandOOG;
79
80 DebugStep {
81 stack,
82 memoryInput: memory,
83 opcode: step.op.get(),
84 depth: step.depth,
85 isOutOfGas: is_out_of_gas,
86 contractAddr: step.contract,
87 }
88}
89
90fn get_memory_input_for_opcode(
93 opcode: u8,
94 stack: Option<&Vec<U256>>,
95 memory: Option<&RecordedMemory>,
96) -> Bytes {
97 let mut memory_input = Bytes::new();
98 let Some(stack_data) = stack else { return memory_input };
99 let Some(memory_data) = memory else { return memory_input };
100
101 if let Some(accesses) = get_buffer_accesses(opcode, stack_data) {
102 if let Some((BufferKind::Memory, access)) = accesses.read {
103 memory_input = get_slice_from_memory(memory_data.as_bytes(), access.offset, access.len);
104 }
105 };
106
107 memory_input
108}
109
110fn get_stack_inputs_for_opcode(opcode: u8, stack: Option<&Vec<U256>>) -> Vec<U256> {
113 let mut inputs = Vec::new();
114
115 let Some(op) = OpCode::new(opcode) else { return inputs };
116 let Some(stack_data) = stack else { return inputs };
117
118 let stack_input_size = op.inputs() as usize;
119 for i in 0..stack_input_size {
120 inputs.push(stack_data[stack_data.len() - 1 - i]);
121 }
122 inputs
123}
124
125fn get_slice_from_memory(memory: &Bytes, start_index: usize, size: usize) -> Bytes {
126 let memory_len = memory.len();
127
128 let end_bound = start_index + size;
129
130 if start_index < memory_len && end_bound <= memory_len {
132 return memory.slice(start_index..end_bound);
133 }
134
135 if start_index < memory_len && end_bound > memory_len {
137 let mut result = memory.slice(start_index..memory_len).to_vec();
138 result.resize(size, 0u8);
139 return Bytes::from(result);
140 }
141
142 Bytes::from(vec![0u8; size])
144}