Skip to main content

foundry_cheatcodes/evm/
record_debug_step.rs

1use alloy_primitives::{Bytes, U256};
2
3use foundry_evm_traces::CallTraceArena;
4use revm::{bytecode::opcode::OpCode, interpreter::InstructionResult};
5
6use foundry_evm_core::buffer::{BufferKind, get_buffer_accesses};
7use revm_inspectors::tracing::types::{
8    CallTraceNode, CallTraceStep, RecordedMemory, TraceMemberOrder,
9};
10use spec::Vm::DebugStep;
11
12// Context for a CallTraceStep, includes depth and contract address.
13pub(crate) struct CallTraceCtx<'a> {
14    pub node: &'a CallTraceNode,
15    pub step: &'a CallTraceStep,
16}
17
18// Do a depth first traverse of the nodes and steps and return steps
19// that are after `node_start_idx`
20pub(crate) fn flatten_call_trace<'a>(
21    root: usize,
22    arena: &'a CallTraceArena,
23    node_start_idx: usize,
24) -> Vec<CallTraceCtx<'a>> {
25    let mut steps = Vec::new();
26    let mut record_started = false;
27
28    // Start the recursion from the root node
29    recursive_flatten_call_trace(root, arena, node_start_idx, &mut record_started, &mut steps);
30    steps
31}
32
33// Inner recursive function to process nodes.
34// This implementation directly mutates `record_started` and `flatten_steps`.
35// So the recursive call can change the `record_started` flag even for the parent
36// unfinished processing, and append steps to the `flatten_steps` as the final result.
37fn recursive_flatten_call_trace<'a>(
38    node_idx: usize,
39    arena: &'a CallTraceArena,
40    node_start_idx: usize,
41    record_started: &mut bool,
42    flatten_steps: &mut Vec<CallTraceCtx<'a>>,
43) {
44    // Once node_idx exceeds node_start_idx, start recording steps
45    // for all the recursive processing.
46    if !*record_started && node_idx >= node_start_idx {
47        *record_started = true;
48    }
49
50    let node = &arena.nodes()[node_idx];
51
52    for order in &node.ordering {
53        match order {
54            TraceMemberOrder::Step(step_idx) if *record_started => {
55                let step = &node.trace.steps[*step_idx];
56                flatten_steps.push(CallTraceCtx { node, step });
57            }
58            TraceMemberOrder::Call(call_idx) => {
59                let child_node_idx = node.children[*call_idx];
60                recursive_flatten_call_trace(
61                    child_node_idx,
62                    arena,
63                    node_start_idx,
64                    record_started,
65                    flatten_steps,
66                );
67            }
68            _ => {}
69        }
70    }
71}
72
73// Function to convert CallTraceStep to DebugStep
74pub(crate) fn convert_call_trace_ctx_to_debug_step(ctx: &CallTraceCtx) -> DebugStep {
75    let opcode = ctx.step.op.get();
76    let stack = get_stack_inputs_for_opcode(opcode, ctx.step.stack.as_deref());
77
78    let memory =
79        get_memory_input_for_opcode(opcode, ctx.step.stack.as_deref(), ctx.step.memory.as_ref());
80
81    let is_out_of_gas = matches!(
82        ctx.step.status,
83        Some(
84            InstructionResult::OutOfGas
85                | InstructionResult::MemoryOOG
86                | InstructionResult::MemoryLimitOOG
87                | InstructionResult::PrecompileOOG
88                | InstructionResult::InvalidOperandOOG
89        )
90    );
91
92    let depth = ctx.node.trace.depth as u64 + 1;
93    let contract_addr = ctx.node.execution_address();
94
95    DebugStep {
96        stack,
97        memoryInput: memory,
98        opcode: ctx.step.op.get(),
99        depth,
100        isOutOfGas: is_out_of_gas,
101        contractAddr: contract_addr,
102    }
103}
104
105// The expected `stack` here is from the trace stack, where the top of the stack
106// is the last value of the vector
107fn get_memory_input_for_opcode(
108    opcode: u8,
109    stack: Option<&[U256]>,
110    memory: Option<&RecordedMemory>,
111) -> Bytes {
112    let mut memory_input = Bytes::new();
113    let Some(stack_data) = stack else { return memory_input };
114    let Some(memory_data) = memory else { return memory_input };
115
116    if let Some(accesses) = get_buffer_accesses(opcode, stack_data)
117        && let Some((BufferKind::Memory, access)) = accesses.read
118    {
119        memory_input = get_slice_from_memory(memory_data.as_bytes(), access.offset, access.len);
120    };
121
122    memory_input
123}
124
125// The expected `stack` here is from the trace stack, where the top of the stack
126// is the last value of the vector
127fn get_stack_inputs_for_opcode(opcode: u8, stack: Option<&[U256]>) -> Vec<U256> {
128    let mut inputs = Vec::new();
129
130    let Some(op) = OpCode::new(opcode) else { return inputs };
131    let Some(stack_data) = stack else { return inputs };
132
133    let stack_input_size = op.inputs() as usize;
134    for i in 0..stack_input_size {
135        inputs.push(stack_data[stack_data.len() - 1 - i]);
136    }
137    inputs
138}
139
140fn get_slice_from_memory(memory: &Bytes, start_index: usize, size: usize) -> Bytes {
141    let memory_len = memory.len();
142
143    let end_bound = start_index + size;
144
145    // Return the bytes if data is within the range.
146    if start_index < memory_len && end_bound <= memory_len {
147        return memory.slice(start_index..end_bound);
148    }
149
150    // Pad zero bytes if attempting to load memory partially out of range.
151    if start_index < memory_len && end_bound > memory_len {
152        let mut result = memory.slice(start_index..memory_len).to_vec();
153        result.resize(size, 0u8);
154        return Bytes::from(result);
155    }
156
157    // Return empty bytes with the size if not in range at all.
158    Bytes::from(vec![0u8; size])
159}