foundry_cheatcodes/evm/
record_debug_step.rs

1use alloy_primitives::{Bytes, U256};
2
3use foundry_evm_traces::CallTraceArena;
4use revm::{bytecode::opcode::OpCode, interpreter::InstructionResult};
5
6use foundry_evm_core::buffer::{BufferKind, get_buffer_accesses};
7use revm_inspectors::tracing::types::{
8    CallTraceNode, CallTraceStep, RecordedMemory, TraceMemberOrder,
9};
10use spec::Vm::DebugStep;
11
12// Context for a CallTraceStep, includes depth and contract address.
13pub(crate) struct CallTraceCtx<'a> {
14    pub node: &'a CallTraceNode,
15    pub step: &'a CallTraceStep,
16}
17
18// Do a depth first traverse of the nodes and steps and return steps
19// that are after `node_start_idx`
20pub(crate) fn flatten_call_trace<'a>(
21    root: usize,
22    arena: &'a CallTraceArena,
23    node_start_idx: usize,
24) -> Vec<CallTraceCtx<'a>> {
25    let mut steps = Vec::new();
26    let mut record_started = false;
27
28    // Start the recursion from the root node
29    recursive_flatten_call_trace(root, arena, node_start_idx, &mut record_started, &mut steps);
30    steps
31}
32
33// Inner recursive function to process nodes.
34// This implementation directly mutates `record_started` and `flatten_steps`.
35// So the recursive call can change the `record_started` flag even for the parent
36// unfinished processing, and append steps to the `flatten_steps` as the final result.
37fn recursive_flatten_call_trace<'a>(
38    node_idx: usize,
39    arena: &'a CallTraceArena,
40    node_start_idx: usize,
41    record_started: &mut bool,
42    flatten_steps: &mut Vec<CallTraceCtx<'a>>,
43) {
44    // Once node_idx exceeds node_start_idx, start recording steps
45    // for all the recursive processing.
46    if !*record_started && node_idx >= node_start_idx {
47        *record_started = true;
48    }
49
50    let node = &arena.nodes()[node_idx];
51
52    for order in &node.ordering {
53        match order {
54            TraceMemberOrder::Step(step_idx) => {
55                if *record_started {
56                    let step = &node.trace.steps[*step_idx];
57                    flatten_steps.push(CallTraceCtx { node, step });
58                }
59            }
60            TraceMemberOrder::Call(call_idx) => {
61                let child_node_idx = node.children[*call_idx];
62                recursive_flatten_call_trace(
63                    child_node_idx,
64                    arena,
65                    node_start_idx,
66                    record_started,
67                    flatten_steps,
68                );
69            }
70            _ => {}
71        }
72    }
73}
74
75// Function to convert CallTraceStep to DebugStep
76pub(crate) fn convert_call_trace_ctx_to_debug_step(ctx: &CallTraceCtx) -> DebugStep {
77    let opcode = ctx.step.op.get();
78    let stack = get_stack_inputs_for_opcode(opcode, ctx.step.stack.as_deref());
79
80    let memory =
81        get_memory_input_for_opcode(opcode, ctx.step.stack.as_deref(), ctx.step.memory.as_ref());
82
83    let is_out_of_gas = matches!(
84        ctx.step.status,
85        Some(
86            InstructionResult::OutOfGas
87                | InstructionResult::MemoryOOG
88                | InstructionResult::MemoryLimitOOG
89                | InstructionResult::PrecompileOOG
90                | InstructionResult::InvalidOperandOOG
91        )
92    );
93
94    let depth = ctx.node.trace.depth as u64 + 1;
95    let contract_addr = ctx.node.execution_address();
96
97    DebugStep {
98        stack,
99        memoryInput: memory,
100        opcode: ctx.step.op.get(),
101        depth,
102        isOutOfGas: is_out_of_gas,
103        contractAddr: contract_addr,
104    }
105}
106
107// The expected `stack` here is from the trace stack, where the top of the stack
108// is the last value of the vector
109fn get_memory_input_for_opcode(
110    opcode: u8,
111    stack: Option<&[U256]>,
112    memory: Option<&RecordedMemory>,
113) -> Bytes {
114    let mut memory_input = Bytes::new();
115    let Some(stack_data) = stack else { return memory_input };
116    let Some(memory_data) = memory else { return memory_input };
117
118    if let Some(accesses) = get_buffer_accesses(opcode, stack_data)
119        && let Some((BufferKind::Memory, access)) = accesses.read
120    {
121        memory_input = get_slice_from_memory(memory_data.as_bytes(), access.offset, access.len);
122    };
123
124    memory_input
125}
126
127// The expected `stack` here is from the trace stack, where the top of the stack
128// is the last value of the vector
129fn get_stack_inputs_for_opcode(opcode: u8, stack: Option<&[U256]>) -> Vec<U256> {
130    let mut inputs = Vec::new();
131
132    let Some(op) = OpCode::new(opcode) else { return inputs };
133    let Some(stack_data) = stack else { return inputs };
134
135    let stack_input_size = op.inputs() as usize;
136    for i in 0..stack_input_size {
137        inputs.push(stack_data[stack_data.len() - 1 - i]);
138    }
139    inputs
140}
141
142fn get_slice_from_memory(memory: &Bytes, start_index: usize, size: usize) -> Bytes {
143    let memory_len = memory.len();
144
145    let end_bound = start_index + size;
146
147    // Return the bytes if data is within the range.
148    if start_index < memory_len && end_bound <= memory_len {
149        return memory.slice(start_index..end_bound);
150    }
151
152    // Pad zero bytes if attempting to load memory partially out of range.
153    if start_index < memory_len && end_bound > memory_len {
154        let mut result = memory.slice(start_index..memory_len).to_vec();
155        result.resize(size, 0u8);
156        return Bytes::from(result);
157    }
158
159    // Return empty bytes with the size if not in range at all.
160    Bytes::from(vec![0u8; size])
161}