forge/cmd/
bind_json.rs

1use super::eip712::Resolver;
2use clap::{Parser, ValueHint};
3use eyre::Result;
4use foundry_cli::{opts::BuildOpts, utils::LoadConfig};
5use foundry_common::{compile::with_compilation_reporter, fs};
6use foundry_compilers::{
7    artifacts::{
8        output_selection::OutputSelection, ContractDefinitionPart, Source, SourceUnit,
9        SourceUnitPart, Sources,
10    },
11    multi::{MultiCompilerLanguage, MultiCompilerParsedSource},
12    project::ProjectCompiler,
13    solc::SolcLanguage,
14    Graph, Project,
15};
16use foundry_config::Config;
17use itertools::Itertools;
18use rayon::prelude::*;
19use solar_parse::{
20    ast::{self, interface::source_map::FileName, visit::Visit, Arena, FunctionKind, Span, VarMut},
21    interface::Session,
22    Parser as SolarParser,
23};
24use std::{
25    collections::{BTreeMap, BTreeSet},
26    fmt::{self, Write},
27    ops::ControlFlow,
28    path::PathBuf,
29    sync::Arc,
30};
31
32foundry_config::impl_figment_convert!(BindJsonArgs, build);
33
34/// CLI arguments for `forge bind-json`.
35#[derive(Clone, Debug, Parser)]
36pub struct BindJsonArgs {
37    /// The path to write bindings to.
38    #[arg(value_hint = ValueHint::FilePath, value_name = "PATH")]
39    pub out: Option<PathBuf>,
40
41    #[command(flatten)]
42    build: BuildOpts,
43}
44
45impl BindJsonArgs {
46    pub fn run(self) -> Result<()> {
47        self.preprocess()?.compile()?.find_structs()?.resolve_imports_and_aliases().write()?;
48
49        Ok(())
50    }
51
52    /// In cases when user moves/renames/deletes structs, compiler will start failing because
53    /// generated bindings will be referencing non-existing structs or importing non-existing
54    /// files.
55    ///
56    /// Because of that, we need a little bit of preprocessing to make sure that bindings will still
57    /// be valid.
58    ///
59    /// The strategy is:
60    /// 1. Replace bindings file with an empty one to get rid of potentially invalid imports.
61    /// 2. Remove all function bodies to get rid of `serialize`/`deserialize` invocations.
62    /// 3. Remove all `immutable` attributes to avoid errors because of erased constructors
63    ///    initializing them.
64    ///
65    /// After that we'll still have enough information for bindings but compilation should succeed
66    /// in most of the cases.
67    fn preprocess(self) -> Result<PreprocessedState> {
68        let config = self.load_config()?;
69        let project = config.ephemeral_project()?;
70
71        let target_path = config.root.join(self.out.as_ref().unwrap_or(&config.bind_json.out));
72
73        let sources = project.paths.read_input_files()?;
74        let graph = Graph::<MultiCompilerParsedSource>::resolve_sources(&project.paths, sources)?;
75
76        // We only generate bindings for a single Solidity version to avoid conflicts.
77        let mut sources = graph
78            // resolve graph into mapping language -> version -> sources
79            .into_sources_by_version(&project)?
80            .sources
81            .into_iter()
82            // we are only interested in Solidity sources
83            .find(|(lang, _)| *lang == MultiCompilerLanguage::Solc(SolcLanguage::Solidity))
84            .ok_or_else(|| eyre::eyre!("no Solidity sources"))?
85            .1
86            .into_iter()
87            // For now, we are always picking the latest version.
88            .max_by(|(v1, _, _), (v2, _, _)| v1.cmp(v2))
89            .unwrap()
90            .1;
91
92        let sess = Session::builder().with_stderr_emitter().build();
93        let result = sess.enter_parallel(|| -> solar_parse::interface::Result<()> {
94            sources.0.par_iter_mut().try_for_each(|(path, source)| {
95                let mut content = Arc::try_unwrap(std::mem::take(&mut source.content)).unwrap();
96
97                let arena = Arena::new();
98                let mut parser = SolarParser::from_source_code(
99                    &sess,
100                    &arena,
101                    FileName::Real(path.clone()),
102                    content.to_string(),
103                )?;
104                let ast = parser.parse_file().map_err(|e| e.emit())?;
105
106                let mut visitor = PreprocessorVisitor::new();
107                let _ = visitor.visit_source_unit(&ast);
108                visitor.update(&sess, &mut content);
109
110                source.content = Arc::new(content);
111                Ok(())
112            })
113        });
114        eyre::ensure!(result.is_ok(), "failed parsing");
115
116        // Insert empty bindings file.
117        sources.insert(target_path.clone(), Source::new("library JsonBindings {}"));
118
119        Ok(PreprocessedState { sources, target_path, project, config })
120    }
121}
122
123struct PreprocessorVisitor {
124    updates: Vec<(Span, &'static str)>,
125}
126
127impl PreprocessorVisitor {
128    fn new() -> Self {
129        Self { updates: Vec::new() }
130    }
131
132    fn update(mut self, sess: &Session, content: &mut String) {
133        if self.updates.is_empty() {
134            return;
135        }
136
137        let sf = sess.source_map().lookup_source_file(self.updates[0].0.lo());
138        let base = sf.start_pos.0;
139
140        self.updates.sort_by_key(|(span, _)| span.lo());
141        let mut shift = 0_i64;
142        for (span, new) in self.updates {
143            let lo = span.lo() - base;
144            let hi = span.hi() - base;
145            let start = ((lo.0 as i64) - shift) as usize;
146            let end = ((hi.0 as i64) - shift) as usize;
147
148            content.replace_range(start..end, new);
149            shift += (end - start) as i64;
150            shift -= new.len() as i64;
151        }
152    }
153}
154
155impl<'ast> Visit<'ast> for PreprocessorVisitor {
156    type BreakValue = solar_parse::interface::data_structures::Never;
157
158    fn visit_item_function(
159        &mut self,
160        func: &'ast ast::ItemFunction<'ast>,
161    ) -> ControlFlow<Self::BreakValue> {
162        // Replace function bodies with a noop statement.
163        if let Some(block) = &func.body {
164            if !block.is_empty() {
165                let span = block.first().unwrap().span.to(block.last().unwrap().span);
166                let new_body = match func.kind {
167                    FunctionKind::Modifier => "_;",
168                    _ => "revert();",
169                };
170                self.updates.push((span, new_body));
171            }
172        }
173
174        self.walk_item_function(func)
175    }
176
177    fn visit_variable_definition(
178        &mut self,
179        var: &'ast ast::VariableDefinition<'ast>,
180    ) -> ControlFlow<Self::BreakValue> {
181        // Remove `immutable` attributes.
182        if let Some(VarMut::Immutable) = var.mutability {
183            self.updates.push((var.span, ""));
184        }
185
186        self.walk_variable_definition(var)
187    }
188}
189
190/// A single struct definition for which we need to generate bindings.
191#[derive(Debug, Clone)]
192struct StructToWrite {
193    /// Name of the struct definition.
194    name: String,
195    /// Name of the contract containing the struct definition. None if the struct is defined at the
196    /// file level.
197    contract_name: Option<String>,
198    /// Import alias for the contract or struct, depending on whether the struct is imported
199    /// directly, or via a contract.
200    import_alias: Option<String>,
201    /// Path to the file containing the struct definition.
202    path: PathBuf,
203    /// EIP712 schema for the struct.
204    schema: String,
205    /// Name of the struct definition used in function names and schema_* variables.
206    name_in_fns: String,
207}
208
209impl StructToWrite {
210    /// Returns the name of the imported item. If struct is defined at the file level, returns the
211    /// struct name, otherwise returns the parent contract name.
212    fn struct_or_contract_name(&self) -> &str {
213        self.contract_name.as_deref().unwrap_or(&self.name)
214    }
215
216    /// Same as [StructToWrite::struct_or_contract_name] but with alias applied.
217    fn struct_or_contract_name_with_alias(&self) -> &str {
218        self.import_alias.as_deref().unwrap_or(self.struct_or_contract_name())
219    }
220
221    /// Path which can be used to reference this struct in input/output parameters. Either
222    /// StructName or ParantName.StructName
223    fn full_path(&self) -> String {
224        if self.contract_name.is_some() {
225            format!("{}.{}", self.struct_or_contract_name_with_alias(), self.name)
226        } else {
227            self.struct_or_contract_name_with_alias().to_string()
228        }
229    }
230
231    fn import_item(&self) -> String {
232        if let Some(alias) = &self.import_alias {
233            format!("{} as {}", self.struct_or_contract_name(), alias)
234        } else {
235            self.struct_or_contract_name().to_string()
236        }
237    }
238}
239
240#[derive(Debug)]
241struct PreprocessedState {
242    sources: Sources,
243    target_path: PathBuf,
244    project: Project,
245    config: Config,
246}
247
248impl PreprocessedState {
249    fn compile(self) -> Result<CompiledState> {
250        let Self { sources, target_path, mut project, config } = self;
251
252        project.update_output_selection(|selection| {
253            *selection = OutputSelection::ast_output_selection();
254        });
255
256        let output = with_compilation_reporter(false, || {
257            ProjectCompiler::with_sources(&project, sources)?.compile()
258        })?;
259
260        if output.has_compiler_errors() {
261            eyre::bail!("{output}");
262        }
263
264        // Collect ASTs by getting them from sources and converting into strongly typed
265        // `SourceUnit`s. Also strips root from paths.
266        let asts = output
267            .into_output()
268            .sources
269            .into_iter()
270            .filter_map(|(path, mut sources)| Some((path, sources.swap_remove(0).source_file.ast?)))
271            .map(|(path, ast)| {
272                Ok((
273                    path.strip_prefix(project.root()).unwrap_or(&path).to_path_buf(),
274                    serde_json::from_str::<SourceUnit>(&serde_json::to_string(&ast)?)?,
275                ))
276            })
277            .collect::<Result<BTreeMap<_, _>>>()?;
278
279        Ok(CompiledState { asts, target_path, config, project })
280    }
281}
282
283#[derive(Debug, Clone)]
284struct CompiledState {
285    asts: BTreeMap<PathBuf, SourceUnit>,
286    target_path: PathBuf,
287    config: Config,
288    project: Project,
289}
290
291impl CompiledState {
292    fn find_structs(self) -> Result<StructsState> {
293        let Self { asts, target_path, config, project } = self;
294
295        // construct mapping (file, id) -> (struct definition, optional parent contract name)
296        let structs = asts
297            .iter()
298            .flat_map(|(path, ast)| {
299                let mut structs = Vec::new();
300                // we walk AST directly instead of using visitors because we need to distinguish
301                // between file-level and contract-level struct definitions
302                for node in &ast.nodes {
303                    match node {
304                        SourceUnitPart::StructDefinition(def) => {
305                            structs.push((def, None));
306                        }
307                        SourceUnitPart::ContractDefinition(contract) => {
308                            for node in &contract.nodes {
309                                if let ContractDefinitionPart::StructDefinition(def) = node {
310                                    structs.push((def, Some(contract.name.clone())));
311                                }
312                            }
313                        }
314                        _ => {}
315                    }
316                }
317                structs.into_iter().map(|(def, parent)| ((path.as_path(), def.id), (def, parent)))
318            })
319            .collect::<BTreeMap<_, _>>();
320
321        // Resolver for EIP712 schemas
322        let resolver = Resolver::new(&asts);
323
324        let mut structs_to_write = Vec::new();
325
326        let include = config.bind_json.include;
327        let exclude = config.bind_json.exclude;
328
329        for ((path, id), (def, contract_name)) in structs {
330            // For some structs there's no schema (e.g. if they contain a mapping), so we just skip
331            // those.
332            let Some(schema) = resolver.resolve_struct_eip712(id)? else { continue };
333
334            if !include.is_empty() {
335                if !include.iter().any(|matcher| matcher.is_match(path)) {
336                    continue;
337                }
338            } else {
339                // Exclude library files by default
340                if project.paths.has_library_ancestor(path) {
341                    continue;
342                }
343            }
344
345            if exclude.iter().any(|matcher| matcher.is_match(path)) {
346                continue;
347            }
348
349            structs_to_write.push(StructToWrite {
350                name: def.name.clone(),
351                contract_name,
352                path: path.to_path_buf(),
353                schema,
354
355                // will be filled later
356                import_alias: None,
357                name_in_fns: String::new(),
358            })
359        }
360
361        Ok(StructsState { structs_to_write, target_path })
362    }
363}
364
365#[derive(Debug)]
366struct StructsState {
367    structs_to_write: Vec<StructToWrite>,
368    target_path: PathBuf,
369}
370
371impl StructsState {
372    /// We manage 2 namespsaces for JSON bindings:
373    ///   - Namespace of imported items. This includes imports of contracts containing structs and
374    ///     structs defined at the file level.
375    ///   - Namespace of struct names used in function names and schema_* variables.
376    ///
377    /// Both of those might contain conflicts, so we need to resolve them.
378    fn resolve_imports_and_aliases(self) -> ResolvedState {
379        let Self { mut structs_to_write, target_path } = self;
380
381        // firstly, we resolve imported names conflicts
382        // construct mapping name -> paths from which items with such name are imported
383        let mut names_to_paths = BTreeMap::new();
384
385        for s in &structs_to_write {
386            names_to_paths
387                .entry(s.struct_or_contract_name())
388                .or_insert_with(BTreeSet::new)
389                .insert(s.path.as_path());
390        }
391
392        // now resolve aliases for names which need them and construct mapping (name, file) -> alias
393        let mut aliases = BTreeMap::new();
394
395        for (name, paths) in names_to_paths {
396            if paths.len() <= 1 {
397                // no alias needed
398                continue
399            }
400
401            for (i, path) in paths.into_iter().enumerate() {
402                aliases
403                    .entry(name.to_string())
404                    .or_insert_with(BTreeMap::new)
405                    .insert(path.to_path_buf(), format!("{name}_{i}"));
406            }
407        }
408
409        for s in &mut structs_to_write {
410            let name = s.struct_or_contract_name();
411            if aliases.contains_key(name) {
412                s.import_alias = Some(aliases[name][&s.path].clone());
413            }
414        }
415
416        // Each struct needs a name by which we are referencing it in function names (e.g.
417        // deserializeFoo) Those might also have conflicts, so we manage a separate
418        // namespace for them
419        let mut name_to_structs_indexes = BTreeMap::new();
420
421        for (idx, s) in structs_to_write.iter().enumerate() {
422            name_to_structs_indexes.entry(&s.name).or_insert_with(Vec::new).push(idx);
423        }
424
425        // Keeps `Some` for structs that will be referenced by name other than their definition
426        // name.
427        let mut fn_names = vec![None; structs_to_write.len()];
428
429        for (name, indexes) in name_to_structs_indexes {
430            if indexes.len() > 1 {
431                for (i, idx) in indexes.into_iter().enumerate() {
432                    fn_names[idx] = Some(format!("{name}_{i}"));
433                }
434            }
435        }
436
437        for (s, fn_name) in structs_to_write.iter_mut().zip(fn_names.into_iter()) {
438            s.name_in_fns = fn_name.unwrap_or(s.name.clone());
439        }
440
441        ResolvedState { structs_to_write, target_path }
442    }
443}
444
445struct ResolvedState {
446    structs_to_write: Vec<StructToWrite>,
447    target_path: PathBuf,
448}
449
450impl ResolvedState {
451    fn write(self) -> Result<String> {
452        let mut result = String::new();
453        self.write_imports(&mut result)?;
454        self.write_vm(&mut result);
455        self.write_library(&mut result)?;
456
457        if let Some(parent) = self.target_path.parent() {
458            fs::create_dir_all(parent)?;
459        }
460        fs::write(&self.target_path, &result)?;
461
462        sh_println!("Bindings written to {}", self.target_path.display())?;
463
464        Ok(result)
465    }
466
467    fn write_imports(&self, result: &mut String) -> fmt::Result {
468        let mut grouped_imports = BTreeMap::new();
469
470        for struct_to_write in &self.structs_to_write {
471            let item = struct_to_write.import_item();
472            grouped_imports
473                .entry(struct_to_write.path.as_path())
474                .or_insert_with(BTreeSet::new)
475                .insert(item);
476        }
477
478        result.push_str("// Automatically generated by forge bind-json.\n\npragma solidity >=0.6.2 <0.9.0;\npragma experimental ABIEncoderV2;\n\n");
479
480        for (path, names) in grouped_imports {
481            writeln!(
482                result,
483                "import {{{}}} from \"{}\";",
484                names.iter().join(", "),
485                path.display()
486            )?;
487        }
488
489        Ok(())
490    }
491
492    /// Writes minimal VM interface to not depend on forge-std version
493    fn write_vm(&self, result: &mut String) {
494        result.push_str(r#"
495interface Vm {
496    function parseJsonTypeArray(string calldata json, string calldata key, string calldata typeDescription) external pure returns (bytes memory);
497    function parseJsonType(string calldata json, string calldata typeDescription) external pure returns (bytes memory);
498    function parseJsonType(string calldata json, string calldata key, string calldata typeDescription) external pure returns (bytes memory);
499    function serializeJsonType(string calldata typeDescription, bytes memory value) external pure returns (string memory json);
500    function serializeJsonType(string calldata objectKey, string calldata valueKey, string calldata typeDescription, bytes memory value) external returns (string memory json);
501}
502        "#);
503    }
504
505    fn write_library(&self, result: &mut String) -> fmt::Result {
506        result.push_str(
507            r#"
508library JsonBindings {
509    Vm constant vm = Vm(address(uint160(uint256(keccak256("hevm cheat code")))));
510
511"#,
512        );
513        // write schema constants
514        for struct_to_write in &self.structs_to_write {
515            writeln!(
516                result,
517                "    string constant schema_{} = \"{}\";",
518                struct_to_write.name_in_fns, struct_to_write.schema
519            )?;
520        }
521
522        // write serialization functions
523        for struct_to_write in &self.structs_to_write {
524            write!(
525                result,
526                r#"
527    function serialize({path} memory value) internal pure returns (string memory) {{
528        return vm.serializeJsonType(schema_{name_in_fns}, abi.encode(value));
529    }}
530
531    function serialize({path} memory value, string memory objectKey, string memory valueKey) internal returns (string memory) {{
532        return vm.serializeJsonType(objectKey, valueKey, schema_{name_in_fns}, abi.encode(value));
533    }}
534
535    function deserialize{name_in_fns}(string memory json) public pure returns ({path} memory) {{
536        return abi.decode(vm.parseJsonType(json, schema_{name_in_fns}), ({path}));
537    }}
538
539    function deserialize{name_in_fns}(string memory json, string memory path) public pure returns ({path} memory) {{
540        return abi.decode(vm.parseJsonType(json, path, schema_{name_in_fns}), ({path}));
541    }}
542
543    function deserialize{name_in_fns}Array(string memory json, string memory path) public pure returns ({path}[] memory) {{
544        return abi.decode(vm.parseJsonTypeArray(json, path, schema_{name_in_fns}), ({path}[]));
545    }}
546"#,
547                name_in_fns = struct_to_write.name_in_fns,
548                path = struct_to_write.full_path()
549            )?;
550        }
551
552        result.push_str("}\n");
553
554        Ok(())
555    }
556}