forge/cmd/
bind_json.rs

1use super::eip712::Resolver;
2use clap::{Parser, ValueHint};
3use eyre::Result;
4use foundry_cli::{
5    opts::{BuildOpts, configure_pcx_from_solc},
6    utils::LoadConfig,
7};
8use foundry_common::{TYPE_BINDING_PREFIX, fs};
9use foundry_compilers::{
10    CompilerInput, Graph, Project,
11    artifacts::{Source, Sources},
12    multi::{MultiCompilerLanguage, MultiCompilerParser},
13    solc::{SolcLanguage, SolcVersionedInput},
14};
15use foundry_config::Config;
16use itertools::Itertools;
17use path_slash::PathExt;
18use rayon::prelude::*;
19use semver::Version;
20use solar::parse::{
21    Parser as SolarParser,
22    ast::{self, Arena, FunctionKind, Span, VarMut, interface::source_map::FileName, visit::Visit},
23    interface::Session,
24};
25use std::{
26    collections::{BTreeMap, BTreeSet, HashSet},
27    fmt::Write,
28    ops::ControlFlow,
29    path::{Path, PathBuf},
30    sync::Arc,
31};
32
33foundry_config::impl_figment_convert!(BindJsonArgs, build);
34
35const JSON_BINDINGS_PLACEHOLDER: &str = "library JsonBindings {}";
36
37/// CLI arguments for `forge bind-json`.
38#[derive(Clone, Debug, Parser)]
39pub struct BindJsonArgs {
40    /// The path to write bindings to.
41    #[arg(value_hint = ValueHint::FilePath, value_name = "PATH")]
42    pub out: Option<PathBuf>,
43
44    #[command(flatten)]
45    build: BuildOpts,
46}
47
48impl BindJsonArgs {
49    pub fn run(self) -> Result<()> {
50        let config = self.load_config()?;
51        let project = config.ephemeral_project()?;
52        let target_path = config.root.join(self.out.as_ref().unwrap_or(&config.bind_json.out));
53
54        // Step 1: Read and preprocess sources
55        let sources = project.paths.read_input_files()?;
56        let graph = Graph::<MultiCompilerParser>::resolve_sources(&project.paths, sources)?;
57
58        // We only generate bindings for a single Solidity version to avoid conflicts.
59        let (version, mut sources, _) = graph
60            // resolve graph into mapping language -> version -> sources
61            .into_sources_by_version(&project)?
62            .sources
63            .into_iter()
64            // we are only interested in Solidity sources
65            .find(|(lang, _)| *lang == MultiCompilerLanguage::Solc(SolcLanguage::Solidity))
66            .ok_or_else(|| eyre::eyre!("no Solidity sources"))?
67            .1
68            .into_iter()
69            // For now, we are always picking the latest version.
70            .max_by(|(v1, _, _), (v2, _, _)| v1.cmp(v2))
71            .unwrap();
72
73        // Step 2: Preprocess sources to handle potentially invalid bindings
74        self.preprocess_sources(&mut sources)?;
75
76        // Insert empty bindings file.
77        sources.insert(target_path.clone(), Source::new(JSON_BINDINGS_PLACEHOLDER));
78
79        // Step 3: Find structs and generate bindings
80        let structs_to_write =
81            self.find_and_resolve_structs(&config, &project, version, sources, &target_path)?;
82
83        // Step 4: Write bindings
84        self.write_bindings(&structs_to_write, &target_path)?;
85
86        Ok(())
87    }
88
89    /// In cases when user moves/renames/deletes structs, compiler will start failing because
90    /// generated bindings will be referencing non-existing structs or importing non-existing
91    /// files.
92    ///
93    /// Because of that, we need a little bit of preprocessing to make sure that bindings will still
94    /// be valid.
95    ///
96    /// The strategy is:
97    /// 1. Replace bindings file with an empty one to get rid of potentially invalid imports.
98    /// 2. Remove all function bodies to get rid of `serialize`/`deserialize` invocations.
99    /// 3. Remove all `immutable` attributes to avoid errors because of erased constructors
100    ///    initializing them.
101    ///
102    /// After that we'll still have enough information for bindings but compilation should succeed
103    /// in most of the cases.
104    fn preprocess_sources(&self, sources: &mut Sources) -> Result<()> {
105        let sess = Session::builder().with_stderr_emitter().build();
106        let result = sess.enter(|| -> solar::parse::interface::Result<()> {
107            sources.0.par_iter_mut().try_for_each(|(path, source)| {
108                let mut content = Arc::try_unwrap(std::mem::take(&mut source.content)).unwrap();
109
110                let arena = Arena::new();
111                let mut parser = SolarParser::from_source_code(
112                    &sess,
113                    &arena,
114                    FileName::Real(path.clone()),
115                    content.to_string(),
116                )?;
117                let ast = parser.parse_file().map_err(|e| e.emit())?;
118
119                let mut visitor = PreprocessorVisitor::new();
120                let _ = visitor.visit_source_unit(&ast);
121                visitor.update(&sess, &mut content);
122
123                source.content = Arc::new(content);
124                Ok(())
125            })
126        });
127        eyre::ensure!(result.is_ok(), "failed parsing");
128        Ok(())
129    }
130
131    /// Find structs, resolve conflicts, and prepare them for writing
132    fn find_and_resolve_structs(
133        &self,
134        config: &Config,
135        project: &Project,
136        version: Version,
137        sources: Sources,
138        _target_path: &Path,
139    ) -> Result<Vec<StructToWrite>> {
140        let settings = config.solc_settings()?;
141        let include = &config.bind_json.include;
142        let exclude = &config.bind_json.exclude;
143        let root = &config.root;
144
145        let input = SolcVersionedInput::build(sources, settings, SolcLanguage::Solidity, version);
146
147        let mut sess = Session::builder().with_stderr_emitter().build();
148        sess.dcx = sess.dcx.set_flags(|flags| flags.track_diagnostics = false);
149        let mut compiler = solar::sema::Compiler::new(sess);
150
151        let mut structs_to_write = Vec::new();
152
153        compiler.enter_mut(|compiler| -> Result<()> {
154            // Set up the parsing context with the project paths, without adding the source files
155            let mut pcx = compiler.parse();
156            configure_pcx_from_solc(&mut pcx, project, &input, false);
157
158            let mut target_files = HashSet::new();
159            for (path, source) in &input.input.sources {
160                if !include.is_empty() {
161                    if !include.iter().any(|matcher| matcher.is_match(path)) {
162                        continue;
163                    }
164                } else {
165                    // Exclude library files by default
166                    if project.paths.has_library_ancestor(path) {
167                        continue;
168                    }
169                }
170
171                if exclude.iter().any(|matcher| matcher.is_match(path)) {
172                    continue;
173                }
174
175                if let Ok(src_file) = compiler
176                    .sess()
177                    .source_map()
178                    .new_source_file(path.clone(), source.content.as_str())
179                {
180                    target_files.insert(src_file.stable_id);
181                    pcx.add_file(src_file);
182                }
183            }
184
185            // Parse and resolve
186            pcx.parse();
187            let Ok(ControlFlow::Continue(())) = compiler.lower_asts() else { return Ok(()) };
188            let gcx = compiler.gcx();
189            let hir = &gcx.hir;
190            let resolver = Resolver::new(gcx);
191            for id in resolver.struct_ids() {
192                if let Some(schema) = resolver.resolve_struct_eip712(id) {
193                    let def = hir.strukt(id);
194                    let source = hir.source(def.source);
195
196                    if !target_files.contains(&source.file.stable_id) {
197                        continue;
198                    }
199
200                    if let FileName::Real(path) = &source.file.name {
201                        structs_to_write.push(StructToWrite {
202                            name: def.name.as_str().into(),
203                            contract_name: def
204                                .contract
205                                .map(|id| hir.contract(id).name.as_str().into()),
206                            path: path.strip_prefix(root).unwrap_or(path).to_path_buf(),
207                            schema,
208                            // will be filled later
209                            import_alias: None,
210                            name_in_fns: String::new(),
211                        });
212                    }
213                }
214            }
215            Ok(())
216        })?;
217
218        eyre::ensure!(compiler.sess().dcx.has_errors().is_ok(), "errors occurred");
219
220        // Resolve import aliases and function names
221        self.resolve_conflicts(&mut structs_to_write);
222
223        Ok(structs_to_write)
224    }
225
226    /// We manage 2 namespaces for JSON bindings:
227    ///   - Namespace of imported items. This includes imports of contracts containing structs and
228    ///     structs defined at the file level.
229    ///   - Namespace of struct names used in function names and schema_* variables.
230    ///
231    /// Both of those might contain conflicts, so we need to resolve them.
232    fn resolve_conflicts(&self, structs_to_write: &mut [StructToWrite]) {
233        // firstly, we resolve imported names conflicts
234        // construct mapping name -> paths from which items with such name are imported
235        let mut names_to_paths = BTreeMap::new();
236
237        for s in structs_to_write.iter() {
238            names_to_paths
239                .entry(s.struct_or_contract_name())
240                .or_insert_with(BTreeSet::new)
241                .insert(s.path.as_path());
242        }
243
244        // now resolve aliases for names which need them and construct mapping (name, file) -> alias
245        let mut aliases = BTreeMap::new();
246
247        for (name, paths) in names_to_paths {
248            if paths.len() <= 1 {
249                continue; // no alias needed
250            }
251
252            for (i, path) in paths.into_iter().enumerate() {
253                aliases
254                    .entry(name.to_string())
255                    .or_insert_with(BTreeMap::new)
256                    .insert(path.to_path_buf(), format!("{name}_{i}"));
257            }
258        }
259
260        for s in structs_to_write.iter_mut() {
261            let name = s.struct_or_contract_name();
262            if aliases.contains_key(name) {
263                s.import_alias = Some(aliases[name][&s.path].clone());
264            }
265        }
266
267        // Each struct needs a name by which we are referencing it in function names (e.g.
268        // deserializeFoo) Those might also have conflicts, so we manage a separate
269        // namespace for them
270        let mut name_to_structs_indexes = BTreeMap::new();
271
272        for (idx, s) in structs_to_write.iter().enumerate() {
273            name_to_structs_indexes.entry(&s.name).or_insert_with(Vec::new).push(idx);
274        }
275
276        // Keeps `Some` for structs that will be referenced by name other than their definition
277        // name.
278        let mut fn_names = vec![None; structs_to_write.len()];
279
280        for (name, indexes) in name_to_structs_indexes {
281            if indexes.len() > 1 {
282                for (i, idx) in indexes.into_iter().enumerate() {
283                    fn_names[idx] = Some(format!("{name}_{i}"));
284                }
285            }
286        }
287
288        for (s, fn_name) in structs_to_write.iter_mut().zip(fn_names.into_iter()) {
289            s.name_in_fns = fn_name.unwrap_or(s.name.clone());
290        }
291    }
292
293    /// Write the final bindings file
294    fn write_bindings(
295        &self,
296        structs_to_write: &[StructToWrite],
297        target_path: &PathBuf,
298    ) -> Result<()> {
299        let mut result = String::new();
300
301        // Write imports
302        let mut grouped_imports = BTreeMap::new();
303        for struct_to_write in structs_to_write {
304            let item = struct_to_write.import_item();
305            grouped_imports
306                .entry(struct_to_write.path.as_path())
307                .or_insert_with(BTreeSet::new)
308                .insert(item);
309        }
310
311        result.push_str("// Automatically generated by forge bind-json.\n\npragma solidity >=0.6.2 <0.9.0;\npragma experimental ABIEncoderV2;\n\n");
312
313        for (path, names) in grouped_imports {
314            writeln!(
315                &mut result,
316                "import {{{}}} from \"{}\";",
317                names.iter().join(", "),
318                path.to_slash_lossy()
319            )?;
320        }
321
322        // Write VM interface
323        // Writes minimal VM interface to not depend on forge-std version
324        result.push_str(r#"
325interface Vm {
326    function parseJsonTypeArray(string calldata json, string calldata key, string calldata typeDescription) external pure returns (bytes memory);
327    function parseJsonType(string calldata json, string calldata typeDescription) external pure returns (bytes memory);
328    function parseJsonType(string calldata json, string calldata key, string calldata typeDescription) external pure returns (bytes memory);
329    function serializeJsonType(string calldata typeDescription, bytes memory value) external pure returns (string memory json);
330    function serializeJsonType(string calldata objectKey, string calldata valueKey, string calldata typeDescription, bytes memory value) external returns (string memory json);
331}
332        "#);
333
334        // Write library
335        result.push_str(
336            r#"
337library JsonBindings {
338    Vm constant vm = Vm(address(uint160(uint256(keccak256("hevm cheat code")))));
339
340"#,
341        );
342
343        // write schema constants
344        for struct_to_write in structs_to_write {
345            writeln!(
346                &mut result,
347                "    {}{} = \"{}\";",
348                TYPE_BINDING_PREFIX, struct_to_write.name_in_fns, struct_to_write.schema
349            )?;
350        }
351
352        // write serialization functions
353        for struct_to_write in structs_to_write {
354            write!(
355                &mut result,
356                r#"
357    function serialize({path} memory value) internal pure returns (string memory) {{
358        return vm.serializeJsonType(schema_{name_in_fns}, abi.encode(value));
359    }}
360
361    function serialize({path} memory value, string memory objectKey, string memory valueKey) internal returns (string memory) {{
362        return vm.serializeJsonType(objectKey, valueKey, schema_{name_in_fns}, abi.encode(value));
363    }}
364
365    function deserialize{name_in_fns}(string memory json) public pure returns ({path} memory) {{
366        return abi.decode(vm.parseJsonType(json, schema_{name_in_fns}), ({path}));
367    }}
368
369    function deserialize{name_in_fns}(string memory json, string memory path) public pure returns ({path} memory) {{
370        return abi.decode(vm.parseJsonType(json, path, schema_{name_in_fns}), ({path}));
371    }}
372
373    function deserialize{name_in_fns}Array(string memory json, string memory path) public pure returns ({path}[] memory) {{
374        return abi.decode(vm.parseJsonTypeArray(json, path, schema_{name_in_fns}), ({path}[]));
375    }}
376"#,
377                name_in_fns = struct_to_write.name_in_fns,
378                path = struct_to_write.full_path()
379            )?;
380        }
381
382        result.push_str("}\n");
383
384        // Write to file
385        if let Some(parent) = target_path.parent() {
386            fs::create_dir_all(parent)?;
387        }
388        fs::write(target_path, &result)?;
389
390        sh_println!("Bindings written to {}", target_path.display())?;
391
392        Ok(())
393    }
394}
395
396struct PreprocessorVisitor {
397    updates: Vec<(Span, &'static str)>,
398}
399
400impl PreprocessorVisitor {
401    fn new() -> Self {
402        Self { updates: Vec::new() }
403    }
404
405    fn update(mut self, sess: &Session, content: &mut String) {
406        if self.updates.is_empty() {
407            return;
408        }
409
410        let sf = sess.source_map().lookup_source_file(self.updates[0].0.lo());
411        let base = sf.start_pos.0;
412
413        self.updates.sort_by_key(|(span, _)| span.lo());
414        let mut shift = 0_i64;
415        for (span, new) in self.updates {
416            let lo = span.lo() - base;
417            let hi = span.hi() - base;
418            let start = ((lo.0 as i64) - shift) as usize;
419            let end = ((hi.0 as i64) - shift) as usize;
420
421            content.replace_range(start..end, new);
422            shift += (end - start) as i64;
423            shift -= new.len() as i64;
424        }
425    }
426}
427
428impl<'ast> Visit<'ast> for PreprocessorVisitor {
429    type BreakValue = solar::parse::interface::data_structures::Never;
430
431    fn visit_item_function(
432        &mut self,
433        func: &'ast ast::ItemFunction<'ast>,
434    ) -> ControlFlow<Self::BreakValue> {
435        // Replace function bodies with a noop statement.
436        if let Some(block) = &func.body
437            && !block.is_empty()
438        {
439            let span = block.first().unwrap().span.to(block.last().unwrap().span);
440            let new_body = match func.kind {
441                FunctionKind::Modifier => "_;",
442                _ => "revert();",
443            };
444            self.updates.push((span, new_body));
445        }
446
447        self.walk_item_function(func)
448    }
449
450    fn visit_variable_definition(
451        &mut self,
452        var: &'ast ast::VariableDefinition<'ast>,
453    ) -> ControlFlow<Self::BreakValue> {
454        // Remove `immutable` attributes.
455        if let Some(VarMut::Immutable) = var.mutability {
456            self.updates.push((var.span, ""));
457        }
458
459        self.walk_variable_definition(var)
460    }
461}
462
463/// A single struct definition for which we need to generate bindings.
464#[derive(Debug, Clone)]
465struct StructToWrite {
466    /// Name of the struct definition.
467    name: String,
468    /// Name of the contract containing the struct definition. None if the struct is defined at the
469    /// file level.
470    contract_name: Option<String>,
471    /// Import alias for the contract or struct, depending on whether the struct is imported
472    /// directly, or via a contract.
473    import_alias: Option<String>,
474    /// Path to the file containing the struct definition.
475    path: PathBuf,
476    /// EIP712 schema for the struct.
477    schema: String,
478    /// Name of the struct definition used in function names and schema_* variables.
479    name_in_fns: String,
480}
481
482impl StructToWrite {
483    /// Returns the name of the imported item. If struct is defined at the file level, returns the
484    /// struct name, otherwise returns the parent contract name.
485    fn struct_or_contract_name(&self) -> &str {
486        self.contract_name.as_deref().unwrap_or(&self.name)
487    }
488
489    /// Same as [StructToWrite::struct_or_contract_name] but with alias applied.
490    fn struct_or_contract_name_with_alias(&self) -> &str {
491        self.import_alias.as_deref().unwrap_or(self.struct_or_contract_name())
492    }
493
494    /// Path which can be used to reference this struct in input/output parameters. Either
495    /// StructName or ParentName.StructName
496    fn full_path(&self) -> String {
497        if self.contract_name.is_some() {
498            format!("{}.{}", self.struct_or_contract_name_with_alias(), self.name)
499        } else {
500            self.struct_or_contract_name_with_alias().to_string()
501        }
502    }
503
504    fn import_item(&self) -> String {
505        if let Some(alias) = &self.import_alias {
506            format!("{} as {}", self.struct_or_contract_name(), alias)
507        } else {
508            self.struct_or_contract_name().to_string()
509        }
510    }
511}