1use super::eip712::Resolver;
2use clap::{Parser, ValueHint};
3use eyre::Result;
4use foundry_cli::{
5 opts::{solar_pcx_from_solc_project, BuildOpts},
6 utils::LoadConfig,
7};
8use foundry_common::{fs, TYPE_BINDING_PREFIX};
9use foundry_compilers::{
10 artifacts::{Source, Sources},
11 multi::{MultiCompilerLanguage, MultiCompilerParsedSource},
12 solc::{SolcLanguage, SolcVersionedInput},
13 CompilerInput, Graph, Project,
14};
15use foundry_config::Config;
16use itertools::Itertools;
17use path_slash::PathExt;
18use rayon::prelude::*;
19use semver::Version;
20use solar_parse::{
21 ast::{self, interface::source_map::FileName, visit::Visit, Arena, FunctionKind, Span, VarMut},
22 interface::Session,
23 Parser as SolarParser,
24};
25use solar_sema::thread_local::ThreadLocal;
26use std::{
27 collections::{BTreeMap, BTreeSet, HashSet},
28 fmt::{self, Write},
29 ops::ControlFlow,
30 path::PathBuf,
31 sync::Arc,
32};
33
34foundry_config::impl_figment_convert!(BindJsonArgs, build);
35
36const JSON_BINDINGS_PLACEHOLDER: &str = "library JsonBindings {}";
37
38#[derive(Clone, Debug, Parser)]
40pub struct BindJsonArgs {
41 #[arg(value_hint = ValueHint::FilePath, value_name = "PATH")]
43 pub out: Option<PathBuf>,
44
45 #[command(flatten)]
46 build: BuildOpts,
47}
48
49impl BindJsonArgs {
50 pub fn run(self) -> Result<()> {
51 self.preprocess()?.find_structs()?.resolve_imports_and_aliases().write()?;
52
53 Ok(())
54 }
55
56 fn preprocess(self) -> Result<PreprocessedState> {
72 let config = self.load_config()?;
73 let project = config.ephemeral_project()?;
74
75 let target_path = config.root.join(self.out.as_ref().unwrap_or(&config.bind_json.out));
76
77 let sources = project.paths.read_input_files()?;
78 let graph = Graph::<MultiCompilerParsedSource>::resolve_sources(&project.paths, sources)?;
79
80 let (version, mut sources, _) = graph
82 .into_sources_by_version(&project)?
84 .sources
85 .into_iter()
86 .find(|(lang, _)| *lang == MultiCompilerLanguage::Solc(SolcLanguage::Solidity))
88 .ok_or_else(|| eyre::eyre!("no Solidity sources"))?
89 .1
90 .into_iter()
91 .max_by(|(v1, _, _), (v2, _, _)| v1.cmp(v2))
93 .unwrap();
94
95 let sess = Session::builder().with_stderr_emitter().build();
96 let result = sess.enter_parallel(|| -> solar_parse::interface::Result<()> {
97 sources.0.par_iter_mut().try_for_each(|(path, source)| {
98 let mut content = Arc::try_unwrap(std::mem::take(&mut source.content)).unwrap();
99
100 let arena = Arena::new();
101 let mut parser = SolarParser::from_source_code(
102 &sess,
103 &arena,
104 FileName::Real(path.clone()),
105 content.to_string(),
106 )?;
107 let ast = parser.parse_file().map_err(|e| e.emit())?;
108
109 let mut visitor = PreprocessorVisitor::new();
110 let _ = visitor.visit_source_unit(&ast);
111 visitor.update(&sess, &mut content);
112
113 source.content = Arc::new(content);
114 Ok(())
115 })
116 });
117 eyre::ensure!(result.is_ok(), "failed parsing");
118
119 sources.insert(target_path.clone(), Source::new(JSON_BINDINGS_PLACEHOLDER));
121
122 Ok(PreprocessedState { version, sources, target_path, project, config })
123 }
124}
125
126struct PreprocessorVisitor {
127 updates: Vec<(Span, &'static str)>,
128}
129
130impl PreprocessorVisitor {
131 fn new() -> Self {
132 Self { updates: Vec::new() }
133 }
134
135 fn update(mut self, sess: &Session, content: &mut String) {
136 if self.updates.is_empty() {
137 return;
138 }
139
140 let sf = sess.source_map().lookup_source_file(self.updates[0].0.lo());
141 let base = sf.start_pos.0;
142
143 self.updates.sort_by_key(|(span, _)| span.lo());
144 let mut shift = 0_i64;
145 for (span, new) in self.updates {
146 let lo = span.lo() - base;
147 let hi = span.hi() - base;
148 let start = ((lo.0 as i64) - shift) as usize;
149 let end = ((hi.0 as i64) - shift) as usize;
150
151 content.replace_range(start..end, new);
152 shift += (end - start) as i64;
153 shift -= new.len() as i64;
154 }
155 }
156}
157
158impl<'ast> Visit<'ast> for PreprocessorVisitor {
159 type BreakValue = solar_parse::interface::data_structures::Never;
160
161 fn visit_item_function(
162 &mut self,
163 func: &'ast ast::ItemFunction<'ast>,
164 ) -> ControlFlow<Self::BreakValue> {
165 if let Some(block) = &func.body {
167 if !block.is_empty() {
168 let span = block.first().unwrap().span.to(block.last().unwrap().span);
169 let new_body = match func.kind {
170 FunctionKind::Modifier => "_;",
171 _ => "revert();",
172 };
173 self.updates.push((span, new_body));
174 }
175 }
176
177 self.walk_item_function(func)
178 }
179
180 fn visit_variable_definition(
181 &mut self,
182 var: &'ast ast::VariableDefinition<'ast>,
183 ) -> ControlFlow<Self::BreakValue> {
184 if let Some(VarMut::Immutable) = var.mutability {
186 self.updates.push((var.span, ""));
187 }
188
189 self.walk_variable_definition(var)
190 }
191}
192
193#[derive(Debug, Clone)]
195struct StructToWrite {
196 name: String,
198 contract_name: Option<String>,
201 import_alias: Option<String>,
204 path: PathBuf,
206 schema: String,
208 name_in_fns: String,
210}
211
212impl StructToWrite {
213 fn struct_or_contract_name(&self) -> &str {
216 self.contract_name.as_deref().unwrap_or(&self.name)
217 }
218
219 fn struct_or_contract_name_with_alias(&self) -> &str {
221 self.import_alias.as_deref().unwrap_or(self.struct_or_contract_name())
222 }
223
224 fn full_path(&self) -> String {
227 if self.contract_name.is_some() {
228 format!("{}.{}", self.struct_or_contract_name_with_alias(), self.name)
229 } else {
230 self.struct_or_contract_name_with_alias().to_string()
231 }
232 }
233
234 fn import_item(&self) -> String {
235 if let Some(alias) = &self.import_alias {
236 format!("{} as {}", self.struct_or_contract_name(), alias)
237 } else {
238 self.struct_or_contract_name().to_string()
239 }
240 }
241}
242
243struct PreprocessedState {
244 version: Version,
245 sources: Sources,
246 target_path: PathBuf,
247 project: Project,
248 config: Config,
249}
250
251impl PreprocessedState {
252 fn find_structs(self) -> Result<StructsState> {
253 let mut structs_to_write = Vec::new();
254 let Self { version, sources, target_path, config, project } = self;
255
256 let settings = config.solc_settings()?;
257 let include = config.bind_json.include;
258 let exclude = config.bind_json.exclude;
259 let root = config.root;
260
261 let input = SolcVersionedInput::build(sources, settings, SolcLanguage::Solidity, version);
262
263 let mut sess = Session::builder().with_stderr_emitter().build();
264 sess.dcx = sess.dcx.set_flags(|flags| flags.track_diagnostics = false);
265
266 let result = sess.enter_parallel(|| -> Result<()> {
267 let mut parsing_context = solar_pcx_from_solc_project(&sess, &project, &input, false);
269
270 let mut target_files = HashSet::new();
271 for (path, source) in &input.input.sources {
272 if !include.is_empty() {
273 if !include.iter().any(|matcher| matcher.is_match(path)) {
274 continue;
275 }
276 } else {
277 if project.paths.has_library_ancestor(path) {
279 continue;
280 }
281 }
282
283 if exclude.iter().any(|matcher| matcher.is_match(path)) {
284 continue;
285 }
286
287 if let Ok(src_file) =
288 sess.source_map().new_source_file(path.clone(), source.content.as_str())
289 {
290 target_files.insert(src_file.stable_id);
291 parsing_context.add_file(src_file);
292 }
293 }
294
295 let hir_arena = ThreadLocal::new();
297 if let Ok(Some(gcx)) = parsing_context.parse_and_lower(&hir_arena) {
298 let hir = &gcx.get().hir;
299 let resolver = Resolver::new(gcx);
300 for id in &resolver.struct_ids() {
301 if let Some(schema) = resolver.resolve_struct_eip712(*id) {
302 let def = hir.strukt(*id);
303 let source = hir.source(def.source);
304
305 if !target_files.contains(&source.file.stable_id) {
306 continue;
307 }
308
309 if let FileName::Real(ref path) = source.file.name {
310 structs_to_write.push(StructToWrite {
311 name: def.name.as_str().into(),
312 contract_name: def
313 .contract
314 .map(|id| hir.contract(id).name.as_str().into()),
315 path: path
316 .strip_prefix(&root)
317 .unwrap_or_else(|_| path)
318 .to_path_buf(),
319 schema,
320
321 import_alias: None,
323 name_in_fns: String::new(),
324 });
325 }
326 }
327 }
328 }
329 Ok(())
330 });
331
332 eyre::ensure!(result.is_ok() && sess.dcx.has_errors().is_ok(), "failed parsing");
333
334 Ok(StructsState { structs_to_write, target_path })
335 }
336}
337
338#[derive(Debug)]
339struct StructsState {
340 structs_to_write: Vec<StructToWrite>,
341 target_path: PathBuf,
342}
343
344impl StructsState {
345 fn resolve_imports_and_aliases(self) -> ResolvedState {
352 let Self { mut structs_to_write, target_path } = self;
353
354 let mut names_to_paths = BTreeMap::new();
357
358 for s in &structs_to_write {
359 names_to_paths
360 .entry(s.struct_or_contract_name())
361 .or_insert_with(BTreeSet::new)
362 .insert(s.path.as_path());
363 }
364
365 let mut aliases = BTreeMap::new();
367
368 for (name, paths) in names_to_paths {
369 if paths.len() <= 1 {
370 continue
372 }
373
374 for (i, path) in paths.into_iter().enumerate() {
375 aliases
376 .entry(name.to_string())
377 .or_insert_with(BTreeMap::new)
378 .insert(path.to_path_buf(), format!("{name}_{i}"));
379 }
380 }
381
382 for s in &mut structs_to_write {
383 let name = s.struct_or_contract_name();
384 if aliases.contains_key(name) {
385 s.import_alias = Some(aliases[name][&s.path].clone());
386 }
387 }
388
389 let mut name_to_structs_indexes = BTreeMap::new();
393
394 for (idx, s) in structs_to_write.iter().enumerate() {
395 name_to_structs_indexes.entry(&s.name).or_insert_with(Vec::new).push(idx);
396 }
397
398 let mut fn_names = vec![None; structs_to_write.len()];
401
402 for (name, indexes) in name_to_structs_indexes {
403 if indexes.len() > 1 {
404 for (i, idx) in indexes.into_iter().enumerate() {
405 fn_names[idx] = Some(format!("{name}_{i}"));
406 }
407 }
408 }
409
410 for (s, fn_name) in structs_to_write.iter_mut().zip(fn_names.into_iter()) {
411 s.name_in_fns = fn_name.unwrap_or(s.name.clone());
412 }
413
414 ResolvedState { structs_to_write, target_path }
415 }
416}
417
418struct ResolvedState {
419 structs_to_write: Vec<StructToWrite>,
420 target_path: PathBuf,
421}
422
423impl ResolvedState {
424 fn write(self) -> Result<String> {
425 let mut result = String::new();
426 self.write_imports(&mut result)?;
427 self.write_vm(&mut result);
428 self.write_library(&mut result)?;
429
430 if let Some(parent) = self.target_path.parent() {
431 fs::create_dir_all(parent)?;
432 }
433 fs::write(&self.target_path, &result)?;
434
435 sh_println!("Bindings written to {}", self.target_path.display())?;
436
437 Ok(result)
438 }
439
440 fn write_imports(&self, result: &mut String) -> fmt::Result {
441 let mut grouped_imports = BTreeMap::new();
442
443 for struct_to_write in &self.structs_to_write {
444 let item = struct_to_write.import_item();
445 grouped_imports
446 .entry(struct_to_write.path.as_path())
447 .or_insert_with(BTreeSet::new)
448 .insert(item);
449 }
450
451 result.push_str("// Automatically generated by forge bind-json.\n\npragma solidity >=0.6.2 <0.9.0;\npragma experimental ABIEncoderV2;\n\n");
452
453 for (path, names) in grouped_imports {
454 writeln!(
455 result,
456 "import {{{}}} from \"{}\";",
457 names.iter().join(", "),
458 path.to_slash_lossy()
459 )?;
460 }
461
462 Ok(())
463 }
464
465 fn write_vm(&self, result: &mut String) {
467 result.push_str(r#"
468interface Vm {
469 function parseJsonTypeArray(string calldata json, string calldata key, string calldata typeDescription) external pure returns (bytes memory);
470 function parseJsonType(string calldata json, string calldata typeDescription) external pure returns (bytes memory);
471 function parseJsonType(string calldata json, string calldata key, string calldata typeDescription) external pure returns (bytes memory);
472 function serializeJsonType(string calldata typeDescription, bytes memory value) external pure returns (string memory json);
473 function serializeJsonType(string calldata objectKey, string calldata valueKey, string calldata typeDescription, bytes memory value) external returns (string memory json);
474}
475 "#);
476 }
477
478 fn write_library(&self, result: &mut String) -> fmt::Result {
479 result.push_str(
480 r#"
481library JsonBindings {
482 Vm constant vm = Vm(address(uint160(uint256(keccak256("hevm cheat code")))));
483
484"#,
485 );
486 for struct_to_write in &self.structs_to_write {
488 writeln!(
489 result,
490 " {}{} = \"{}\";",
491 TYPE_BINDING_PREFIX, struct_to_write.name_in_fns, struct_to_write.schema
492 )?;
493 }
494
495 for struct_to_write in &self.structs_to_write {
497 write!(
498 result,
499 r#"
500 function serialize({path} memory value) internal pure returns (string memory) {{
501 return vm.serializeJsonType(schema_{name_in_fns}, abi.encode(value));
502 }}
503
504 function serialize({path} memory value, string memory objectKey, string memory valueKey) internal returns (string memory) {{
505 return vm.serializeJsonType(objectKey, valueKey, schema_{name_in_fns}, abi.encode(value));
506 }}
507
508 function deserialize{name_in_fns}(string memory json) public pure returns ({path} memory) {{
509 return abi.decode(vm.parseJsonType(json, schema_{name_in_fns}), ({path}));
510 }}
511
512 function deserialize{name_in_fns}(string memory json, string memory path) public pure returns ({path} memory) {{
513 return abi.decode(vm.parseJsonType(json, path, schema_{name_in_fns}), ({path}));
514 }}
515
516 function deserialize{name_in_fns}Array(string memory json, string memory path) public pure returns ({path}[] memory) {{
517 return abi.decode(vm.parseJsonTypeArray(json, path, schema_{name_in_fns}), ({path}[]));
518 }}
519"#,
520 name_in_fns = struct_to_write.name_in_fns,
521 path = struct_to_write.full_path()
522 )?;
523 }
524
525 result.push_str("}\n");
526
527 Ok(())
528 }
529}