1use crate::{
2 AsDoc, BufWriter, Document, ParseItem, ParseSource, Parser, Preprocessor,
3 document::DocumentContent, helpers::merge_toml_table,
4};
5use alloy_primitives::map::HashMap;
6use eyre::{Context, Result};
7use forge_fmt::{FormatterConfig, Visitable};
8use foundry_compilers::{compilers::solc::SOLC_EXTENSIONS, utils::source_files_iter};
9use foundry_config::{DocConfig, filter::expand_globs};
10use itertools::Itertools;
11use mdbook::MDBook;
12use rayon::prelude::*;
13use std::{
14 cmp::Ordering,
15 fs,
16 path::{Path, PathBuf},
17};
18use toml::value;
19
20#[derive(Debug)]
24pub struct DocBuilder {
25 root: PathBuf,
27 sources: PathBuf,
29 libraries: Vec<PathBuf>,
31 should_build: bool,
33 config: DocConfig,
35 preprocessors: Vec<Box<dyn Preprocessor>>,
37 fmt: FormatterConfig,
39 include_libraries: bool,
41}
42
43impl DocBuilder {
44 pub(crate) const SRC: &'static str = "src";
45 const SOL_EXT: &'static str = "sol";
46 const README: &'static str = "README.md";
47 const SUMMARY: &'static str = "SUMMARY.md";
48
49 pub fn new(
51 root: PathBuf,
52 sources: PathBuf,
53 libraries: Vec<PathBuf>,
54 include_libraries: bool,
55 ) -> Self {
56 Self {
57 root,
58 sources,
59 libraries,
60 include_libraries,
61 should_build: false,
62 config: DocConfig::default(),
63 preprocessors: Default::default(),
64 fmt: Default::default(),
65 }
66 }
67
68 pub fn with_should_build(mut self, should_build: bool) -> Self {
70 self.should_build = should_build;
71 self
72 }
73
74 pub fn with_config(mut self, config: DocConfig) -> Self {
76 self.config = config;
77 self
78 }
79
80 pub fn with_fmt(mut self, fmt: FormatterConfig) -> Self {
82 self.fmt = fmt;
83 self
84 }
85
86 pub fn with_preprocessor<P: Preprocessor + 'static>(mut self, preprocessor: P) -> Self {
88 self.preprocessors.push(Box::new(preprocessor) as Box<dyn Preprocessor>);
89 self
90 }
91
92 pub fn out_dir(&self) -> Result<PathBuf> {
94 Ok(self.root.join(&self.config.out).canonicalize()?)
95 }
96
97 pub fn build(self) -> eyre::Result<()> {
99 fs::create_dir_all(self.root.join(&self.config.out))
100 .wrap_err("failed to create output directory")?;
101
102 let ignored = expand_globs(&self.root, self.config.ignore.iter())?;
104
105 let sources = source_files_iter(&self.sources, SOLC_EXTENSIONS)
107 .filter(|file| !ignored.contains(file))
108 .collect::<Vec<_>>();
109
110 if sources.is_empty() {
111 sh_println!("No sources detected at {}", self.sources.display())?;
112 return Ok(());
113 }
114
115 let library_sources = self
116 .libraries
117 .iter()
118 .flat_map(|lib| source_files_iter(lib, SOLC_EXTENSIONS))
119 .collect::<Vec<_>>();
120
121 let combined_sources = sources
122 .iter()
123 .map(|path| (path, false))
124 .chain(library_sources.iter().map(|path| (path, true)))
125 .collect::<Vec<_>>();
126
127 let out_dir = self.out_dir()?;
128 let documents = combined_sources
129 .par_iter()
130 .enumerate()
131 .map(|(i, (path, from_library))| {
132 let path = *path;
133 let from_library = *from_library;
134
135 let source = fs::read_to_string(path)?;
137
138 let (mut source_unit, comments) = match solang_parser::parse(&source, i) {
139 Ok(res) => res,
140 Err(err) => {
141 if from_library {
142 return Ok(Vec::new());
144 } else {
145 return Err(eyre::eyre!(
146 "Failed to parse Solidity code for {}\nDebug info: {:?}",
147 path.display(),
148 err
149 ));
150 }
151 }
152 };
153
154 let mut doc = Parser::new(comments, source).with_fmt(self.fmt.clone());
156 source_unit
157 .visit(&mut doc)
158 .map_err(|err| eyre::eyre!("Failed to parse source: {err}"))?;
159
160 let (items, consts): (Vec<ParseItem>, Vec<ParseItem>) = doc
162 .items()
163 .into_iter()
164 .partition(|item| !matches!(item.source, ParseSource::Variable(_)));
165
166 let mut remaining = Vec::with_capacity(items.len());
168 let mut funcs: HashMap<String, Vec<ParseItem>> = HashMap::default();
169 for item in items {
170 if matches!(item.source, ParseSource::Function(_)) {
171 funcs.entry(item.source.ident()).or_default().push(item);
172 } else {
173 remaining.push(item);
175 }
176 }
177 let (items, overloaded): (
178 HashMap<String, Vec<ParseItem>>,
179 HashMap<String, Vec<ParseItem>>,
180 ) = funcs.into_iter().partition(|(_, v)| v.len() == 1);
181 remaining.extend(items.into_iter().flat_map(|(_, v)| v));
182
183 let mut files = remaining
185 .into_iter()
186 .map(|item| {
187 let relative_path = path.strip_prefix(&self.root)?.join(item.filename());
188
189 let target_path = out_dir.join(Self::SRC).join(relative_path);
190 let ident = item.source.ident();
191 Ok(Document::new(
192 path.clone(),
193 target_path,
194 from_library,
195 self.config.out.clone(),
196 )
197 .with_content(DocumentContent::Single(item), ident))
198 })
199 .collect::<eyre::Result<Vec<_>>>()?;
200
201 if !consts.is_empty() {
203 let filestem = path.file_stem().and_then(|stem| stem.to_str());
204
205 let filename = {
206 let mut name = "constants".to_owned();
207 if let Some(stem) = filestem {
208 name.push_str(&format!(".{stem}"));
209 }
210 name.push_str(".md");
211 name
212 };
213 let relative_path = path.strip_prefix(&self.root)?.join(filename);
214 let target_path = out_dir.join(Self::SRC).join(relative_path);
215
216 let identity = match filestem {
217 Some(stem) if stem.to_lowercase().contains("constants") => stem.to_owned(),
218 Some(stem) => format!("{stem} constants"),
219 None => "constants".to_owned(),
220 };
221
222 files.push(
223 Document::new(
224 path.clone(),
225 target_path,
226 from_library,
227 self.config.out.clone(),
228 )
229 .with_content(DocumentContent::Constants(consts), identity),
230 )
231 }
232
233 if !overloaded.is_empty() {
235 for (ident, funcs) in overloaded {
236 let filename = funcs.first().expect("no overloaded functions").filename();
237 let relative_path = path.strip_prefix(&self.root)?.join(filename);
238
239 let target_path = out_dir.join(Self::SRC).join(relative_path);
240 files.push(
241 Document::new(
242 path.clone(),
243 target_path,
244 from_library,
245 self.config.out.clone(),
246 )
247 .with_content(DocumentContent::OverloadedFunctions(funcs), ident),
248 );
249 }
250 }
251
252 Ok(files)
253 })
254 .collect::<eyre::Result<Vec<_>>>()?;
255
256 let documents = self
258 .preprocessors
259 .iter()
260 .try_fold(documents.into_iter().flatten().collect_vec(), |docs, p| {
261 p.preprocess(docs)
262 })?;
263
264 let documents = documents.into_iter().sorted_by(|doc1, doc2| {
266 doc1.item_path.display().to_string().cmp(&doc2.item_path.display().to_string())
267 });
268
269 self.write_mdbook(
271 documents.filter(|d| !d.from_library || self.include_libraries).collect_vec(),
272 )?;
273
274 if self.should_build {
276 MDBook::load(self.out_dir().wrap_err("failed to construct output directory")?)
277 .and_then(|book| book.build())
278 .map_err(|err| eyre::eyre!("failed to build book: {err:?}"))?;
279 }
280
281 Ok(())
282 }
283
284 fn write_mdbook(&self, documents: Vec<Document>) -> eyre::Result<()> {
285 let out_dir = self.out_dir().wrap_err("failed to construct output directory")?;
286 let out_dir_src = out_dir.join(Self::SRC);
287 fs::create_dir_all(&out_dir_src)?;
288
289 let homepage_content = {
291 let homepage_or_src_readme = self
294 .config
295 .homepage
296 .as_ref()
297 .map(|homepage| self.root.join(homepage))
298 .unwrap_or_else(|| self.sources.join(Self::README));
299 let root_readme = self.root.join(Self::README);
301
302 if homepage_or_src_readme.exists() {
305 fs::read_to_string(homepage_or_src_readme)?
306 } else if root_readme.exists() {
307 fs::read_to_string(root_readme)?
308 } else {
309 String::new()
310 }
311 };
312
313 let readme_path = out_dir_src.join(Self::README);
314 fs::write(readme_path, homepage_content)?;
315
316 let mut summary = BufWriter::default();
318 summary.write_title("Summary")?;
319 summary.write_link_list_item("Home", Self::README, 0)?;
320 self.write_summary_section(&mut summary, &documents.iter().collect::<Vec<_>>(), None, 0)?;
321 fs::write(out_dir_src.join(Self::SUMMARY), summary.finish())?;
322
323 fs::write(out_dir.join("solidity.min.js"), include_str!("../static/solidity.min.js"))?;
325
326 fs::write(out_dir.join("book.css"), include_str!("../static/book.css"))?;
328
329 fs::write(out_dir.join("book.toml"), self.book_config()?)?;
331
332 let gitignore = "book/";
334 fs::write(out_dir.join(".gitignore"), gitignore)?;
335
336 for document in documents {
338 fs::create_dir_all(
339 document
340 .target_path
341 .parent()
342 .ok_or_else(|| eyre::format_err!("empty target path; noop"))?,
343 )?;
344 fs::write(&document.target_path, document.as_doc()?)?;
345 }
346
347 Ok(())
348 }
349
350 fn book_config(&self) -> eyre::Result<String> {
351 let mut book: value::Table = toml::from_str(include_str!("../static/book.toml"))?;
353 book["book"]
354 .as_table_mut()
355 .unwrap()
356 .insert(String::from("title"), self.config.title.clone().into());
357 if let Some(ref repo) = self.config.repository {
358 let git_repo_url = if let Some(path) = &self.config.path {
360 format!("{}/{}", repo.trim_end_matches('/'), path.trim_start_matches('/'))
362 } else {
363 repo.clone()
365 };
366
367 book["output"].as_table_mut().unwrap()["html"]
368 .as_table_mut()
369 .unwrap()
370 .insert(String::from("git-repository-url"), git_repo_url.into());
371 }
372
373 let book_path = {
375 if self.config.book.is_file() {
376 Some(self.config.book.clone())
377 } else {
378 let book_path = self.config.book.join("book.toml");
379 if book_path.is_file() { Some(book_path) } else { None }
380 }
381 };
382
383 if let Some(book_path) = book_path {
385 merge_toml_table(&mut book, toml::from_str(&fs::read_to_string(book_path)?)?);
386 }
387
388 Ok(toml::to_string_pretty(&book)?)
389 }
390
391 fn write_summary_section(
392 &self,
393 summary: &mut BufWriter,
394 files: &[&Document],
395 base_path: Option<&Path>,
396 depth: usize,
397 ) -> eyre::Result<()> {
398 if files.is_empty() {
399 return Ok(());
400 }
401
402 if let Some(path) = base_path {
403 let title = path.iter().next_back().unwrap().to_string_lossy();
404 if depth == 1 {
405 summary.write_title(&title)?;
406 } else {
407 let summary_path = path.join(Self::README);
408 summary.write_link_list_item(
409 &format!("❱ {title}"),
410 &summary_path.display().to_string(),
411 depth - 1,
412 )?;
413 }
414 }
415
416 let mut grouped = HashMap::new();
418 for file in files {
419 let path = file.item_path.strip_prefix(&self.root)?;
420 let key = path.iter().take(depth + 1).collect::<PathBuf>();
421 grouped.entry(key).or_insert_with(Vec::new).push(*file);
422 }
423 let grouped = grouped.into_iter().sorted_by(|(lhs, _), (rhs, _)| {
425 let lhs_at_end = lhs.extension().map(|ext| ext == Self::SOL_EXT).unwrap_or_default();
426 let rhs_at_end = rhs.extension().map(|ext| ext == Self::SOL_EXT).unwrap_or_default();
427 if lhs_at_end == rhs_at_end {
428 lhs.cmp(rhs)
429 } else if lhs_at_end {
430 Ordering::Greater
431 } else {
432 Ordering::Less
433 }
434 });
435
436 let out_dir = self.out_dir().wrap_err("failed to construct output directory")?;
437 let mut readme = BufWriter::new("\n\n# Contents\n");
438 for (path, files) in grouped {
439 if path.extension().map(|ext| ext == Self::SOL_EXT).unwrap_or_default() {
440 for file in files {
441 let ident = &file.identity;
442
443 let summary_path = &file.target_path.strip_prefix(out_dir.join(Self::SRC))?;
444 summary.write_link_list_item(
445 ident,
446 &summary_path.display().to_string(),
447 depth,
448 )?;
449
450 let readme_path = base_path
451 .map(|path| summary_path.strip_prefix(path))
452 .transpose()?
453 .unwrap_or(summary_path);
454 readme.write_link_list_item(ident, &readme_path.display().to_string(), 0)?;
455 }
456 } else {
457 let name = path.iter().next_back().unwrap().to_string_lossy();
458 let readme_path = Path::new("/").join(&path).display().to_string();
459 readme.write_link_list_item(&name, &readme_path, 0)?;
460 self.write_summary_section(summary, &files, Some(&path), depth + 1)?;
461 }
462 }
463 if !readme.is_empty()
464 && let Some(path) = base_path
465 {
466 let path = out_dir.join(Self::SRC).join(path);
467 fs::create_dir_all(&path)?;
468 fs::write(path.join(Self::README), readme.finish())?;
469 }
470 Ok(())
471 }
472}