1use crate::{
2 AsDoc, BufWriter, Document, ParseItem, ParseSource, Parser, Preprocessor,
3 document::DocumentContent, helpers::merge_toml_table,
4};
5use alloy_primitives::map::HashMap;
6use eyre::{Context, Result};
7use foundry_compilers::{compilers::solc::SOLC_EXTENSIONS, utils::source_files_iter};
8use foundry_config::{DocConfig, FormatterConfig, filter::expand_globs};
9use itertools::Itertools;
10use mdbook_driver::MDBook;
11use rayon::prelude::*;
12use std::{
13 cmp::Ordering,
14 fs,
15 path::{Path, PathBuf},
16};
17use toml::value;
18
19#[derive(Debug)]
23pub struct DocBuilder {
24 root: PathBuf,
26 sources: PathBuf,
28 libraries: Vec<PathBuf>,
30 should_build: bool,
32 config: DocConfig,
34 preprocessors: Vec<Box<dyn Preprocessor>>,
36 fmt: FormatterConfig,
38 include_libraries: bool,
40}
41
42impl DocBuilder {
43 pub(crate) const SRC: &'static str = "src";
44 const SOL_EXT: &'static str = "sol";
45 const README: &'static str = "README.md";
46 const SUMMARY: &'static str = "SUMMARY.md";
47
48 pub fn new(
50 root: PathBuf,
51 sources: PathBuf,
52 libraries: Vec<PathBuf>,
53 include_libraries: bool,
54 ) -> Self {
55 Self {
56 root,
57 sources,
58 libraries,
59 include_libraries,
60 should_build: false,
61 config: DocConfig::default(),
62 preprocessors: Default::default(),
63 fmt: Default::default(),
64 }
65 }
66
67 pub const fn with_should_build(mut self, should_build: bool) -> Self {
69 self.should_build = should_build;
70 self
71 }
72
73 pub fn with_config(mut self, config: DocConfig) -> Self {
75 self.config = config;
76 self
77 }
78
79 pub fn with_fmt(mut self, fmt: FormatterConfig) -> Self {
81 self.fmt = fmt;
82 self
83 }
84
85 pub fn with_preprocessor<P: Preprocessor + 'static>(mut self, preprocessor: P) -> Self {
87 self.preprocessors.push(Box::new(preprocessor) as Box<dyn Preprocessor>);
88 self
89 }
90
91 pub fn out_dir(&self) -> Result<PathBuf> {
93 Ok(self.root.join(&self.config.out).canonicalize()?)
94 }
95
96 pub fn build(self, compiler: &mut solar::sema::Compiler) -> eyre::Result<()> {
98 fs::create_dir_all(self.root.join(&self.config.out))
99 .wrap_err("failed to create output directory")?;
100
101 let ignored = expand_globs(&self.root, self.config.ignore.iter())?;
103
104 let sources = source_files_iter(&self.sources, SOLC_EXTENSIONS)
106 .filter(|file| !ignored.contains(file))
107 .collect::<Vec<_>>();
108
109 if sources.is_empty() {
110 sh_println!("No sources detected at {}", self.sources.display())?;
111 return Ok(());
112 }
113
114 let library_sources = self
115 .libraries
116 .iter()
117 .flat_map(|lib| source_files_iter(lib, SOLC_EXTENSIONS))
118 .collect::<Vec<_>>();
119
120 let combined_sources = sources
121 .iter()
122 .map(|path| (path, false))
123 .chain(library_sources.iter().map(|path| (path, true)))
124 .collect::<Vec<_>>();
125
126 let out_dir = self.out_dir()?;
127 let out_target_dir = out_dir.clone();
128 let documents = compiler.enter_mut(|compiler| -> eyre::Result<Vec<Vec<Document>>> {
129 let gcx = compiler.gcx();
130 let documents = combined_sources
131 .par_iter()
132 .map(|(path, from_library)| {
133 let path = *path;
134 let from_library = *from_library;
135 let mut files = vec![];
136
137 if let Some((_, ast_source)) = gcx.get_ast_source(path)
139 && let Some(source_unit) = ast_source.ast.as_ref()
140 {
141 let source = ast_source.file.src.to_string();
145 let file_start = ast_source.file.start_pos.to_usize();
146
147 let doc = Parser::new(source, file_start, self.fmt.tab_width);
149 let all_items = doc.parse(source_unit);
150
151 let (items, consts): (Vec<ParseItem>, Vec<ParseItem>) = all_items
153 .into_iter()
154 .partition(|item| !matches!(item.source, ParseSource::Variable(_)));
155
156 let mut remaining = Vec::with_capacity(items.len());
158 let mut funcs: HashMap<String, Vec<ParseItem>> = HashMap::default();
159 for item in items {
160 if matches!(item.source, ParseSource::Function(_)) {
161 funcs.entry(item.source.ident()).or_default().push(item);
162 } else {
163 remaining.push(item);
165 }
166 }
167 let (items, overloaded): (
168 HashMap<String, Vec<ParseItem>>,
169 HashMap<String, Vec<ParseItem>>,
170 ) = funcs.into_iter().partition(|(_, v)| v.len() == 1);
171 remaining.extend(items.into_values().flatten());
172
173 files = remaining
175 .into_iter()
176 .map(|item| {
177 let relative_path =
178 path.strip_prefix(&self.root)?.join(item.filename());
179
180 let target_path = out_dir.join(Self::SRC).join(relative_path);
181 let ident = item.source.ident();
182 Ok(Document::new(
183 path.clone(),
184 target_path,
185 from_library,
186 out_target_dir.clone(),
187 )
188 .with_content(DocumentContent::Single(item), ident))
189 })
190 .collect::<eyre::Result<Vec<_>>>()?;
191
192 if !consts.is_empty() {
194 let filestem = path.file_stem().and_then(|stem| stem.to_str());
195
196 let filename = {
197 let mut name = "constants".to_owned();
198 if let Some(stem) = filestem {
199 name.push_str(&format!(".{stem}"));
200 }
201 name.push_str(".md");
202 name
203 };
204 let relative_path = path.strip_prefix(&self.root)?.join(filename);
205 let target_path = out_dir.join(Self::SRC).join(relative_path);
206
207 let identity = match filestem {
208 Some(stem) if stem.to_lowercase().contains("constants") => {
209 stem.to_owned()
210 }
211 Some(stem) => format!("{stem} constants"),
212 None => "constants".to_owned(),
213 };
214
215 files.push(
216 Document::new(
217 path.clone(),
218 target_path,
219 from_library,
220 out_target_dir.clone(),
221 )
222 .with_content(DocumentContent::Constants(consts), identity),
223 )
224 }
225
226 if !overloaded.is_empty() {
228 for (ident, funcs) in overloaded {
229 let filename =
230 funcs.first().expect("no overloaded functions").filename();
231 let relative_path = path.strip_prefix(&self.root)?.join(filename);
232
233 let target_path = out_dir.join(Self::SRC).join(relative_path);
234 files.push(
235 Document::new(
236 path.clone(),
237 target_path,
238 from_library,
239 out_target_dir.clone(),
240 )
241 .with_content(
242 DocumentContent::OverloadedFunctions(funcs),
243 ident,
244 ),
245 );
246 }
247 }
248 };
249
250 Ok(files)
251 })
252 .collect::<eyre::Result<Vec<_>>>()?;
253
254 Ok(documents)
255 })?;
256
257 let documents = self
259 .preprocessors
260 .iter()
261 .try_fold(documents.into_iter().flatten().collect_vec(), |docs, p| {
262 p.preprocess(docs)
263 })?;
264
265 let documents = documents
267 .into_iter()
268 .sorted_by(|doc1, doc2| {
269 doc1.item_path.display().to_string().cmp(&doc2.item_path.display().to_string())
270 })
271 .filter(|d| !d.from_library || self.include_libraries)
272 .collect_vec();
273
274 self.write_mdbook(documents)?;
276
277 if self.should_build {
279 MDBook::load(self.out_dir().wrap_err("failed to construct output directory")?)
280 .and_then(|book| book.build())
281 .map_err(|err| eyre::eyre!("failed to build book: {err:?}"))?;
282 }
283
284 Ok(())
285 }
286
287 fn write_mdbook(&self, documents: Vec<Document>) -> eyre::Result<()> {
288 let out_dir = self.out_dir().wrap_err("failed to construct output directory")?;
289 let out_dir_src = out_dir.join(Self::SRC);
290 fs::create_dir_all(&out_dir_src)?;
291
292 let homepage_content = {
294 let homepage_or_src_readme = self
297 .config
298 .homepage
299 .as_ref()
300 .map(|homepage| self.root.join(homepage))
301 .unwrap_or_else(|| self.sources.join(Self::README));
302 let root_readme = self.root.join(Self::README);
304
305 if homepage_or_src_readme.exists() {
308 fs::read_to_string(homepage_or_src_readme)?
309 } else if root_readme.exists() {
310 fs::read_to_string(root_readme)?
311 } else {
312 String::new()
313 }
314 };
315
316 let readme_path = out_dir_src.join(Self::README);
317 fs::write(readme_path, homepage_content)?;
318
319 let mut summary = BufWriter::default();
321 summary.write_title("Summary")?;
322 summary.write_link_list_item("Home", Self::README, 0)?;
323 self.write_summary_section(&mut summary, &documents.iter().collect::<Vec<_>>(), None, 0)?;
324 fs::write(out_dir_src.join(Self::SUMMARY), summary.finish())?;
325
326 fs::write(out_dir.join("solidity.min.js"), include_str!("../static/solidity.min.js"))?;
328
329 fs::write(out_dir.join("book.css"), include_str!("../static/book.css"))?;
331
332 fs::write(out_dir.join("book.toml"), self.book_config()?)?;
334
335 let gitignore = "book/";
337 fs::write(out_dir.join(".gitignore"), gitignore)?;
338
339 for document in documents {
341 fs::create_dir_all(
342 document
343 .target_path
344 .parent()
345 .ok_or_else(|| eyre::format_err!("empty target path; noop"))?,
346 )?;
347 fs::write(&document.target_path, document.as_doc()?)?;
348 }
349
350 Ok(())
351 }
352
353 fn book_config(&self) -> eyre::Result<String> {
354 let mut book: value::Table = toml::from_str(include_str!("../static/book.toml"))?;
356 book["book"]
357 .as_table_mut()
358 .unwrap()
359 .insert(String::from("title"), self.config.title.clone().into());
360 if let Some(ref repo) = self.config.repository {
361 let git_repo_url = if let Some(path) = &self.config.path {
363 format!("{}/{}", repo.trim_end_matches('/'), path.trim_start_matches('/'))
365 } else {
366 repo.clone()
368 };
369
370 book["output"].as_table_mut().unwrap()["html"]
371 .as_table_mut()
372 .unwrap()
373 .insert(String::from("git-repository-url"), git_repo_url.into());
374 }
375
376 let book_path = {
378 if self.config.book.is_file() {
379 Some(self.config.book.clone())
380 } else {
381 let book_path = self.config.book.join("book.toml");
382 book_path.is_file().then_some(book_path)
383 }
384 };
385
386 if let Some(book_path) = book_path {
388 merge_toml_table(&mut book, toml::from_str(&fs::read_to_string(book_path)?)?);
389 }
390
391 Ok(toml::to_string_pretty(&book)?)
392 }
393
394 fn write_summary_section(
395 &self,
396 summary: &mut BufWriter,
397 files: &[&Document],
398 base_path: Option<&Path>,
399 depth: usize,
400 ) -> eyre::Result<()> {
401 if files.is_empty() {
402 return Ok(());
403 }
404
405 if let Some(path) = base_path {
406 let title = path.iter().next_back().unwrap().to_string_lossy();
407 if depth == 1 {
408 summary.write_title(&title)?;
409 } else {
410 let summary_path = path.join(Self::README);
411 summary.write_link_list_item(
412 &format!("❱ {title}"),
413 &summary_path.display().to_string(),
414 depth - 1,
415 )?;
416 }
417 }
418
419 let mut grouped = HashMap::new();
421 for file in files {
422 let path = file.item_path.strip_prefix(&self.root)?;
423 let key = path.iter().take(depth + 1).collect::<PathBuf>();
424 grouped.entry(key).or_insert_with(Vec::new).push(*file);
425 }
426 let grouped = grouped.into_iter().sorted_by(|(lhs, _), (rhs, _)| {
428 let lhs_at_end = lhs.extension().map(|ext| ext == Self::SOL_EXT).unwrap_or_default();
429 let rhs_at_end = rhs.extension().map(|ext| ext == Self::SOL_EXT).unwrap_or_default();
430 if lhs_at_end == rhs_at_end {
431 lhs.cmp(rhs)
432 } else if lhs_at_end {
433 Ordering::Greater
434 } else {
435 Ordering::Less
436 }
437 });
438
439 let out_dir = self.out_dir().wrap_err("failed to construct output directory")?;
440 let mut readme = BufWriter::new("\n\n# Contents\n");
441 for (path, files) in grouped {
442 if path.extension().map(|ext| ext == Self::SOL_EXT).unwrap_or_default() {
443 for file in files {
444 let ident = &file.identity;
445
446 let summary_path = &file.target_path.strip_prefix(out_dir.join(Self::SRC))?;
447 summary.write_link_list_item(
448 ident,
449 &summary_path.display().to_string(),
450 depth,
451 )?;
452
453 let readme_path = base_path
454 .map(|path| summary_path.strip_prefix(path))
455 .transpose()?
456 .unwrap_or(summary_path);
457 readme.write_link_list_item(ident, &readme_path.display().to_string(), 0)?;
458 }
459 } else {
460 let name = path.iter().next_back().unwrap().to_string_lossy();
461 let readme_path = Path::new("/").join(&path).display().to_string();
462 readme.write_link_list_item(&name, &readme_path, 0)?;
463 self.write_summary_section(summary, &files, Some(&path), depth + 1)?;
464 }
465 }
466 if !readme.is_empty()
467 && let Some(path) = base_path
468 {
469 let path = out_dir.join(Self::SRC).join(path);
470 fs::create_dir_all(&path)?;
471 fs::write(path.join(Self::README), readme.finish())?;
472 }
473 Ok(())
474 }
475}