refactoring: remove dependency on SimpleFiles, make tree parsing multithreaded

This commit is contained in:
liquidex 2024-11-26 22:58:02 +01:00
parent 505163383f
commit 0713b59063
11 changed files with 283 additions and 177 deletions

View file

@ -108,7 +108,8 @@ pub fn fix_file(
diagnostics: &mut Vec<Diagnostic<FileId>>,
file_id: FileId,
) -> Result<String, parse::ErrorsEmitted> {
parse_tree_with_diagnostics(treehouse, file_id)
let source = treehouse.source(file_id).input();
parse_tree_with_diagnostics(file_id, source)
.map(|roots| {
let mut source = treehouse.source(file_id).input().to_owned();
let mut state = State::default();
@ -146,7 +147,7 @@ pub fn fix_file_cli(fix_args: FixArgs, root: &dyn Dir) -> anyhow::Result<Edit> {
let mut treehouse = Treehouse::new();
let mut diagnostics = vec![];
let file_id = treehouse.add_file(fix_args.file.as_str().to_owned(), Source::Other(file));
let file_id = treehouse.add_file(fix_args.file.clone(), Source::Other(file));
let edit_path = root.edit_path(&fix_args.file).ok_or_else(|| {
anyhow!(
"{} is not an editable file (perhaps it is not in a persistent path?)",
@ -178,7 +179,7 @@ pub fn fix_file_cli(fix_args: FixArgs, root: &dyn Dir) -> anyhow::Result<Edit> {
Edit::NoOp
}
} else {
report_diagnostics(&treehouse.files, &diagnostics)?;
report_diagnostics(&treehouse, &diagnostics)?;
Edit::NoOp
},
)
@ -196,7 +197,7 @@ pub fn fix_all_cli(fix_all_args: FixAllArgs, dir: &dyn Dir) -> anyhow::Result<Ed
let mut treehouse = Treehouse::new();
let mut diagnostics = vec![];
let file_id = treehouse.add_file(path.as_str().to_string(), Source::Other(content));
let file_id = treehouse.add_file(path.to_owned(), Source::Other(content));
let edit_path = dir.edit_path(path).context("path is not editable")?;
if let Ok(fixed) = fix_file(&mut treehouse, &mut diagnostics, file_id) {
@ -204,7 +205,7 @@ pub fn fix_all_cli(fix_all_args: FixAllArgs, dir: &dyn Dir) -> anyhow::Result<Ed
return Ok(Edit::Write(edit_path, fixed));
}
} else {
report_diagnostics(&treehouse.files, &diagnostics)?;
report_diagnostics(&treehouse, &diagnostics)?;
}
}

View file

@ -144,13 +144,12 @@ async fn branch(RawQuery(named_id): RawQuery, State(state): State<Arc<Server>>)
});
if let Some(branch_id) = branch_id {
let branch = state.sources.treehouse.tree.branch(branch_id);
if let Source::Tree {
input, target_path, ..
} = state.sources.treehouse.source(branch.file_id)
if let Source::Tree { input, tree_path } =
state.sources.treehouse.source(branch.file_id)
{
if let Some(content) = state
.target
.content(target_path)
.content(tree_path)
.await
.and_then(|s| String::from_utf8(s).ok())
{
@ -172,7 +171,7 @@ async fn branch(RawQuery(named_id): RawQuery, State(state): State<Arc<Server>>)
} else {
return (
StatusCode::INTERNAL_SERVER_ERROR,
format!("500 Internal Server Error: branch metadata points to entry {target_path} which does not have readable content")
format!("500 Internal Server Error: branch metadata points to entry {tree_path} which does not have readable content")
)
.into_response();
}

View file

@ -47,16 +47,16 @@ pub fn wc_cli(content_dir: &dyn Dir, mut wc_args: WcArgs) -> anyhow::Result<()>
.content(path)
.and_then(|b| String::from_utf8(b).ok())
{
let file_id = treehouse.add_file(path.to_string(), Source::Other(content));
match parse_tree_with_diagnostics(&mut treehouse, file_id) {
let file_id = treehouse.add_file(path.clone(), Source::Other(content.clone()));
match parse_tree_with_diagnostics(file_id, &content) {
Ok(parsed) => {
let source = treehouse.source(file_id);
let word_count = wc_roots(source.input(), &parsed);
println!("{word_count:>8} {}", treehouse.filename(file_id));
println!("{word_count:>8} {}", treehouse.path(file_id));
total += word_count;
}
Err(diagnostics) => {
report_diagnostics(&treehouse.files, &diagnostics)?;
report_diagnostics(&treehouse, &diagnostics)?;
}
}
}

View file

@ -4,10 +4,10 @@ mod include_static_helper;
use std::{collections::HashMap, fmt, ops::ControlFlow, sync::Arc};
use anyhow::{anyhow, ensure, Context};
use codespan_reporting::diagnostic::Diagnostic;
use dir_helper::DirHelper;
use handlebars::{handlebars_helper, Handlebars};
use include_static_helper::IncludeStaticHelper;
use rayon::iter::{IndexedParallelIterator, IntoParallelIterator, ParallelIterator};
use serde::Serialize;
use tracing::{error, info_span, instrument};
@ -18,7 +18,7 @@ use crate::{
html::{breadcrumbs::breadcrumbs_to_html, navmap::NavigationMap, tree::branches_to_html},
import_map::ImportMap,
parse::parse_tree_with_diagnostics,
state::{report_diagnostics, Source},
state::{report_diagnostics, FileId, Source},
tree::SemaRoots,
vfs::{
self, Cd, ContentCache, Dir, DirEntry, DynDir, EditPath, ImageSize, MemDir, Overlay,
@ -26,13 +26,7 @@ use crate::{
},
};
use crate::state::{FileId, Treehouse};
#[derive(Debug, Clone)]
pub struct ParsedTree {
root_key: String,
file_id: FileId,
}
use crate::state::Treehouse;
#[derive(Serialize)]
struct Page {
@ -96,84 +90,81 @@ fn load_templates(handlebars: &mut Handlebars, dir: &dyn Dir) {
});
}
#[instrument(skip(treehouse, config, source, target_path, tree_path))]
fn parse_tree(
treehouse: &mut Treehouse,
config: &Config,
source: String,
source_path: VPathBuf,
target_path: VPathBuf,
tree_path: String,
) -> anyhow::Result<(Option<ParsedTree>, Vec<Diagnostic<FileId>>)> {
let file_id = treehouse.add_file(
source_path.as_str().to_owned(),
Source::Tree {
input: source,
target_path: target_path.clone(),
tree_path: tree_path.clone(),
},
);
match parse_tree_with_diagnostics(treehouse, file_id) {
Ok(roots) => {
let mut diagnostics = vec![];
let roots = SemaRoots::from_roots(treehouse, &mut diagnostics, config, file_id, roots);
let root_key = tree_path.clone();
treehouse.roots.insert(root_key.clone(), roots);
Ok((Some(ParsedTree { root_key, file_id }), diagnostics))
}
Err(diagnostics) => Ok((None, diagnostics)),
}
}
#[instrument(skip(config, dirs))]
fn parse_trees(
config: &Config,
dirs: &Dirs,
) -> anyhow::Result<(Treehouse, HashMap<VPathBuf, ParsedTree>)> {
fn load_trees(config: &Config, dirs: &Dirs) -> anyhow::Result<Treehouse> {
let mut treehouse = Treehouse::new();
let mut diagnostics = vec![];
let mut parsed_trees = HashMap::new();
let mut paths = vec![];
vfs::walk_dir_rec(&*dirs.content, VPath::ROOT, &mut |path| {
if path.extension() == Some("tree") {
if let Some(source) = dirs
.content
.content(path)
.and_then(|b| String::from_utf8(b).ok())
{
let tree_path = path.with_extension("");
let target_path = path.with_extension("html");
match parse_tree(
&mut treehouse,
config,
source,
path.to_owned(),
target_path,
tree_path.as_str().to_owned(),
) {
Ok((parsed_tree, mut parse_diagnostics)) => {
diagnostics.append(&mut parse_diagnostics);
if let Some(parsed_tree) = parsed_tree {
parsed_trees.insert(tree_path, parsed_tree);
paths.push(path.to_owned());
}
}
Err(err) => {
error!("failed to parse tree {path}: {err:?}")
}
}
}
}
ControlFlow::Continue(())
});
report_diagnostics(&treehouse.files, &diagnostics)?;
// NOTE: Sources are filled in later; they can be left out until a call to report_diagnostics.
let file_ids: Vec<_> = paths
.iter()
.map(|path| treehouse.add_file(path.clone(), Source::Other(String::new())))
.collect();
Ok((treehouse, parsed_trees))
let parse_results: Vec<_> = {
let _span = info_span!("load_trees::parse").entered();
paths
.into_par_iter()
.zip(&file_ids)
.flat_map(|(path, &file_id)| {
dirs.content
.content(&path)
.and_then(|b| String::from_utf8(b).ok())
.map(|input| {
let parse_result = parse_tree_with_diagnostics(file_id, &input);
(path, file_id, input, parse_result)
})
})
.collect()
};
for (path, file_id, input, _) in &parse_results {
let tree_path = path.with_extension("");
treehouse
.files_by_tree_path
.insert(tree_path.clone(), *file_id);
treehouse.set_source(
*file_id,
Source::Tree {
input: input.clone(),
tree_path,
},
);
}
{
let _span = info_span!("load_trees::sema").entered();
for (path, file_id, _, result) in parse_results {
match result {
Ok(roots) => {
let roots = SemaRoots::from_roots(
&mut treehouse,
&mut diagnostics,
config,
file_id,
roots,
);
treehouse.roots.insert(file_id, roots);
parsed_trees.insert(path, file_id);
}
Err(mut parse_diagnostics) => diagnostics.append(&mut parse_diagnostics),
}
}
}
report_diagnostics(&treehouse, &diagnostics)?;
Ok(treehouse)
}
#[instrument(skip(sources, handlebars))]
@ -205,23 +196,19 @@ fn generate_simple_template_or_error(
}
}
#[instrument(skip(sources, dirs, handlebars, parsed_tree), fields(root_key = parsed_tree.root_key))]
#[instrument(skip(sources, dirs, handlebars))]
fn generate_tree(
sources: &Sources,
dirs: &Dirs,
handlebars: &Handlebars,
parsed_tree: &ParsedTree,
file_id: FileId,
) -> anyhow::Result<String> {
let breadcrumbs = breadcrumbs_to_html(
&sources.config,
&sources.navigation_map,
&parsed_tree.root_key,
);
let breadcrumbs = breadcrumbs_to_html(&sources.config, &sources.navigation_map, file_id);
let roots = sources
.treehouse
.roots
.get(&parsed_tree.root_key)
.get(&file_id)
.expect("tree should have been added to the treehouse");
let tree = {
@ -232,7 +219,7 @@ fn generate_tree(
&sources.treehouse,
&sources.config,
dirs,
parsed_tree.file_id,
file_id,
&roots.branches,
);
tree
@ -261,10 +248,7 @@ fn generate_tree(
scripts: roots.attributes.scripts.clone(),
styles: roots.attributes.styles.clone(),
breadcrumbs,
tree_path: sources
.treehouse
.tree_path(parsed_tree.file_id)
.map(|s| s.to_owned()),
tree_path: sources.treehouse.tree_path(file_id).map(|s| s.to_string()),
tree,
},
};
@ -289,9 +273,9 @@ fn generate_tree_or_error(
sources: &Sources,
dirs: &Dirs,
handlebars: &Handlebars,
parsed_tree: &ParsedTree,
file_id: FileId,
) -> String {
match generate_tree(sources, dirs, handlebars, parsed_tree) {
match generate_tree(sources, dirs, handlebars, file_id) {
Ok(html) => html,
Err(error) => format!("error: {error:?}"),
}
@ -300,7 +284,6 @@ fn generate_tree_or_error(
pub struct Sources {
pub config: Config,
pub treehouse: Treehouse,
pub parsed_trees: HashMap<VPathBuf, ParsedTree>,
pub navigation_map: NavigationMap,
pub import_map: ImportMap,
}
@ -324,8 +307,11 @@ impl Sources {
config
};
let (treehouse, parsed_trees) = parse_trees(&config, dirs)?;
let navigation_map = NavigationMap::build(&treehouse, "index");
let treehouse = load_trees(&config, dirs)?;
let navigation_map = NavigationMap::build(
&treehouse,
treehouse.files_by_tree_path[VPath::new("index")],
);
let import_map = ImportMap::generate(
&config.site,
&Cd::new(dirs.static_.clone(), VPathBuf::new("js")),
@ -335,7 +321,6 @@ impl Sources {
Ok(Sources {
config,
treehouse,
parsed_trees,
navigation_map,
import_map,
})
@ -429,11 +414,11 @@ impl Dir for TreehouseDir {
};
self.sources
.parsed_trees
.treehouse
.files_by_tree_path
.get(path)
.map(|parsed_tree| {
generate_tree_or_error(&self.sources, &self.dirs, &self.handlebars, parsed_tree)
.into()
.map(|&file_id| {
generate_tree_or_error(&self.sources, &self.dirs, &self.handlebars, file_id).into()
})
.or_else(|| {
if path.file_name().is_some_and(|s| !s.starts_with('_')) {
@ -525,7 +510,7 @@ pub fn target(dirs: Arc<Dirs>, sources: Arc<Sources>) -> DynDir {
Cd::new(dirs.static_.clone(), VPathBuf::new("robots.txt")).to_dyn(),
);
let dir_index = DirIndex::new(sources.parsed_trees.keys().map(|x| &**x));
let dir_index = DirIndex::new(sources.treehouse.files_by_tree_path.keys().map(|x| &**x));
let tree_view = TreehouseDir::new(dirs, sources, dir_index);
let tree_view = ContentCache::new(tree_view);

View file

@ -2,7 +2,7 @@ use std::{borrow::Cow, fmt::Write};
use tracing::instrument;
use crate::config::Config;
use crate::{config::Config, state::FileId, vfs::VPath};
use super::{navmap::NavigationMap, EscapeAttribute};
@ -10,26 +10,31 @@ use super::{navmap::NavigationMap, EscapeAttribute};
pub fn breadcrumbs_to_html(
config: &Config,
navigation_map: &NavigationMap,
tree_path: &str,
file_id: FileId,
) -> String {
let mut s = String::new();
if let Some(path) = navigation_map.paths.get(tree_path) {
if let Some(path) = navigation_map.paths.get(&file_id) {
for (i, element) in path.iter().enumerate() {
// Skip the index because it's implied by the logo on the left.
if element != "index" {
if &**element != VPath::new_const("index") {
s.push_str("<li class=\"breadcrumb\">");
{
let short_element = path
.get(i - 1)
.map(|p| format!("{p}/"))
.and_then(|prefix| element.strip_prefix(prefix.as_str()).map(Cow::Borrowed))
.and_then(|prefix| {
element
.as_str()
.strip_prefix(prefix.as_str())
.map(Cow::Borrowed)
})
.unwrap_or_else(|| Cow::Owned(format!("/{element}")));
write!(
s,
"<a href=\"{site}/{element}\">{short_element}</a>",
site = EscapeAttribute(&config.site),
element = EscapeAttribute(element)
element = EscapeAttribute(element.as_str())
)
.unwrap();
}

View file

@ -3,20 +3,23 @@ use std::collections::HashMap;
use tracing::instrument;
use crate::{
state::Treehouse,
state::{FileId, Treehouse},
tree::{attributes::Content, SemaBranchId},
vfs::VPathBuf,
};
#[derive(Debug, Clone, Default)]
struct NavigationMapBuilder {
stack: Vec<String>,
stack: Vec<VPathBuf>,
navigation_map: NavigationMap,
}
impl NavigationMapBuilder {
fn enter_tree(&mut self, tree: String) {
self.stack.push(tree.clone());
self.navigation_map.paths.insert(tree, self.stack.clone());
fn enter_tree(&mut self, file_id: FileId, tree_path: VPathBuf) {
self.stack.push(tree_path.clone());
self.navigation_map
.paths
.insert(file_id, self.stack.clone());
}
fn exit_tree(&mut self) {
@ -31,12 +34,12 @@ impl NavigationMapBuilder {
#[derive(Debug, Clone, Default)]
pub struct NavigationMap {
/// Tells you which pages need to be opened to get to the key.
pub paths: HashMap<String, Vec<String>>,
pub paths: HashMap<FileId, Vec<VPathBuf>>,
}
impl NavigationMap {
#[instrument(name = "NavigationMap::build", skip(treehouse))]
pub fn build(treehouse: &Treehouse, root_tree_path: &str) -> Self {
pub fn build(treehouse: &Treehouse, root_file_id: FileId) -> Self {
let mut builder = NavigationMapBuilder::default();
fn rec_branch(
@ -45,8 +48,8 @@ impl NavigationMap {
branch_id: SemaBranchId,
) {
let branch = treehouse.tree.branch(branch_id);
if let Content::Link(linked) = &branch.attributes.content {
rec_tree(treehouse, builder, linked);
if let Content::ResolvedLink(linked) = &branch.attributes.content {
rec_tree(treehouse, builder, *linked);
} else {
for &child_id in &branch.children {
rec_branch(treehouse, builder, child_id);
@ -54,12 +57,18 @@ impl NavigationMap {
}
}
fn rec_tree(treehouse: &Treehouse, builder: &mut NavigationMapBuilder, tree_path: &str) {
if let Some(roots) = treehouse.roots.get(tree_path) {
fn rec_tree(treehouse: &Treehouse, builder: &mut NavigationMapBuilder, file_id: FileId) {
if let Some(roots) = treehouse.roots.get(&file_id) {
// Pages can link to each other causing infinite recursion, so we need to handle that
// case by skipping pages that already have been analyzed.
if !builder.navigation_map.paths.contains_key(tree_path) {
builder.enter_tree(tree_path.to_owned());
if !builder.navigation_map.paths.contains_key(&file_id) {
builder.enter_tree(
file_id,
treehouse
.tree_path(file_id)
.expect("tree files may only link to other tree files")
.to_owned(),
);
for &branch_id in &roots.branches {
rec_branch(treehouse, builder, branch_id);
}
@ -68,7 +77,7 @@ impl NavigationMap {
}
}
rec_tree(treehouse, &mut builder, root_tree_path);
rec_tree(treehouse, &mut builder, root_file_id);
builder.finish()
}

View file

@ -30,8 +30,8 @@ pub fn branch_to_html(
return;
}
let has_children =
!branch.children.is_empty() || matches!(branch.attributes.content, Content::Link(_));
let has_children = !branch.children.is_empty()
|| matches!(branch.attributes.content, Content::ResolvedLink(_));
let class = if has_children { "branch" } else { "leaf" };
let mut class = String::from(class);
@ -44,7 +44,7 @@ pub fn branch_to_html(
class.push_str(" draft");
}
let component = if let Content::Link(_) = branch.attributes.content {
let component = if let Content::ResolvedLink(_) = branch.attributes.content {
"b-linked"
} else {
"b"
@ -55,8 +55,9 @@ pub fn branch_to_html(
Cow::Borrowed(component)
};
let linked_branch = if let Content::Link(link) = &branch.attributes.content {
format!(" data-th-link=\"{}\"", EscapeHtml(link))
let linked_branch = if let Content::ResolvedLink(file_id) = &branch.attributes.content {
let path = treehouse.tree_path(*file_id).expect(".tree file expected");
format!(" data-th-link=\"{}\"", EscapeHtml(path.as_str()))
} else {
String::new()
};
@ -126,7 +127,7 @@ pub fn branch_to_html(
page_id: treehouse
.tree_path(file_id)
.expect(".tree file expected")
.to_owned(),
.to_string(),
config,
dirs,
@ -137,13 +138,14 @@ pub fn branch_to_html(
.render(&events, s);
let branch = treehouse.tree.branch(branch_id);
if let Content::Link(link) = &branch.attributes.content {
if let Content::ResolvedLink(file_id) = &branch.attributes.content {
let path = treehouse.tree_path(*file_id).expect(".tree file expected");
write!(
s,
"<noscript><a class=\"navigate icon-go\" href=\"{}/{}\">Go to linked tree: <code>{}</code></a></noscript>",
EscapeAttribute(&config.site),
EscapeAttribute(link),
EscapeHtml(link),
EscapeAttribute(path.as_str()),
EscapeHtml(path.as_str()),
)
.unwrap();
}
@ -151,12 +153,13 @@ pub fn branch_to_html(
s.push_str("<th-bb>");
{
if let Content::Link(link) = &branch.attributes.content {
if let Content::ResolvedLink(file_id) = &branch.attributes.content {
let path = treehouse.tree_path(*file_id).expect(".tree file expected");
write!(
s,
"<a class=\"icon icon-go\" href=\"{}/{}\" title=\"linked tree\"></a>",
EscapeAttribute(&config.site),
EscapeAttribute(link),
EscapeAttribute(path.as_str()),
)
.unwrap();
} else {

View file

@ -8,12 +8,11 @@ use crate::state::{toml_error_to_diagnostic, FileId, TomlError, Treehouse};
pub struct ErrorsEmitted;
#[instrument(skip(treehouse))]
#[instrument(skip(input))]
pub fn parse_tree_with_diagnostics(
treehouse: &mut Treehouse,
file_id: FileId,
input: &str,
) -> Result<Roots, Vec<Diagnostic<FileId>>> {
let input = &treehouse.source(file_id).input();
Roots::parse(&mut treehouse_format::pull::Parser { input, position: 0 }).map_err(|error| {
vec![Diagnostic {
severity: Severity::Error,

View file

@ -3,23 +3,19 @@ use std::{collections::HashMap, ops::Range};
use anyhow::Context;
use codespan_reporting::{
diagnostic::{Diagnostic, Label, LabelStyle, Severity},
files::SimpleFiles,
term::termcolor::{ColorChoice, StandardStream},
};
use tracing::instrument;
use ulid::Ulid;
use crate::{
tree::{SemaBranchId, SemaRoots, SemaTree},
vfs::VPathBuf,
vfs::{VPath, VPathBuf},
};
#[derive(Debug, Clone)]
pub enum Source {
Tree {
input: String,
tree_path: String,
target_path: VPathBuf,
},
Tree { input: String, tree_path: VPathBuf },
Other(String),
}
@ -38,26 +34,54 @@ impl AsRef<str> for Source {
}
}
pub type Files = SimpleFiles<String, Source>;
pub type FileId = <Files as codespan_reporting::files::Files<'static>>::FileId;
#[derive(Debug, Clone)]
pub struct File {
pub path: VPathBuf,
pub source: Source,
pub line_starts: Vec<usize>,
}
impl File {
fn line_start(&self, line_index: usize) -> Result<usize, codespan_reporting::files::Error> {
use std::cmp::Ordering;
match line_index.cmp(&self.line_starts.len()) {
Ordering::Less => Ok(self
.line_starts
.get(line_index)
.cloned()
.expect("failed despite previous check")),
Ordering::Equal => Ok(self.source.as_ref().len()),
Ordering::Greater => Err(codespan_reporting::files::Error::LineTooLarge {
given: line_index,
max: self.line_starts.len() - 1,
}),
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct FileId(usize);
/// Treehouse compilation context.
pub struct Treehouse {
pub files: Files,
pub files: Vec<File>,
pub files_by_tree_path: HashMap<VPathBuf, FileId>,
pub tree: SemaTree,
pub branches_by_named_id: HashMap<String, SemaBranchId>,
pub roots: HashMap<String, SemaRoots>,
pub roots: HashMap<FileId, SemaRoots>,
pub branch_redirects: HashMap<String, SemaBranchId>,
missingno_generator: ulid::Generator,
pub missingno_generator: ulid::Generator,
}
impl Treehouse {
pub fn new() -> Self {
Self {
files: Files::new(),
files: vec![],
files_by_tree_path: HashMap::new(),
tree: SemaTree::default(),
branches_by_named_id: HashMap::new(),
@ -69,27 +93,34 @@ impl Treehouse {
}
}
pub fn add_file(&mut self, filename: String, source: Source) -> FileId {
self.files.add(filename, source)
pub fn add_file(&mut self, path: VPathBuf, source: Source) -> FileId {
let id = FileId(self.files.len());
self.files.push(File {
line_starts: codespan_reporting::files::line_starts(source.input()).collect(),
path,
source,
});
id
}
/// Get the name of a file, assuming it was previously registered.
pub fn path(&self, file_id: FileId) -> &VPath {
&self.files[file_id.0].path
}
/// Get the source code of a file, assuming it was previously registered.
pub fn source(&self, file_id: FileId) -> &Source {
self.files
.get(file_id)
.expect("file should have been registered previously")
.source()
&self.files[file_id.0].source
}
/// Get the name of a file, assuming it was previously registered.
pub fn filename(&self, file_id: FileId) -> &str {
self.files
.get(file_id)
.expect("file should have been registered previously")
.name()
pub fn set_source(&mut self, file_id: FileId, source: Source) {
self.files[file_id.0].line_starts =
codespan_reporting::files::line_starts(source.input()).collect();
self.files[file_id.0].source = source;
}
pub fn tree_path(&self, file_id: FileId) -> Option<&str> {
pub fn tree_path(&self, file_id: FileId) -> Option<&VPath> {
match self.source(file_id) {
Source::Tree { tree_path, .. } => Some(tree_path),
Source::Other(_) => None,
@ -109,6 +140,49 @@ impl Default for Treehouse {
}
}
impl<'a> codespan_reporting::files::Files<'a> for Treehouse {
type FileId = FileId;
type Name = &'a VPath;
type Source = &'a str;
fn name(&'a self, id: Self::FileId) -> Result<Self::Name, codespan_reporting::files::Error> {
Ok(self.path(id))
}
fn source(
&'a self,
id: Self::FileId,
) -> Result<Self::Source, codespan_reporting::files::Error> {
Ok(self.source(id).input())
}
fn line_index(
&'a self,
id: Self::FileId,
byte_index: usize,
) -> Result<usize, codespan_reporting::files::Error> {
let file = &self.files[id.0];
Ok(file
.line_starts
.binary_search(&byte_index)
.unwrap_or_else(|next_line| next_line - 1))
}
fn line_range(
&'a self,
id: Self::FileId,
line_index: usize,
) -> Result<Range<usize>, codespan_reporting::files::Error> {
let file = &self.files[id.0];
let line_start = file.line_start(line_index)?;
let next_line_start = file.line_start(line_index + 1)?;
Ok(line_start..next_line_start)
}
}
pub struct TomlError {
pub message: String,
pub span: Option<Range<usize>>,
@ -135,7 +209,11 @@ pub fn toml_error_to_diagnostic(error: TomlError) -> Diagnostic<FileId> {
}
}
pub fn report_diagnostics(files: &Files, diagnostics: &[Diagnostic<FileId>]) -> anyhow::Result<()> {
#[instrument(skip(files, diagnostics))]
pub fn report_diagnostics(
files: &Treehouse,
diagnostics: &[Diagnostic<FileId>],
) -> anyhow::Result<()> {
let writer = StandardStream::stderr(ColorChoice::Auto);
let config = codespan_reporting::term::Config::default();
for diagnostic in diagnostics {

View file

@ -96,7 +96,7 @@ impl SemaRoots {
if successfully_parsed && attributes.title.is_empty() {
attributes.title = match treehouse.source(file_id) {
Source::Tree { tree_path, .. } => tree_path.clone(),
Source::Tree { tree_path, .. } => tree_path.to_string(),
_ => panic!("parse_attributes called for a non-.tree file"),
}
}
@ -310,7 +310,7 @@ impl SemaBranch {
"note: a generated id `{}` will be used, but this id is unstable and will not persist across generations",
attributes.id
),
format!("help: run `treehouse fix {}` to add missing ids to branches", treehouse.filename(file_id)),
format!("help: run `treehouse fix {}` to add missing ids to branches", treehouse.path(file_id)),
],
});
}
@ -334,6 +334,26 @@ impl SemaBranch {
});
}
}
// Resolve content.links.
if let Content::Link(tree_path) = &attributes.content {
if let Some(file_id) = treehouse.files_by_tree_path.get(tree_path) {
attributes.content = Content::ResolvedLink(*file_id);
} else {
diagnostics.push(Diagnostic {
severity: Severity::Error,
code: Some("attr".into()),
message: format!("linked tree `{tree_path}` does not exist"),
labels: vec![Label {
style: LabelStyle::Primary,
file_id,
range: attribute_warning_span.clone(),
message: "".into(),
}],
notes: vec![],
})
}
}
}
attributes
}

View file

@ -2,6 +2,8 @@ use std::collections::HashMap;
use serde::{Deserialize, Serialize};
use crate::{state::FileId, vfs::VPathBuf};
/// Top-level `%%` root attributes.
#[derive(Debug, Default, Clone, PartialEq, Eq, Deserialize, Serialize)]
pub struct RootAttributes {
@ -118,7 +120,12 @@ pub enum Content {
///
/// Note that `Link` branches must not contain any children. If a `Link` branch does contain
/// children, an `attribute`-type error is raised.
Link(String),
Link(VPathBuf),
/// Valid link to another tree.
/// This replaces `Content::Link` during semantic analysis.
#[serde(skip)]
ResolvedLink(FileId),
}
#[derive(Debug, Default, Clone, PartialEq, Eq, Deserialize)]