Compare commits
No commits in common. "0713b59063eb2dacf977df7bfd26d2fdacbb77a9" and "a8773aac63c1260602e3d264aca0c7fb3e476c82" have entirely different histories.
0713b59063
...
a8773aac63
|
@ -6,6 +6,7 @@ this page used to host all my music reviews, but those have since been moved to
|
||||||
+ I haven't had time to review the following albums, but I'm including them for permalinking purposes.
|
+ I haven't had time to review the following albums, but I'm including them for permalinking purposes.
|
||||||
|
|
||||||
% id = "music/album/aphex-twin/drukqs"
|
% id = "music/album/aphex-twin/drukqs"
|
||||||
|
content.link = "music/reviews/aphex-twin/drukqs"
|
||||||
+ ### :page: album: Aphex Twin - drukQs
|
+ ### :page: album: Aphex Twin - drukQs
|
||||||
|
|
||||||
% id = "01H9JB094GTG1TJ029CQ4PNMS0"
|
% id = "01H9JB094GTG1TJ029CQ4PNMS0"
|
||||||
|
|
|
@ -108,8 +108,7 @@ pub fn fix_file(
|
||||||
diagnostics: &mut Vec<Diagnostic<FileId>>,
|
diagnostics: &mut Vec<Diagnostic<FileId>>,
|
||||||
file_id: FileId,
|
file_id: FileId,
|
||||||
) -> Result<String, parse::ErrorsEmitted> {
|
) -> Result<String, parse::ErrorsEmitted> {
|
||||||
let source = treehouse.source(file_id).input();
|
parse_tree_with_diagnostics(treehouse, file_id)
|
||||||
parse_tree_with_diagnostics(file_id, source)
|
|
||||||
.map(|roots| {
|
.map(|roots| {
|
||||||
let mut source = treehouse.source(file_id).input().to_owned();
|
let mut source = treehouse.source(file_id).input().to_owned();
|
||||||
let mut state = State::default();
|
let mut state = State::default();
|
||||||
|
@ -147,7 +146,7 @@ pub fn fix_file_cli(fix_args: FixArgs, root: &dyn Dir) -> anyhow::Result<Edit> {
|
||||||
|
|
||||||
let mut treehouse = Treehouse::new();
|
let mut treehouse = Treehouse::new();
|
||||||
let mut diagnostics = vec![];
|
let mut diagnostics = vec![];
|
||||||
let file_id = treehouse.add_file(fix_args.file.clone(), Source::Other(file));
|
let file_id = treehouse.add_file(fix_args.file.as_str().to_owned(), Source::Other(file));
|
||||||
let edit_path = root.edit_path(&fix_args.file).ok_or_else(|| {
|
let edit_path = root.edit_path(&fix_args.file).ok_or_else(|| {
|
||||||
anyhow!(
|
anyhow!(
|
||||||
"{} is not an editable file (perhaps it is not in a persistent path?)",
|
"{} is not an editable file (perhaps it is not in a persistent path?)",
|
||||||
|
@ -179,7 +178,7 @@ pub fn fix_file_cli(fix_args: FixArgs, root: &dyn Dir) -> anyhow::Result<Edit> {
|
||||||
Edit::NoOp
|
Edit::NoOp
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
report_diagnostics(&treehouse, &diagnostics)?;
|
report_diagnostics(&treehouse.files, &diagnostics)?;
|
||||||
Edit::NoOp
|
Edit::NoOp
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
@ -197,7 +196,7 @@ pub fn fix_all_cli(fix_all_args: FixAllArgs, dir: &dyn Dir) -> anyhow::Result<Ed
|
||||||
|
|
||||||
let mut treehouse = Treehouse::new();
|
let mut treehouse = Treehouse::new();
|
||||||
let mut diagnostics = vec![];
|
let mut diagnostics = vec![];
|
||||||
let file_id = treehouse.add_file(path.to_owned(), Source::Other(content));
|
let file_id = treehouse.add_file(path.as_str().to_string(), Source::Other(content));
|
||||||
let edit_path = dir.edit_path(path).context("path is not editable")?;
|
let edit_path = dir.edit_path(path).context("path is not editable")?;
|
||||||
|
|
||||||
if let Ok(fixed) = fix_file(&mut treehouse, &mut diagnostics, file_id) {
|
if let Ok(fixed) = fix_file(&mut treehouse, &mut diagnostics, file_id) {
|
||||||
|
@ -205,7 +204,7 @@ pub fn fix_all_cli(fix_all_args: FixAllArgs, dir: &dyn Dir) -> anyhow::Result<Ed
|
||||||
return Ok(Edit::Write(edit_path, fixed));
|
return Ok(Edit::Write(edit_path, fixed));
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
report_diagnostics(&treehouse, &diagnostics)?;
|
report_diagnostics(&treehouse.files, &diagnostics)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -144,12 +144,13 @@ async fn branch(RawQuery(named_id): RawQuery, State(state): State<Arc<Server>>)
|
||||||
});
|
});
|
||||||
if let Some(branch_id) = branch_id {
|
if let Some(branch_id) = branch_id {
|
||||||
let branch = state.sources.treehouse.tree.branch(branch_id);
|
let branch = state.sources.treehouse.tree.branch(branch_id);
|
||||||
if let Source::Tree { input, tree_path } =
|
if let Source::Tree {
|
||||||
state.sources.treehouse.source(branch.file_id)
|
input, target_path, ..
|
||||||
|
} = state.sources.treehouse.source(branch.file_id)
|
||||||
{
|
{
|
||||||
if let Some(content) = state
|
if let Some(content) = state
|
||||||
.target
|
.target
|
||||||
.content(tree_path)
|
.content(target_path)
|
||||||
.await
|
.await
|
||||||
.and_then(|s| String::from_utf8(s).ok())
|
.and_then(|s| String::from_utf8(s).ok())
|
||||||
{
|
{
|
||||||
|
@ -171,7 +172,7 @@ async fn branch(RawQuery(named_id): RawQuery, State(state): State<Arc<Server>>)
|
||||||
} else {
|
} else {
|
||||||
return (
|
return (
|
||||||
StatusCode::INTERNAL_SERVER_ERROR,
|
StatusCode::INTERNAL_SERVER_ERROR,
|
||||||
format!("500 Internal Server Error: branch metadata points to entry {tree_path} which does not have readable content")
|
format!("500 Internal Server Error: branch metadata points to entry {target_path} which does not have readable content")
|
||||||
)
|
)
|
||||||
.into_response();
|
.into_response();
|
||||||
}
|
}
|
||||||
|
|
|
@ -47,16 +47,16 @@ pub fn wc_cli(content_dir: &dyn Dir, mut wc_args: WcArgs) -> anyhow::Result<()>
|
||||||
.content(path)
|
.content(path)
|
||||||
.and_then(|b| String::from_utf8(b).ok())
|
.and_then(|b| String::from_utf8(b).ok())
|
||||||
{
|
{
|
||||||
let file_id = treehouse.add_file(path.clone(), Source::Other(content.clone()));
|
let file_id = treehouse.add_file(path.to_string(), Source::Other(content));
|
||||||
match parse_tree_with_diagnostics(file_id, &content) {
|
match parse_tree_with_diagnostics(&mut treehouse, file_id) {
|
||||||
Ok(parsed) => {
|
Ok(parsed) => {
|
||||||
let source = treehouse.source(file_id);
|
let source = treehouse.source(file_id);
|
||||||
let word_count = wc_roots(source.input(), &parsed);
|
let word_count = wc_roots(source.input(), &parsed);
|
||||||
println!("{word_count:>8} {}", treehouse.path(file_id));
|
println!("{word_count:>8} {}", treehouse.filename(file_id));
|
||||||
total += word_count;
|
total += word_count;
|
||||||
}
|
}
|
||||||
Err(diagnostics) => {
|
Err(diagnostics) => {
|
||||||
report_diagnostics(&treehouse, &diagnostics)?;
|
report_diagnostics(&treehouse.files, &diagnostics)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
use std::{collections::HashMap, ops::ControlFlow};
|
use std::{collections::HashMap, ops::ControlFlow};
|
||||||
|
|
||||||
use anyhow::{anyhow, Context};
|
use anyhow::{anyhow, Context};
|
||||||
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use tracing::{error, info_span, instrument};
|
use tracing::{error, info_span, instrument};
|
||||||
|
|
||||||
|
@ -11,7 +10,7 @@ use crate::{
|
||||||
Syntax,
|
Syntax,
|
||||||
},
|
},
|
||||||
import_map::ImportRoot,
|
import_map::ImportRoot,
|
||||||
vfs::{self, Dir, DynDir, ImageSize, VPath, VPathBuf},
|
vfs::{self, Dir, ImageSize, VPath, VPathBuf},
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||||
|
@ -160,18 +159,9 @@ impl Config {
|
||||||
|
|
||||||
/// Loads all syntax definition files.
|
/// Loads all syntax definition files.
|
||||||
#[instrument(name = "Config::load_syntaxes", skip(self))]
|
#[instrument(name = "Config::load_syntaxes", skip(self))]
|
||||||
pub fn load_syntaxes(&mut self, dir: DynDir) -> anyhow::Result<()> {
|
pub fn load_syntaxes(&mut self, dir: &dyn Dir) -> anyhow::Result<()> {
|
||||||
let mut paths = vec![];
|
vfs::walk_dir_rec(dir, VPath::ROOT, &mut |path| {
|
||||||
vfs::walk_dir_rec(&dir, VPath::ROOT, &mut |path| {
|
|
||||||
if path.extension() == Some("json") {
|
if path.extension() == Some("json") {
|
||||||
paths.push(path.to_owned());
|
|
||||||
}
|
|
||||||
ControlFlow::Continue(())
|
|
||||||
});
|
|
||||||
|
|
||||||
let syntaxes: Vec<_> = paths
|
|
||||||
.par_iter()
|
|
||||||
.flat_map(|path| {
|
|
||||||
let name = path
|
let name = path
|
||||||
.file_stem()
|
.file_stem()
|
||||||
.expect("syntax file name should have a stem due to the .json extension");
|
.expect("syntax file name should have a stem due to the .json extension");
|
||||||
|
@ -190,19 +180,14 @@ impl Config {
|
||||||
Ok(syntax) => {
|
Ok(syntax) => {
|
||||||
let _span = info_span!("Config::load_syntaxes::compile", ?name).entered();
|
let _span = info_span!("Config::load_syntaxes::compile", ?name).entered();
|
||||||
let compiled = compile_syntax(&syntax);
|
let compiled = compile_syntax(&syntax);
|
||||||
Some((name.to_owned(), compiled))
|
self.syntaxes.insert(name.to_owned(), compiled);
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
error!("error while loading syntax file `{path}`: {err}");
|
|
||||||
None
|
|
||||||
}
|
}
|
||||||
|
Err(err) => error!("error while loading syntax file `{path}`: {err}"),
|
||||||
}
|
}
|
||||||
})
|
}
|
||||||
.collect();
|
|
||||||
|
|
||||||
for (name, compiled) in syntaxes {
|
ControlFlow::Continue(())
|
||||||
self.syntaxes.insert(name, compiled);
|
});
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,10 +4,10 @@ mod include_static_helper;
|
||||||
use std::{collections::HashMap, fmt, ops::ControlFlow, sync::Arc};
|
use std::{collections::HashMap, fmt, ops::ControlFlow, sync::Arc};
|
||||||
|
|
||||||
use anyhow::{anyhow, ensure, Context};
|
use anyhow::{anyhow, ensure, Context};
|
||||||
|
use codespan_reporting::diagnostic::Diagnostic;
|
||||||
use dir_helper::DirHelper;
|
use dir_helper::DirHelper;
|
||||||
use handlebars::{handlebars_helper, Handlebars};
|
use handlebars::{handlebars_helper, Handlebars};
|
||||||
use include_static_helper::IncludeStaticHelper;
|
use include_static_helper::IncludeStaticHelper;
|
||||||
use rayon::iter::{IndexedParallelIterator, IntoParallelIterator, ParallelIterator};
|
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use tracing::{error, info_span, instrument};
|
use tracing::{error, info_span, instrument};
|
||||||
|
|
||||||
|
@ -18,7 +18,7 @@ use crate::{
|
||||||
html::{breadcrumbs::breadcrumbs_to_html, navmap::NavigationMap, tree::branches_to_html},
|
html::{breadcrumbs::breadcrumbs_to_html, navmap::NavigationMap, tree::branches_to_html},
|
||||||
import_map::ImportMap,
|
import_map::ImportMap,
|
||||||
parse::parse_tree_with_diagnostics,
|
parse::parse_tree_with_diagnostics,
|
||||||
state::{report_diagnostics, FileId, Source},
|
state::{report_diagnostics, Source},
|
||||||
tree::SemaRoots,
|
tree::SemaRoots,
|
||||||
vfs::{
|
vfs::{
|
||||||
self, Cd, ContentCache, Dir, DirEntry, DynDir, EditPath, ImageSize, MemDir, Overlay,
|
self, Cd, ContentCache, Dir, DirEntry, DynDir, EditPath, ImageSize, MemDir, Overlay,
|
||||||
|
@ -26,7 +26,13 @@ use crate::{
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::state::Treehouse;
|
use crate::state::{FileId, Treehouse};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct ParsedTree {
|
||||||
|
root_key: String,
|
||||||
|
file_id: FileId,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Serialize)]
|
#[derive(Serialize)]
|
||||||
struct Page {
|
struct Page {
|
||||||
|
@ -90,81 +96,84 @@ fn load_templates(handlebars: &mut Handlebars, dir: &dyn Dir) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[instrument(skip(treehouse, config, source, target_path, tree_path))]
|
||||||
|
fn parse_tree(
|
||||||
|
treehouse: &mut Treehouse,
|
||||||
|
config: &Config,
|
||||||
|
source: String,
|
||||||
|
source_path: VPathBuf,
|
||||||
|
target_path: VPathBuf,
|
||||||
|
tree_path: String,
|
||||||
|
) -> anyhow::Result<(Option<ParsedTree>, Vec<Diagnostic<FileId>>)> {
|
||||||
|
let file_id = treehouse.add_file(
|
||||||
|
source_path.as_str().to_owned(),
|
||||||
|
Source::Tree {
|
||||||
|
input: source,
|
||||||
|
target_path: target_path.clone(),
|
||||||
|
tree_path: tree_path.clone(),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
match parse_tree_with_diagnostics(treehouse, file_id) {
|
||||||
|
Ok(roots) => {
|
||||||
|
let mut diagnostics = vec![];
|
||||||
|
let roots = SemaRoots::from_roots(treehouse, &mut diagnostics, config, file_id, roots);
|
||||||
|
|
||||||
|
let root_key = tree_path.clone();
|
||||||
|
treehouse.roots.insert(root_key.clone(), roots);
|
||||||
|
|
||||||
|
Ok((Some(ParsedTree { root_key, file_id }), diagnostics))
|
||||||
|
}
|
||||||
|
Err(diagnostics) => Ok((None, diagnostics)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[instrument(skip(config, dirs))]
|
#[instrument(skip(config, dirs))]
|
||||||
fn load_trees(config: &Config, dirs: &Dirs) -> anyhow::Result<Treehouse> {
|
fn parse_trees(
|
||||||
|
config: &Config,
|
||||||
|
dirs: &Dirs,
|
||||||
|
) -> anyhow::Result<(Treehouse, HashMap<VPathBuf, ParsedTree>)> {
|
||||||
let mut treehouse = Treehouse::new();
|
let mut treehouse = Treehouse::new();
|
||||||
let mut diagnostics = vec![];
|
let mut diagnostics = vec![];
|
||||||
let mut parsed_trees = HashMap::new();
|
let mut parsed_trees = HashMap::new();
|
||||||
|
|
||||||
let mut paths = vec![];
|
|
||||||
|
|
||||||
vfs::walk_dir_rec(&*dirs.content, VPath::ROOT, &mut |path| {
|
vfs::walk_dir_rec(&*dirs.content, VPath::ROOT, &mut |path| {
|
||||||
if path.extension() == Some("tree") {
|
if path.extension() == Some("tree") {
|
||||||
paths.push(path.to_owned());
|
if let Some(source) = dirs
|
||||||
|
.content
|
||||||
|
.content(path)
|
||||||
|
.and_then(|b| String::from_utf8(b).ok())
|
||||||
|
{
|
||||||
|
let tree_path = path.with_extension("");
|
||||||
|
let target_path = path.with_extension("html");
|
||||||
|
|
||||||
|
match parse_tree(
|
||||||
|
&mut treehouse,
|
||||||
|
config,
|
||||||
|
source,
|
||||||
|
path.to_owned(),
|
||||||
|
target_path,
|
||||||
|
tree_path.as_str().to_owned(),
|
||||||
|
) {
|
||||||
|
Ok((parsed_tree, mut parse_diagnostics)) => {
|
||||||
|
diagnostics.append(&mut parse_diagnostics);
|
||||||
|
if let Some(parsed_tree) = parsed_tree {
|
||||||
|
parsed_trees.insert(tree_path, parsed_tree);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
error!("failed to parse tree {path}: {err:?}")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ControlFlow::Continue(())
|
ControlFlow::Continue(())
|
||||||
});
|
});
|
||||||
|
|
||||||
// NOTE: Sources are filled in later; they can be left out until a call to report_diagnostics.
|
report_diagnostics(&treehouse.files, &diagnostics)?;
|
||||||
let file_ids: Vec<_> = paths
|
|
||||||
.iter()
|
|
||||||
.map(|path| treehouse.add_file(path.clone(), Source::Other(String::new())))
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let parse_results: Vec<_> = {
|
Ok((treehouse, parsed_trees))
|
||||||
let _span = info_span!("load_trees::parse").entered();
|
|
||||||
paths
|
|
||||||
.into_par_iter()
|
|
||||||
.zip(&file_ids)
|
|
||||||
.flat_map(|(path, &file_id)| {
|
|
||||||
dirs.content
|
|
||||||
.content(&path)
|
|
||||||
.and_then(|b| String::from_utf8(b).ok())
|
|
||||||
.map(|input| {
|
|
||||||
let parse_result = parse_tree_with_diagnostics(file_id, &input);
|
|
||||||
(path, file_id, input, parse_result)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
};
|
|
||||||
|
|
||||||
for (path, file_id, input, _) in &parse_results {
|
|
||||||
let tree_path = path.with_extension("");
|
|
||||||
treehouse
|
|
||||||
.files_by_tree_path
|
|
||||||
.insert(tree_path.clone(), *file_id);
|
|
||||||
treehouse.set_source(
|
|
||||||
*file_id,
|
|
||||||
Source::Tree {
|
|
||||||
input: input.clone(),
|
|
||||||
tree_path,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let _span = info_span!("load_trees::sema").entered();
|
|
||||||
for (path, file_id, _, result) in parse_results {
|
|
||||||
match result {
|
|
||||||
Ok(roots) => {
|
|
||||||
let roots = SemaRoots::from_roots(
|
|
||||||
&mut treehouse,
|
|
||||||
&mut diagnostics,
|
|
||||||
config,
|
|
||||||
file_id,
|
|
||||||
roots,
|
|
||||||
);
|
|
||||||
treehouse.roots.insert(file_id, roots);
|
|
||||||
parsed_trees.insert(path, file_id);
|
|
||||||
}
|
|
||||||
Err(mut parse_diagnostics) => diagnostics.append(&mut parse_diagnostics),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
report_diagnostics(&treehouse, &diagnostics)?;
|
|
||||||
|
|
||||||
Ok(treehouse)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip(sources, handlebars))]
|
#[instrument(skip(sources, handlebars))]
|
||||||
|
@ -196,19 +205,23 @@ fn generate_simple_template_or_error(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip(sources, dirs, handlebars))]
|
#[instrument(skip(sources, dirs, handlebars, parsed_tree), fields(root_key = parsed_tree.root_key))]
|
||||||
fn generate_tree(
|
fn generate_tree(
|
||||||
sources: &Sources,
|
sources: &Sources,
|
||||||
dirs: &Dirs,
|
dirs: &Dirs,
|
||||||
handlebars: &Handlebars,
|
handlebars: &Handlebars,
|
||||||
file_id: FileId,
|
parsed_tree: &ParsedTree,
|
||||||
) -> anyhow::Result<String> {
|
) -> anyhow::Result<String> {
|
||||||
let breadcrumbs = breadcrumbs_to_html(&sources.config, &sources.navigation_map, file_id);
|
let breadcrumbs = breadcrumbs_to_html(
|
||||||
|
&sources.config,
|
||||||
|
&sources.navigation_map,
|
||||||
|
&parsed_tree.root_key,
|
||||||
|
);
|
||||||
|
|
||||||
let roots = sources
|
let roots = sources
|
||||||
.treehouse
|
.treehouse
|
||||||
.roots
|
.roots
|
||||||
.get(&file_id)
|
.get(&parsed_tree.root_key)
|
||||||
.expect("tree should have been added to the treehouse");
|
.expect("tree should have been added to the treehouse");
|
||||||
|
|
||||||
let tree = {
|
let tree = {
|
||||||
|
@ -219,7 +232,7 @@ fn generate_tree(
|
||||||
&sources.treehouse,
|
&sources.treehouse,
|
||||||
&sources.config,
|
&sources.config,
|
||||||
dirs,
|
dirs,
|
||||||
file_id,
|
parsed_tree.file_id,
|
||||||
&roots.branches,
|
&roots.branches,
|
||||||
);
|
);
|
||||||
tree
|
tree
|
||||||
|
@ -248,7 +261,10 @@ fn generate_tree(
|
||||||
scripts: roots.attributes.scripts.clone(),
|
scripts: roots.attributes.scripts.clone(),
|
||||||
styles: roots.attributes.styles.clone(),
|
styles: roots.attributes.styles.clone(),
|
||||||
breadcrumbs,
|
breadcrumbs,
|
||||||
tree_path: sources.treehouse.tree_path(file_id).map(|s| s.to_string()),
|
tree_path: sources
|
||||||
|
.treehouse
|
||||||
|
.tree_path(parsed_tree.file_id)
|
||||||
|
.map(|s| s.to_owned()),
|
||||||
tree,
|
tree,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
@ -273,9 +289,9 @@ fn generate_tree_or_error(
|
||||||
sources: &Sources,
|
sources: &Sources,
|
||||||
dirs: &Dirs,
|
dirs: &Dirs,
|
||||||
handlebars: &Handlebars,
|
handlebars: &Handlebars,
|
||||||
file_id: FileId,
|
parsed_tree: &ParsedTree,
|
||||||
) -> String {
|
) -> String {
|
||||||
match generate_tree(sources, dirs, handlebars, file_id) {
|
match generate_tree(sources, dirs, handlebars, parsed_tree) {
|
||||||
Ok(html) => html,
|
Ok(html) => html,
|
||||||
Err(error) => format!("error: {error:?}"),
|
Err(error) => format!("error: {error:?}"),
|
||||||
}
|
}
|
||||||
|
@ -284,6 +300,7 @@ fn generate_tree_or_error(
|
||||||
pub struct Sources {
|
pub struct Sources {
|
||||||
pub config: Config,
|
pub config: Config,
|
||||||
pub treehouse: Treehouse,
|
pub treehouse: Treehouse,
|
||||||
|
pub parsed_trees: HashMap<VPathBuf, ParsedTree>,
|
||||||
pub navigation_map: NavigationMap,
|
pub navigation_map: NavigationMap,
|
||||||
pub import_map: ImportMap,
|
pub import_map: ImportMap,
|
||||||
}
|
}
|
||||||
|
@ -303,15 +320,12 @@ impl Sources {
|
||||||
config.site = std::env::var("TREEHOUSE_SITE").unwrap_or(config.site);
|
config.site = std::env::var("TREEHOUSE_SITE").unwrap_or(config.site);
|
||||||
config.autopopulate_emoji(&*dirs.emoji)?;
|
config.autopopulate_emoji(&*dirs.emoji)?;
|
||||||
config.autopopulate_pics(&*dirs.pic)?;
|
config.autopopulate_pics(&*dirs.pic)?;
|
||||||
config.load_syntaxes(dirs.syntax.clone())?;
|
config.load_syntaxes(&*dirs.syntax)?;
|
||||||
config
|
config
|
||||||
};
|
};
|
||||||
|
|
||||||
let treehouse = load_trees(&config, dirs)?;
|
let (treehouse, parsed_trees) = parse_trees(&config, dirs)?;
|
||||||
let navigation_map = NavigationMap::build(
|
let navigation_map = NavigationMap::build(&treehouse, "index");
|
||||||
&treehouse,
|
|
||||||
treehouse.files_by_tree_path[VPath::new("index")],
|
|
||||||
);
|
|
||||||
let import_map = ImportMap::generate(
|
let import_map = ImportMap::generate(
|
||||||
&config.site,
|
&config.site,
|
||||||
&Cd::new(dirs.static_.clone(), VPathBuf::new("js")),
|
&Cd::new(dirs.static_.clone(), VPathBuf::new("js")),
|
||||||
|
@ -321,6 +335,7 @@ impl Sources {
|
||||||
Ok(Sources {
|
Ok(Sources {
|
||||||
config,
|
config,
|
||||||
treehouse,
|
treehouse,
|
||||||
|
parsed_trees,
|
||||||
navigation_map,
|
navigation_map,
|
||||||
import_map,
|
import_map,
|
||||||
})
|
})
|
||||||
|
@ -414,11 +429,11 @@ impl Dir for TreehouseDir {
|
||||||
};
|
};
|
||||||
|
|
||||||
self.sources
|
self.sources
|
||||||
.treehouse
|
.parsed_trees
|
||||||
.files_by_tree_path
|
|
||||||
.get(path)
|
.get(path)
|
||||||
.map(|&file_id| {
|
.map(|parsed_tree| {
|
||||||
generate_tree_or_error(&self.sources, &self.dirs, &self.handlebars, file_id).into()
|
generate_tree_or_error(&self.sources, &self.dirs, &self.handlebars, parsed_tree)
|
||||||
|
.into()
|
||||||
})
|
})
|
||||||
.or_else(|| {
|
.or_else(|| {
|
||||||
if path.file_name().is_some_and(|s| !s.starts_with('_')) {
|
if path.file_name().is_some_and(|s| !s.starts_with('_')) {
|
||||||
|
@ -510,7 +525,7 @@ pub fn target(dirs: Arc<Dirs>, sources: Arc<Sources>) -> DynDir {
|
||||||
Cd::new(dirs.static_.clone(), VPathBuf::new("robots.txt")).to_dyn(),
|
Cd::new(dirs.static_.clone(), VPathBuf::new("robots.txt")).to_dyn(),
|
||||||
);
|
);
|
||||||
|
|
||||||
let dir_index = DirIndex::new(sources.treehouse.files_by_tree_path.keys().map(|x| &**x));
|
let dir_index = DirIndex::new(sources.parsed_trees.keys().map(|x| &**x));
|
||||||
let tree_view = TreehouseDir::new(dirs, sources, dir_index);
|
let tree_view = TreehouseDir::new(dirs, sources, dir_index);
|
||||||
|
|
||||||
let tree_view = ContentCache::new(tree_view);
|
let tree_view = ContentCache::new(tree_view);
|
||||||
|
|
|
@ -2,7 +2,7 @@ use std::{borrow::Cow, fmt::Write};
|
||||||
|
|
||||||
use tracing::instrument;
|
use tracing::instrument;
|
||||||
|
|
||||||
use crate::{config::Config, state::FileId, vfs::VPath};
|
use crate::config::Config;
|
||||||
|
|
||||||
use super::{navmap::NavigationMap, EscapeAttribute};
|
use super::{navmap::NavigationMap, EscapeAttribute};
|
||||||
|
|
||||||
|
@ -10,31 +10,26 @@ use super::{navmap::NavigationMap, EscapeAttribute};
|
||||||
pub fn breadcrumbs_to_html(
|
pub fn breadcrumbs_to_html(
|
||||||
config: &Config,
|
config: &Config,
|
||||||
navigation_map: &NavigationMap,
|
navigation_map: &NavigationMap,
|
||||||
file_id: FileId,
|
tree_path: &str,
|
||||||
) -> String {
|
) -> String {
|
||||||
let mut s = String::new();
|
let mut s = String::new();
|
||||||
|
|
||||||
if let Some(path) = navigation_map.paths.get(&file_id) {
|
if let Some(path) = navigation_map.paths.get(tree_path) {
|
||||||
for (i, element) in path.iter().enumerate() {
|
for (i, element) in path.iter().enumerate() {
|
||||||
// Skip the index because it's implied by the logo on the left.
|
// Skip the index because it's implied by the logo on the left.
|
||||||
if &**element != VPath::new_const("index") {
|
if element != "index" {
|
||||||
s.push_str("<li class=\"breadcrumb\">");
|
s.push_str("<li class=\"breadcrumb\">");
|
||||||
{
|
{
|
||||||
let short_element = path
|
let short_element = path
|
||||||
.get(i - 1)
|
.get(i - 1)
|
||||||
.map(|p| format!("{p}/"))
|
.map(|p| format!("{p}/"))
|
||||||
.and_then(|prefix| {
|
.and_then(|prefix| element.strip_prefix(prefix.as_str()).map(Cow::Borrowed))
|
||||||
element
|
|
||||||
.as_str()
|
|
||||||
.strip_prefix(prefix.as_str())
|
|
||||||
.map(Cow::Borrowed)
|
|
||||||
})
|
|
||||||
.unwrap_or_else(|| Cow::Owned(format!("/{element}")));
|
.unwrap_or_else(|| Cow::Owned(format!("/{element}")));
|
||||||
write!(
|
write!(
|
||||||
s,
|
s,
|
||||||
"<a href=\"{site}/{element}\">{short_element}</a>",
|
"<a href=\"{site}/{element}\">{short_element}</a>",
|
||||||
site = EscapeAttribute(&config.site),
|
site = EscapeAttribute(&config.site),
|
||||||
element = EscapeAttribute(element.as_str())
|
element = EscapeAttribute(element)
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,23 +3,20 @@ use std::collections::HashMap;
|
||||||
use tracing::instrument;
|
use tracing::instrument;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
state::{FileId, Treehouse},
|
state::Treehouse,
|
||||||
tree::{attributes::Content, SemaBranchId},
|
tree::{attributes::Content, SemaBranchId},
|
||||||
vfs::VPathBuf,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Debug, Clone, Default)]
|
#[derive(Debug, Clone, Default)]
|
||||||
struct NavigationMapBuilder {
|
struct NavigationMapBuilder {
|
||||||
stack: Vec<VPathBuf>,
|
stack: Vec<String>,
|
||||||
navigation_map: NavigationMap,
|
navigation_map: NavigationMap,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl NavigationMapBuilder {
|
impl NavigationMapBuilder {
|
||||||
fn enter_tree(&mut self, file_id: FileId, tree_path: VPathBuf) {
|
fn enter_tree(&mut self, tree: String) {
|
||||||
self.stack.push(tree_path.clone());
|
self.stack.push(tree.clone());
|
||||||
self.navigation_map
|
self.navigation_map.paths.insert(tree, self.stack.clone());
|
||||||
.paths
|
|
||||||
.insert(file_id, self.stack.clone());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn exit_tree(&mut self) {
|
fn exit_tree(&mut self) {
|
||||||
|
@ -34,12 +31,12 @@ impl NavigationMapBuilder {
|
||||||
#[derive(Debug, Clone, Default)]
|
#[derive(Debug, Clone, Default)]
|
||||||
pub struct NavigationMap {
|
pub struct NavigationMap {
|
||||||
/// Tells you which pages need to be opened to get to the key.
|
/// Tells you which pages need to be opened to get to the key.
|
||||||
pub paths: HashMap<FileId, Vec<VPathBuf>>,
|
pub paths: HashMap<String, Vec<String>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl NavigationMap {
|
impl NavigationMap {
|
||||||
#[instrument(name = "NavigationMap::build", skip(treehouse))]
|
#[instrument(name = "NavigationMap::build", skip(treehouse))]
|
||||||
pub fn build(treehouse: &Treehouse, root_file_id: FileId) -> Self {
|
pub fn build(treehouse: &Treehouse, root_tree_path: &str) -> Self {
|
||||||
let mut builder = NavigationMapBuilder::default();
|
let mut builder = NavigationMapBuilder::default();
|
||||||
|
|
||||||
fn rec_branch(
|
fn rec_branch(
|
||||||
|
@ -48,8 +45,8 @@ impl NavigationMap {
|
||||||
branch_id: SemaBranchId,
|
branch_id: SemaBranchId,
|
||||||
) {
|
) {
|
||||||
let branch = treehouse.tree.branch(branch_id);
|
let branch = treehouse.tree.branch(branch_id);
|
||||||
if let Content::ResolvedLink(linked) = &branch.attributes.content {
|
if let Content::Link(linked) = &branch.attributes.content {
|
||||||
rec_tree(treehouse, builder, *linked);
|
rec_tree(treehouse, builder, linked);
|
||||||
} else {
|
} else {
|
||||||
for &child_id in &branch.children {
|
for &child_id in &branch.children {
|
||||||
rec_branch(treehouse, builder, child_id);
|
rec_branch(treehouse, builder, child_id);
|
||||||
|
@ -57,18 +54,12 @@ impl NavigationMap {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn rec_tree(treehouse: &Treehouse, builder: &mut NavigationMapBuilder, file_id: FileId) {
|
fn rec_tree(treehouse: &Treehouse, builder: &mut NavigationMapBuilder, tree_path: &str) {
|
||||||
if let Some(roots) = treehouse.roots.get(&file_id) {
|
if let Some(roots) = treehouse.roots.get(tree_path) {
|
||||||
// Pages can link to each other causing infinite recursion, so we need to handle that
|
// Pages can link to each other causing infinite recursion, so we need to handle that
|
||||||
// case by skipping pages that already have been analyzed.
|
// case by skipping pages that already have been analyzed.
|
||||||
if !builder.navigation_map.paths.contains_key(&file_id) {
|
if !builder.navigation_map.paths.contains_key(tree_path) {
|
||||||
builder.enter_tree(
|
builder.enter_tree(tree_path.to_owned());
|
||||||
file_id,
|
|
||||||
treehouse
|
|
||||||
.tree_path(file_id)
|
|
||||||
.expect("tree files may only link to other tree files")
|
|
||||||
.to_owned(),
|
|
||||||
);
|
|
||||||
for &branch_id in &roots.branches {
|
for &branch_id in &roots.branches {
|
||||||
rec_branch(treehouse, builder, branch_id);
|
rec_branch(treehouse, builder, branch_id);
|
||||||
}
|
}
|
||||||
|
@ -77,7 +68,7 @@ impl NavigationMap {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
rec_tree(treehouse, &mut builder, root_file_id);
|
rec_tree(treehouse, &mut builder, root_tree_path);
|
||||||
|
|
||||||
builder.finish()
|
builder.finish()
|
||||||
}
|
}
|
||||||
|
|
|
@ -30,8 +30,8 @@ pub fn branch_to_html(
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
let has_children = !branch.children.is_empty()
|
let has_children =
|
||||||
|| matches!(branch.attributes.content, Content::ResolvedLink(_));
|
!branch.children.is_empty() || matches!(branch.attributes.content, Content::Link(_));
|
||||||
|
|
||||||
let class = if has_children { "branch" } else { "leaf" };
|
let class = if has_children { "branch" } else { "leaf" };
|
||||||
let mut class = String::from(class);
|
let mut class = String::from(class);
|
||||||
|
@ -44,7 +44,7 @@ pub fn branch_to_html(
|
||||||
class.push_str(" draft");
|
class.push_str(" draft");
|
||||||
}
|
}
|
||||||
|
|
||||||
let component = if let Content::ResolvedLink(_) = branch.attributes.content {
|
let component = if let Content::Link(_) = branch.attributes.content {
|
||||||
"b-linked"
|
"b-linked"
|
||||||
} else {
|
} else {
|
||||||
"b"
|
"b"
|
||||||
|
@ -55,9 +55,8 @@ pub fn branch_to_html(
|
||||||
Cow::Borrowed(component)
|
Cow::Borrowed(component)
|
||||||
};
|
};
|
||||||
|
|
||||||
let linked_branch = if let Content::ResolvedLink(file_id) = &branch.attributes.content {
|
let linked_branch = if let Content::Link(link) = &branch.attributes.content {
|
||||||
let path = treehouse.tree_path(*file_id).expect(".tree file expected");
|
format!(" data-th-link=\"{}\"", EscapeHtml(link))
|
||||||
format!(" data-th-link=\"{}\"", EscapeHtml(path.as_str()))
|
|
||||||
} else {
|
} else {
|
||||||
String::new()
|
String::new()
|
||||||
};
|
};
|
||||||
|
@ -127,7 +126,7 @@ pub fn branch_to_html(
|
||||||
page_id: treehouse
|
page_id: treehouse
|
||||||
.tree_path(file_id)
|
.tree_path(file_id)
|
||||||
.expect(".tree file expected")
|
.expect(".tree file expected")
|
||||||
.to_string(),
|
.to_owned(),
|
||||||
|
|
||||||
config,
|
config,
|
||||||
dirs,
|
dirs,
|
||||||
|
@ -138,14 +137,13 @@ pub fn branch_to_html(
|
||||||
.render(&events, s);
|
.render(&events, s);
|
||||||
|
|
||||||
let branch = treehouse.tree.branch(branch_id);
|
let branch = treehouse.tree.branch(branch_id);
|
||||||
if let Content::ResolvedLink(file_id) = &branch.attributes.content {
|
if let Content::Link(link) = &branch.attributes.content {
|
||||||
let path = treehouse.tree_path(*file_id).expect(".tree file expected");
|
|
||||||
write!(
|
write!(
|
||||||
s,
|
s,
|
||||||
"<noscript><a class=\"navigate icon-go\" href=\"{}/{}\">Go to linked tree: <code>{}</code></a></noscript>",
|
"<noscript><a class=\"navigate icon-go\" href=\"{}/{}\">Go to linked tree: <code>{}</code></a></noscript>",
|
||||||
EscapeAttribute(&config.site),
|
EscapeAttribute(&config.site),
|
||||||
EscapeAttribute(path.as_str()),
|
EscapeAttribute(link),
|
||||||
EscapeHtml(path.as_str()),
|
EscapeHtml(link),
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
@ -153,13 +151,12 @@ pub fn branch_to_html(
|
||||||
|
|
||||||
s.push_str("<th-bb>");
|
s.push_str("<th-bb>");
|
||||||
{
|
{
|
||||||
if let Content::ResolvedLink(file_id) = &branch.attributes.content {
|
if let Content::Link(link) = &branch.attributes.content {
|
||||||
let path = treehouse.tree_path(*file_id).expect(".tree file expected");
|
|
||||||
write!(
|
write!(
|
||||||
s,
|
s,
|
||||||
"<a class=\"icon icon-go\" href=\"{}/{}\" title=\"linked tree\"></a>",
|
"<a class=\"icon icon-go\" href=\"{}/{}\" title=\"linked tree\"></a>",
|
||||||
EscapeAttribute(&config.site),
|
EscapeAttribute(&config.site),
|
||||||
EscapeAttribute(path.as_str()),
|
EscapeAttribute(link),
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -8,11 +8,12 @@ use crate::state::{toml_error_to_diagnostic, FileId, TomlError, Treehouse};
|
||||||
|
|
||||||
pub struct ErrorsEmitted;
|
pub struct ErrorsEmitted;
|
||||||
|
|
||||||
#[instrument(skip(input))]
|
#[instrument(skip(treehouse))]
|
||||||
pub fn parse_tree_with_diagnostics(
|
pub fn parse_tree_with_diagnostics(
|
||||||
|
treehouse: &mut Treehouse,
|
||||||
file_id: FileId,
|
file_id: FileId,
|
||||||
input: &str,
|
|
||||||
) -> Result<Roots, Vec<Diagnostic<FileId>>> {
|
) -> Result<Roots, Vec<Diagnostic<FileId>>> {
|
||||||
|
let input = &treehouse.source(file_id).input();
|
||||||
Roots::parse(&mut treehouse_format::pull::Parser { input, position: 0 }).map_err(|error| {
|
Roots::parse(&mut treehouse_format::pull::Parser { input, position: 0 }).map_err(|error| {
|
||||||
vec![Diagnostic {
|
vec![Diagnostic {
|
||||||
severity: Severity::Error,
|
severity: Severity::Error,
|
||||||
|
|
|
@ -3,19 +3,23 @@ use std::{collections::HashMap, ops::Range};
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use codespan_reporting::{
|
use codespan_reporting::{
|
||||||
diagnostic::{Diagnostic, Label, LabelStyle, Severity},
|
diagnostic::{Diagnostic, Label, LabelStyle, Severity},
|
||||||
|
files::SimpleFiles,
|
||||||
term::termcolor::{ColorChoice, StandardStream},
|
term::termcolor::{ColorChoice, StandardStream},
|
||||||
};
|
};
|
||||||
use tracing::instrument;
|
|
||||||
use ulid::Ulid;
|
use ulid::Ulid;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
tree::{SemaBranchId, SemaRoots, SemaTree},
|
tree::{SemaBranchId, SemaRoots, SemaTree},
|
||||||
vfs::{VPath, VPathBuf},
|
vfs::VPathBuf,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub enum Source {
|
pub enum Source {
|
||||||
Tree { input: String, tree_path: VPathBuf },
|
Tree {
|
||||||
|
input: String,
|
||||||
|
tree_path: String,
|
||||||
|
target_path: VPathBuf,
|
||||||
|
},
|
||||||
Other(String),
|
Other(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -34,54 +38,26 @@ impl AsRef<str> for Source {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
pub type Files = SimpleFiles<String, Source>;
|
||||||
pub struct File {
|
pub type FileId = <Files as codespan_reporting::files::Files<'static>>::FileId;
|
||||||
pub path: VPathBuf,
|
|
||||||
pub source: Source,
|
|
||||||
pub line_starts: Vec<usize>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl File {
|
|
||||||
fn line_start(&self, line_index: usize) -> Result<usize, codespan_reporting::files::Error> {
|
|
||||||
use std::cmp::Ordering;
|
|
||||||
|
|
||||||
match line_index.cmp(&self.line_starts.len()) {
|
|
||||||
Ordering::Less => Ok(self
|
|
||||||
.line_starts
|
|
||||||
.get(line_index)
|
|
||||||
.cloned()
|
|
||||||
.expect("failed despite previous check")),
|
|
||||||
Ordering::Equal => Ok(self.source.as_ref().len()),
|
|
||||||
Ordering::Greater => Err(codespan_reporting::files::Error::LineTooLarge {
|
|
||||||
given: line_index,
|
|
||||||
max: self.line_starts.len() - 1,
|
|
||||||
}),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
|
||||||
pub struct FileId(usize);
|
|
||||||
|
|
||||||
/// Treehouse compilation context.
|
/// Treehouse compilation context.
|
||||||
pub struct Treehouse {
|
pub struct Treehouse {
|
||||||
pub files: Vec<File>,
|
pub files: Files,
|
||||||
pub files_by_tree_path: HashMap<VPathBuf, FileId>,
|
|
||||||
|
|
||||||
pub tree: SemaTree,
|
pub tree: SemaTree,
|
||||||
pub branches_by_named_id: HashMap<String, SemaBranchId>,
|
pub branches_by_named_id: HashMap<String, SemaBranchId>,
|
||||||
pub roots: HashMap<FileId, SemaRoots>,
|
pub roots: HashMap<String, SemaRoots>,
|
||||||
|
|
||||||
pub branch_redirects: HashMap<String, SemaBranchId>,
|
pub branch_redirects: HashMap<String, SemaBranchId>,
|
||||||
|
|
||||||
pub missingno_generator: ulid::Generator,
|
missingno_generator: ulid::Generator,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Treehouse {
|
impl Treehouse {
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Self {
|
Self {
|
||||||
files: vec![],
|
files: Files::new(),
|
||||||
files_by_tree_path: HashMap::new(),
|
|
||||||
|
|
||||||
tree: SemaTree::default(),
|
tree: SemaTree::default(),
|
||||||
branches_by_named_id: HashMap::new(),
|
branches_by_named_id: HashMap::new(),
|
||||||
|
@ -93,34 +69,27 @@ impl Treehouse {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add_file(&mut self, path: VPathBuf, source: Source) -> FileId {
|
pub fn add_file(&mut self, filename: String, source: Source) -> FileId {
|
||||||
let id = FileId(self.files.len());
|
self.files.add(filename, source)
|
||||||
self.files.push(File {
|
|
||||||
line_starts: codespan_reporting::files::line_starts(source.input()).collect(),
|
|
||||||
|
|
||||||
path,
|
|
||||||
source,
|
|
||||||
});
|
|
||||||
id
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the name of a file, assuming it was previously registered.
|
|
||||||
pub fn path(&self, file_id: FileId) -> &VPath {
|
|
||||||
&self.files[file_id.0].path
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the source code of a file, assuming it was previously registered.
|
/// Get the source code of a file, assuming it was previously registered.
|
||||||
pub fn source(&self, file_id: FileId) -> &Source {
|
pub fn source(&self, file_id: FileId) -> &Source {
|
||||||
&self.files[file_id.0].source
|
self.files
|
||||||
|
.get(file_id)
|
||||||
|
.expect("file should have been registered previously")
|
||||||
|
.source()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_source(&mut self, file_id: FileId, source: Source) {
|
/// Get the name of a file, assuming it was previously registered.
|
||||||
self.files[file_id.0].line_starts =
|
pub fn filename(&self, file_id: FileId) -> &str {
|
||||||
codespan_reporting::files::line_starts(source.input()).collect();
|
self.files
|
||||||
self.files[file_id.0].source = source;
|
.get(file_id)
|
||||||
|
.expect("file should have been registered previously")
|
||||||
|
.name()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn tree_path(&self, file_id: FileId) -> Option<&VPath> {
|
pub fn tree_path(&self, file_id: FileId) -> Option<&str> {
|
||||||
match self.source(file_id) {
|
match self.source(file_id) {
|
||||||
Source::Tree { tree_path, .. } => Some(tree_path),
|
Source::Tree { tree_path, .. } => Some(tree_path),
|
||||||
Source::Other(_) => None,
|
Source::Other(_) => None,
|
||||||
|
@ -140,49 +109,6 @@ impl Default for Treehouse {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> codespan_reporting::files::Files<'a> for Treehouse {
|
|
||||||
type FileId = FileId;
|
|
||||||
|
|
||||||
type Name = &'a VPath;
|
|
||||||
|
|
||||||
type Source = &'a str;
|
|
||||||
|
|
||||||
fn name(&'a self, id: Self::FileId) -> Result<Self::Name, codespan_reporting::files::Error> {
|
|
||||||
Ok(self.path(id))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn source(
|
|
||||||
&'a self,
|
|
||||||
id: Self::FileId,
|
|
||||||
) -> Result<Self::Source, codespan_reporting::files::Error> {
|
|
||||||
Ok(self.source(id).input())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn line_index(
|
|
||||||
&'a self,
|
|
||||||
id: Self::FileId,
|
|
||||||
byte_index: usize,
|
|
||||||
) -> Result<usize, codespan_reporting::files::Error> {
|
|
||||||
let file = &self.files[id.0];
|
|
||||||
Ok(file
|
|
||||||
.line_starts
|
|
||||||
.binary_search(&byte_index)
|
|
||||||
.unwrap_or_else(|next_line| next_line - 1))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn line_range(
|
|
||||||
&'a self,
|
|
||||||
id: Self::FileId,
|
|
||||||
line_index: usize,
|
|
||||||
) -> Result<Range<usize>, codespan_reporting::files::Error> {
|
|
||||||
let file = &self.files[id.0];
|
|
||||||
let line_start = file.line_start(line_index)?;
|
|
||||||
let next_line_start = file.line_start(line_index + 1)?;
|
|
||||||
|
|
||||||
Ok(line_start..next_line_start)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct TomlError {
|
pub struct TomlError {
|
||||||
pub message: String,
|
pub message: String,
|
||||||
pub span: Option<Range<usize>>,
|
pub span: Option<Range<usize>>,
|
||||||
|
@ -209,11 +135,7 @@ pub fn toml_error_to_diagnostic(error: TomlError) -> Diagnostic<FileId> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip(files, diagnostics))]
|
pub fn report_diagnostics(files: &Files, diagnostics: &[Diagnostic<FileId>]) -> anyhow::Result<()> {
|
||||||
pub fn report_diagnostics(
|
|
||||||
files: &Treehouse,
|
|
||||||
diagnostics: &[Diagnostic<FileId>],
|
|
||||||
) -> anyhow::Result<()> {
|
|
||||||
let writer = StandardStream::stderr(ColorChoice::Auto);
|
let writer = StandardStream::stderr(ColorChoice::Auto);
|
||||||
let config = codespan_reporting::term::Config::default();
|
let config = codespan_reporting::term::Config::default();
|
||||||
for diagnostic in diagnostics {
|
for diagnostic in diagnostics {
|
||||||
|
|
|
@ -96,7 +96,7 @@ impl SemaRoots {
|
||||||
|
|
||||||
if successfully_parsed && attributes.title.is_empty() {
|
if successfully_parsed && attributes.title.is_empty() {
|
||||||
attributes.title = match treehouse.source(file_id) {
|
attributes.title = match treehouse.source(file_id) {
|
||||||
Source::Tree { tree_path, .. } => tree_path.to_string(),
|
Source::Tree { tree_path, .. } => tree_path.clone(),
|
||||||
_ => panic!("parse_attributes called for a non-.tree file"),
|
_ => panic!("parse_attributes called for a non-.tree file"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -310,7 +310,7 @@ impl SemaBranch {
|
||||||
"note: a generated id `{}` will be used, but this id is unstable and will not persist across generations",
|
"note: a generated id `{}` will be used, but this id is unstable and will not persist across generations",
|
||||||
attributes.id
|
attributes.id
|
||||||
),
|
),
|
||||||
format!("help: run `treehouse fix {}` to add missing ids to branches", treehouse.path(file_id)),
|
format!("help: run `treehouse fix {}` to add missing ids to branches", treehouse.filename(file_id)),
|
||||||
],
|
],
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -334,26 +334,6 @@ impl SemaBranch {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Resolve content.links.
|
|
||||||
if let Content::Link(tree_path) = &attributes.content {
|
|
||||||
if let Some(file_id) = treehouse.files_by_tree_path.get(tree_path) {
|
|
||||||
attributes.content = Content::ResolvedLink(*file_id);
|
|
||||||
} else {
|
|
||||||
diagnostics.push(Diagnostic {
|
|
||||||
severity: Severity::Error,
|
|
||||||
code: Some("attr".into()),
|
|
||||||
message: format!("linked tree `{tree_path}` does not exist"),
|
|
||||||
labels: vec![Label {
|
|
||||||
style: LabelStyle::Primary,
|
|
||||||
file_id,
|
|
||||||
range: attribute_warning_span.clone(),
|
|
||||||
message: "".into(),
|
|
||||||
}],
|
|
||||||
notes: vec![],
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
attributes
|
attributes
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,8 +2,6 @@ use std::collections::HashMap;
|
||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use crate::{state::FileId, vfs::VPathBuf};
|
|
||||||
|
|
||||||
/// Top-level `%%` root attributes.
|
/// Top-level `%%` root attributes.
|
||||||
#[derive(Debug, Default, Clone, PartialEq, Eq, Deserialize, Serialize)]
|
#[derive(Debug, Default, Clone, PartialEq, Eq, Deserialize, Serialize)]
|
||||||
pub struct RootAttributes {
|
pub struct RootAttributes {
|
||||||
|
@ -120,12 +118,7 @@ pub enum Content {
|
||||||
///
|
///
|
||||||
/// Note that `Link` branches must not contain any children. If a `Link` branch does contain
|
/// Note that `Link` branches must not contain any children. If a `Link` branch does contain
|
||||||
/// children, an `attribute`-type error is raised.
|
/// children, an `attribute`-type error is raised.
|
||||||
Link(VPathBuf),
|
Link(String),
|
||||||
|
|
||||||
/// Valid link to another tree.
|
|
||||||
/// This replaces `Content::Link` during semantic analysis.
|
|
||||||
#[serde(skip)]
|
|
||||||
ResolvedLink(FileId),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone, PartialEq, Eq, Deserialize)]
|
#[derive(Debug, Default, Clone, PartialEq, Eq, Deserialize)]
|
||||||
|
|
Loading…
Reference in a new issue