introduce the virtual filesystem everywhere

this unfortunately means I had to cut some features (bye bye commit history! for now)
stuff's not quite 100% working just yet (like branch links, which were and are still broken)
we also don't have content_version impls just yet
This commit is contained in:
liquidex 2024-11-17 22:34:43 +01:00
parent db0329077e
commit 377fbe4dab
42 changed files with 1613 additions and 1655 deletions

15
Cargo.lock generated
View file

@ -1545,20 +1545,6 @@ version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0"
[[package]]
name = "tower-livereload"
version = "0.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "61d6cbbab4b2d3cafd21fb211cc4b06525a0df919c3e8ca3d36485b1c1bd4cd4"
dependencies = [
"bytes",
"http",
"http-body",
"pin-project-lite",
"tokio",
"tower",
]
[[package]] [[package]]
name = "tower-service" name = "tower-service"
version = "0.3.2" version = "0.3.2"
@ -1611,7 +1597,6 @@ dependencies = [
"serde_json", "serde_json",
"tokio", "tokio",
"toml_edit", "toml_edit",
"tower-livereload",
"treehouse-format", "treehouse-format",
"ulid", "ulid",
"url", "url",

View file

@ -1,8 +1,5 @@
%% title = "404" %% title = "404"
% id = "404"
- # 404
% id = "01HMF8KQ997F1ZTEGDNAE2S6F1" % id = "01HMF8KQ997F1ZTEGDNAE2S6F1"
- seems like the page you're looking for isn't here. - seems like the page you're looking for isn't here.

View file

@ -29,7 +29,6 @@ serde = { version = "1.0.183", features = ["derive"] }
serde_json = "1.0.105" serde_json = "1.0.105"
tokio = { version = "1.32.0", features = ["full"] } tokio = { version = "1.32.0", features = ["full"] }
toml_edit = { version = "0.19.14", features = ["serde"] } toml_edit = { version = "0.19.14", features = ["serde"] }
tower-livereload = "0.9.2"
walkdir = "2.3.3" walkdir = "2.3.3"
ulid = "1.0.0" ulid = "1.0.0"
url = "2.5.0" url = "2.5.0"

View file

@ -2,10 +2,10 @@ pub mod fix;
pub mod serve; pub mod serve;
pub mod wc; pub mod wc;
use std::path::{Path, PathBuf};
use clap::{Args, Parser, Subcommand}; use clap::{Args, Parser, Subcommand};
use crate::vfs::VPathBuf;
#[derive(Parser)] #[derive(Parser)]
pub struct ProgramArgs { pub struct ProgramArgs {
#[clap(subcommand)] #[clap(subcommand)]
@ -41,20 +41,13 @@ pub enum Command {
} }
#[derive(Args)] #[derive(Args)]
pub struct GenerateArgs { pub struct GenerateArgs {}
/// Only use commits as sources. This will cause the latest revision to be taken from the
/// Git history instead of the working tree.
///
/// Recommended for deployment.
#[clap(long)]
pub commits_only: bool,
}
#[derive(Args)] #[derive(Args)]
pub struct FixArgs { pub struct FixArgs {
/// Which file to fix. The fixed file will be printed into stdout so that you have a chance to /// Which file to fix. The fixed file will be printed into stdout so that you have a chance to
/// see the changes. /// see the changes.
pub file: PathBuf, pub file: VPathBuf,
/// If you're happy with the suggested changes, specifying this will apply them to the file /// If you're happy with the suggested changes, specifying this will apply them to the file
/// (overwrite it in place.) /// (overwrite it in place.)
@ -63,7 +56,7 @@ pub struct FixArgs {
/// Write the previous version back to the specified path. /// Write the previous version back to the specified path.
#[clap(long)] #[clap(long)]
pub backup: Option<PathBuf>, pub backup: Option<VPathBuf>,
} }
#[derive(Args)] #[derive(Args)]
@ -85,17 +78,5 @@ pub struct ServeArgs {
pub struct WcArgs { pub struct WcArgs {
/// A list of paths to report the word counts of. /// A list of paths to report the word counts of.
/// If no paths are provided, the entire tree is word-counted. /// If no paths are provided, the entire tree is word-counted.
pub paths: Vec<PathBuf>, pub paths: Vec<VPathBuf>,
}
#[derive(Debug, Clone, Copy)]
pub struct Paths<'a> {
pub target_dir: &'a Path,
pub template_target_dir: &'a Path,
pub static_dir: &'a Path,
pub template_dir: &'a Path,
pub content_dir: &'a Path,
pub config_file: &'a Path,
} }

View file

@ -1,16 +1,17 @@
use std::{ffi::OsStr, ops::Range}; use std::ops::{ControlFlow, Range};
use anyhow::Context; use anyhow::{anyhow, Context};
use codespan_reporting::diagnostic::Diagnostic; use codespan_reporting::diagnostic::Diagnostic;
use log::{error, info};
use treehouse_format::ast::Branch; use treehouse_format::ast::Branch;
use walkdir::WalkDir;
use crate::{ use crate::{
parse::{self, parse_toml_with_diagnostics, parse_tree_with_diagnostics}, parse::{self, parse_toml_with_diagnostics, parse_tree_with_diagnostics},
state::{report_diagnostics, FileId, Source, Treehouse}, state::{report_diagnostics, FileId, Source, Treehouse},
vfs::{self, Dir, Edit, VPath},
}; };
use super::{FixAllArgs, FixArgs, Paths}; use super::{FixAllArgs, FixArgs};
struct Fix { struct Fix {
range: Range<usize>, range: Range<usize>,
@ -132,68 +133,102 @@ pub fn fix_file(
}) })
} }
pub fn fix_file_cli(fix_args: FixArgs) -> anyhow::Result<()> { pub fn fix_file_cli(fix_args: FixArgs, root: &dyn Dir) -> anyhow::Result<Edit> {
let utf8_filename = fix_args.file.to_string_lossy().into_owned(); let file = if &*fix_args.file == VPath::new("-") {
let file = if utf8_filename == "-" {
std::io::read_to_string(std::io::stdin().lock()).context("cannot read file from stdin")? std::io::read_to_string(std::io::stdin().lock()).context("cannot read file from stdin")?
} else { } else {
std::fs::read_to_string(&fix_args.file).context("cannot read file to fix")? String::from_utf8(
root.content(&fix_args.file)
.ok_or_else(|| anyhow!("cannot read file to fix"))?,
)
.context("input file has invalid UTF-8")?
}; };
let mut treehouse = Treehouse::new(); let mut treehouse = Treehouse::new();
let mut diagnostics = vec![]; let mut diagnostics = vec![];
let file_id = treehouse.add_file(utf8_filename, Source::Other(file)); let file_id = treehouse.add_file(fix_args.file.as_str().to_owned(), Source::Other(file));
let edit_path = root.edit_path(&fix_args.file).ok_or_else(|| {
anyhow!(
"{} is not an editable file (perhaps it is not in a persistent path?)",
fix_args.file
)
})?;
Ok(
if let Ok(fixed) = fix_file(&mut treehouse, &mut diagnostics, file_id) { if let Ok(fixed) = fix_file(&mut treehouse, &mut diagnostics, file_id) {
if fix_args.apply { if fix_args.apply {
// Try to write the backup first. If writing that fails, bail out without overwriting // Try to write the backup first. If writing that fails, bail out without overwriting
// the source file. // the source file.
if let Some(backup_path) = fix_args.backup { if let Some(backup_path) = fix_args.backup {
std::fs::write(backup_path, treehouse.source(file_id).input()) let backup_edit_path = root.edit_path(&backup_path).ok_or_else(|| {
.context("cannot write backup; original file will not be overwritten")?; anyhow!("backup file {backup_path} is not an editable file")
})?;
Edit::Seq(vec![
Edit::Write(
backup_edit_path,
treehouse.source(file_id).input().to_owned(),
),
Edit::Write(edit_path, fixed),
])
} else {
Edit::Write(edit_path, fixed)
} }
std::fs::write(&fix_args.file, fixed).context("cannot overwrite original file")?;
} else { } else {
println!("{fixed}"); println!("{fixed}");
Edit::NoOp
} }
} else { } else {
report_diagnostics(&treehouse.files, &diagnostics)?; report_diagnostics(&treehouse.files, &diagnostics)?;
Edit::NoOp
},
)
} }
Ok(()) pub fn fix_all_cli(fix_all_args: FixAllArgs, dir: &dyn Dir) -> anyhow::Result<Edit> {
} let mut edits = vec![];
pub fn fix_all_cli(fix_all_args: FixAllArgs, paths: &Paths<'_>) -> anyhow::Result<()> { fn fix_one(dir: &dyn Dir, path: &VPath) -> anyhow::Result<Edit> {
for entry in WalkDir::new(paths.content_dir) { if path.extension() == Some("tree") {
let entry = entry?; let Some(content) = dir.content(path) else {
if entry.file_type().is_file() && entry.path().extension() == Some(OsStr::new("tree")) { return Ok(Edit::NoOp);
let file = std::fs::read_to_string(entry.path()) };
.with_context(|| format!("cannot read file to fix: {:?}", entry.path()))?; let content = String::from_utf8(content).context("file is not valid UTF-8")?;
let utf8_filename = entry.path().to_string_lossy();
let mut treehouse = Treehouse::new(); let mut treehouse = Treehouse::new();
let mut diagnostics = vec![]; let mut diagnostics = vec![];
let file_id = treehouse.add_file(utf8_filename.into_owned(), Source::Other(file)); let file_id = treehouse.add_file(path.as_str().to_string(), Source::Other(content));
let edit_path = dir.edit_path(path).context("path is not editable")?;
if let Ok(fixed) = fix_file(&mut treehouse, &mut diagnostics, file_id) { if let Ok(fixed) = fix_file(&mut treehouse, &mut diagnostics, file_id) {
if fixed != treehouse.source(file_id).input() { if fixed != treehouse.source(file_id).input() {
if fix_all_args.apply { return Ok(Edit::Write(edit_path, fixed));
println!("fixing: {:?}", entry.path());
std::fs::write(entry.path(), fixed).with_context(|| {
format!("cannot overwrite original file: {:?}", entry.path())
})?;
} else {
println!("will fix: {:?}", entry.path());
}
} }
} else { } else {
report_diagnostics(&treehouse.files, &diagnostics)?; report_diagnostics(&treehouse.files, &diagnostics)?;
} }
} }
}
if !fix_all_args.apply { Ok(Edit::NoOp)
println!("run with `--apply` to apply changes");
} }
Ok(()) info!("gathering edits");
vfs::walk_dir_rec(dir, VPath::ROOT, &mut |path| {
match fix_one(dir, path) {
Ok(Edit::NoOp) => (),
Ok(edit) => edits.push(edit),
Err(err) => error!("cannot fix {path}: {err:?}"),
}
ControlFlow::Continue(())
});
// NOTE: This number may be higher than you expect, because NoOp edits also count!
info!("{} edits to apply", edits.len());
if !fix_all_args.apply {
info!("dry run; add `--apply` to apply changes");
Ok(Edit::Dry(Box::new(Edit::All(edits))))
} else {
Ok(Edit::All(edits))
}
} }

View file

@ -2,131 +2,78 @@
mod live_reload; mod live_reload;
use std::fmt::Write; use std::fmt::Write;
use std::{net::Ipv4Addr, path::PathBuf, sync::Arc}; use std::{net::Ipv4Addr, sync::Arc};
use anyhow::Context;
use axum::{ use axum::{
extract::{Path, Query, RawQuery, State}, extract::{Path, Query, RawQuery, State},
http::{ http::{
header::{CACHE_CONTROL, CONTENT_TYPE, LOCATION}, header::{CACHE_CONTROL, CONTENT_TYPE},
HeaderValue, StatusCode, HeaderValue, StatusCode,
}, },
response::{Html, IntoResponse, Response}, response::{Html, IntoResponse, Response},
routing::get, routing::get,
Router, Router,
}; };
use log::{error, info}; use log::info;
use serde::Deserialize; use serde::Deserialize;
use tokio::net::TcpListener; use tokio::net::TcpListener;
use crate::{ use crate::generate::Sources;
config::Config, use crate::vfs::asynch::AsyncDir;
html::EscapeHtml, use crate::vfs::VPath;
state::{Source, Treehouse}, use crate::{html::EscapeHtml, state::Source};
};
use super::Paths; mod system {
use crate::vfs::VPath;
struct SystemPages { pub const INDEX: &VPath = VPath::new_const("index");
index: String, pub const FOUR_OH_FOUR: &VPath = VPath::new_const("_treehouse/404");
four_oh_four: String, pub const B_DOCS: &VPath = VPath::new_const("_treehouse/b");
b_docs: String,
sandbox: String,
navmap: String,
} }
struct Server { struct Server {
config: Config, sources: Arc<Sources>,
treehouse: Treehouse, target: AsyncDir,
target_dir: PathBuf,
system_pages: SystemPages,
} }
pub async fn serve( pub async fn serve(sources: Arc<Sources>, target: AsyncDir, port: u16) -> anyhow::Result<()> {
config: Config,
treehouse: Treehouse,
paths: &Paths<'_>,
port: u16,
) -> anyhow::Result<()> {
let app = Router::new() let app = Router::new()
.route("/", get(index)) .route("/", get(index)) // needed explicitly because * does not match empty paths
.route("/*page", get(page)) .route("/*path", get(vfs_entry))
.route("/b", get(branch)) .route("/b", get(branch))
.route("/navmap.js", get(navmap))
.route("/sandbox", get(sandbox))
.route("/static/*file", get(static_file))
.fallback(get(four_oh_four)) .fallback(get(four_oh_four))
.with_state(Arc::new(Server { .with_state(Arc::new(Server { sources, target }));
config,
treehouse,
target_dir: paths.target_dir.to_owned(),
system_pages: SystemPages {
index: std::fs::read_to_string(paths.target_dir.join("index.html"))
.context("cannot read index page")?,
four_oh_four: std::fs::read_to_string(paths.target_dir.join("_treehouse/404.html"))
.context("cannot read 404 page")?,
b_docs: std::fs::read_to_string(paths.target_dir.join("_treehouse/b.html"))
.context("cannot read /b documentation page")?,
sandbox: std::fs::read_to_string(paths.target_dir.join("static/html/sandbox.html"))
.context("cannot read sandbox page")?,
navmap: std::fs::read_to_string(paths.target_dir.join("navmap.js"))
.context("cannot read navigation map")?,
},
}));
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
let app = live_reload::live_reload(app); let app = app.nest("/dev/live-reload", live_reload::router());
info!("serving on port {port}"); info!("serving on port {port}");
let listener = TcpListener::bind((Ipv4Addr::from([0u8, 0, 0, 0]), port)).await?; let listener = TcpListener::bind((Ipv4Addr::from([0u8, 0, 0, 0]), port)).await?;
Ok(axum::serve(listener, app).await?) Ok(axum::serve(listener, app).await?)
} }
fn get_content_type(path: &str) -> Option<&'static str> { fn get_content_type(extension: &str) -> Option<&'static str> {
match () { match extension {
_ if path.ends_with(".html") => Some("text/html"), "html" => Some("text/html"),
_ if path.ends_with(".js") => Some("text/javascript"), "js" => Some("text/javascript"),
_ if path.ends_with(".woff2") => Some("font/woff2"), "woff" => Some("font/woff2"),
_ if path.ends_with(".svg") => Some("image/svg+xml"), "svg" => Some("image/svg+xml"),
_ => None, _ => None,
} }
} }
async fn index(State(state): State<Arc<Server>>) -> Response {
Html(state.system_pages.index.clone()).into_response()
}
async fn navmap(State(state): State<Arc<Server>>) -> Response {
let mut response = state.system_pages.navmap.clone().into_response();
response
.headers_mut()
.insert(CONTENT_TYPE, HeaderValue::from_static("text/javascript"));
response
}
async fn four_oh_four(State(state): State<Arc<Server>>) -> Response {
(
StatusCode::NOT_FOUND,
Html(state.system_pages.four_oh_four.clone()),
)
.into_response()
}
#[derive(Deserialize)] #[derive(Deserialize)]
struct StaticFileQuery { struct VfsQuery {
cache: Option<String>, #[serde(rename = "v")]
content_version: Option<String>,
} }
async fn static_file( async fn get_static_file(path: &str, query: &VfsQuery, state: &Server) -> Option<Response> {
Path(path): Path<String>, let vpath = VPath::try_new(path).ok()?;
Query(query): Query<StaticFileQuery>, let content = state.target.content(vpath).await?;
State(state): State<Arc<Server>>, let mut response = content.into_response();
) -> Response {
if let Ok(file) = tokio::fs::read(state.target_dir.join("static").join(&path)).await {
let mut response = file.into_response();
if let Some(content_type) = get_content_type(&path) { if let Some(content_type) = vpath.extension().and_then(get_content_type) {
response response
.headers_mut() .headers_mut()
.insert(CONTENT_TYPE, HeaderValue::from_static(content_type)); .insert(CONTENT_TYPE, HeaderValue::from_static(content_type));
@ -134,77 +81,80 @@ async fn static_file(
response.headers_mut().remove(CONTENT_TYPE); response.headers_mut().remove(CONTENT_TYPE);
} }
if query.cache.is_some() { if query.content_version.is_some() {
response.headers_mut().insert( response.headers_mut().insert(
CACHE_CONTROL, CACHE_CONTROL,
HeaderValue::from_static("public, max-age=31536000, immutable"), HeaderValue::from_static("public, max-age=31536000, immutable"),
); );
} }
Some(response)
}
async fn vfs_entry(
Path(path): Path<String>,
Query(query): Query<VfsQuery>,
State(state): State<Arc<Server>>,
) -> Response {
if let Some(response) = get_static_file(&path, &query, &state).await {
response response
} else { } else {
four_oh_four(State(state)).await four_oh_four(State(state)).await
} }
} }
async fn page(Path(path): Path<String>, State(state): State<Arc<Server>>) -> Response { async fn system_page(target: &AsyncDir, path: &VPath) -> Response {
let bare_path = path.strip_suffix(".html").unwrap_or(&path); if let Some(content) = target.content(path).await {
if let Some(redirected_path) = state.config.redirects.page.get(bare_path) { (StatusCode::NOT_FOUND, Html(content)).into_response()
return ( } else {
StatusCode::MOVED_PERMANENTLY, (
[(LOCATION, format!("{}/{redirected_path}", state.config.site))], StatusCode::INTERNAL_SERVER_ERROR,
format!("500 Internal Server Error: system page {path} is not available"),
) )
.into_response(); .into_response()
}
let html_path = format!("{bare_path}.html");
if let Ok(file) = tokio::fs::read(state.target_dir.join(&*html_path)).await {
([(CONTENT_TYPE, "text/html")], file).into_response()
} else {
four_oh_four(State(state)).await
} }
} }
async fn sandbox(State(state): State<Arc<Server>>) -> Response { async fn index(State(state): State<Arc<Server>>) -> Response {
// Small hack to prevent the LiveReloadLayer from injecting itself into the sandbox. system_page(&state.target, system::INDEX).await
// The sandbox is always nested under a different page, so there's no need to do that.
let mut response = Html(state.system_pages.sandbox.clone()).into_response();
#[cfg(debug_assertions)]
{
response
.extensions_mut()
.insert(live_reload::DisableLiveReload);
}
// Debounce requests a bit. There's a tendency to have very many sandboxes on a page, and
// loading this page as many times as there are sandboxes doesn't seem like the best way to do
// things.
response
.headers_mut()
.insert(CACHE_CONTROL, HeaderValue::from_static("max-age=10"));
response
} }
async fn branch(RawQuery(named_id): RawQuery, State(state): State<Arc<Server>>) -> Html<String> { async fn four_oh_four(State(state): State<Arc<Server>>) -> Response {
system_page(&state.target, system::FOUR_OH_FOUR).await
}
async fn branch(RawQuery(named_id): RawQuery, State(state): State<Arc<Server>>) -> Response {
if let Some(named_id) = named_id { if let Some(named_id) = named_id {
let branch_id = state let branch_id = state
.sources
.treehouse .treehouse
.branches_by_named_id .branches_by_named_id
.get(&named_id) .get(&named_id)
.copied() .copied()
.or_else(|| state.treehouse.branch_redirects.get(&named_id).copied()); .or_else(|| {
state
.sources
.treehouse
.branch_redirects
.get(&named_id)
.copied()
});
if let Some(branch_id) = branch_id { if let Some(branch_id) = branch_id {
let branch = state.treehouse.tree.branch(branch_id); let branch = state.sources.treehouse.tree.branch(branch_id);
if let Source::Tree { if let Source::Tree {
input, target_path, .. input, target_path, ..
} = state.treehouse.source(branch.file_id) } = state.sources.treehouse.source(branch.file_id)
{ {
match std::fs::read_to_string(target_path) { if let Some(content) = state
Ok(content) => { .target
let branch_markdown_content = input[branch.content.clone()].trim(); .content(target_path)
.await
.and_then(|s| String::from_utf8(s).ok())
{
let branch_markup = input[branch.content.clone()].trim();
let mut per_page_metadata = let mut per_page_metadata =
String::from("<meta property=\"og:description\" content=\""); String::from("<meta property=\"og:description\" content=\"");
write!(per_page_metadata, "{}", EscapeHtml(branch_markdown_content)) write!(per_page_metadata, "{}", EscapeHtml(branch_markup)).unwrap();
.unwrap();
per_page_metadata.push_str("\">"); per_page_metadata.push_str("\">");
const PER_PAGE_METADATA_REPLACEMENT_STRING: &str = "<!-- treehouse-ca37057a-cff5-45b3-8415-3b02dbf6c799-per-branch-metadata -->"; const PER_PAGE_METADATA_REPLACEMENT_STRING: &str = "<!-- treehouse-ca37057a-cff5-45b3-8415-3b02dbf6c799-per-branch-metadata -->";
@ -213,17 +163,20 @@ async fn branch(RawQuery(named_id): RawQuery, State(state): State<Arc<Server>>)
&per_page_metadata, &per_page_metadata,
// Replace one under the assumption that it appears in all pages. // Replace one under the assumption that it appears in all pages.
1, 1,
)); ))
} .into_response();
Err(e) => { } else {
error!("error while reading file {target_path:?}: {e:?}"); return (
} StatusCode::INTERNAL_SERVER_ERROR,
format!("500 Internal Server Error: branch metadata points to entry {target_path} which does not have readable content")
)
.into_response();
} }
} }
} }
Html(state.system_pages.four_oh_four.clone()) system_page(&state.target, system::FOUR_OH_FOUR).await
} else { } else {
Html(state.system_pages.b_docs.clone()) system_page(&state.target, system::B_DOCS).await
} }
} }

View file

@ -1,21 +1,28 @@
use axum::{ use std::time::Duration;
http::{header::CONTENT_TYPE, Response},
Router,
};
#[derive(Debug, Clone, Copy)] use axum::{routing::get, Router};
pub struct DisableLiveReload; use tokio::time::sleep;
pub fn live_reload(router: Router) -> Router { pub fn router<S>() -> Router<S> {
router.layer(tower_livereload::LiveReloadLayer::new().response_predicate( let router = Router::new().route("/back-up", get(back_up));
|response: &Response<_>| {
let is_html = response // The endpoint for immediate reload is only enabled on debug builds.
.headers() // Release builds use the exponential backoff system that detects is the WebSocket is closed.
.get(CONTENT_TYPE) #[cfg(debug_assertions)]
.and_then(|v| v.to_str().ok()) let router = router.route("/stall", get(stall));
.is_some_and(|v| v.starts_with("text/html"));
let is_disabled = response.extensions().get::<DisableLiveReload>().is_some(); router.with_state(())
is_html && !is_disabled }
},
)) #[cfg(debug_assertions)]
async fn stall() -> String {
loop {
// Sleep for a day, I guess. Just to uphold the connection forever without really using any
// significant resources.
sleep(Duration::from_secs(60 * 60 * 24)).await;
}
}
async fn back_up() -> String {
"".into()
} }

View file

@ -1,12 +1,11 @@
use std::{ffi::OsStr, path::Path}; use std::ops::ControlFlow;
use anyhow::Context;
use treehouse_format::ast::{Branch, Roots}; use treehouse_format::ast::{Branch, Roots};
use walkdir::WalkDir;
use crate::{ use crate::{
parse::parse_tree_with_diagnostics, parse::parse_tree_with_diagnostics,
state::{report_diagnostics, Source, Treehouse}, state::{report_diagnostics, Source, Treehouse},
vfs::{self, Dir, VPath},
}; };
use super::WcArgs; use super::WcArgs;
@ -29,14 +28,14 @@ fn wc_roots(source: &str, roots: &Roots) -> usize {
.sum() .sum()
} }
pub fn wc_cli(content_dir: &Path, mut wc_args: WcArgs) -> anyhow::Result<()> { pub fn wc_cli(content_dir: &dyn Dir, mut wc_args: WcArgs) -> anyhow::Result<()> {
if wc_args.paths.is_empty() { if wc_args.paths.is_empty() {
for entry in WalkDir::new(content_dir) { vfs::walk_dir_rec(content_dir, VPath::ROOT, &mut |path| {
let entry = entry?; if path.extension() == Some("tree") {
if entry.file_type().is_file() && entry.path().extension() == Some(OsStr::new("tree")) { wc_args.paths.push(path.to_owned());
wc_args.paths.push(entry.into_path());
}
} }
ControlFlow::Continue(())
});
} }
let mut treehouse = Treehouse::new(); let mut treehouse = Treehouse::new();
@ -44,15 +43,11 @@ pub fn wc_cli(content_dir: &Path, mut wc_args: WcArgs) -> anyhow::Result<()> {
let mut total = 0; let mut total = 0;
for path in &wc_args.paths { for path in &wc_args.paths {
let file = std::fs::read_to_string(path) if let Some(content) = content_dir
.with_context(|| format!("cannot read file to word count: {path:?}"))?; .content(path)
let path_without_ext = path.with_extension(""); .and_then(|b| String::from_utf8(b).ok())
let utf8_filename = path_without_ext {
.strip_prefix(content_dir) let file_id = treehouse.add_file(path.to_string(), Source::Other(content));
.expect("paths should be rooted within the content directory")
.to_string_lossy();
let file_id = treehouse.add_file(utf8_filename.into_owned(), Source::Other(file));
match parse_tree_with_diagnostics(&mut treehouse, file_id) { match parse_tree_with_diagnostics(&mut treehouse, file_id) {
Ok(parsed) => { Ok(parsed) => {
let source = treehouse.source(file_id); let source = treehouse.source(file_id);
@ -65,6 +60,7 @@ pub fn wc_cli(content_dir: &Path, mut wc_args: WcArgs) -> anyhow::Result<()> {
} }
} }
} }
}
println!("{total:>8} total"); println!("{total:>8} total");

View file

@ -1,11 +1,8 @@
use std::{ use std::{collections::HashMap, ops::ControlFlow};
collections::HashMap, ffi::OsStr, fs::File, io::BufReader, ops::ControlFlow, path::Path,
};
use anyhow::Context; use anyhow::{anyhow, Context};
use log::debug; use log::{debug, error};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use walkdir::WalkDir;
use crate::{ use crate::{
html::highlight::{ html::highlight::{
@ -13,7 +10,7 @@ use crate::{
Syntax, Syntax,
}, },
import_map::ImportRoot, import_map::ImportRoot,
vfs::{self, ReadFilesystem, VPath, VPathBuf}, vfs::{self, Dir, VPath, VPathBuf},
}; };
#[derive(Debug, Clone, Deserialize, Serialize)] #[derive(Debug, Clone, Deserialize, Serialize)]
@ -102,8 +99,8 @@ pub enum Markup {
} }
impl Config { impl Config {
pub fn autopopulate_emoji(&mut self, dir: &dyn ReadFilesystem) -> anyhow::Result<()> { pub fn autopopulate_emoji(&mut self, dir: &dyn Dir) -> anyhow::Result<()> {
vfs::walk_rec(dir, VPath::ROOT, &mut |path| { vfs::walk_dir_rec(dir, VPath::ROOT, &mut |path| {
if path.extension().is_some_and(is_emoji_file) { if path.extension().is_some_and(is_emoji_file) {
if let Some(emoji_name) = path.file_stem() { if let Some(emoji_name) = path.file_stem() {
if !self.emoji.contains_key(emoji_name) { if !self.emoji.contains_key(emoji_name) {
@ -118,8 +115,8 @@ impl Config {
Ok(()) Ok(())
} }
pub fn autopopulate_pics(&mut self, dir: &dyn ReadFilesystem) -> anyhow::Result<()> { pub fn autopopulate_pics(&mut self, dir: &dyn Dir) -> anyhow::Result<()> {
vfs::walk_rec(dir, VPath::ROOT, &mut |path| { vfs::walk_dir_rec(dir, VPath::ROOT, &mut |path| {
if path.extension().is_some_and(is_pic_file) { if path.extension().is_some_and(is_pic_file) {
if let Some(pic_name) = path.file_stem() { if let Some(pic_name) = path.file_stem() {
let pic_id = pic_name let pic_id = pic_name
@ -142,38 +139,48 @@ impl Config {
format!("{}/{}", self.site, page) format!("{}/{}", self.site, page)
} }
pub fn pic_url(&self, pics_fs: &dyn ReadFilesystem, id: &str) -> String { pub fn pic_url(&self, pics_dir: &dyn Dir, id: &str) -> String {
vfs::url( vfs::url(
&self.site, &self.site,
pics_fs, pics_dir,
self.pics self.pics
.get(id) .get(id)
.map(|x| &**x) .map(|x| &**x)
.unwrap_or(VPath::new("404.png")), .unwrap_or(VPath::new("404.png")),
) )
.expect("pics_dir is not anchored anywhere")
} }
/// Loads all syntax definition files. /// Loads all syntax definition files.
pub fn load_syntaxes(&mut self, dir: &Path) -> anyhow::Result<()> { pub fn load_syntaxes(&mut self, dir: &dyn Dir) -> anyhow::Result<()> {
for entry in WalkDir::new(dir) { vfs::walk_dir_rec(dir, VPath::ROOT, &mut |path| {
let entry = entry?; if path.extension() == Some("json") {
if entry.path().extension() == Some(OsStr::new("json")) { let name = path
let name = entry
.path()
.file_stem() .file_stem()
.expect("syntax file name should have a stem") .expect("syntax file name should have a stem due to the .json extension");
.to_string_lossy();
debug!("loading syntax {name:?}"); debug!("loading syntax {name:?}");
let syntax: Syntax = serde_json::from_reader(BufReader::new( let result: Result<Syntax, _> = dir
File::open(entry.path()).context("could not open syntax file")?, .content(path)
)) .ok_or_else(|| anyhow!("syntax .json is not a file"))
.context("could not deserialize syntax file")?; .and_then(|b| {
String::from_utf8(b).context("syntax .json contains invalid UTF-8")
})
.and_then(|s| {
serde_json::from_str(&s).context("could not deserialize syntax file")
});
match result {
Ok(syntax) => {
let compiled = compile_syntax(&syntax); let compiled = compile_syntax(&syntax);
self.syntaxes.insert(name.into_owned(), compiled); self.syntaxes.insert(name.to_owned(), compiled);
}
Err(err) => error!("error while loading syntax file `{path}`: {err}"),
} }
} }
ControlFlow::Continue(())
});
Ok(()) Ok(())
} }
} }

View file

@ -0,0 +1,15 @@
use crate::vfs::DynDir;
#[derive(Debug, Clone)]
pub struct Dirs {
pub root: DynDir,
pub content: DynDir,
pub static_: DynDir,
pub template: DynDir,
// `static` directories
pub pics: DynDir,
pub emoji: DynDir,
pub syntax: DynDir,
}

View file

@ -1,103 +1,53 @@
use std::{ mod dir_helper;
collections::HashMap, mod include_static_helper;
ffi::OsStr,
path::{Path, PathBuf},
time::Instant,
};
use anyhow::{anyhow, bail, Context}; use std::{collections::HashMap, fmt, ops::ControlFlow, sync::Arc};
use codespan_reporting::{
diagnostic::{Diagnostic, Label, LabelStyle, Severity}, use anyhow::{anyhow, ensure, Context};
files::Files as _, use codespan_reporting::diagnostic::Diagnostic;
}; use dir_helper::DirHelper;
use copy_dir::copy_dir;
use handlebars::{handlebars_helper, Handlebars}; use handlebars::{handlebars_helper, Handlebars};
use include_static_helper::IncludeStaticHelper;
use log::{debug, error, info}; use log::{debug, error, info};
use serde::Serialize; use serde::Serialize;
use walkdir::WalkDir;
use crate::{ use crate::{
cli::Paths,
config::Config, config::Config,
dirs::Dirs,
fun::seasons::Season, fun::seasons::Season,
history::History, html::{breadcrumbs::breadcrumbs_to_html, navmap::NavigationMap, tree::branches_to_html},
html::{
breadcrumbs::breadcrumbs_to_html,
navmap::{build_navigation_map, NavigationMap},
tree::branches_to_html,
},
import_map::ImportMap, import_map::ImportMap,
include_static::IncludeStatic,
parse::parse_tree_with_diagnostics, parse::parse_tree_with_diagnostics,
state::{has_errors, report_diagnostics, RevisionInfo, Source}, state::{report_diagnostics, Source},
static_urls::StaticUrls,
tree::SemaRoots, tree::SemaRoots,
vfs::{CdExt, ReadFilesystem, VPath, VPathBuf}, vfs::{self, Cd, Dir, DirEntry, DynDir, MemDir, Overlay, ToDynDir, VPath, VPathBuf},
}; };
use crate::state::{FileId, Treehouse}; use crate::state::{FileId, Treehouse};
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum LatestRevision {
/// The working tree is treated as the latest revision.
WorkingTree,
/// The latest commit is treated as the latest revision. The working tree is ignored.
LatestCommit,
}
struct Generator {
tree_files: Vec<PathBuf>,
git: git2::Repository,
history: History,
latest_revision: LatestRevision,
}
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
struct ParsedTree { pub struct ParsedTree {
source_path: String,
root_key: String, root_key: String,
tree_path: String, tree_path: String,
file_id: FileId, file_id: FileId,
target_path: PathBuf, target_path: VPathBuf,
} }
#[derive(Serialize)] #[derive(Serialize)]
pub struct Page { struct Page {
pub title: String, title: String,
pub thumbnail: Option<Thumbnail>, thumbnail: Option<Thumbnail>,
pub scripts: Vec<String>, scripts: Vec<String>,
pub styles: Vec<String>, styles: Vec<String>,
pub breadcrumbs: String, breadcrumbs: String,
pub tree_path: Option<String>, tree_path: Option<String>,
pub tree: String, tree: String,
pub revision: RevisionInfo,
pub revision_url: String,
pub source_url: String,
pub history_url: String,
} }
#[derive(Serialize)] #[derive(Serialize)]
pub struct Commit { struct Thumbnail {
pub revision_number: usize, url: String,
pub hash: String, alt: Option<String>,
pub hash_short: String,
pub summary: String,
pub body: String,
}
#[derive(Serialize)]
pub struct HistoryPage {
pub title: String,
pub commits: Vec<Commit>,
pub tree_path: String,
pub is_history: bool, // always true
}
#[derive(Serialize)]
pub struct Thumbnail {
pub url: String,
pub alt: Option<String>,
} }
#[derive(Serialize)] #[derive(Serialize)]
@ -114,138 +64,61 @@ struct PageTemplateData<'a> {
page: Page, page: Page,
} }
#[derive(Serialize)] fn create_handlebars(site: &str, static_: DynDir) -> Handlebars<'static> {
struct HistoryTemplateData<'a> { let mut handlebars = Handlebars::new();
#[serde(flatten)]
base: &'a BaseTemplateData<'a>,
page: HistoryPage,
}
impl Generator {
fn add_directory_rec(&mut self, directory: &Path) -> anyhow::Result<()> {
for entry in WalkDir::new(directory) {
let entry = entry?;
if entry.path().extension() == Some(OsStr::new("tree")) {
self.tree_files.push(entry.path().to_owned());
}
}
Ok(())
}
fn init_handlebars(handlebars: &mut Handlebars<'_>, paths: &Paths<'_>, config: &Config) {
handlebars_helper!(cat: |a: String, b: String| a + &b); handlebars_helper!(cat: |a: String, b: String| a + &b);
handlebars.register_helper("cat", Box::new(cat)); handlebars.register_helper("cat", Box::new(cat));
handlebars.register_helper( handlebars.register_helper("asset", Box::new(DirHelper::new(site, static_.clone())));
"asset",
Box::new(StaticUrls::new(
paths.target_dir.join("static"),
format!("{}/static", config.site),
)),
);
handlebars.register_helper( handlebars.register_helper(
"include_static", "include_static",
Box::new(IncludeStatic { Box::new(IncludeStaticHelper::new(static_)),
// NOTE: Again, allow referring to generated static assets.
// This is necessary for import maps, for whom the <src> attribute is not
// currently supported.
base_dir: paths.target_dir.join("static"),
}),
); );
handlebars
} }
fn register_template( fn load_templates(handlebars: &mut Handlebars, dir: &dyn Dir) {
handlebars: &mut Handlebars<'_>, vfs::walk_dir_rec(dir, VPath::ROOT, &mut |path| {
treehouse: &mut Treehouse, if path.extension() == Some("hbs") {
diagnostics: &mut Vec<Diagnostic<FileId>>, if let Some(content) = dir.content(path).and_then(|b| String::from_utf8(b).ok()) {
name: &str, if let Err(err) = handlebars.register_template_string(path.as_str(), content) {
path: &Path, error!("in template: {err}");
) -> anyhow::Result<FileId> {
let source = std::fs::read_to_string(path)
.with_context(|| format!("cannot read template file {path:?}"))?;
let file_id =
treehouse.add_file(path.to_string_lossy().into_owned(), Source::Other(source));
let source = treehouse.source(file_id);
if let Err(error) = handlebars.register_template_string(name, source) {
Self::wrangle_handlebars_error_into_diagnostic(
treehouse,
diagnostics,
file_id,
error.line_no,
error.column_no,
error.reason().to_string(),
)?;
} }
Ok(file_id)
} }
fn wrangle_handlebars_error_into_diagnostic(
treehouse: &mut Treehouse,
diagnostics: &mut Vec<Diagnostic<FileId>>,
file_id: FileId,
line: Option<usize>,
column: Option<usize>,
message: String,
) -> anyhow::Result<()> {
if let (Some(line), Some(column)) = (line, column) {
let line_range = treehouse
.files
.line_range(file_id, line)
.expect("file was added to the list");
diagnostics.push(Diagnostic {
severity: Severity::Error,
code: Some("template".into()),
message,
labels: vec![Label {
style: LabelStyle::Primary,
file_id,
range: line_range.start + column..line_range.start + column + 1,
message: String::new(),
}],
notes: vec![],
})
} else {
let file = treehouse.filename(file_id);
bail!("template error in {file}: {message}");
} }
Ok(()) ControlFlow::Continue(())
});
} }
fn parse_tree( fn parse_tree(
treehouse: &mut Treehouse, treehouse: &mut Treehouse,
config: &Config, config: &Config,
source: String, source: String,
source_path: String, source_path: VPathBuf,
target_path: VPathBuf,
tree_path: String, tree_path: String,
target_path: PathBuf,
revision: RevisionInfo,
) -> anyhow::Result<(Option<ParsedTree>, Vec<Diagnostic<FileId>>)> { ) -> anyhow::Result<(Option<ParsedTree>, Vec<Diagnostic<FileId>>)> {
let file_id = treehouse.add_file( let file_id = treehouse.add_file(
format!("{source_path}@{}", revision.commit_short), source_path.as_str().to_owned(),
Source::Tree { Source::Tree {
input: source, input: source,
target_path: target_path.clone(), target_path: target_path.clone(),
tree_path: tree_path.clone(), tree_path: tree_path.clone(),
revision_info: revision.clone(),
}, },
); );
match parse_tree_with_diagnostics(treehouse, file_id) { match parse_tree_with_diagnostics(treehouse, file_id) {
Ok(roots) => { Ok(roots) => {
let mut diagnostics = vec![]; let mut diagnostics = vec![];
let roots = let roots = SemaRoots::from_roots(treehouse, &mut diagnostics, config, file_id, roots);
SemaRoots::from_roots(treehouse, &mut diagnostics, config, file_id, roots);
let root_key = if revision.is_latest { let root_key = tree_path.clone();
tree_path.clone()
} else {
format!("{tree_path}@{}", revision.number)
};
treehouse.roots.insert(root_key.clone(), roots); treehouse.roots.insert(root_key.clone(), roots);
Ok(( Ok((
Some(ParsedTree { Some(ParsedTree {
source_path,
root_key, root_key,
tree_path, tree_path,
file_id, file_id,
@ -259,233 +132,114 @@ impl Generator {
} }
fn parse_trees( fn parse_trees(
&self,
config: &Config, config: &Config,
paths: &Paths<'_>, dirs: &Dirs,
) -> anyhow::Result<(Treehouse, Vec<ParsedTree>, Vec<Diagnostic<FileId>>)> { ) -> anyhow::Result<(Treehouse, HashMap<VPathBuf, ParsedTree>)> {
let mut treehouse = Treehouse::new(); let mut treehouse = Treehouse::new();
let mut diagnostics = vec![]; let mut diagnostics = vec![];
let mut parsed_trees = vec![]; let mut parsed_trees = HashMap::new();
for path in &self.tree_files { vfs::walk_dir_rec(&*dirs.content, VPath::ROOT, &mut |path| {
let utf8_path = path.to_string_lossy(); if path.extension() == Some("tree") {
if let Some(source) = dirs
.content
.content(path)
.and_then(|b| String::from_utf8(b).ok())
{
let tree_path = path.with_extension("");
let target_path = path.with_extension("html");
let tree_path = path debug!("tree file: {path}");
.strip_prefix(paths.content_dir)
.unwrap_or(path)
.with_extension("")
.to_string_lossy()
.replace('\\', "/");
debug!("tree file: {path:?}");
let page_history = self.history.by_page.get(&utf8_path[..]); match parse_tree(
let working_revision_number = page_history
.map(|history| history.revisions.len() + 1)
.unwrap_or(1);
if self.latest_revision == LatestRevision::WorkingTree {
let source = std::fs::read_to_string(path)?;
let target_path = paths.target_dir.join(&tree_path).with_extension("html");
let (parsed_tree, mut parse_diagnostics) = Self::parse_tree(
&mut treehouse, &mut treehouse,
config, config,
source, source,
utf8_path.clone().into_owned(), path.to_owned(),
tree_path.clone(),
target_path, target_path,
RevisionInfo { tree_path.as_str().to_owned(),
is_latest: true, ) {
number: working_revision_number, Ok((parsed_tree, mut parse_diagnostics)) => {
commit: "working".into(),
commit_short: "working".into(),
},
)?;
diagnostics.append(&mut parse_diagnostics); diagnostics.append(&mut parse_diagnostics);
if let Some(parsed_tree) = parsed_tree { if let Some(parsed_tree) = parsed_tree {
parsed_trees.push(parsed_tree); parsed_trees.insert(tree_path, parsed_tree);
}
}
Err(err) => {
error!("failed to parse tree {path}: {err:?}")
}
}
} }
} }
if let Some(page_history) = page_history { ControlFlow::Continue(())
for (i, revision) in page_history.revisions.iter().enumerate() { });
let revision_number = page_history.revisions.len() - i;
let source = String::from_utf8( report_diagnostics(&treehouse.files, &diagnostics)?;
self.git.find_blob(revision.blob_oid)?.content().to_owned(),
)?;
let target_path = paths Ok((treehouse, parsed_trees))
.target_dir }
.join(format!("{tree_path}@{revision_number}"))
.with_extension("html");
let (parsed_tree, parse_diagnostics) = Self::parse_tree( // TODO: Generation of pages in static/html
&mut treehouse, //
config, // for (name, &file_id) in &template_file_ids {
source, // let filename = name.rsplit_once('/').unwrap_or(("", name)).1;
utf8_path.clone().into_owned(), // if !filename.starts_with('_') {
tree_path.clone(), // let templated_html = match handlebars.render(name, &base_template_data) {
target_path, // Ok(html) => html,
RevisionInfo { // Err(error) => {
is_latest: false, // Self::wrangle_handlebars_error_into_diagnostic(
number: revision_number, // treehouse,
commit: revision.commit_oid.to_string(), // &mut global_diagnostics,
commit_short: revision.commit_short(), // file_id,
}, // error.line_no,
)?; // error.column_no,
_ = parse_diagnostics; // We don't reemit diagnostics from old revisions. // error.desc,
if let Some(parsed_tree) = parsed_tree { // )?;
// If this commit is also considered to be the latest revision, we need // continue;
// to generate a second version of the page that will act as the // }
// latest one. // };
let is_latest = // std::fs::write(
self.latest_revision == LatestRevision::LatestCommit && i == 0; // paths.template_target_dir.join(name).with_extension("html"),
if is_latest { // templated_html,
let root_key = parsed_tree.tree_path.clone(); // )?;
treehouse.roots.insert( // }
root_key.clone(), // }
treehouse.roots.get(&parsed_tree.root_key).unwrap().clone(),
fn generate_tree(
sources: &Sources,
dirs: &Dirs,
handlebars: &Handlebars,
parsed_tree: &ParsedTree,
) -> anyhow::Result<String> {
let breadcrumbs = breadcrumbs_to_html(
&sources.config,
&sources.navigation_map,
&parsed_tree.root_key,
); );
let target_path =
paths.target_dir.join(&tree_path).with_extension("html");
let file_id = {
let file = treehouse.files.get(parsed_tree.file_id).unwrap();
let filename = file.name().clone();
let Source::Tree {
input,
tree_path,
target_path,
revision_info,
} = file.source().clone()
else {
panic!(".tree files must have Tree sources")
};
treehouse.add_file(
filename,
Source::Tree {
input,
tree_path,
target_path: target_path.clone(),
revision_info: RevisionInfo {
is_latest: true,
..revision_info
},
},
)
};
parsed_trees.push(ParsedTree {
root_key,
target_path,
file_id,
..parsed_tree.clone()
})
}
parsed_trees.push(parsed_tree);
}
}
}
}
Ok((treehouse, parsed_trees, diagnostics))
}
fn generate_all_files(
&self,
treehouse: &mut Treehouse,
config: &Config,
paths: &Paths<'_>,
root_fs: &dyn ReadFilesystem,
navigation_map: &NavigationMap,
parsed_trees: Vec<ParsedTree>,
) -> anyhow::Result<Vec<Diagnostic<FileId>>> {
let mut global_diagnostics = vec![];
let mut handlebars: Handlebars<'static> = Handlebars::new();
Self::init_handlebars(&mut handlebars, paths, config);
let mut template_file_ids = HashMap::new();
for entry in WalkDir::new(paths.template_dir) {
let entry = entry.context("cannot read directory entry")?;
let path = entry.path();
if !entry.file_type().is_dir() && path.extension() == Some(OsStr::new("hbs")) {
let relative_path = path
.strip_prefix(paths.template_dir)?
.to_string_lossy()
.into_owned()
.replace('\\', "/");
let file_id = Self::register_template(
&mut handlebars,
treehouse,
&mut global_diagnostics,
&relative_path,
path,
)?;
template_file_ids.insert(relative_path, file_id);
}
}
let import_map =
ImportMap::generate(config.site.clone(), &config.build.javascript.import_roots);
let base_template_data = BaseTemplateData {
config,
import_map: serde_json::to_string_pretty(&import_map)
.expect("import map should be serializable to JSON"),
season: Season::current(),
};
std::fs::create_dir_all(paths.template_target_dir)?;
for (name, &file_id) in &template_file_ids {
let filename = name.rsplit_once('/').unwrap_or(("", name)).1;
if !filename.starts_with('_') {
let templated_html = match handlebars.render(name, &base_template_data) {
Ok(html) => html,
Err(error) => {
Self::wrangle_handlebars_error_into_diagnostic(
treehouse,
&mut global_diagnostics,
file_id,
error.line_no,
error.column_no,
error.desc,
)?;
continue;
}
};
std::fs::write(
paths.template_target_dir.join(name).with_extension("html"),
templated_html,
)?;
}
}
for parsed_tree in parsed_trees {
debug!("generating: {:?}", parsed_tree.target_path);
let breadcrumbs = breadcrumbs_to_html(config, navigation_map, &parsed_tree.root_key);
let mut tree = String::new(); let mut tree = String::new();
// Temporarily steal the tree out of the treehouse. let roots = sources
let roots = treehouse .treehouse
.roots .roots
.remove(&parsed_tree.root_key) .get(&parsed_tree.root_key)
.expect("tree should have been added to the treehouse"); .expect("tree should have been added to the treehouse");
branches_to_html( branches_to_html(
&mut tree, &mut tree,
treehouse, &sources.treehouse,
config, &sources.config,
root_fs, dirs,
paths,
parsed_tree.file_id, parsed_tree.file_id,
&roots.branches, &roots.branches,
); );
let revision = treehouse let base_template_data = BaseTemplateData {
.revision_info(parsed_tree.file_id) config: &sources.config,
.expect(".tree files should have Tree sources"); import_map: serde_json::to_string_pretty(&sources.import_map)
.expect("import map should be serializable to JSON"),
season: Season::current(),
};
let template_data = PageTemplateData { let template_data = PageTemplateData {
base: &base_template_data, base: &base_template_data,
page: Page { page: Page {
@ -495,223 +249,203 @@ impl Generator {
.thumbnail .thumbnail
.as_ref() .as_ref()
.map(|thumbnail| Thumbnail { .map(|thumbnail| Thumbnail {
url: config url: sources.config.pic_url(&*dirs.pics, &thumbnail.id),
.pic_url(&root_fs.cd(VPathBuf::new("static/pics")), &thumbnail.id),
alt: thumbnail.alt.clone(), alt: thumbnail.alt.clone(),
}), }),
scripts: roots.attributes.scripts.clone(), scripts: roots.attributes.scripts.clone(),
styles: roots.attributes.styles.clone(), styles: roots.attributes.styles.clone(),
breadcrumbs, breadcrumbs,
tree_path: treehouse tree_path: sources
.treehouse
.tree_path(parsed_tree.file_id) .tree_path(parsed_tree.file_id)
.map(|s| s.to_owned()), .map(|s| s.to_owned()),
tree, tree,
revision_url: format!("{}/{}", config.site, parsed_tree.root_key),
source_url: format!(
"{}/{}/{}",
config.commit_base_url, revision.commit, parsed_tree.source_path,
),
history_url: format!("{}/h/{}", config.site, parsed_tree.tree_path),
revision: revision.clone(),
}, },
}; };
let mut template_name = roots let template_name = roots
.attributes .attributes
.template .template
.clone() .clone()
.unwrap_or_else(|| "_tree.hbs".into()); .unwrap_or_else(|| "_tree.hbs".into());
if !template_file_ids.contains_key(&template_name) { ensure!(
template_name = "_tree.hbs".into(); handlebars.has_template(&template_name),
"template {template_name} does not exist"
);
handlebars
.render(&template_name, &template_data)
.context("template rendering failed")
} }
// Reinsert the stolen roots. fn generate_tree_or_error(
treehouse.roots.insert(parsed_tree.root_key, roots); sources: &Sources,
dirs: &Dirs,
let templated_html = match handlebars.render(&template_name, &template_data) { handlebars: &Handlebars,
parsed_tree: &ParsedTree,
) -> String {
match generate_tree(sources, dirs, handlebars, parsed_tree) {
Ok(html) => html, Ok(html) => html,
Err(error) => { Err(error) => format!("error: {error:?}"),
Self::wrangle_handlebars_error_into_diagnostic(
treehouse,
// TODO: This should dump diagnostics out somewhere else.
&mut global_diagnostics,
template_file_ids[&template_name],
error.line_no,
error.column_no,
error.desc,
)?;
continue;
}
};
std::fs::create_dir_all(
parsed_tree
.target_path
.parent()
.expect("there should be a parent directory to generate files into"),
)?;
std::fs::write(parsed_tree.target_path, templated_html)?;
}
for (path, page_history) in &self.history.by_page {
let tree_path = path
.strip_prefix("content/")
.unwrap_or(path)
.strip_suffix(".tree")
.unwrap_or(path);
let target_path = paths
.target_dir
.join("h")
.join(path.strip_prefix("content/").unwrap_or(path))
.with_extension("html");
std::fs::create_dir_all(target_path.parent().unwrap())?;
let template_data = HistoryTemplateData {
base: &base_template_data,
page: HistoryPage {
title: format!("page history: {tree_path}"),
commits: page_history
.revisions
.iter()
.enumerate()
.map(|(i, revision)| Commit {
revision_number: page_history.revisions.len() - i,
hash: revision.commit_oid.to_string(),
hash_short: revision.commit_short(),
summary: self
.history
.commits
.get(&revision.commit_oid)
.map(|c| c.summary.as_str())
.unwrap_or("<no summary available>")
.to_owned(),
body: self
.history
.commits
.get(&revision.commit_oid)
.map(|c| c.body.as_str())
.unwrap_or("<no body available>")
.to_owned(),
})
.collect(),
tree_path: tree_path.to_owned(),
is_history: true,
},
};
let templated_html = match handlebars.render("_history.hbs", &template_data) {
Ok(html) => html,
Err(error) => {
Self::wrangle_handlebars_error_into_diagnostic(
treehouse,
// TODO: This should dump diagnostics out somewhere else.
&mut global_diagnostics,
template_file_ids["_history.hbs"],
error.line_no,
error.column_no,
error.desc,
)?;
continue;
}
};
std::fs::write(target_path, templated_html)?;
}
Ok(global_diagnostics)
} }
} }
pub fn generate( pub struct Sources {
paths: &Paths<'_>, pub config: Config,
src: &dyn ReadFilesystem, pub treehouse: Treehouse,
latest_revision: LatestRevision, pub parsed_trees: HashMap<VPathBuf, ParsedTree>,
) -> anyhow::Result<(Config, Treehouse)> { pub navigation_map: NavigationMap,
let start = Instant::now(); pub import_map: ImportMap,
}
impl Sources {
pub fn load(dirs: &Dirs) -> anyhow::Result<Self> {
info!("loading config"); info!("loading config");
let mut config: Config = toml_edit::de::from_str( let mut config: Config = toml_edit::de::from_str(
&src.content(VPath::new("treehouse.toml")) &dirs
.root
.content(VPath::new("treehouse.toml"))
.map(String::from_utf8) .map(String::from_utf8)
.ok_or_else(|| anyhow!("config file does not exist"))??, .ok_or_else(|| anyhow!("config file does not exist"))??,
) )
.context("failed to deserialize config")?; .context("failed to deserialize config")?;
config.site = std::env::var("TREEHOUSE_SITE").unwrap_or(config.site); config.site = std::env::var("TREEHOUSE_SITE").unwrap_or(config.site);
config.autopopulate_emoji(&src.cd(VPathBuf::new("static/emoji")))?; config.autopopulate_emoji(&*dirs.emoji)?;
config.autopopulate_pics(&src.cd(VPathBuf::new("static/pic")))?; config.autopopulate_pics(&*dirs.pics)?;
config.load_syntaxes(&paths.static_dir.join("syntax"))?; config.load_syntaxes(&*dirs.syntax)?;
// TODO: WriteFilesystem, such that we can write into the target directory? info!("parsing tree files");
let (treehouse, parsed_trees) = parse_trees(&config, dirs)?;
info!("cleaning target directory"); info!("constructing navigation map");
let _ = std::fs::remove_dir_all(paths.target_dir); let navigation_map = NavigationMap::build(&treehouse, "index");
std::fs::create_dir_all(paths.target_dir)?;
info!("copying static directory to target directory"); info!("constructing import map");
copy_dir(paths.static_dir, paths.target_dir.join("static"))?; let import_map = ImportMap::generate(
&config.site,
&Cd::new(dirs.static_.clone(), VPathBuf::new("js")),
&config.build.javascript.import_roots,
);
info!("getting history"); Ok(Sources {
let git = git2::Repository::open(".")?; config,
let history = History::get(&git)?; treehouse,
info!("parsing tree");
let mut generator = Generator {
tree_files: vec![],
git,
history,
latest_revision,
};
generator.add_directory_rec(paths.content_dir)?;
let (mut treehouse, parsed_trees, diagnostics) = generator.parse_trees(&config, paths)?;
report_diagnostics(&treehouse.files, &diagnostics)?;
if has_errors(&diagnostics) {
bail!("diagnostics emitted during parsing");
}
// NOTE: The navigation map is a legacy feature that is lazy-loaded when fragment-based
// navigation is used.
// I couldn't be bothered with adding it to the import map since fragment-based navigation is
// only used on very old links. Adding caching to the navigation map is probably not worth it.
info!("generating navigation map");
let navigation_map = build_navigation_map(&treehouse, "index");
std::fs::write(
paths.target_dir.join("navmap.js"),
navigation_map.to_javascript(),
)?;
info!("generating standalone pages");
let diagnostics = generator.generate_all_files(
&mut treehouse,
&config,
paths,
src,
&navigation_map,
parsed_trees, parsed_trees,
)?; navigation_map,
report_diagnostics(&treehouse.files, &diagnostics)?; import_map,
})
}
}
info!("generating change history pages"); /// Acceleration structure for `dir` operations on [`TreehouseDir`]s.
#[derive(Debug, Default)]
struct DirIndex {
full_path: VPathBuf,
children: HashMap<VPathBuf, DirIndex>,
}
let duration = start.elapsed(); impl DirIndex {
info!("generation done in {duration:?}"); pub fn new<'a>(paths: impl Iterator<Item = &'a VPath>) -> Self {
let mut root = DirIndex::default();
if !has_errors(&diagnostics) { for path in paths {
Ok((config, treehouse)) let mut parent = &mut root;
let mut full_path = VPath::ROOT.to_owned();
for segment in path.segments() {
full_path.push(segment);
let child = parent
.children
.entry(segment.to_owned())
.or_insert_with(|| DirIndex {
full_path: full_path.clone(),
children: HashMap::new(),
});
parent = child;
}
}
root
}
}
struct TreehouseDir {
dirs: Arc<Dirs>,
sources: Arc<Sources>,
dir_index: DirIndex,
handlebars: Handlebars<'static>,
}
impl TreehouseDir {
fn new(dirs: Arc<Dirs>, sources: Arc<Sources>, dir_index: DirIndex) -> Self {
let mut handlebars = create_handlebars(&sources.config.site, dirs.static_.clone());
load_templates(&mut handlebars, &dirs.template);
Self {
dirs,
sources,
dir_index,
handlebars,
}
}
}
impl Dir for TreehouseDir {
fn dir(&self, path: &VPath) -> Vec<DirEntry> {
let mut index = &self.dir_index;
for component in path.segments() {
if let Some(child) = index.children.get(component) {
index = child;
} else { } else {
bail!("generation errors occurred; diagnostics were emitted with detailed descriptions"); // There cannot possibly be any entries under an invalid path.
// Bail early.
return vec![];
} }
} }
pub fn regenerate_or_report_error( index
paths: &Paths<'_>, .children
src: &dyn ReadFilesystem, .values()
latest_revision: LatestRevision, .map(|child| DirEntry {
) -> anyhow::Result<(Config, Treehouse)> { path: child.full_path.clone(),
info!("regenerating site content"); })
.collect()
}
let result = generate(paths, src, latest_revision); fn content(&self, path: &VPath) -> Option<Vec<u8>> {
if let Err(e) = &result { debug!("content({path})");
error!("{e:?}");
let path = if path.is_root() {
VPath::new_const("index")
} else {
path
};
let mut path = path.to_owned();
if path.extension() == Some("html") {
path.set_extension("");
} }
result
self.sources.parsed_trees.get(&path).map(|parsed_tree| {
generate_tree_or_error(&self.sources, &self.dirs, &self.handlebars, parsed_tree).into()
})
}
fn content_version(&self, _path: &VPath) -> Option<String> {
None
}
}
impl fmt::Debug for TreehouseDir {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("TreehouseDir")
}
}
pub fn target(dirs: Arc<Dirs>, sources: Arc<Sources>) -> DynDir {
let mut root = MemDir::new();
root.add(VPath::new("static"), dirs.static_.clone());
let dir_index = DirIndex::new(sources.parsed_trees.keys().map(|x| &**x));
let tree_view = TreehouseDir::new(dirs, sources, dir_index);
Overlay::new(tree_view.to_dyn(), root.to_dyn()).to_dyn()
} }

View file

@ -0,0 +1,37 @@
use handlebars::{Context, Handlebars, Helper, HelperDef, RenderContext, RenderError, ScopedJson};
use serde_json::Value;
use crate::vfs::{self, DynDir, VPath};
pub struct DirHelper {
site: String,
dir: DynDir,
}
impl DirHelper {
pub fn new(site: &str, dir: DynDir) -> Self {
Self {
site: site.to_owned(),
dir,
}
}
}
impl HelperDef for DirHelper {
fn call_inner<'reg: 'rc, 'rc>(
&self,
h: &Helper<'reg, 'rc>,
_: &'reg Handlebars<'reg>,
_: &'rc Context,
_: &mut RenderContext<'reg, 'rc>,
) -> Result<ScopedJson<'reg, 'rc>, RenderError> {
if let Some(path) = h.param(0).and_then(|v| v.value().as_str()) {
let vpath = VPath::try_new(path).map_err(|e| RenderError::new(e.to_string()))?;
let url = vfs::url(&self.site, &self.dir, vpath)
.ok_or_else(|| RenderError::new("path is not anchored anywhere"))?;
Ok(ScopedJson::Derived(Value::String(url)))
} else {
Err(RenderError::new("missing path string"))
}
}
}

View file

@ -0,0 +1,37 @@
use handlebars::{Context, Handlebars, Helper, HelperDef, RenderContext, RenderError, ScopedJson};
use serde_json::Value;
use crate::vfs::{DynDir, VPath};
pub struct IncludeStaticHelper {
dir: DynDir,
}
impl IncludeStaticHelper {
pub fn new(dir: DynDir) -> Self {
Self { dir }
}
}
impl HelperDef for IncludeStaticHelper {
fn call_inner<'reg: 'rc, 'rc>(
&self,
h: &Helper<'reg, 'rc>,
_: &'reg Handlebars<'reg>,
_: &'rc Context,
_: &mut RenderContext<'reg, 'rc>,
) -> Result<ScopedJson<'reg, 'rc>, RenderError> {
if let Some(path) = h.param(0).and_then(|v| v.value().as_str()) {
let vpath = VPath::try_new(path).map_err(|e| RenderError::new(e.to_string()))?;
let url = String::from_utf8(
self.dir
.content(vpath)
.ok_or_else(|| RenderError::new("file does not exist"))?,
)
.map_err(|_| RenderError::new("included file does not contain UTF-8 text"))?;
Ok(ScopedJson::Derived(Value::String(url)))
} else {
Err(RenderError::new("missing path string"))
}
}
}

View file

@ -17,10 +17,10 @@ use jotdown::OrderedListNumbering::*;
use jotdown::SpanLinkType; use jotdown::SpanLinkType;
use crate::config::Config; use crate::config::Config;
use crate::dirs::Dirs;
use crate::state::FileId; use crate::state::FileId;
use crate::state::Treehouse; use crate::state::Treehouse;
use crate::vfs; use crate::vfs;
use crate::vfs::ReadFilesystem;
use super::highlight::highlight; use super::highlight::highlight;
@ -28,10 +28,9 @@ use super::highlight::highlight;
pub struct Renderer<'a> { pub struct Renderer<'a> {
pub config: &'a Config, pub config: &'a Config,
pub emoji_fs: &'a dyn ReadFilesystem, pub dirs: &'a Dirs,
pub pics_fs: &'a dyn ReadFilesystem,
pub treehouse: &'a mut Treehouse, pub treehouse: &'a Treehouse,
pub file_id: FileId, pub file_id: FileId,
pub page_id: String, pub page_id: String,
} }
@ -376,7 +375,7 @@ impl<'a> Writer<'a> {
let pic_url = self let pic_url = self
.renderer .renderer
.config .config
.pic_url(self.renderer.pics_fs, placeholder_pic_id); .pic_url(&*self.renderer.dirs.pics, placeholder_pic_id);
write_attr(&pic_url, out); write_attr(&pic_url, out);
out.push('"'); out.push('"');
@ -563,7 +562,12 @@ impl<'a> Writer<'a> {
out.push_str(r#"">"#) out.push_str(r#"">"#)
} }
let url = vfs::url(&self.renderer.config.site, self.renderer.emoji_fs, vpath); let url = vfs::url(
&self.renderer.config.site,
&*self.renderer.dirs.emoji,
vpath,
)
.expect("emoji directory is not anchored anywhere");
// TODO: this could do with better alt text // TODO: this could do with better alt text
write!( write!(
@ -644,7 +648,7 @@ impl<'a> Writer<'a> {
) )
}), }),
"page" => Some(config.page_url(linked)), "page" => Some(config.page_url(linked)),
"pic" => Some(config.pic_url(self.renderer.pics_fs, linked)), "pic" => Some(config.pic_url(&*self.renderer.dirs.pics, linked)),
_ => None, _ => None,
}) })
} }

View file

@ -1,50 +1,39 @@
use std::collections::HashMap; use std::collections::HashMap;
use serde::Serialize;
use crate::{ use crate::{
state::Treehouse, state::Treehouse,
tree::{attributes::Content, SemaBranchId}, tree::{attributes::Content, SemaBranchId},
}; };
#[derive(Debug, Clone, Default, Serialize)] #[derive(Debug, Clone, Default)]
struct NavigationMapBuilder {
stack: Vec<String>,
navigation_map: NavigationMap,
}
impl NavigationMapBuilder {
fn enter_tree(&mut self, tree: String) {
self.stack.push(tree.clone());
self.navigation_map.paths.insert(tree, self.stack.clone());
}
fn exit_tree(&mut self) {
self.stack.pop();
}
fn finish(self) -> NavigationMap {
self.navigation_map
}
}
#[derive(Debug, Clone, Default)]
pub struct NavigationMap { pub struct NavigationMap {
/// Tells you which pages need to be opened to get to the key. /// Tells you which pages need to be opened to get to the key.
pub paths: HashMap<String, Vec<String>>, pub paths: HashMap<String, Vec<String>>,
} }
impl NavigationMap { impl NavigationMap {
pub fn to_javascript(&self) -> String { pub fn build(treehouse: &Treehouse, root_tree_path: &str) -> Self {
format!(
"export const navigationMap = {};",
serde_json::to_string(&self.paths)
.expect("serialization of the navigation map should not fail")
)
}
}
#[derive(Debug, Clone, Default)]
pub struct NavigationMapBuilder {
stack: Vec<String>,
navigation_map: NavigationMap,
}
impl NavigationMapBuilder {
pub fn enter_tree(&mut self, tree: String) {
self.stack.push(tree.clone());
self.navigation_map.paths.insert(tree, self.stack.clone());
}
pub fn exit_tree(&mut self) {
self.stack.pop();
}
pub fn finish(self) -> NavigationMap {
self.navigation_map
}
}
pub fn build_navigation_map(treehouse: &Treehouse, root_tree_path: &str) -> NavigationMap {
let mut builder = NavigationMapBuilder::default(); let mut builder = NavigationMapBuilder::default();
fn rec_branch( fn rec_branch(
@ -80,3 +69,4 @@ pub fn build_navigation_map(treehouse: &Treehouse, root_tree_path: &str) -> Navi
builder.finish() builder.finish()
} }
}

View file

@ -3,25 +3,23 @@ use std::{borrow::Cow, fmt::Write};
use treehouse_format::pull::BranchKind; use treehouse_format::pull::BranchKind;
use crate::{ use crate::{
cli::Paths,
config::Config, config::Config,
dirs::Dirs,
html::EscapeAttribute, html::EscapeAttribute,
state::{FileId, Treehouse}, state::{FileId, Treehouse},
tree::{ tree::{
attributes::{Content, Stage}, attributes::{Content, Stage},
mini_template, SemaBranchId, mini_template, SemaBranchId,
}, },
vfs::{CdExt, ReadFilesystem, VPathBuf},
}; };
use super::{djot, EscapeHtml}; use super::{djot, EscapeHtml};
pub fn branch_to_html( pub fn branch_to_html(
s: &mut String, s: &mut String,
treehouse: &mut Treehouse, treehouse: &Treehouse,
config: &Config, config: &Config,
root_fs: &dyn ReadFilesystem, // TODO: Lower privileges dirs: &Dirs,
paths: &Paths<'_>,
file_id: FileId, file_id: FileId,
branch_id: SemaBranchId, branch_id: SemaBranchId,
) { ) {
@ -116,13 +114,7 @@ pub fn branch_to_html(
} }
if branch.attributes.template { if branch.attributes.template {
final_markup = mini_template::render( final_markup = mini_template::render(config, treehouse, dirs, &final_markup);
config,
treehouse,
paths,
&root_fs.cd(VPathBuf::new("static/pics")),
&final_markup,
);
} }
s.push_str("<th-bc>"); s.push_str("<th-bc>");
@ -137,8 +129,7 @@ pub fn branch_to_html(
.to_owned(), .to_owned(),
config, config,
emoji_fs: &root_fs.cd(VPathBuf::new("static/emoji")), dirs,
pics_fs: &root_fs.cd(VPathBuf::new("static/pics")),
treehouse, treehouse,
file_id, file_id,
@ -195,7 +186,7 @@ pub fn branch_to_html(
let num_children = branch.children.len(); let num_children = branch.children.len();
for i in 0..num_children { for i in 0..num_children {
let child_id = treehouse.tree.branch(branch_id).children[i]; let child_id = treehouse.tree.branch(branch_id).children[i];
branch_to_html(s, treehouse, config, root_fs, paths, file_id, child_id); branch_to_html(s, treehouse, config, dirs, file_id, child_id);
} }
s.push_str("</ul>"); s.push_str("</ul>");
} }
@ -209,16 +200,15 @@ pub fn branch_to_html(
pub fn branches_to_html( pub fn branches_to_html(
s: &mut String, s: &mut String,
treehouse: &mut Treehouse, treehouse: &Treehouse,
config: &Config, config: &Config,
root_fs: &dyn ReadFilesystem, // TODO: Lower privileges dirs: &Dirs,
paths: &Paths<'_>,
file_id: FileId, file_id: FileId,
branches: &[SemaBranchId], branches: &[SemaBranchId],
) { ) {
s.push_str("<ul>"); s.push_str("<ul>");
for &child in branches { for &child in branches {
branch_to_html(s, treehouse, config, root_fs, paths, file_id, child); branch_to_html(s, treehouse, config, dirs, file_id, child);
} }
s.push_str("</ul>"); s.push_str("</ul>");
} }

View file

@ -1,11 +1,9 @@
use std::{ffi::OsStr, path::PathBuf}; use std::ops::ControlFlow;
use indexmap::IndexMap; use indexmap::IndexMap;
use log::warn;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use walkdir::WalkDir;
use crate::static_urls::StaticUrls; use crate::vfs::{self, Dir, VPathBuf};
#[derive(Debug, Clone, Serialize)] #[derive(Debug, Clone, Serialize)]
pub struct ImportMap { pub struct ImportMap {
@ -15,49 +13,30 @@ pub struct ImportMap {
#[derive(Debug, Clone, Deserialize, Serialize)] #[derive(Debug, Clone, Deserialize, Serialize)]
pub struct ImportRoot { pub struct ImportRoot {
pub name: String, pub name: String,
pub path: String, pub path: VPathBuf,
} }
impl ImportMap { impl ImportMap {
pub fn generate(base_url: String, import_roots: &[ImportRoot]) -> Self { pub fn generate(site: &str, root: &dyn Dir, import_roots: &[ImportRoot]) -> Self {
let mut import_map = ImportMap { let mut import_map = ImportMap {
imports: IndexMap::new(), imports: IndexMap::new(),
}; };
for root in import_roots { for import_root in import_roots {
let static_urls = StaticUrls::new( vfs::walk_dir_rec(root, &import_root.path, &mut |path| {
PathBuf::from(&root.path), if path.extension() == Some("js") {
format!("{base_url}/{}", root.path), import_map.imports.insert(
format!(
"{}/{}",
import_root.name,
path.strip_prefix(&import_root.path).unwrap_or(path)
),
vfs::url(site, root, path)
.expect("import directory is not anchored anywhere"),
); );
for entry in WalkDir::new(&root.path) {
let entry = match entry {
Ok(entry) => entry,
Err(error) => {
warn!("directory walk failed: {error}");
continue;
}
};
if !entry.file_type().is_dir() && entry.path().extension() == Some(OsStr::new("js"))
{
let normalized_path = entry
.path()
.strip_prefix(&root.path)
.unwrap_or(entry.path())
.to_string_lossy()
.replace('\\', "/");
match static_urls.get(&normalized_path) {
Ok(url) => {
import_map
.imports
.insert(format!("{}/{normalized_path}", root.name), url);
}
Err(error) => {
warn!("could not get static url for {normalized_path}: {error}")
}
}
}
} }
ControlFlow::Continue(())
});
} }
import_map.imports.sort_unstable_keys(); import_map.imports.sort_unstable_keys();

View file

@ -1,28 +0,0 @@
use std::path::PathBuf;
use handlebars::{Context, Handlebars, Helper, HelperDef, RenderContext, RenderError, ScopedJson};
use serde_json::Value;
pub struct IncludeStatic {
pub base_dir: PathBuf,
}
impl HelperDef for IncludeStatic {
fn call_inner<'reg: 'rc, 'rc>(
&self,
helper: &Helper<'reg, 'rc>,
_: &'reg Handlebars<'reg>,
_: &'rc Context,
_: &mut RenderContext<'reg, 'rc>,
) -> Result<ScopedJson<'reg, 'rc>, RenderError> {
if let Some(param) = helper.param(0).and_then(|v| v.value().as_str()) {
return Ok(ScopedJson::Derived(Value::String(
std::fs::read_to_string(self.base_dir.join(param)).map_err(|error| {
RenderError::new(format!("cannot read static asset {param}: {error}"))
})?,
)));
}
Err(RenderError::new("asset path must be provided"))
}
}

View file

@ -1,15 +1,13 @@
pub mod cli; pub mod cli;
pub mod config; pub mod config;
pub mod dirs;
pub mod fun; pub mod fun;
pub mod generate; pub mod generate;
pub mod history; pub mod history;
pub mod html; pub mod html;
pub mod import_map; pub mod import_map;
pub mod include_static;
pub mod parse; pub mod parse;
pub mod paths; pub mod paths;
pub mod state; pub mod state;
pub mod static_urls;
pub mod templater;
pub mod tree; pub mod tree;
pub mod vfs; pub mod vfs;

View file

@ -1,76 +1,78 @@
use std::fs;
use std::path::PathBuf; use std::path::PathBuf;
use std::{fs, path::Path}; use std::sync::Arc;
use anyhow::Context;
use clap::Parser; use clap::Parser;
use log::error; use log::error;
use treehouse::generate::{regenerate_or_report_error, LatestRevision}; use treehouse::cli::serve::serve;
use treehouse::vfs::PhysicalDir; use treehouse::dirs::Dirs;
use treehouse::vfs::{AnchoredAtExt, VPathBuf}; use treehouse::generate::{self, Sources};
use treehouse::vfs::asynch::AsyncDir;
use treehouse::vfs::{AnchoredAtExt, DynDir, ToDynDir, VPathBuf};
use treehouse::vfs::{Cd, PhysicalDir};
use treehouse::{ use treehouse::{
cli::{ cli::{
fix::{fix_all_cli, fix_file_cli}, fix::{fix_all_cli, fix_file_cli},
serve::serve,
wc::wc_cli, wc::wc_cli,
Command, Paths, ProgramArgs, Command, ProgramArgs,
}, },
vfs::{BufferedFile, MountPoints, ReadFilesystem, VPath}, vfs::{BufferedFile, MemDir, VPath},
}; };
fn vfs_sources() -> anyhow::Result<impl ReadFilesystem> { fn vfs_sources() -> anyhow::Result<DynDir> {
let mut root = MountPoints::new(); let mut root = MemDir::new();
root.add( root.add(
VPath::new("treehouse.toml"), VPath::new("treehouse.toml"),
Box::new(BufferedFile::new(fs::read("treehouse.toml")?)), BufferedFile::new(fs::read("treehouse.toml")?).to_dyn(),
); );
root.add( root.add(
VPath::new("static"), VPath::new("static"),
Box::new(PhysicalDir::new(PathBuf::from("static")).anchored_at(VPathBuf::new("static"))), PhysicalDir::new(PathBuf::from("static"))
.anchored_at(VPathBuf::new("static"))
.to_dyn(),
); );
root.add( root.add(
VPath::new("template"), VPath::new("template"),
Box::new(PhysicalDir::new(PathBuf::from("template"))), PhysicalDir::new(PathBuf::from("template")).to_dyn(),
); );
root.add( root.add(
VPath::new("content"), VPath::new("content"),
Box::new(PhysicalDir::new(PathBuf::from("content"))), PhysicalDir::new(PathBuf::from("content")).to_dyn(),
); );
Ok(root) Ok(root.to_dyn())
} }
async fn fallible_main() -> anyhow::Result<()> { async fn fallible_main() -> anyhow::Result<()> {
let args = ProgramArgs::parse(); let args = ProgramArgs::parse();
let paths = Paths {
target_dir: Path::new("target/site"),
template_target_dir: Path::new("target/site/static/html"),
config_file: Path::new("treehouse.toml"),
static_dir: Path::new("static"),
template_dir: Path::new("template"),
content_dir: Path::new("content"),
};
let src = vfs_sources()?; let src = vfs_sources()?;
let dirs = Arc::new(Dirs {
root: src.clone(),
content: Cd::new(src.clone(), VPathBuf::new("content")).to_dyn(),
static_: Cd::new(src.clone(), VPathBuf::new("static")).to_dyn(),
template: Cd::new(src.clone(), VPathBuf::new("template")).to_dyn(),
pics: Cd::new(src.clone(), VPathBuf::new("static/pics")).to_dyn(),
emoji: Cd::new(src.clone(), VPathBuf::new("static/emoji")).to_dyn(),
syntax: Cd::new(src.clone(), VPathBuf::new("static/syntax")).to_dyn(),
});
match args.command { match args.command {
Command::Serve { Command::Serve {
generate: generate_args, generate: _,
serve: serve_args, serve: serve_args,
} => { } => {
let latest_revision = match generate_args.commits_only { let sources = Arc::new(Sources::load(&dirs).context("failed to load sources")?);
true => LatestRevision::LatestCommit, let target = generate::target(dirs, sources.clone());
false => LatestRevision::WorkingTree, serve(sources, AsyncDir::new(target), serve_args.port).await?;
};
let (config, treehouse) = regenerate_or_report_error(&paths, &src, latest_revision)?;
serve(config, treehouse, &paths, serve_args.port).await?;
} }
Command::Fix(fix_args) => fix_file_cli(fix_args)?, Command::Fix(fix_args) => fix_file_cli(fix_args, &*dirs.content)?.apply().await?,
Command::FixAll(fix_args) => fix_all_cli(fix_args, &paths)?, Command::FixAll(fix_args) => fix_all_cli(fix_args, &*dirs.content)?.apply().await?,
Command::Wc(wc_args) => wc_cli(paths.content_dir, wc_args)?, Command::Wc(wc_args) => wc_cli(&dirs.content, wc_args)?,
Command::Ulid => { Command::Ulid => {
let mut rng = rand::thread_rng(); let mut rng = rand::thread_rng();

View file

@ -1,4 +1,4 @@
use std::{collections::HashMap, ops::Range, path::PathBuf}; use std::{collections::HashMap, ops::Range};
use anyhow::Context; use anyhow::Context;
use codespan_reporting::{ use codespan_reporting::{
@ -6,26 +6,19 @@ use codespan_reporting::{
files::SimpleFiles, files::SimpleFiles,
term::termcolor::{ColorChoice, StandardStream}, term::termcolor::{ColorChoice, StandardStream},
}; };
use serde::Serialize;
use ulid::Ulid; use ulid::Ulid;
use crate::tree::{SemaBranchId, SemaRoots, SemaTree}; use crate::{
tree::{SemaBranchId, SemaRoots, SemaTree},
#[derive(Debug, Clone, Serialize)] vfs::VPathBuf,
pub struct RevisionInfo { };
pub is_latest: bool,
pub number: usize,
pub commit: String,
pub commit_short: String,
}
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum Source { pub enum Source {
Tree { Tree {
input: String, input: String,
tree_path: String, tree_path: String,
target_path: PathBuf, target_path: VPathBuf,
revision_info: RevisionInfo,
}, },
Other(String), Other(String),
} }
@ -103,13 +96,6 @@ impl Treehouse {
} }
} }
pub fn revision_info(&self, file_id: FileId) -> Option<&RevisionInfo> {
match self.source(file_id) {
Source::Tree { revision_info, .. } => Some(revision_info),
Source::Other(_) => None,
}
}
pub fn next_missingno(&mut self) -> Ulid { pub fn next_missingno(&mut self) -> Ulid {
self.missingno_generator self.missingno_generator
.generate() .generate()

View file

@ -1,89 +0,0 @@
use std::{
collections::HashMap,
fs::File,
io::{self, BufReader},
path::PathBuf,
sync::{Mutex, RwLock},
};
use handlebars::{Context, Handlebars, Helper, HelperDef, RenderContext, RenderError, ScopedJson};
use serde_json::Value;
pub struct StaticUrls {
base_dir: PathBuf,
base_url: String,
// Really annoying that we have to use an RwLock for this. We only ever generate in a
// single-threaded environment.
// Honestly it would be a lot more efficient if Handlebars just assumed single-threadedness
// and required you to clone it over to different threads.
// Stuff like this is why I really want to implement my own templating engine...
hash_cache: RwLock<HashMap<String, String>>,
missing_files: Mutex<Vec<MissingFile>>,
}
pub struct MissingFile {
pub path: String,
}
impl StaticUrls {
pub fn new(base_dir: PathBuf, base_url: String) -> Self {
Self {
base_dir,
base_url,
hash_cache: RwLock::new(HashMap::new()),
missing_files: Mutex::new(vec![]),
}
}
pub fn get(&self, filename: &str) -> Result<String, io::Error> {
let hash_cache = self.hash_cache.read().unwrap();
if let Some(cached) = hash_cache.get(filename) {
return Ok(cached.to_owned());
}
drop(hash_cache);
let mut hasher = blake3::Hasher::new();
let file = BufReader::new(File::open(self.base_dir.join(filename))?);
hasher.update_reader(file)?;
// NOTE: Here the hash is truncated to 8 characters. This is fine, because we don't
// care about security here - only detecting changes in files.
let hash = format!(
"{}/{}?cache=b3-{}",
self.base_url,
filename,
&hasher.finalize().to_hex()[0..8]
);
{
let mut hash_cache = self.hash_cache.write().unwrap();
hash_cache.insert(filename.to_owned(), hash.clone());
}
Ok(hash)
}
pub fn take_missing_files(&self) -> Vec<MissingFile> {
std::mem::take(&mut self.missing_files.lock().unwrap())
}
}
impl HelperDef for StaticUrls {
fn call_inner<'reg: 'rc, 'rc>(
&self,
helper: &Helper<'reg, 'rc>,
_: &'reg Handlebars<'reg>,
_: &'rc Context,
_: &mut RenderContext<'reg, 'rc>,
) -> Result<ScopedJson<'reg, 'rc>, RenderError> {
if let Some(param) = helper.param(0).and_then(|v| v.value().as_str()) {
return Ok(ScopedJson::Derived(Value::String(
self.get(param).unwrap_or_else(|_| {
self.missing_files.lock().unwrap().push(MissingFile {
path: param.to_owned(),
});
format!("{}/{}", self.base_url, param)
}),
)));
}
Err(RenderError::new("asset path must be provided"))
}
}

View file

@ -1,13 +0,0 @@
use handlebars::Handlebars;
pub struct Templater {
handlebars: Handlebars<'static>,
}
impl Templater {
pub fn new() -> Self {
Self {
handlebars: Handlebars::new(),
}
}
}

View file

@ -163,13 +163,7 @@ impl SemaBranch {
) -> SemaBranchId { ) -> SemaBranchId {
let attributes = Self::parse_attributes(treehouse, diagnostics, file_id, &branch); let attributes = Self::parse_attributes(treehouse, diagnostics, file_id, &branch);
let revision_info = treehouse let named_id = attributes.id.to_owned();
.revision_info(file_id)
.expect(".tree files must have Tree-type sources");
let named_id = match revision_info.is_latest {
true => attributes.id.to_owned(),
false => format!("{}@{}", attributes.id, revision_info.commit_short),
};
let html_id = format!( let html_id = format!(
"{}:{}", "{}:{}",
treehouse.tree_path(file_id).unwrap(), treehouse.tree_path(file_id).unwrap(),

View file

@ -7,7 +7,13 @@
use std::fmt::Write; use std::fmt::Write;
use std::ops::Range; use std::ops::Range;
use crate::{cli::Paths, config::Config, html::EscapeHtml, state::Treehouse, vfs::ReadFilesystem}; use crate::{
config::Config,
dirs::Dirs,
html::EscapeHtml,
state::Treehouse,
vfs::{Dir, VPath},
};
struct Lexer<'a> { struct Lexer<'a> {
input: &'a str, input: &'a str,
@ -148,13 +154,7 @@ impl Renderer<'_> {
self.output.push_str(&self.lexer.input[token.range.clone()]); self.output.push_str(&self.lexer.input[token.range.clone()]);
} }
fn render( fn render(&mut self, config: &Config, treehouse: &Treehouse, dirs: &Dirs) {
&mut self,
config: &Config,
treehouse: &Treehouse,
paths: &Paths<'_>,
pics_fs: &dyn ReadFilesystem,
) {
let kind_of = |token: &Token| token.kind; let kind_of = |token: &Token| token.kind;
while let Some(token) = self.lexer.next() { while let Some(token) = self.lexer.next() {
@ -171,8 +171,7 @@ impl Renderer<'_> {
match Self::render_template( match Self::render_template(
config, config,
treehouse, treehouse,
pics_fs, dirs,
paths,
self.lexer.input[inside.as_ref().unwrap().range.clone()].trim(), self.lexer.input[inside.as_ref().unwrap().range.clone()].trim(),
) { ) {
Ok(s) => match escaping { Ok(s) => match escaping {
@ -199,31 +198,27 @@ impl Renderer<'_> {
fn render_template( fn render_template(
config: &Config, config: &Config,
_treehouse: &Treehouse, _treehouse: &Treehouse,
pics_fs: &dyn ReadFilesystem, dirs: &Dirs,
paths: &Paths<'_>,
template: &str, template: &str,
) -> Result<String, InvalidTemplate> { ) -> Result<String, InvalidTemplate> {
let (function, arguments) = template.split_once(' ').unwrap_or((template, "")); let (function, arguments) = template.split_once(' ').unwrap_or((template, ""));
match function { match function {
"pic" => Ok(config.pic_url(pics_fs, arguments)), "pic" => Ok(config.pic_url(&*dirs.pics, arguments)),
"include_static" => std::fs::read_to_string(paths.static_dir.join(arguments)) "include_static" => VPath::try_new(arguments)
.map_err(|_| InvalidTemplate), .ok()
.and_then(|vpath| dirs.static_.content(vpath))
.and_then(|content| String::from_utf8(content).ok())
.ok_or(InvalidTemplate),
_ => Err(InvalidTemplate), _ => Err(InvalidTemplate),
} }
} }
} }
pub fn render( pub fn render(config: &Config, treehouse: &Treehouse, dirs: &Dirs, input: &str) -> String {
config: &Config,
treehouse: &Treehouse,
paths: &Paths<'_>,
pics_fs: &dyn ReadFilesystem,
input: &str,
) -> String {
let mut renderer = Renderer { let mut renderer = Renderer {
lexer: Lexer::new(input), lexer: Lexer::new(input),
output: String::new(), output: String::new(),
}; };
renderer.render(config, treehouse, paths, pics_fs); renderer.render(config, treehouse, dirs);
renderer.output renderer.output
} }

View file

@ -1,250 +1,82 @@
//! The treehouse virtual file system.
//!
//! Unlike traditional file systems, there is no separation between directories and files.
//! Instead, our file system is based on _entries_, which may have specific, optional, well-typed
//! metadata attached to them.
//! A directory is formed by returning a list of paths from [`dir`][Dir::dir], and a file is
//! formed by returning `Some` from [`content`][Dir::content].
//!
//! This makes using the file system simpler, as you do not have to differentiate between different
//! entry kinds. All paths act as if they _could_ return byte content, and all paths act as if they
//! _could_ have children.
//!
//! # Composability
//!
//! [`Dir`]s are composable. The [`Dir`] itself starts off with the root path ([`VPath::ROOT`]),
//! which may contain further [`dir`][Dir::dir] entries, or content by itself.
//! This makes it possible to nest a [`Dir`] under another [`Dir`].
//!
//! Additionally, there's also the inverse operation, [`Cd`] (named after the `cd`
//! _change directory_ shell command), which returns a [`Dir`] viewing a subpath within another
//! [`Dir`].
//!
//! # Building directories
//!
//! In-memory directories can be composed using the following primitives:
//!
//! - [`EmptyEntry`] - has no metadata whatsoever.
//! - [`BufferedFile`] - root path content is the provided byte vector.
//! - [`MemDir`] - a [`Dir`] containing a single level of other [`Dir`]s inside.
//!
//! Additionally, for interfacing with the OS file system, [`PhysicalDir`] is available,
//! representing a directory stored on the disk.
//!
//! # Virtual paths
//!
//! Entries within directories are referenced using [`VPath`]s (**v**irtual **path**s).
//! A virtual path is composed out of any amount of `/`-separated components.
//!
//! There are no special directories like `.` and `..` (those are just normal entries, though using
//! them is discouraged). [`VPath`]s are always relative to the root of the [`Dir`] you're querying.
//!
//! A leading or trailing slash is not allowed, because they would have no meaning.
//!
//! [`VPath`] also has an owned version, [`VPathBuf`].
use std::{ use std::{
borrow::Borrow,
fmt::{self, Debug}, fmt::{self, Debug},
ops::{ControlFlow, Deref}, ops::{ControlFlow, Deref},
sync::Arc,
}; };
use anyhow::ensure;
use serde::{Deserialize, Serialize};
mod anchored; mod anchored;
pub mod asynch;
mod cd; mod cd;
mod edit;
mod empty; mod empty;
mod file; mod file;
mod mount_points; mod mem_dir;
mod overlay;
mod path;
mod physical; mod physical;
pub use anchored::*; pub use anchored::*;
pub use cd::*; pub use cd::*;
pub use edit::*;
pub use empty::*; pub use empty::*;
pub use file::*; pub use file::*;
pub use mount_points::*; pub use mem_dir::*;
pub use overlay::*;
pub use path::*;
pub use physical::*; pub use physical::*;
#[derive(PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct VPath {
path: str,
}
impl VPath {
pub const SEPARATOR: char = '/';
pub const ROOT: &Self = unsafe { Self::new_unchecked("") };
pub fn try_new(s: &str) -> anyhow::Result<&Self> {
ensure!(
!s.ends_with(Self::SEPARATOR),
"path must not end with '{}' (got {s:?})",
Self::SEPARATOR
);
ensure!(
!s.starts_with(Self::SEPARATOR),
"paths are always absolute and must not start with '{}' (got {s:?})",
Self::SEPARATOR
);
Ok(unsafe { Self::new_unchecked(s) })
}
pub fn new(s: &str) -> &Self {
Self::try_new(s).expect("invalid path")
}
const unsafe fn new_unchecked(s: &str) -> &Self {
std::mem::transmute::<_, &Self>(s)
}
pub fn try_join(&self, sub: &str) -> anyhow::Result<VPathBuf> {
let mut buf = VPathBuf::from(self);
if !sub.is_empty() {
let sub = VPath::try_new(sub)?;
buf.path.push('/');
buf.path.push_str(&sub.path);
}
Ok(buf)
}
pub fn join(&self, sub: &str) -> VPathBuf {
self.try_join(sub).expect("invalid subpath")
}
pub fn strip_prefix(&self, prefix: &VPath) -> Option<&Self> {
if self == prefix {
Some(VPath::ROOT)
} else {
self.path
.strip_prefix(&prefix.path)
.and_then(|p| p.strip_prefix('/'))
// SAFETY: If `self` starts with `prefix`, `p` will end up not being prefixed by `self`
// nor a leading slash.
.map(|p| unsafe { VPath::new_unchecked(p) })
}
}
pub fn depth(&self) -> usize {
self.path.chars().filter(|&c| c == Self::SEPARATOR).count()
}
pub fn segments(&self) -> impl Iterator<Item = &Self> {
self.as_str().split(Self::SEPARATOR).map(|s| unsafe {
// SAFETY: Since we're splitting on the separator, the path cannot start or end with it.
Self::new_unchecked(s)
})
}
pub fn rsegments(&self) -> impl Iterator<Item = &Self> {
self.as_str().rsplit(Self::SEPARATOR).map(|s| unsafe {
// SAFETY: Since we're splitting on the separator, the path cannot start or end with it.
Self::new_unchecked(s)
})
}
pub fn file_name(&self) -> Option<&str> {
self.rsegments().next().map(Self::as_str)
}
pub fn extension(&self) -> Option<&str> {
let file_name = self.file_name()?;
let (left, right) = file_name.rsplit_once('.')?;
if left.is_empty() {
None
} else {
Some(right)
}
}
pub fn file_stem(&self) -> Option<&str> {
let file_name = self.file_name()?;
if let Some(extension) = self.extension() {
Some(&file_name[..file_name.len() - extension.len() - 1])
} else {
Some(file_name)
}
}
pub fn as_str(&self) -> &str {
&self.path
}
}
impl ToOwned for VPath {
type Owned = VPathBuf;
fn to_owned(&self) -> Self::Owned {
VPathBuf::from(self)
}
}
impl fmt::Debug for VPath {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(&self.path)
}
}
impl fmt::Display for VPath {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(&self.path)
}
}
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct VPathBuf {
path: String,
}
impl VPathBuf {
pub fn new(path: impl Into<String>) -> Self {
Self::try_new(path).expect("invalid path")
}
pub fn try_new(path: impl Into<String>) -> anyhow::Result<Self> {
let path = path.into();
match VPath::try_new(&path) {
Ok(_) => Ok(Self { path }),
Err(e) => Err(e),
}
}
unsafe fn new_unchecked(path: String) -> Self {
Self { path }
}
}
impl Deref for VPathBuf {
type Target = VPath;
fn deref(&self) -> &Self::Target {
unsafe { VPath::new_unchecked(&self.path) }
}
}
impl fmt::Debug for VPathBuf {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(&self.path)
}
}
impl fmt::Display for VPathBuf {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(&self.path)
}
}
impl From<&VPath> for VPathBuf {
fn from(value: &VPath) -> Self {
unsafe { Self::new_unchecked(value.path.to_owned()) }
}
}
impl Borrow<VPath> for VPathBuf {
fn borrow(&self) -> &VPath {
self
}
}
impl<'de> Deserialize<'de> for VPathBuf {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
use serde::de;
struct Visitor;
impl de::Visitor<'_> for Visitor {
type Value = VPathBuf;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("virtual path")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: de::Error,
{
VPathBuf::try_new(v).map_err(de::Error::custom)
}
}
deserializer.deserialize_str(Visitor)
}
}
impl Serialize for VPathBuf {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(self.as_str())
}
}
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct DirEntry { pub struct DirEntry {
pub path: VPathBuf, pub path: VPathBuf,
} }
pub trait ReadFilesystem: Debug { pub trait Dir: Debug {
/// List all files under the provided path. /// List all entries under the provided path.
fn dir(&self, path: &VPath) -> Vec<DirEntry>; fn dir(&self, path: &VPath) -> Vec<DirEntry>;
/// Return the byte content of the entry at the given path. /// Return the byte content of the entry at the given path.
@ -264,14 +96,96 @@ pub trait ReadFilesystem: Debug {
None None
} }
/// Optimization for [`ReadFilesystemCombinators::cd`] that allows for avoiding wrapping /// If a file can be written persistently, returns an [`EditPath`] representing the file in
/// `Cd`s in `Cd`s. /// persistent storage.
#[doc(hidden)] ///
fn cd_optimization(&self, _subpath: &VPath) -> Option<Cd<'_>> { /// An edit path can then be made into an [`Edit`].
fn edit_path(&self, _path: &VPath) -> Option<EditPath> {
None None
} }
} }
impl<T> Dir for &T
where
T: Dir,
{
fn dir(&self, path: &VPath) -> Vec<DirEntry> {
(**self).dir(path)
}
fn content(&self, path: &VPath) -> Option<Vec<u8>> {
(**self).content(path)
}
fn content_version(&self, path: &VPath) -> Option<String> {
(**self).content_version(path)
}
fn anchor(&self, path: &VPath) -> Option<VPathBuf> {
(**self).anchor(path)
}
fn edit_path(&self, path: &VPath) -> Option<EditPath> {
(**self).edit_path(path)
}
}
#[derive(Clone)]
pub struct DynDir {
arc: Arc<dyn Dir + Send + Sync>,
}
impl Dir for DynDir {
fn dir(&self, path: &VPath) -> Vec<DirEntry> {
self.arc.dir(path)
}
fn content(&self, path: &VPath) -> Option<Vec<u8>> {
self.arc.content(path)
}
fn content_version(&self, path: &VPath) -> Option<String> {
self.arc.content_version(path)
}
fn anchor(&self, path: &VPath) -> Option<VPathBuf> {
self.arc.anchor(path)
}
fn edit_path(&self, path: &VPath) -> Option<EditPath> {
self.arc.edit_path(path)
}
}
impl fmt::Debug for DynDir {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&*self.arc, f)
}
}
impl Deref for DynDir {
type Target = dyn Dir + Send + Sync;
fn deref(&self) -> &Self::Target {
&*self.arc
}
}
pub trait ToDynDir {
fn to_dyn(self) -> DynDir;
}
impl<T> ToDynDir for T
where
T: Dir + Send + Sync + 'static,
{
fn to_dyn(self) -> DynDir {
DynDir {
arc: Arc::new(self),
}
}
}
pub trait AnchoredAtExt { pub trait AnchoredAtExt {
fn anchored_at(self, at: VPathBuf) -> Anchored<Self> fn anchored_at(self, at: VPathBuf) -> Anchored<Self>
where where
@ -280,53 +194,28 @@ pub trait AnchoredAtExt {
impl<T> AnchoredAtExt for T impl<T> AnchoredAtExt for T
where where
T: ReadFilesystem, T: Dir,
{ {
fn anchored_at(self, at: VPathBuf) -> Anchored<Self> { fn anchored_at(self, at: VPathBuf) -> Anchored<Self> {
Anchored::new(self, at) Anchored::new(self, at)
} }
} }
pub trait CdExt { pub fn walk_dir_rec(dir: &dyn Dir, path: &VPath, f: &mut dyn FnMut(&VPath) -> ControlFlow<(), ()>) {
fn cd<'a>(self, into: VPathBuf) -> Cd<'a> for entry in dir.dir(path) {
where
Self: 'a;
}
impl CdExt for &dyn ReadFilesystem {
fn cd<'a>(self, into: VPathBuf) -> Cd<'a>
where
Self: 'a,
{
if let Some(cd) = self.cd_optimization(&into) {
cd
} else {
Cd::new(self, into)
}
}
}
pub fn walk_rec(
fs: &dyn ReadFilesystem,
path: &VPath,
f: &mut dyn FnMut(&VPath) -> ControlFlow<(), ()>,
) {
for entry in fs.dir(path) {
match f(&entry.path) { match f(&entry.path) {
ControlFlow::Continue(_) => (), ControlFlow::Continue(_) => (),
ControlFlow::Break(_) => return, ControlFlow::Break(_) => return,
} }
walk_rec(fs, &entry.path, f); walk_dir_rec(dir, &entry.path, f);
} }
} }
pub fn url(site: &str, fs: &dyn ReadFilesystem, path: &VPath) -> String { pub fn url(site: &str, dir: &dyn Dir, path: &VPath) -> Option<String> {
let Some(anchor) = fs.anchor(path) else { let anchor = dir.anchor(path)?;
panic!("filesystem {fs:?} is not anchored anywhere and a URL of it cannot be produced") if let Some(version) = dir.content_version(path) {
}; Some(format!("{}/{anchor}?v={version}", site))
if let Some(version) = fs.content_version(path) {
format!("{}/{anchor}?v={version}", site)
} else { } else {
format!("{}/{anchor}", site) Some(format!("{}/{anchor}", site))
} }
} }

View file

@ -1,6 +1,6 @@
use std::fmt; use std::fmt;
use super::{DirEntry, ReadFilesystem, VPath, VPathBuf}; use super::{Dir, DirEntry, VPath, VPathBuf};
pub struct Anchored<T> { pub struct Anchored<T> {
inner: T, inner: T,
@ -13,9 +13,9 @@ impl<T> Anchored<T> {
} }
} }
impl<T> ReadFilesystem for Anchored<T> impl<T> Dir for Anchored<T>
where where
T: ReadFilesystem, T: Dir,
{ {
fn dir(&self, path: &VPath) -> Vec<DirEntry> { fn dir(&self, path: &VPath) -> Vec<DirEntry> {
self.inner.dir(path) self.inner.dir(path)
@ -30,7 +30,7 @@ where
} }
fn anchor(&self, path: &VPath) -> Option<VPathBuf> { fn anchor(&self, path: &VPath) -> Option<VPathBuf> {
Some(self.at.join(path.as_str())) Some(self.at.join(path))
} }
} }

View file

@ -0,0 +1,23 @@
use super::{Dir, DynDir, VPath};
#[derive(Debug, Clone)]
pub struct AsyncDir {
inner: DynDir,
}
impl AsyncDir {
pub fn new(inner: DynDir) -> Self {
Self { inner }
}
pub async fn content(&self, path: &VPath) -> Option<Vec<u8>> {
let this = self.clone();
let path = path.to_owned();
// NOTE: Performance impact of spawning a blocking task may be a bit high in case
// we add caching.
// Measure throughput here.
tokio::task::spawn_blocking(move || this.inner.content(&path))
.await
.unwrap()
}
}

View file

@ -1,22 +1,25 @@
use std::fmt; use std::fmt;
use super::{DirEntry, ReadFilesystem, VPath, VPathBuf}; use super::{Dir, DirEntry, EditPath, VPath, VPathBuf};
pub struct Cd<'fs> { pub struct Cd<T> {
parent: &'fs dyn ReadFilesystem, parent: T,
path: VPathBuf, path: VPathBuf,
} }
impl<'fs> Cd<'fs> { impl<T> Cd<T> {
pub fn new(parent: &'fs dyn ReadFilesystem, path: VPathBuf) -> Self { pub fn new(parent: T, path: VPathBuf) -> Self {
Self { parent, path } Self { parent, path }
} }
} }
impl ReadFilesystem for Cd<'_> { impl<T> Dir for Cd<T>
where
T: Dir,
{
fn dir(&self, path: &VPath) -> Vec<DirEntry> { fn dir(&self, path: &VPath) -> Vec<DirEntry> {
self.parent self.parent
.dir(&self.path.join(path.as_str())) .dir(&self.path.join(path))
.into_iter() .into_iter()
.map(|entry| DirEntry { .map(|entry| DirEntry {
path: entry path: entry
@ -29,23 +32,26 @@ impl ReadFilesystem for Cd<'_> {
} }
fn content_version(&self, path: &VPath) -> Option<String> { fn content_version(&self, path: &VPath) -> Option<String> {
self.parent.content_version(&self.path.join(path.as_str())) self.parent.content_version(&self.path.join(path))
} }
fn content(&self, path: &VPath) -> Option<Vec<u8>> { fn content(&self, path: &VPath) -> Option<Vec<u8>> {
self.parent.content(&self.path.join(path.as_str())) self.parent.content(&self.path.join(path))
} }
fn anchor(&self, path: &VPath) -> Option<VPathBuf> { fn anchor(&self, path: &VPath) -> Option<VPathBuf> {
self.parent.anchor(&self.path.join(path.as_str())) self.parent.anchor(&self.path.join(path))
} }
fn cd_optimization(&self, subpath: &VPath) -> Option<Cd<'_>> { fn edit_path(&self, path: &VPath) -> Option<EditPath> {
Some(Cd::new(self, subpath.to_owned())) self.parent.edit_path(&self.path.join(path))
} }
} }
impl fmt::Debug for Cd<'_> { impl<T> fmt::Debug for Cd<T>
where
T: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}/{:?}", self.parent, self.path) write!(f, "{:?}/{:?}", self.parent, self.path)
} }

View file

@ -0,0 +1,92 @@
use std::{error::Error, fmt, future::Future, path::PathBuf};
use log::{error, info};
use tokio::task::JoinSet;
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct EditPath {
pub(super) path: PathBuf,
}
/// Represents a pending edit operation that can be written to persistent storage later.
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum Edit {
/// An edit that doesn't do anything.
NoOp,
/// Write the given string to a file.
Write(EditPath, String),
/// Execute a sequence of edits in order.
Seq(Vec<Edit>),
/// Execute the provided edits in parallel.
All(Vec<Edit>),
/// Makes an edit dry.
///
/// A dry edit only logs what operations would be performed, does not perform the I/O.
Dry(Box<Edit>),
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct ApplyFailed;
impl Edit {
#[expect(clippy::manual_async_fn)]
pub fn apply(self) -> impl Future<Output = Result<(), ApplyFailed>> + Send {
async {
match self {
Edit::NoOp => (),
Edit::Write(edit_path, content) => {
tokio::fs::write(&edit_path.path, &content)
.await
.inspect_err(|err| error!("write to {edit_path:?} failed: {err:?}"))
.map_err(|_| ApplyFailed)?;
}
Edit::Seq(vec) => {
for edit in vec {
Box::pin(edit.apply()).await?;
}
}
Edit::All(vec) => {
let mut set = JoinSet::new();
for edit in vec {
set.spawn(edit.apply());
}
while let Some(result) = set.try_join_next() {
result.map_err(|_| ApplyFailed)??;
}
}
Edit::Dry(edit) => edit.dry(),
}
Ok(())
}
}
pub fn dry(&self) {
match self {
Edit::NoOp => (),
Edit::Write(edit_path, content) => {
info!("{edit_path:?}: would write {:?} bytes", content.len());
}
Edit::Seq(edits) => edits.iter().for_each(Self::dry),
Edit::All(edits) => edits.iter().for_each(Self::dry),
Edit::Dry(edit) => edit.dry(),
}
}
}
impl fmt::Display for ApplyFailed {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("failed to apply some edits")
}
}
impl Error for ApplyFailed {}
impl fmt::Debug for EditPath {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&self.path, f)
}
}

View file

@ -1,9 +1,9 @@
use super::{DirEntry, ReadFilesystem, VPath}; use super::{Dir, DirEntry, VPath};
#[derive(Debug)] #[derive(Debug)]
pub struct EmptyFilesystem; pub struct EmptyEntry;
impl ReadFilesystem for EmptyFilesystem { impl Dir for EmptyEntry {
fn dir(&self, _path: &VPath) -> Vec<DirEntry> { fn dir(&self, _path: &VPath) -> Vec<DirEntry> {
vec![] vec![]
} }

View file

@ -1,6 +1,6 @@
use std::fmt; use std::fmt;
use super::{DirEntry, ReadFilesystem, VPath}; use super::{DirEntry, Dir, VPath};
pub struct BufferedFile { pub struct BufferedFile {
pub content: Vec<u8>, pub content: Vec<u8>,
@ -12,7 +12,7 @@ impl BufferedFile {
} }
} }
impl ReadFilesystem for BufferedFile { impl Dir for BufferedFile {
fn dir(&self, _path: &VPath) -> Vec<DirEntry> { fn dir(&self, _path: &VPath) -> Vec<DirEntry> {
vec![] vec![]
} }

View file

@ -1,29 +1,29 @@
use std::{collections::HashMap, fmt}; use std::{collections::HashMap, fmt};
use super::{DirEntry, ReadFilesystem, VPath, VPathBuf}; use super::{Dir, DirEntry, DynDir, EditPath, VPath, VPathBuf};
pub struct MountPoints { pub struct MemDir {
mount_points: HashMap<String, Box<dyn ReadFilesystem>>, mount_points: HashMap<String, DynDir>,
} }
enum Resolved<'fs, 'path> { enum Resolved<'fs, 'path> {
Root, Root,
MountPoint { MountPoint {
fs: &'fs dyn ReadFilesystem, fs: &'fs dyn Dir,
fs_path: &'path VPath, fs_path: &'path VPath,
subpath: &'path VPath, subpath: &'path VPath,
}, },
None, None,
} }
impl MountPoints { impl MemDir {
pub fn new() -> Self { pub fn new() -> Self {
Self { Self {
mount_points: HashMap::new(), mount_points: HashMap::new(),
} }
} }
pub fn add(&mut self, path: &VPath, fs: Box<dyn ReadFilesystem>) { pub fn add(&mut self, path: &VPath, dir: DynDir) {
assert_eq!( assert_eq!(
path.depth(), 0, path.depth(), 0,
"path must be situated at root. MountPoints does not support nested paths, but you can nest MountPoints within other MountPoints" "path must be situated at root. MountPoints does not support nested paths, but you can nest MountPoints within other MountPoints"
@ -31,7 +31,7 @@ impl MountPoints {
assert!( assert!(
self.mount_points self.mount_points
.insert(path.as_str().to_owned(), fs) .insert(path.as_str().to_owned(), dir)
.is_none(), .is_none(),
"duplicate mount point at {path:?}" "duplicate mount point at {path:?}"
); );
@ -57,13 +57,13 @@ impl MountPoints {
} }
} }
impl Default for MountPoints { impl Default for MemDir {
fn default() -> Self { fn default() -> Self {
Self::new() Self::new()
} }
} }
impl ReadFilesystem for MountPoints { impl Dir for MemDir {
fn dir(&self, path: &VPath) -> Vec<DirEntry> { fn dir(&self, path: &VPath) -> Vec<DirEntry> {
match self.resolve(path) { match self.resolve(path) {
Resolved::Root => self Resolved::Root => self
@ -81,7 +81,7 @@ impl ReadFilesystem for MountPoints {
.dir(subpath) .dir(subpath)
.into_iter() .into_iter()
.map(|entry| DirEntry { .map(|entry| DirEntry {
path: fs_path.join(entry.path.as_str()), path: fs_path.join(&entry.path),
}) })
.collect(), .collect(),
Resolved::None => vec![], Resolved::None => vec![],
@ -120,9 +120,20 @@ impl ReadFilesystem for MountPoints {
Resolved::Root | Resolved::None => None, Resolved::Root | Resolved::None => None,
} }
} }
fn edit_path(&self, path: &VPath) -> Option<EditPath> {
match self.resolve(path) {
Resolved::MountPoint {
fs,
fs_path: _,
subpath,
} => fs.edit_path(subpath),
Resolved::Root | Resolved::None => None,
}
}
} }
impl fmt::Debug for MountPoints { impl fmt::Debug for MemDir {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("MountPoints") f.write_str("MountPoints")
} }

View file

@ -0,0 +1,52 @@
use std::fmt;
use super::{Dir, DirEntry, DynDir, EditPath, VPath, VPathBuf};
pub struct Overlay {
base: DynDir,
overlay: DynDir,
}
impl Overlay {
pub fn new(base: DynDir, overlay: DynDir) -> Self {
Self { base, overlay }
}
}
impl Dir for Overlay {
fn dir(&self, path: &VPath) -> Vec<DirEntry> {
let mut dir = self.base.dir(path);
dir.append(&mut self.overlay.dir(path));
dir.sort();
dir.dedup();
dir
}
fn content(&self, path: &VPath) -> Option<Vec<u8>> {
self.overlay
.content(path)
.or_else(|| self.base.content(path))
}
fn content_version(&self, path: &VPath) -> Option<String> {
self.overlay
.content_version(path)
.or_else(|| self.base.content_version(path))
}
fn anchor(&self, path: &VPath) -> Option<VPathBuf> {
self.overlay.anchor(path).or_else(|| self.base.anchor(path))
}
fn edit_path(&self, path: &VPath) -> Option<EditPath> {
self.overlay
.edit_path(path)
.or_else(|| self.base.edit_path(path))
}
}
impl fmt::Debug for Overlay {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Overlay({:?}, {:?})", self.base, self.overlay)
}
}

View file

@ -0,0 +1,305 @@
use std::{borrow::Borrow, error::Error, fmt, ops::Deref, str::FromStr};
use serde::{Deserialize, Serialize};
#[derive(PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct VPath {
path: str,
}
impl VPath {
pub const SEPARATOR_BYTE: u8 = b'/';
pub const SEPARATOR: char = Self::SEPARATOR_BYTE as char;
pub const ROOT: &Self = unsafe { Self::new_unchecked("") };
pub const fn try_new(s: &str) -> Result<&Self, InvalidPathError> {
if s.is_empty() {
return Ok(Self::ROOT);
}
let b = s.as_bytes();
if b[b.len() - 1] == Self::SEPARATOR_BYTE {
return Err(InvalidPathError::TrailingSlash);
}
if b[0] == Self::SEPARATOR_BYTE {
return Err(InvalidPathError::LeadingSlash);
}
Ok(unsafe { Self::new_unchecked(s) })
}
pub fn new(s: &str) -> &Self {
Self::try_new(s).expect("invalid path")
}
/// `const` version of [`new`][Self::new]. This has worse error messages, so prefer `new` whenever possible.
pub const fn new_const(s: &str) -> &Self {
match Self::try_new(s) {
Ok(p) => p,
Err(_) => panic!("invalid path"),
}
}
const unsafe fn new_unchecked(s: &str) -> &Self {
std::mem::transmute::<_, &Self>(s)
}
pub fn is_empty(&self) -> bool {
self.path.is_empty()
}
pub fn is_root(&self) -> bool {
self.is_empty()
}
pub fn join(&self, sub: &VPath) -> VPathBuf {
let mut buf = self.to_owned();
buf.push(sub);
buf
}
pub fn parent(&self) -> Option<&VPath> {
if self.is_root() {
None
} else if self.depth() == 0 {
Some(VPath::ROOT)
} else {
let (left, _right) = self
.path
.split_once(Self::SEPARATOR)
.expect("path with depth > 0 must have separators");
// SAFETY: We're splitting on a `/`, so there cannot be a trailing `/` in `left`.
Some(unsafe { VPath::new_unchecked(left) })
}
}
pub fn strip_prefix(&self, prefix: &VPath) -> Option<&Self> {
if self == prefix {
Some(VPath::ROOT)
} else {
self.path
.strip_prefix(&prefix.path)
.and_then(|p| p.strip_prefix(Self::SEPARATOR))
// SAFETY: If `self` starts with `prefix`, `p` will end up not being prefixed by `self`
// nor a leading slash.
.map(|p| unsafe { VPath::new_unchecked(p) })
}
}
pub fn depth(&self) -> usize {
self.path.chars().filter(|&c| c == Self::SEPARATOR).count()
}
pub fn segments(&self) -> impl Iterator<Item = &Self> {
self.as_str().split(Self::SEPARATOR).map(|s| unsafe {
// SAFETY: Since we're splitting on the separator, the path cannot start or end with it.
Self::new_unchecked(s)
})
}
pub fn rsegments(&self) -> impl Iterator<Item = &Self> {
self.as_str().rsplit(Self::SEPARATOR).map(|s| unsafe {
// SAFETY: Since we're splitting on the separator, the path cannot start or end with it.
Self::new_unchecked(s)
})
}
pub fn file_name(&self) -> Option<&str> {
self.rsegments().next().map(Self::as_str)
}
pub fn extension(&self) -> Option<&str> {
let file_name = self.file_name()?;
let (left, right) = file_name.rsplit_once('.')?;
if left.is_empty() {
None
} else {
Some(right)
}
}
pub fn with_extension(&self, extension: &str) -> VPathBuf {
let mut buf = self.to_owned();
buf.set_extension(extension);
buf
}
pub fn file_stem(&self) -> Option<&str> {
let file_name = self.file_name()?;
if let Some(extension) = self.extension() {
Some(&file_name[..file_name.len() - extension.len() - 1])
} else {
Some(file_name)
}
}
pub fn as_str(&self) -> &str {
&self.path
}
}
impl ToOwned for VPath {
type Owned = VPathBuf;
fn to_owned(&self) -> Self::Owned {
VPathBuf::from(self)
}
}
impl fmt::Debug for VPath {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(&self.path)
}
}
impl fmt::Display for VPath {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(&self.path)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum InvalidPathError {
TrailingSlash,
LeadingSlash,
}
impl fmt::Display for InvalidPathError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
InvalidPathError::TrailingSlash => {
f.write_str("paths must not end with a trailing `/`")
}
InvalidPathError::LeadingSlash => {
f.write_str("paths are always absolute and must not start with `/`")
}
}
}
}
impl Error for InvalidPathError {}
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct VPathBuf {
path: String,
}
impl VPathBuf {
pub fn new(path: impl Into<String>) -> Self {
Self::try_new(path).expect("invalid path")
}
pub fn try_new(path: impl Into<String>) -> Result<Self, InvalidPathError> {
let path = path.into();
match VPath::try_new(&path) {
Ok(_) => Ok(Self { path }),
Err(e) => Err(e),
}
}
unsafe fn new_unchecked(path: String) -> Self {
Self { path }
}
pub fn push(&mut self, sub: &VPath) {
if !sub.is_empty() {
self.path.push('/');
self.path.push_str(&sub.path);
}
}
pub fn set_extension(&mut self, new_extension: &str) {
if let Some(existing) = self.extension() {
let mut chop_len = existing.len();
if new_extension.is_empty() {
chop_len += 1; // also chop off the `.`
}
let range = self.path.len() - chop_len..;
self.path.replace_range(range, new_extension);
}
}
}
impl Default for VPathBuf {
fn default() -> Self {
VPath::ROOT.to_owned()
}
}
impl Deref for VPathBuf {
type Target = VPath;
fn deref(&self) -> &Self::Target {
unsafe { VPath::new_unchecked(&self.path) }
}
}
impl fmt::Debug for VPathBuf {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(&self.path)
}
}
impl fmt::Display for VPathBuf {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(&self.path)
}
}
impl From<&VPath> for VPathBuf {
fn from(value: &VPath) -> Self {
unsafe { Self::new_unchecked(value.path.to_owned()) }
}
}
impl Borrow<VPath> for VPathBuf {
fn borrow(&self) -> &VPath {
self
}
}
impl<'de> Deserialize<'de> for VPathBuf {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
use serde::de;
struct Visitor;
impl de::Visitor<'_> for Visitor {
type Value = VPathBuf;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("virtual path")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: de::Error,
{
VPathBuf::try_new(v).map_err(de::Error::custom)
}
}
deserializer.deserialize_str(Visitor)
}
}
impl Serialize for VPathBuf {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(self.as_str())
}
}
impl FromStr for VPathBuf {
type Err = InvalidPathError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Self::try_new(s)
}
}

View file

@ -2,7 +2,7 @@ use std::path::{Path, PathBuf};
use log::error; use log::error;
use super::{DirEntry, ReadFilesystem, VPath, VPathBuf}; use super::{Dir, DirEntry, EditPath, VPath, VPathBuf};
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct PhysicalDir { pub struct PhysicalDir {
@ -15,7 +15,7 @@ impl PhysicalDir {
} }
} }
impl ReadFilesystem for PhysicalDir { impl Dir for PhysicalDir {
fn dir(&self, vpath: &VPath) -> Vec<DirEntry> { fn dir(&self, vpath: &VPath) -> Vec<DirEntry> {
let physical = self.root.join(physical_path(vpath)); let physical = self.root.join(physical_path(vpath));
if !physical.is_dir() { if !physical.is_dir() {
@ -68,6 +68,12 @@ impl ReadFilesystem for PhysicalDir {
.inspect_err(|err| error!("{self:?} cannot read file at vpath {path:?}: {err:?}",)) .inspect_err(|err| error!("{self:?} cannot read file at vpath {path:?}: {err:?}",))
.ok() .ok()
} }
fn edit_path(&self, path: &VPath) -> Option<EditPath> {
Some(EditPath {
path: self.root.join(physical_path(path)),
})
}
} }
fn physical_path(path: &VPath) -> &Path { fn physical_path(path: &VPath) -> &Path {

View file

@ -1,33 +1,31 @@
use treehouse::vfs::{ use treehouse::vfs::{BufferedFile, Cd, Dir, DirEntry, MemDir, ToDynDir, VPath, VPathBuf};
BufferedFile, Cd, CdExt, DirEntry, MountPoints, ReadFilesystem, VPath, VPathBuf,
};
const HEWWO: &[u8] = b"hewwo :3"; const HEWWO: &[u8] = b"hewwo :3";
const FWOOFEE: &[u8] = b"fwoofee -w-"; const FWOOFEE: &[u8] = b"fwoofee -w-";
const BOOP: &[u8] = b"boop >w<"; const BOOP: &[u8] = b"boop >w<";
fn vfs() -> MountPoints { fn vfs() -> MemDir {
let file1 = BufferedFile::new(HEWWO.to_vec()); let file1 = BufferedFile::new(HEWWO.to_vec());
let file2 = BufferedFile::new(FWOOFEE.to_vec()); let file2 = BufferedFile::new(FWOOFEE.to_vec());
let file3 = BufferedFile::new(BOOP.to_vec()); let file3 = BufferedFile::new(BOOP.to_vec());
let mut innermost = MountPoints::new(); let mut innermost = MemDir::new();
innermost.add(VPath::new("file3.txt"), Box::new(file3)); innermost.add(VPath::new("file3.txt"), file3.to_dyn());
let mut inner = MountPoints::new(); let mut inner = MemDir::new();
inner.add(VPath::new("file1.txt"), Box::new(file1)); inner.add(VPath::new("file1.txt"), file1.to_dyn());
inner.add(VPath::new("file2.txt"), Box::new(file2)); inner.add(VPath::new("file2.txt"), file2.to_dyn());
inner.add(VPath::new("innermost"), Box::new(innermost)); inner.add(VPath::new("innermost"), innermost.to_dyn());
let mut vfs = MountPoints::new(); let mut vfs = MemDir::new();
vfs.add(VPath::new("inner"), Box::new(inner)); vfs.add(VPath::new("inner"), inner.to_dyn());
vfs vfs
} }
#[test] #[test]
fn dir1() { fn dir1() {
let outer = vfs(); let outer = vfs();
let inner = Cd::new(&outer, VPathBuf::new("inner")); let inner = Cd::new(outer, VPathBuf::new("inner"));
let mut dir = inner.dir(VPath::ROOT); let mut dir = inner.dir(VPath::ROOT);
dir.sort(); dir.sort();
@ -49,23 +47,6 @@ fn dir1() {
#[test] #[test]
fn dir2() { fn dir2() {
let outer = vfs();
let outer: &dyn ReadFilesystem = &outer;
let inner: &dyn ReadFilesystem = &outer.cd(VPathBuf::new("inner"));
let innermost = inner.cd(VPathBuf::new("innermost"));
let mut dir = innermost.dir(VPath::ROOT);
dir.sort();
assert_eq!(
dir,
vec![DirEntry {
path: VPathBuf::new("file3.txt"),
},]
);
}
#[test]
fn dir3() {
let outer = vfs(); let outer = vfs();
let innermost = Cd::new(&outer, VPathBuf::new("inner/innermost")); let innermost = Cd::new(&outer, VPathBuf::new("inner/innermost"));

View file

@ -1,16 +1,16 @@
use treehouse::vfs::{EmptyFilesystem, ReadFilesystem, VPath}; use treehouse::vfs::{Dir, EmptyEntry, VPath};
#[test] #[test]
fn dir() { fn dir() {
assert!(EmptyFilesystem.dir(VPath::ROOT).is_empty()); assert!(EmptyEntry.dir(VPath::ROOT).is_empty());
} }
#[test] #[test]
fn content_version() { fn content_version() {
assert!(EmptyFilesystem.content_version(VPath::ROOT).is_none()); assert!(EmptyEntry.content_version(VPath::ROOT).is_none());
} }
#[test] #[test]
fn content() { fn content() {
assert!(EmptyFilesystem.content(VPath::ROOT).is_none()); assert!(EmptyEntry.content(VPath::ROOT).is_none());
} }

View file

@ -1,4 +1,4 @@
use treehouse::vfs::{BufferedFile, ReadFilesystem, VPath}; use treehouse::vfs::{BufferedFile, Dir, VPath};
fn vfs() -> BufferedFile { fn vfs() -> BufferedFile {
BufferedFile::new(b"hewwo :3".to_vec()) BufferedFile::new(b"hewwo :3".to_vec())

View file

@ -1,21 +1,23 @@
use treehouse::vfs::{BufferedFile, DirEntry, MountPoints, ReadFilesystem, VPath, VPathBuf}; use std::sync::Arc;
use treehouse::vfs::{BufferedFile, Dir, DirEntry, MemDir, ToDynDir, VPath, VPathBuf};
const HEWWO: &[u8] = b"hewwo :3"; const HEWWO: &[u8] = b"hewwo :3";
const FWOOFEE: &[u8] = b"fwoofee -w-"; const FWOOFEE: &[u8] = b"fwoofee -w-";
const BOOP: &[u8] = b"boop >w<"; const BOOP: &[u8] = b"boop >w<";
fn vfs() -> MountPoints { fn vfs() -> MemDir {
let file1 = BufferedFile::new(HEWWO.to_vec()); let file1 = BufferedFile::new(HEWWO.to_vec());
let file2 = BufferedFile::new(FWOOFEE.to_vec()); let file2 = BufferedFile::new(FWOOFEE.to_vec());
let file3 = BufferedFile::new(BOOP.to_vec()); let file3 = BufferedFile::new(BOOP.to_vec());
let mut inner = MountPoints::new(); let mut inner = MemDir::new();
inner.add(VPath::new("file3.txt"), Box::new(file3)); inner.add(VPath::new("file3.txt"), file3.to_dyn());
let mut vfs = MountPoints::new(); let mut vfs = MemDir::new();
vfs.add(VPath::new("file1.txt"), Box::new(file1)); vfs.add(VPath::new("file1.txt"), file1.to_dyn());
vfs.add(VPath::new("file2.txt"), Box::new(file2)); vfs.add(VPath::new("file2.txt"), file2.to_dyn());
vfs.add(VPath::new("inner"), Box::new(inner)); vfs.add(VPath::new("inner"), inner.to_dyn());
vfs vfs
} }

View file

@ -1,6 +1,6 @@
use std::path::Path; use std::path::Path;
use treehouse::vfs::{DirEntry, PhysicalDir, ReadFilesystem, VPath, VPathBuf}; use treehouse::vfs::{DirEntry, PhysicalDir, Dir, VPath, VPathBuf};
fn vfs() -> PhysicalDir { fn vfs() -> PhysicalDir {
let root = Path::new("tests/it/vfs_physical").to_path_buf(); let root = Path::new("tests/it/vfs_physical").to_path_buf();

View file

@ -57,7 +57,7 @@ description = "a place on the Internet I like to call home"
[build.javascript] [build.javascript]
import_roots = [ import_roots = [
{ name = "treehouse", path = "static/js" }, { name = "treehouse", path = "" },
{ name = "tairu", path = "static/js/components/tairu" }, { name = "tairu", path = "components/tairu" },
{ name = "haku", path = "static/js/components/haku" }, { name = "haku", path = "components/haku" },
] ]