introduce the virtual filesystem everywhere

this unfortunately means I had to cut some features (bye bye commit history! for now)
stuff's not quite 100% working just yet (like branch links, which were and are still broken)
we also don't have content_version impls just yet
This commit is contained in:
liquidex 2024-11-17 22:34:43 +01:00
parent db0329077e
commit 377fbe4dab
42 changed files with 1613 additions and 1655 deletions

15
Cargo.lock generated
View file

@ -1545,20 +1545,6 @@ version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0"
[[package]]
name = "tower-livereload"
version = "0.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "61d6cbbab4b2d3cafd21fb211cc4b06525a0df919c3e8ca3d36485b1c1bd4cd4"
dependencies = [
"bytes",
"http",
"http-body",
"pin-project-lite",
"tokio",
"tower",
]
[[package]] [[package]]
name = "tower-service" name = "tower-service"
version = "0.3.2" version = "0.3.2"
@ -1611,7 +1597,6 @@ dependencies = [
"serde_json", "serde_json",
"tokio", "tokio",
"toml_edit", "toml_edit",
"tower-livereload",
"treehouse-format", "treehouse-format",
"ulid", "ulid",
"url", "url",

View file

@ -1,10 +1,7 @@
%% title = "404" %% title = "404"
% id = "404" % id = "01HMF8KQ997F1ZTEGDNAE2S6F1"
- # 404 - seems like the page you're looking for isn't here.
% id = "01HMF8KQ997F1ZTEGDNAE2S6F1" % id = "01HMF8KQ99XNMEP67NE3QH5698"
- seems like the page you're looking for isn't here. - care to go [back to the index][branch:treehouse]?
% id = "01HMF8KQ99XNMEP67NE3QH5698"
- care to go [back to the index][branch:treehouse]?

View file

@ -29,7 +29,6 @@ serde = { version = "1.0.183", features = ["derive"] }
serde_json = "1.0.105" serde_json = "1.0.105"
tokio = { version = "1.32.0", features = ["full"] } tokio = { version = "1.32.0", features = ["full"] }
toml_edit = { version = "0.19.14", features = ["serde"] } toml_edit = { version = "0.19.14", features = ["serde"] }
tower-livereload = "0.9.2"
walkdir = "2.3.3" walkdir = "2.3.3"
ulid = "1.0.0" ulid = "1.0.0"
url = "2.5.0" url = "2.5.0"

View file

@ -2,10 +2,10 @@ pub mod fix;
pub mod serve; pub mod serve;
pub mod wc; pub mod wc;
use std::path::{Path, PathBuf};
use clap::{Args, Parser, Subcommand}; use clap::{Args, Parser, Subcommand};
use crate::vfs::VPathBuf;
#[derive(Parser)] #[derive(Parser)]
pub struct ProgramArgs { pub struct ProgramArgs {
#[clap(subcommand)] #[clap(subcommand)]
@ -41,20 +41,13 @@ pub enum Command {
} }
#[derive(Args)] #[derive(Args)]
pub struct GenerateArgs { pub struct GenerateArgs {}
/// Only use commits as sources. This will cause the latest revision to be taken from the
/// Git history instead of the working tree.
///
/// Recommended for deployment.
#[clap(long)]
pub commits_only: bool,
}
#[derive(Args)] #[derive(Args)]
pub struct FixArgs { pub struct FixArgs {
/// Which file to fix. The fixed file will be printed into stdout so that you have a chance to /// Which file to fix. The fixed file will be printed into stdout so that you have a chance to
/// see the changes. /// see the changes.
pub file: PathBuf, pub file: VPathBuf,
/// If you're happy with the suggested changes, specifying this will apply them to the file /// If you're happy with the suggested changes, specifying this will apply them to the file
/// (overwrite it in place.) /// (overwrite it in place.)
@ -63,7 +56,7 @@ pub struct FixArgs {
/// Write the previous version back to the specified path. /// Write the previous version back to the specified path.
#[clap(long)] #[clap(long)]
pub backup: Option<PathBuf>, pub backup: Option<VPathBuf>,
} }
#[derive(Args)] #[derive(Args)]
@ -85,17 +78,5 @@ pub struct ServeArgs {
pub struct WcArgs { pub struct WcArgs {
/// A list of paths to report the word counts of. /// A list of paths to report the word counts of.
/// If no paths are provided, the entire tree is word-counted. /// If no paths are provided, the entire tree is word-counted.
pub paths: Vec<PathBuf>, pub paths: Vec<VPathBuf>,
}
#[derive(Debug, Clone, Copy)]
pub struct Paths<'a> {
pub target_dir: &'a Path,
pub template_target_dir: &'a Path,
pub static_dir: &'a Path,
pub template_dir: &'a Path,
pub content_dir: &'a Path,
pub config_file: &'a Path,
} }

View file

@ -1,16 +1,17 @@
use std::{ffi::OsStr, ops::Range}; use std::ops::{ControlFlow, Range};
use anyhow::Context; use anyhow::{anyhow, Context};
use codespan_reporting::diagnostic::Diagnostic; use codespan_reporting::diagnostic::Diagnostic;
use log::{error, info};
use treehouse_format::ast::Branch; use treehouse_format::ast::Branch;
use walkdir::WalkDir;
use crate::{ use crate::{
parse::{self, parse_toml_with_diagnostics, parse_tree_with_diagnostics}, parse::{self, parse_toml_with_diagnostics, parse_tree_with_diagnostics},
state::{report_diagnostics, FileId, Source, Treehouse}, state::{report_diagnostics, FileId, Source, Treehouse},
vfs::{self, Dir, Edit, VPath},
}; };
use super::{FixAllArgs, FixArgs, Paths}; use super::{FixAllArgs, FixArgs};
struct Fix { struct Fix {
range: Range<usize>, range: Range<usize>,
@ -132,68 +133,102 @@ pub fn fix_file(
}) })
} }
pub fn fix_file_cli(fix_args: FixArgs) -> anyhow::Result<()> { pub fn fix_file_cli(fix_args: FixArgs, root: &dyn Dir) -> anyhow::Result<Edit> {
let utf8_filename = fix_args.file.to_string_lossy().into_owned(); let file = if &*fix_args.file == VPath::new("-") {
let file = if utf8_filename == "-" {
std::io::read_to_string(std::io::stdin().lock()).context("cannot read file from stdin")? std::io::read_to_string(std::io::stdin().lock()).context("cannot read file from stdin")?
} else { } else {
std::fs::read_to_string(&fix_args.file).context("cannot read file to fix")? String::from_utf8(
root.content(&fix_args.file)
.ok_or_else(|| anyhow!("cannot read file to fix"))?,
)
.context("input file has invalid UTF-8")?
}; };
let mut treehouse = Treehouse::new(); let mut treehouse = Treehouse::new();
let mut diagnostics = vec![]; let mut diagnostics = vec![];
let file_id = treehouse.add_file(utf8_filename, Source::Other(file)); let file_id = treehouse.add_file(fix_args.file.as_str().to_owned(), Source::Other(file));
let edit_path = root.edit_path(&fix_args.file).ok_or_else(|| {
anyhow!(
"{} is not an editable file (perhaps it is not in a persistent path?)",
fix_args.file
)
})?;
if let Ok(fixed) = fix_file(&mut treehouse, &mut diagnostics, file_id) { Ok(
if fix_args.apply { if let Ok(fixed) = fix_file(&mut treehouse, &mut diagnostics, file_id) {
// Try to write the backup first. If writing that fails, bail out without overwriting if fix_args.apply {
// the source file. // Try to write the backup first. If writing that fails, bail out without overwriting
if let Some(backup_path) = fix_args.backup { // the source file.
std::fs::write(backup_path, treehouse.source(file_id).input()) if let Some(backup_path) = fix_args.backup {
.context("cannot write backup; original file will not be overwritten")?; let backup_edit_path = root.edit_path(&backup_path).ok_or_else(|| {
anyhow!("backup file {backup_path} is not an editable file")
})?;
Edit::Seq(vec![
Edit::Write(
backup_edit_path,
treehouse.source(file_id).input().to_owned(),
),
Edit::Write(edit_path, fixed),
])
} else {
Edit::Write(edit_path, fixed)
}
} else {
println!("{fixed}");
Edit::NoOp
} }
std::fs::write(&fix_args.file, fixed).context("cannot overwrite original file")?;
} else { } else {
println!("{fixed}"); report_diagnostics(&treehouse.files, &diagnostics)?;
} Edit::NoOp
} else { },
report_diagnostics(&treehouse.files, &diagnostics)?; )
}
Ok(())
} }
pub fn fix_all_cli(fix_all_args: FixAllArgs, paths: &Paths<'_>) -> anyhow::Result<()> { pub fn fix_all_cli(fix_all_args: FixAllArgs, dir: &dyn Dir) -> anyhow::Result<Edit> {
for entry in WalkDir::new(paths.content_dir) { let mut edits = vec![];
let entry = entry?;
if entry.file_type().is_file() && entry.path().extension() == Some(OsStr::new("tree")) { fn fix_one(dir: &dyn Dir, path: &VPath) -> anyhow::Result<Edit> {
let file = std::fs::read_to_string(entry.path()) if path.extension() == Some("tree") {
.with_context(|| format!("cannot read file to fix: {:?}", entry.path()))?; let Some(content) = dir.content(path) else {
let utf8_filename = entry.path().to_string_lossy(); return Ok(Edit::NoOp);
};
let content = String::from_utf8(content).context("file is not valid UTF-8")?;
let mut treehouse = Treehouse::new(); let mut treehouse = Treehouse::new();
let mut diagnostics = vec![]; let mut diagnostics = vec![];
let file_id = treehouse.add_file(utf8_filename.into_owned(), Source::Other(file)); let file_id = treehouse.add_file(path.as_str().to_string(), Source::Other(content));
let edit_path = dir.edit_path(path).context("path is not editable")?;
if let Ok(fixed) = fix_file(&mut treehouse, &mut diagnostics, file_id) { if let Ok(fixed) = fix_file(&mut treehouse, &mut diagnostics, file_id) {
if fixed != treehouse.source(file_id).input() { if fixed != treehouse.source(file_id).input() {
if fix_all_args.apply { return Ok(Edit::Write(edit_path, fixed));
println!("fixing: {:?}", entry.path());
std::fs::write(entry.path(), fixed).with_context(|| {
format!("cannot overwrite original file: {:?}", entry.path())
})?;
} else {
println!("will fix: {:?}", entry.path());
}
} }
} else { } else {
report_diagnostics(&treehouse.files, &diagnostics)?; report_diagnostics(&treehouse.files, &diagnostics)?;
} }
} }
}
if !fix_all_args.apply { Ok(Edit::NoOp)
println!("run with `--apply` to apply changes");
} }
Ok(()) info!("gathering edits");
vfs::walk_dir_rec(dir, VPath::ROOT, &mut |path| {
match fix_one(dir, path) {
Ok(Edit::NoOp) => (),
Ok(edit) => edits.push(edit),
Err(err) => error!("cannot fix {path}: {err:?}"),
}
ControlFlow::Continue(())
});
// NOTE: This number may be higher than you expect, because NoOp edits also count!
info!("{} edits to apply", edits.len());
if !fix_all_args.apply {
info!("dry run; add `--apply` to apply changes");
Ok(Edit::Dry(Box::new(Edit::All(edits))))
} else {
Ok(Edit::All(edits))
}
} }

View file

@ -2,228 +2,181 @@
mod live_reload; mod live_reload;
use std::fmt::Write; use std::fmt::Write;
use std::{net::Ipv4Addr, path::PathBuf, sync::Arc}; use std::{net::Ipv4Addr, sync::Arc};
use anyhow::Context;
use axum::{ use axum::{
extract::{Path, Query, RawQuery, State}, extract::{Path, Query, RawQuery, State},
http::{ http::{
header::{CACHE_CONTROL, CONTENT_TYPE, LOCATION}, header::{CACHE_CONTROL, CONTENT_TYPE},
HeaderValue, StatusCode, HeaderValue, StatusCode,
}, },
response::{Html, IntoResponse, Response}, response::{Html, IntoResponse, Response},
routing::get, routing::get,
Router, Router,
}; };
use log::{error, info}; use log::info;
use serde::Deserialize; use serde::Deserialize;
use tokio::net::TcpListener; use tokio::net::TcpListener;
use crate::{ use crate::generate::Sources;
config::Config, use crate::vfs::asynch::AsyncDir;
html::EscapeHtml, use crate::vfs::VPath;
state::{Source, Treehouse}, use crate::{html::EscapeHtml, state::Source};
};
use super::Paths; mod system {
use crate::vfs::VPath;
struct SystemPages { pub const INDEX: &VPath = VPath::new_const("index");
index: String, pub const FOUR_OH_FOUR: &VPath = VPath::new_const("_treehouse/404");
four_oh_four: String, pub const B_DOCS: &VPath = VPath::new_const("_treehouse/b");
b_docs: String,
sandbox: String,
navmap: String,
} }
struct Server { struct Server {
config: Config, sources: Arc<Sources>,
treehouse: Treehouse, target: AsyncDir,
target_dir: PathBuf,
system_pages: SystemPages,
} }
pub async fn serve( pub async fn serve(sources: Arc<Sources>, target: AsyncDir, port: u16) -> anyhow::Result<()> {
config: Config,
treehouse: Treehouse,
paths: &Paths<'_>,
port: u16,
) -> anyhow::Result<()> {
let app = Router::new() let app = Router::new()
.route("/", get(index)) .route("/", get(index)) // needed explicitly because * does not match empty paths
.route("/*page", get(page)) .route("/*path", get(vfs_entry))
.route("/b", get(branch)) .route("/b", get(branch))
.route("/navmap.js", get(navmap))
.route("/sandbox", get(sandbox))
.route("/static/*file", get(static_file))
.fallback(get(four_oh_four)) .fallback(get(four_oh_four))
.with_state(Arc::new(Server { .with_state(Arc::new(Server { sources, target }));
config,
treehouse,
target_dir: paths.target_dir.to_owned(),
system_pages: SystemPages {
index: std::fs::read_to_string(paths.target_dir.join("index.html"))
.context("cannot read index page")?,
four_oh_four: std::fs::read_to_string(paths.target_dir.join("_treehouse/404.html"))
.context("cannot read 404 page")?,
b_docs: std::fs::read_to_string(paths.target_dir.join("_treehouse/b.html"))
.context("cannot read /b documentation page")?,
sandbox: std::fs::read_to_string(paths.target_dir.join("static/html/sandbox.html"))
.context("cannot read sandbox page")?,
navmap: std::fs::read_to_string(paths.target_dir.join("navmap.js"))
.context("cannot read navigation map")?,
},
}));
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
let app = live_reload::live_reload(app); let app = app.nest("/dev/live-reload", live_reload::router());
info!("serving on port {port}"); info!("serving on port {port}");
let listener = TcpListener::bind((Ipv4Addr::from([0u8, 0, 0, 0]), port)).await?; let listener = TcpListener::bind((Ipv4Addr::from([0u8, 0, 0, 0]), port)).await?;
Ok(axum::serve(listener, app).await?) Ok(axum::serve(listener, app).await?)
} }
fn get_content_type(path: &str) -> Option<&'static str> { fn get_content_type(extension: &str) -> Option<&'static str> {
match () { match extension {
_ if path.ends_with(".html") => Some("text/html"), "html" => Some("text/html"),
_ if path.ends_with(".js") => Some("text/javascript"), "js" => Some("text/javascript"),
_ if path.ends_with(".woff2") => Some("font/woff2"), "woff" => Some("font/woff2"),
_ if path.ends_with(".svg") => Some("image/svg+xml"), "svg" => Some("image/svg+xml"),
_ => None, _ => None,
} }
} }
async fn index(State(state): State<Arc<Server>>) -> Response { #[derive(Deserialize)]
Html(state.system_pages.index.clone()).into_response() struct VfsQuery {
#[serde(rename = "v")]
content_version: Option<String>,
} }
async fn navmap(State(state): State<Arc<Server>>) -> Response { async fn get_static_file(path: &str, query: &VfsQuery, state: &Server) -> Option<Response> {
let mut response = state.system_pages.navmap.clone().into_response(); let vpath = VPath::try_new(path).ok()?;
response let content = state.target.content(vpath).await?;
.headers_mut() let mut response = content.into_response();
.insert(CONTENT_TYPE, HeaderValue::from_static("text/javascript"));
response if let Some(content_type) = vpath.extension().and_then(get_content_type) {
response
.headers_mut()
.insert(CONTENT_TYPE, HeaderValue::from_static(content_type));
} else {
response.headers_mut().remove(CONTENT_TYPE);
}
if query.content_version.is_some() {
response.headers_mut().insert(
CACHE_CONTROL,
HeaderValue::from_static("public, max-age=31536000, immutable"),
);
}
Some(response)
}
async fn vfs_entry(
Path(path): Path<String>,
Query(query): Query<VfsQuery>,
State(state): State<Arc<Server>>,
) -> Response {
if let Some(response) = get_static_file(&path, &query, &state).await {
response
} else {
four_oh_four(State(state)).await
}
}
async fn system_page(target: &AsyncDir, path: &VPath) -> Response {
if let Some(content) = target.content(path).await {
(StatusCode::NOT_FOUND, Html(content)).into_response()
} else {
(
StatusCode::INTERNAL_SERVER_ERROR,
format!("500 Internal Server Error: system page {path} is not available"),
)
.into_response()
}
}
async fn index(State(state): State<Arc<Server>>) -> Response {
system_page(&state.target, system::INDEX).await
} }
async fn four_oh_four(State(state): State<Arc<Server>>) -> Response { async fn four_oh_four(State(state): State<Arc<Server>>) -> Response {
( system_page(&state.target, system::FOUR_OH_FOUR).await
StatusCode::NOT_FOUND,
Html(state.system_pages.four_oh_four.clone()),
)
.into_response()
} }
#[derive(Deserialize)] async fn branch(RawQuery(named_id): RawQuery, State(state): State<Arc<Server>>) -> Response {
struct StaticFileQuery {
cache: Option<String>,
}
async fn static_file(
Path(path): Path<String>,
Query(query): Query<StaticFileQuery>,
State(state): State<Arc<Server>>,
) -> Response {
if let Ok(file) = tokio::fs::read(state.target_dir.join("static").join(&path)).await {
let mut response = file.into_response();
if let Some(content_type) = get_content_type(&path) {
response
.headers_mut()
.insert(CONTENT_TYPE, HeaderValue::from_static(content_type));
} else {
response.headers_mut().remove(CONTENT_TYPE);
}
if query.cache.is_some() {
response.headers_mut().insert(
CACHE_CONTROL,
HeaderValue::from_static("public, max-age=31536000, immutable"),
);
}
response
} else {
four_oh_four(State(state)).await
}
}
async fn page(Path(path): Path<String>, State(state): State<Arc<Server>>) -> Response {
let bare_path = path.strip_suffix(".html").unwrap_or(&path);
if let Some(redirected_path) = state.config.redirects.page.get(bare_path) {
return (
StatusCode::MOVED_PERMANENTLY,
[(LOCATION, format!("{}/{redirected_path}", state.config.site))],
)
.into_response();
}
let html_path = format!("{bare_path}.html");
if let Ok(file) = tokio::fs::read(state.target_dir.join(&*html_path)).await {
([(CONTENT_TYPE, "text/html")], file).into_response()
} else {
four_oh_four(State(state)).await
}
}
async fn sandbox(State(state): State<Arc<Server>>) -> Response {
// Small hack to prevent the LiveReloadLayer from injecting itself into the sandbox.
// The sandbox is always nested under a different page, so there's no need to do that.
let mut response = Html(state.system_pages.sandbox.clone()).into_response();
#[cfg(debug_assertions)]
{
response
.extensions_mut()
.insert(live_reload::DisableLiveReload);
}
// Debounce requests a bit. There's a tendency to have very many sandboxes on a page, and
// loading this page as many times as there are sandboxes doesn't seem like the best way to do
// things.
response
.headers_mut()
.insert(CACHE_CONTROL, HeaderValue::from_static("max-age=10"));
response
}
async fn branch(RawQuery(named_id): RawQuery, State(state): State<Arc<Server>>) -> Html<String> {
if let Some(named_id) = named_id { if let Some(named_id) = named_id {
let branch_id = state let branch_id = state
.sources
.treehouse .treehouse
.branches_by_named_id .branches_by_named_id
.get(&named_id) .get(&named_id)
.copied() .copied()
.or_else(|| state.treehouse.branch_redirects.get(&named_id).copied()); .or_else(|| {
state
.sources
.treehouse
.branch_redirects
.get(&named_id)
.copied()
});
if let Some(branch_id) = branch_id { if let Some(branch_id) = branch_id {
let branch = state.treehouse.tree.branch(branch_id); let branch = state.sources.treehouse.tree.branch(branch_id);
if let Source::Tree { if let Source::Tree {
input, target_path, .. input, target_path, ..
} = state.treehouse.source(branch.file_id) } = state.sources.treehouse.source(branch.file_id)
{ {
match std::fs::read_to_string(target_path) { if let Some(content) = state
Ok(content) => { .target
let branch_markdown_content = input[branch.content.clone()].trim(); .content(target_path)
let mut per_page_metadata = .await
String::from("<meta property=\"og:description\" content=\""); .and_then(|s| String::from_utf8(s).ok())
write!(per_page_metadata, "{}", EscapeHtml(branch_markdown_content)) {
.unwrap(); let branch_markup = input[branch.content.clone()].trim();
per_page_metadata.push_str("\">"); let mut per_page_metadata =
String::from("<meta property=\"og:description\" content=\"");
write!(per_page_metadata, "{}", EscapeHtml(branch_markup)).unwrap();
per_page_metadata.push_str("\">");
const PER_PAGE_METADATA_REPLACEMENT_STRING: &str = "<!-- treehouse-ca37057a-cff5-45b3-8415-3b02dbf6c799-per-branch-metadata -->"; const PER_PAGE_METADATA_REPLACEMENT_STRING: &str = "<!-- treehouse-ca37057a-cff5-45b3-8415-3b02dbf6c799-per-branch-metadata -->";
return Html(content.replacen( return Html(content.replacen(
PER_PAGE_METADATA_REPLACEMENT_STRING, PER_PAGE_METADATA_REPLACEMENT_STRING,
&per_page_metadata, &per_page_metadata,
// Replace one under the assumption that it appears in all pages. // Replace one under the assumption that it appears in all pages.
1, 1,
)); ))
} .into_response();
Err(e) => { } else {
error!("error while reading file {target_path:?}: {e:?}"); return (
} StatusCode::INTERNAL_SERVER_ERROR,
format!("500 Internal Server Error: branch metadata points to entry {target_path} which does not have readable content")
)
.into_response();
} }
} }
} }
Html(state.system_pages.four_oh_four.clone()) system_page(&state.target, system::FOUR_OH_FOUR).await
} else { } else {
Html(state.system_pages.b_docs.clone()) system_page(&state.target, system::B_DOCS).await
} }
} }

View file

@ -1,21 +1,28 @@
use axum::{ use std::time::Duration;
http::{header::CONTENT_TYPE, Response},
Router,
};
#[derive(Debug, Clone, Copy)] use axum::{routing::get, Router};
pub struct DisableLiveReload; use tokio::time::sleep;
pub fn live_reload(router: Router) -> Router { pub fn router<S>() -> Router<S> {
router.layer(tower_livereload::LiveReloadLayer::new().response_predicate( let router = Router::new().route("/back-up", get(back_up));
|response: &Response<_>| {
let is_html = response // The endpoint for immediate reload is only enabled on debug builds.
.headers() // Release builds use the exponential backoff system that detects is the WebSocket is closed.
.get(CONTENT_TYPE) #[cfg(debug_assertions)]
.and_then(|v| v.to_str().ok()) let router = router.route("/stall", get(stall));
.is_some_and(|v| v.starts_with("text/html"));
let is_disabled = response.extensions().get::<DisableLiveReload>().is_some(); router.with_state(())
is_html && !is_disabled }
},
)) #[cfg(debug_assertions)]
async fn stall() -> String {
loop {
// Sleep for a day, I guess. Just to uphold the connection forever without really using any
// significant resources.
sleep(Duration::from_secs(60 * 60 * 24)).await;
}
}
async fn back_up() -> String {
"".into()
} }

View file

@ -1,12 +1,11 @@
use std::{ffi::OsStr, path::Path}; use std::ops::ControlFlow;
use anyhow::Context;
use treehouse_format::ast::{Branch, Roots}; use treehouse_format::ast::{Branch, Roots};
use walkdir::WalkDir;
use crate::{ use crate::{
parse::parse_tree_with_diagnostics, parse::parse_tree_with_diagnostics,
state::{report_diagnostics, Source, Treehouse}, state::{report_diagnostics, Source, Treehouse},
vfs::{self, Dir, VPath},
}; };
use super::WcArgs; use super::WcArgs;
@ -29,14 +28,14 @@ fn wc_roots(source: &str, roots: &Roots) -> usize {
.sum() .sum()
} }
pub fn wc_cli(content_dir: &Path, mut wc_args: WcArgs) -> anyhow::Result<()> { pub fn wc_cli(content_dir: &dyn Dir, mut wc_args: WcArgs) -> anyhow::Result<()> {
if wc_args.paths.is_empty() { if wc_args.paths.is_empty() {
for entry in WalkDir::new(content_dir) { vfs::walk_dir_rec(content_dir, VPath::ROOT, &mut |path| {
let entry = entry?; if path.extension() == Some("tree") {
if entry.file_type().is_file() && entry.path().extension() == Some(OsStr::new("tree")) { wc_args.paths.push(path.to_owned());
wc_args.paths.push(entry.into_path());
} }
} ControlFlow::Continue(())
});
} }
let mut treehouse = Treehouse::new(); let mut treehouse = Treehouse::new();
@ -44,24 +43,21 @@ pub fn wc_cli(content_dir: &Path, mut wc_args: WcArgs) -> anyhow::Result<()> {
let mut total = 0; let mut total = 0;
for path in &wc_args.paths { for path in &wc_args.paths {
let file = std::fs::read_to_string(path) if let Some(content) = content_dir
.with_context(|| format!("cannot read file to word count: {path:?}"))?; .content(path)
let path_without_ext = path.with_extension(""); .and_then(|b| String::from_utf8(b).ok())
let utf8_filename = path_without_ext {
.strip_prefix(content_dir) let file_id = treehouse.add_file(path.to_string(), Source::Other(content));
.expect("paths should be rooted within the content directory") match parse_tree_with_diagnostics(&mut treehouse, file_id) {
.to_string_lossy(); Ok(parsed) => {
let source = treehouse.source(file_id);
let file_id = treehouse.add_file(utf8_filename.into_owned(), Source::Other(file)); let word_count = wc_roots(source.input(), &parsed);
match parse_tree_with_diagnostics(&mut treehouse, file_id) { println!("{word_count:>8} {}", treehouse.filename(file_id));
Ok(parsed) => { total += word_count;
let source = treehouse.source(file_id); }
let word_count = wc_roots(source.input(), &parsed); Err(diagnostics) => {
println!("{word_count:>8} {}", treehouse.filename(file_id)); report_diagnostics(&treehouse.files, &diagnostics)?;
total += word_count; }
}
Err(diagnostics) => {
report_diagnostics(&treehouse.files, &diagnostics)?;
} }
} }
} }

View file

@ -1,11 +1,8 @@
use std::{ use std::{collections::HashMap, ops::ControlFlow};
collections::HashMap, ffi::OsStr, fs::File, io::BufReader, ops::ControlFlow, path::Path,
};
use anyhow::Context; use anyhow::{anyhow, Context};
use log::debug; use log::{debug, error};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use walkdir::WalkDir;
use crate::{ use crate::{
html::highlight::{ html::highlight::{
@ -13,7 +10,7 @@ use crate::{
Syntax, Syntax,
}, },
import_map::ImportRoot, import_map::ImportRoot,
vfs::{self, ReadFilesystem, VPath, VPathBuf}, vfs::{self, Dir, VPath, VPathBuf},
}; };
#[derive(Debug, Clone, Deserialize, Serialize)] #[derive(Debug, Clone, Deserialize, Serialize)]
@ -102,8 +99,8 @@ pub enum Markup {
} }
impl Config { impl Config {
pub fn autopopulate_emoji(&mut self, dir: &dyn ReadFilesystem) -> anyhow::Result<()> { pub fn autopopulate_emoji(&mut self, dir: &dyn Dir) -> anyhow::Result<()> {
vfs::walk_rec(dir, VPath::ROOT, &mut |path| { vfs::walk_dir_rec(dir, VPath::ROOT, &mut |path| {
if path.extension().is_some_and(is_emoji_file) { if path.extension().is_some_and(is_emoji_file) {
if let Some(emoji_name) = path.file_stem() { if let Some(emoji_name) = path.file_stem() {
if !self.emoji.contains_key(emoji_name) { if !self.emoji.contains_key(emoji_name) {
@ -118,8 +115,8 @@ impl Config {
Ok(()) Ok(())
} }
pub fn autopopulate_pics(&mut self, dir: &dyn ReadFilesystem) -> anyhow::Result<()> { pub fn autopopulate_pics(&mut self, dir: &dyn Dir) -> anyhow::Result<()> {
vfs::walk_rec(dir, VPath::ROOT, &mut |path| { vfs::walk_dir_rec(dir, VPath::ROOT, &mut |path| {
if path.extension().is_some_and(is_pic_file) { if path.extension().is_some_and(is_pic_file) {
if let Some(pic_name) = path.file_stem() { if let Some(pic_name) = path.file_stem() {
let pic_id = pic_name let pic_id = pic_name
@ -142,37 +139,47 @@ impl Config {
format!("{}/{}", self.site, page) format!("{}/{}", self.site, page)
} }
pub fn pic_url(&self, pics_fs: &dyn ReadFilesystem, id: &str) -> String { pub fn pic_url(&self, pics_dir: &dyn Dir, id: &str) -> String {
vfs::url( vfs::url(
&self.site, &self.site,
pics_fs, pics_dir,
self.pics self.pics
.get(id) .get(id)
.map(|x| &**x) .map(|x| &**x)
.unwrap_or(VPath::new("404.png")), .unwrap_or(VPath::new("404.png")),
) )
.expect("pics_dir is not anchored anywhere")
} }
/// Loads all syntax definition files. /// Loads all syntax definition files.
pub fn load_syntaxes(&mut self, dir: &Path) -> anyhow::Result<()> { pub fn load_syntaxes(&mut self, dir: &dyn Dir) -> anyhow::Result<()> {
for entry in WalkDir::new(dir) { vfs::walk_dir_rec(dir, VPath::ROOT, &mut |path| {
let entry = entry?; if path.extension() == Some("json") {
if entry.path().extension() == Some(OsStr::new("json")) { let name = path
let name = entry
.path()
.file_stem() .file_stem()
.expect("syntax file name should have a stem") .expect("syntax file name should have a stem due to the .json extension");
.to_string_lossy();
debug!("loading syntax {name:?}"); debug!("loading syntax {name:?}");
let syntax: Syntax = serde_json::from_reader(BufReader::new( let result: Result<Syntax, _> = dir
File::open(entry.path()).context("could not open syntax file")?, .content(path)
)) .ok_or_else(|| anyhow!("syntax .json is not a file"))
.context("could not deserialize syntax file")?; .and_then(|b| {
let compiled = compile_syntax(&syntax); String::from_utf8(b).context("syntax .json contains invalid UTF-8")
self.syntaxes.insert(name.into_owned(), compiled); })
.and_then(|s| {
serde_json::from_str(&s).context("could not deserialize syntax file")
});
match result {
Ok(syntax) => {
let compiled = compile_syntax(&syntax);
self.syntaxes.insert(name.to_owned(), compiled);
}
Err(err) => error!("error while loading syntax file `{path}`: {err}"),
}
} }
}
ControlFlow::Continue(())
});
Ok(()) Ok(())
} }

View file

@ -0,0 +1,15 @@
use crate::vfs::DynDir;
#[derive(Debug, Clone)]
pub struct Dirs {
pub root: DynDir,
pub content: DynDir,
pub static_: DynDir,
pub template: DynDir,
// `static` directories
pub pics: DynDir,
pub emoji: DynDir,
pub syntax: DynDir,
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,37 @@
use handlebars::{Context, Handlebars, Helper, HelperDef, RenderContext, RenderError, ScopedJson};
use serde_json::Value;
use crate::vfs::{self, DynDir, VPath};
pub struct DirHelper {
site: String,
dir: DynDir,
}
impl DirHelper {
pub fn new(site: &str, dir: DynDir) -> Self {
Self {
site: site.to_owned(),
dir,
}
}
}
impl HelperDef for DirHelper {
fn call_inner<'reg: 'rc, 'rc>(
&self,
h: &Helper<'reg, 'rc>,
_: &'reg Handlebars<'reg>,
_: &'rc Context,
_: &mut RenderContext<'reg, 'rc>,
) -> Result<ScopedJson<'reg, 'rc>, RenderError> {
if let Some(path) = h.param(0).and_then(|v| v.value().as_str()) {
let vpath = VPath::try_new(path).map_err(|e| RenderError::new(e.to_string()))?;
let url = vfs::url(&self.site, &self.dir, vpath)
.ok_or_else(|| RenderError::new("path is not anchored anywhere"))?;
Ok(ScopedJson::Derived(Value::String(url)))
} else {
Err(RenderError::new("missing path string"))
}
}
}

View file

@ -0,0 +1,37 @@
use handlebars::{Context, Handlebars, Helper, HelperDef, RenderContext, RenderError, ScopedJson};
use serde_json::Value;
use crate::vfs::{DynDir, VPath};
pub struct IncludeStaticHelper {
dir: DynDir,
}
impl IncludeStaticHelper {
pub fn new(dir: DynDir) -> Self {
Self { dir }
}
}
impl HelperDef for IncludeStaticHelper {
fn call_inner<'reg: 'rc, 'rc>(
&self,
h: &Helper<'reg, 'rc>,
_: &'reg Handlebars<'reg>,
_: &'rc Context,
_: &mut RenderContext<'reg, 'rc>,
) -> Result<ScopedJson<'reg, 'rc>, RenderError> {
if let Some(path) = h.param(0).and_then(|v| v.value().as_str()) {
let vpath = VPath::try_new(path).map_err(|e| RenderError::new(e.to_string()))?;
let url = String::from_utf8(
self.dir
.content(vpath)
.ok_or_else(|| RenderError::new("file does not exist"))?,
)
.map_err(|_| RenderError::new("included file does not contain UTF-8 text"))?;
Ok(ScopedJson::Derived(Value::String(url)))
} else {
Err(RenderError::new("missing path string"))
}
}
}

View file

@ -17,10 +17,10 @@ use jotdown::OrderedListNumbering::*;
use jotdown::SpanLinkType; use jotdown::SpanLinkType;
use crate::config::Config; use crate::config::Config;
use crate::dirs::Dirs;
use crate::state::FileId; use crate::state::FileId;
use crate::state::Treehouse; use crate::state::Treehouse;
use crate::vfs; use crate::vfs;
use crate::vfs::ReadFilesystem;
use super::highlight::highlight; use super::highlight::highlight;
@ -28,10 +28,9 @@ use super::highlight::highlight;
pub struct Renderer<'a> { pub struct Renderer<'a> {
pub config: &'a Config, pub config: &'a Config,
pub emoji_fs: &'a dyn ReadFilesystem, pub dirs: &'a Dirs,
pub pics_fs: &'a dyn ReadFilesystem,
pub treehouse: &'a mut Treehouse, pub treehouse: &'a Treehouse,
pub file_id: FileId, pub file_id: FileId,
pub page_id: String, pub page_id: String,
} }
@ -376,7 +375,7 @@ impl<'a> Writer<'a> {
let pic_url = self let pic_url = self
.renderer .renderer
.config .config
.pic_url(self.renderer.pics_fs, placeholder_pic_id); .pic_url(&*self.renderer.dirs.pics, placeholder_pic_id);
write_attr(&pic_url, out); write_attr(&pic_url, out);
out.push('"'); out.push('"');
@ -563,7 +562,12 @@ impl<'a> Writer<'a> {
out.push_str(r#"">"#) out.push_str(r#"">"#)
} }
let url = vfs::url(&self.renderer.config.site, self.renderer.emoji_fs, vpath); let url = vfs::url(
&self.renderer.config.site,
&*self.renderer.dirs.emoji,
vpath,
)
.expect("emoji directory is not anchored anywhere");
// TODO: this could do with better alt text // TODO: this could do with better alt text
write!( write!(
@ -644,7 +648,7 @@ impl<'a> Writer<'a> {
) )
}), }),
"page" => Some(config.page_url(linked)), "page" => Some(config.page_url(linked)),
"pic" => Some(config.pic_url(self.renderer.pics_fs, linked)), "pic" => Some(config.pic_url(&*self.renderer.dirs.pics, linked)),
_ => None, _ => None,
}) })
} }

View file

@ -1,82 +1,72 @@
use std::collections::HashMap; use std::collections::HashMap;
use serde::Serialize;
use crate::{ use crate::{
state::Treehouse, state::Treehouse,
tree::{attributes::Content, SemaBranchId}, tree::{attributes::Content, SemaBranchId},
}; };
#[derive(Debug, Clone, Default, Serialize)] #[derive(Debug, Clone, Default)]
struct NavigationMapBuilder {
stack: Vec<String>,
navigation_map: NavigationMap,
}
impl NavigationMapBuilder {
fn enter_tree(&mut self, tree: String) {
self.stack.push(tree.clone());
self.navigation_map.paths.insert(tree, self.stack.clone());
}
fn exit_tree(&mut self) {
self.stack.pop();
}
fn finish(self) -> NavigationMap {
self.navigation_map
}
}
#[derive(Debug, Clone, Default)]
pub struct NavigationMap { pub struct NavigationMap {
/// Tells you which pages need to be opened to get to the key. /// Tells you which pages need to be opened to get to the key.
pub paths: HashMap<String, Vec<String>>, pub paths: HashMap<String, Vec<String>>,
} }
impl NavigationMap { impl NavigationMap {
pub fn to_javascript(&self) -> String { pub fn build(treehouse: &Treehouse, root_tree_path: &str) -> Self {
format!( let mut builder = NavigationMapBuilder::default();
"export const navigationMap = {};",
serde_json::to_string(&self.paths)
.expect("serialization of the navigation map should not fail")
)
}
}
#[derive(Debug, Clone, Default)] fn rec_branch(
pub struct NavigationMapBuilder { treehouse: &Treehouse,
stack: Vec<String>, builder: &mut NavigationMapBuilder,
navigation_map: NavigationMap, branch_id: SemaBranchId,
} ) {
let branch = treehouse.tree.branch(branch_id);
impl NavigationMapBuilder { if let Content::Link(linked) = &branch.attributes.content {
pub fn enter_tree(&mut self, tree: String) { rec_tree(treehouse, builder, linked);
self.stack.push(tree.clone()); } else {
self.navigation_map.paths.insert(tree, self.stack.clone()); for &child_id in &branch.children {
} rec_branch(treehouse, builder, child_id);
pub fn exit_tree(&mut self) {
self.stack.pop();
}
pub fn finish(self) -> NavigationMap {
self.navigation_map
}
}
pub fn build_navigation_map(treehouse: &Treehouse, root_tree_path: &str) -> NavigationMap {
let mut builder = NavigationMapBuilder::default();
fn rec_branch(
treehouse: &Treehouse,
builder: &mut NavigationMapBuilder,
branch_id: SemaBranchId,
) {
let branch = treehouse.tree.branch(branch_id);
if let Content::Link(linked) = &branch.attributes.content {
rec_tree(treehouse, builder, linked);
} else {
for &child_id in &branch.children {
rec_branch(treehouse, builder, child_id);
}
}
}
fn rec_tree(treehouse: &Treehouse, builder: &mut NavigationMapBuilder, tree_path: &str) {
if let Some(roots) = treehouse.roots.get(tree_path) {
// Pages can link to each other causing infinite recursion, so we need to handle that
// case by skipping pages that already have been analyzed.
if !builder.navigation_map.paths.contains_key(tree_path) {
builder.enter_tree(tree_path.to_owned());
for &branch_id in &roots.branches {
rec_branch(treehouse, builder, branch_id);
} }
builder.exit_tree();
} }
} }
fn rec_tree(treehouse: &Treehouse, builder: &mut NavigationMapBuilder, tree_path: &str) {
if let Some(roots) = treehouse.roots.get(tree_path) {
// Pages can link to each other causing infinite recursion, so we need to handle that
// case by skipping pages that already have been analyzed.
if !builder.navigation_map.paths.contains_key(tree_path) {
builder.enter_tree(tree_path.to_owned());
for &branch_id in &roots.branches {
rec_branch(treehouse, builder, branch_id);
}
builder.exit_tree();
}
}
}
rec_tree(treehouse, &mut builder, root_tree_path);
builder.finish()
} }
rec_tree(treehouse, &mut builder, root_tree_path);
builder.finish()
} }

View file

@ -3,25 +3,23 @@ use std::{borrow::Cow, fmt::Write};
use treehouse_format::pull::BranchKind; use treehouse_format::pull::BranchKind;
use crate::{ use crate::{
cli::Paths,
config::Config, config::Config,
dirs::Dirs,
html::EscapeAttribute, html::EscapeAttribute,
state::{FileId, Treehouse}, state::{FileId, Treehouse},
tree::{ tree::{
attributes::{Content, Stage}, attributes::{Content, Stage},
mini_template, SemaBranchId, mini_template, SemaBranchId,
}, },
vfs::{CdExt, ReadFilesystem, VPathBuf},
}; };
use super::{djot, EscapeHtml}; use super::{djot, EscapeHtml};
pub fn branch_to_html( pub fn branch_to_html(
s: &mut String, s: &mut String,
treehouse: &mut Treehouse, treehouse: &Treehouse,
config: &Config, config: &Config,
root_fs: &dyn ReadFilesystem, // TODO: Lower privileges dirs: &Dirs,
paths: &Paths<'_>,
file_id: FileId, file_id: FileId,
branch_id: SemaBranchId, branch_id: SemaBranchId,
) { ) {
@ -116,13 +114,7 @@ pub fn branch_to_html(
} }
if branch.attributes.template { if branch.attributes.template {
final_markup = mini_template::render( final_markup = mini_template::render(config, treehouse, dirs, &final_markup);
config,
treehouse,
paths,
&root_fs.cd(VPathBuf::new("static/pics")),
&final_markup,
);
} }
s.push_str("<th-bc>"); s.push_str("<th-bc>");
@ -137,8 +129,7 @@ pub fn branch_to_html(
.to_owned(), .to_owned(),
config, config,
emoji_fs: &root_fs.cd(VPathBuf::new("static/emoji")), dirs,
pics_fs: &root_fs.cd(VPathBuf::new("static/pics")),
treehouse, treehouse,
file_id, file_id,
@ -195,7 +186,7 @@ pub fn branch_to_html(
let num_children = branch.children.len(); let num_children = branch.children.len();
for i in 0..num_children { for i in 0..num_children {
let child_id = treehouse.tree.branch(branch_id).children[i]; let child_id = treehouse.tree.branch(branch_id).children[i];
branch_to_html(s, treehouse, config, root_fs, paths, file_id, child_id); branch_to_html(s, treehouse, config, dirs, file_id, child_id);
} }
s.push_str("</ul>"); s.push_str("</ul>");
} }
@ -209,16 +200,15 @@ pub fn branch_to_html(
pub fn branches_to_html( pub fn branches_to_html(
s: &mut String, s: &mut String,
treehouse: &mut Treehouse, treehouse: &Treehouse,
config: &Config, config: &Config,
root_fs: &dyn ReadFilesystem, // TODO: Lower privileges dirs: &Dirs,
paths: &Paths<'_>,
file_id: FileId, file_id: FileId,
branches: &[SemaBranchId], branches: &[SemaBranchId],
) { ) {
s.push_str("<ul>"); s.push_str("<ul>");
for &child in branches { for &child in branches {
branch_to_html(s, treehouse, config, root_fs, paths, file_id, child); branch_to_html(s, treehouse, config, dirs, file_id, child);
} }
s.push_str("</ul>"); s.push_str("</ul>");
} }

View file

@ -1,11 +1,9 @@
use std::{ffi::OsStr, path::PathBuf}; use std::ops::ControlFlow;
use indexmap::IndexMap; use indexmap::IndexMap;
use log::warn;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use walkdir::WalkDir;
use crate::static_urls::StaticUrls; use crate::vfs::{self, Dir, VPathBuf};
#[derive(Debug, Clone, Serialize)] #[derive(Debug, Clone, Serialize)]
pub struct ImportMap { pub struct ImportMap {
@ -15,49 +13,30 @@ pub struct ImportMap {
#[derive(Debug, Clone, Deserialize, Serialize)] #[derive(Debug, Clone, Deserialize, Serialize)]
pub struct ImportRoot { pub struct ImportRoot {
pub name: String, pub name: String,
pub path: String, pub path: VPathBuf,
} }
impl ImportMap { impl ImportMap {
pub fn generate(base_url: String, import_roots: &[ImportRoot]) -> Self { pub fn generate(site: &str, root: &dyn Dir, import_roots: &[ImportRoot]) -> Self {
let mut import_map = ImportMap { let mut import_map = ImportMap {
imports: IndexMap::new(), imports: IndexMap::new(),
}; };
for root in import_roots { for import_root in import_roots {
let static_urls = StaticUrls::new( vfs::walk_dir_rec(root, &import_root.path, &mut |path| {
PathBuf::from(&root.path), if path.extension() == Some("js") {
format!("{base_url}/{}", root.path), import_map.imports.insert(
); format!(
for entry in WalkDir::new(&root.path) { "{}/{}",
let entry = match entry { import_root.name,
Ok(entry) => entry, path.strip_prefix(&import_root.path).unwrap_or(path)
Err(error) => { ),
warn!("directory walk failed: {error}"); vfs::url(site, root, path)
continue; .expect("import directory is not anchored anywhere"),
} );
};
if !entry.file_type().is_dir() && entry.path().extension() == Some(OsStr::new("js"))
{
let normalized_path = entry
.path()
.strip_prefix(&root.path)
.unwrap_or(entry.path())
.to_string_lossy()
.replace('\\', "/");
match static_urls.get(&normalized_path) {
Ok(url) => {
import_map
.imports
.insert(format!("{}/{normalized_path}", root.name), url);
}
Err(error) => {
warn!("could not get static url for {normalized_path}: {error}")
}
}
} }
} ControlFlow::Continue(())
});
} }
import_map.imports.sort_unstable_keys(); import_map.imports.sort_unstable_keys();

View file

@ -1,28 +0,0 @@
use std::path::PathBuf;
use handlebars::{Context, Handlebars, Helper, HelperDef, RenderContext, RenderError, ScopedJson};
use serde_json::Value;
pub struct IncludeStatic {
pub base_dir: PathBuf,
}
impl HelperDef for IncludeStatic {
fn call_inner<'reg: 'rc, 'rc>(
&self,
helper: &Helper<'reg, 'rc>,
_: &'reg Handlebars<'reg>,
_: &'rc Context,
_: &mut RenderContext<'reg, 'rc>,
) -> Result<ScopedJson<'reg, 'rc>, RenderError> {
if let Some(param) = helper.param(0).and_then(|v| v.value().as_str()) {
return Ok(ScopedJson::Derived(Value::String(
std::fs::read_to_string(self.base_dir.join(param)).map_err(|error| {
RenderError::new(format!("cannot read static asset {param}: {error}"))
})?,
)));
}
Err(RenderError::new("asset path must be provided"))
}
}

View file

@ -1,15 +1,13 @@
pub mod cli; pub mod cli;
pub mod config; pub mod config;
pub mod dirs;
pub mod fun; pub mod fun;
pub mod generate; pub mod generate;
pub mod history; pub mod history;
pub mod html; pub mod html;
pub mod import_map; pub mod import_map;
pub mod include_static;
pub mod parse; pub mod parse;
pub mod paths; pub mod paths;
pub mod state; pub mod state;
pub mod static_urls;
pub mod templater;
pub mod tree; pub mod tree;
pub mod vfs; pub mod vfs;

View file

@ -1,76 +1,78 @@
use std::fs;
use std::path::PathBuf; use std::path::PathBuf;
use std::{fs, path::Path}; use std::sync::Arc;
use anyhow::Context;
use clap::Parser; use clap::Parser;
use log::error; use log::error;
use treehouse::generate::{regenerate_or_report_error, LatestRevision}; use treehouse::cli::serve::serve;
use treehouse::vfs::PhysicalDir; use treehouse::dirs::Dirs;
use treehouse::vfs::{AnchoredAtExt, VPathBuf}; use treehouse::generate::{self, Sources};
use treehouse::vfs::asynch::AsyncDir;
use treehouse::vfs::{AnchoredAtExt, DynDir, ToDynDir, VPathBuf};
use treehouse::vfs::{Cd, PhysicalDir};
use treehouse::{ use treehouse::{
cli::{ cli::{
fix::{fix_all_cli, fix_file_cli}, fix::{fix_all_cli, fix_file_cli},
serve::serve,
wc::wc_cli, wc::wc_cli,
Command, Paths, ProgramArgs, Command, ProgramArgs,
}, },
vfs::{BufferedFile, MountPoints, ReadFilesystem, VPath}, vfs::{BufferedFile, MemDir, VPath},
}; };
fn vfs_sources() -> anyhow::Result<impl ReadFilesystem> { fn vfs_sources() -> anyhow::Result<DynDir> {
let mut root = MountPoints::new(); let mut root = MemDir::new();
root.add( root.add(
VPath::new("treehouse.toml"), VPath::new("treehouse.toml"),
Box::new(BufferedFile::new(fs::read("treehouse.toml")?)), BufferedFile::new(fs::read("treehouse.toml")?).to_dyn(),
); );
root.add( root.add(
VPath::new("static"), VPath::new("static"),
Box::new(PhysicalDir::new(PathBuf::from("static")).anchored_at(VPathBuf::new("static"))), PhysicalDir::new(PathBuf::from("static"))
.anchored_at(VPathBuf::new("static"))
.to_dyn(),
); );
root.add( root.add(
VPath::new("template"), VPath::new("template"),
Box::new(PhysicalDir::new(PathBuf::from("template"))), PhysicalDir::new(PathBuf::from("template")).to_dyn(),
); );
root.add( root.add(
VPath::new("content"), VPath::new("content"),
Box::new(PhysicalDir::new(PathBuf::from("content"))), PhysicalDir::new(PathBuf::from("content")).to_dyn(),
); );
Ok(root) Ok(root.to_dyn())
} }
async fn fallible_main() -> anyhow::Result<()> { async fn fallible_main() -> anyhow::Result<()> {
let args = ProgramArgs::parse(); let args = ProgramArgs::parse();
let paths = Paths {
target_dir: Path::new("target/site"),
template_target_dir: Path::new("target/site/static/html"),
config_file: Path::new("treehouse.toml"),
static_dir: Path::new("static"),
template_dir: Path::new("template"),
content_dir: Path::new("content"),
};
let src = vfs_sources()?; let src = vfs_sources()?;
let dirs = Arc::new(Dirs {
root: src.clone(),
content: Cd::new(src.clone(), VPathBuf::new("content")).to_dyn(),
static_: Cd::new(src.clone(), VPathBuf::new("static")).to_dyn(),
template: Cd::new(src.clone(), VPathBuf::new("template")).to_dyn(),
pics: Cd::new(src.clone(), VPathBuf::new("static/pics")).to_dyn(),
emoji: Cd::new(src.clone(), VPathBuf::new("static/emoji")).to_dyn(),
syntax: Cd::new(src.clone(), VPathBuf::new("static/syntax")).to_dyn(),
});
match args.command { match args.command {
Command::Serve { Command::Serve {
generate: generate_args, generate: _,
serve: serve_args, serve: serve_args,
} => { } => {
let latest_revision = match generate_args.commits_only { let sources = Arc::new(Sources::load(&dirs).context("failed to load sources")?);
true => LatestRevision::LatestCommit, let target = generate::target(dirs, sources.clone());
false => LatestRevision::WorkingTree, serve(sources, AsyncDir::new(target), serve_args.port).await?;
};
let (config, treehouse) = regenerate_or_report_error(&paths, &src, latest_revision)?;
serve(config, treehouse, &paths, serve_args.port).await?;
} }
Command::Fix(fix_args) => fix_file_cli(fix_args)?, Command::Fix(fix_args) => fix_file_cli(fix_args, &*dirs.content)?.apply().await?,
Command::FixAll(fix_args) => fix_all_cli(fix_args, &paths)?, Command::FixAll(fix_args) => fix_all_cli(fix_args, &*dirs.content)?.apply().await?,
Command::Wc(wc_args) => wc_cli(paths.content_dir, wc_args)?, Command::Wc(wc_args) => wc_cli(&dirs.content, wc_args)?,
Command::Ulid => { Command::Ulid => {
let mut rng = rand::thread_rng(); let mut rng = rand::thread_rng();

View file

@ -1,4 +1,4 @@
use std::{collections::HashMap, ops::Range, path::PathBuf}; use std::{collections::HashMap, ops::Range};
use anyhow::Context; use anyhow::Context;
use codespan_reporting::{ use codespan_reporting::{
@ -6,26 +6,19 @@ use codespan_reporting::{
files::SimpleFiles, files::SimpleFiles,
term::termcolor::{ColorChoice, StandardStream}, term::termcolor::{ColorChoice, StandardStream},
}; };
use serde::Serialize;
use ulid::Ulid; use ulid::Ulid;
use crate::tree::{SemaBranchId, SemaRoots, SemaTree}; use crate::{
tree::{SemaBranchId, SemaRoots, SemaTree},
#[derive(Debug, Clone, Serialize)] vfs::VPathBuf,
pub struct RevisionInfo { };
pub is_latest: bool,
pub number: usize,
pub commit: String,
pub commit_short: String,
}
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum Source { pub enum Source {
Tree { Tree {
input: String, input: String,
tree_path: String, tree_path: String,
target_path: PathBuf, target_path: VPathBuf,
revision_info: RevisionInfo,
}, },
Other(String), Other(String),
} }
@ -103,13 +96,6 @@ impl Treehouse {
} }
} }
pub fn revision_info(&self, file_id: FileId) -> Option<&RevisionInfo> {
match self.source(file_id) {
Source::Tree { revision_info, .. } => Some(revision_info),
Source::Other(_) => None,
}
}
pub fn next_missingno(&mut self) -> Ulid { pub fn next_missingno(&mut self) -> Ulid {
self.missingno_generator self.missingno_generator
.generate() .generate()

View file

@ -1,89 +0,0 @@
use std::{
collections::HashMap,
fs::File,
io::{self, BufReader},
path::PathBuf,
sync::{Mutex, RwLock},
};
use handlebars::{Context, Handlebars, Helper, HelperDef, RenderContext, RenderError, ScopedJson};
use serde_json::Value;
pub struct StaticUrls {
base_dir: PathBuf,
base_url: String,
// Really annoying that we have to use an RwLock for this. We only ever generate in a
// single-threaded environment.
// Honestly it would be a lot more efficient if Handlebars just assumed single-threadedness
// and required you to clone it over to different threads.
// Stuff like this is why I really want to implement my own templating engine...
hash_cache: RwLock<HashMap<String, String>>,
missing_files: Mutex<Vec<MissingFile>>,
}
pub struct MissingFile {
pub path: String,
}
impl StaticUrls {
pub fn new(base_dir: PathBuf, base_url: String) -> Self {
Self {
base_dir,
base_url,
hash_cache: RwLock::new(HashMap::new()),
missing_files: Mutex::new(vec![]),
}
}
pub fn get(&self, filename: &str) -> Result<String, io::Error> {
let hash_cache = self.hash_cache.read().unwrap();
if let Some(cached) = hash_cache.get(filename) {
return Ok(cached.to_owned());
}
drop(hash_cache);
let mut hasher = blake3::Hasher::new();
let file = BufReader::new(File::open(self.base_dir.join(filename))?);
hasher.update_reader(file)?;
// NOTE: Here the hash is truncated to 8 characters. This is fine, because we don't
// care about security here - only detecting changes in files.
let hash = format!(
"{}/{}?cache=b3-{}",
self.base_url,
filename,
&hasher.finalize().to_hex()[0..8]
);
{
let mut hash_cache = self.hash_cache.write().unwrap();
hash_cache.insert(filename.to_owned(), hash.clone());
}
Ok(hash)
}
pub fn take_missing_files(&self) -> Vec<MissingFile> {
std::mem::take(&mut self.missing_files.lock().unwrap())
}
}
impl HelperDef for StaticUrls {
fn call_inner<'reg: 'rc, 'rc>(
&self,
helper: &Helper<'reg, 'rc>,
_: &'reg Handlebars<'reg>,
_: &'rc Context,
_: &mut RenderContext<'reg, 'rc>,
) -> Result<ScopedJson<'reg, 'rc>, RenderError> {
if let Some(param) = helper.param(0).and_then(|v| v.value().as_str()) {
return Ok(ScopedJson::Derived(Value::String(
self.get(param).unwrap_or_else(|_| {
self.missing_files.lock().unwrap().push(MissingFile {
path: param.to_owned(),
});
format!("{}/{}", self.base_url, param)
}),
)));
}
Err(RenderError::new("asset path must be provided"))
}
}

View file

@ -1,13 +0,0 @@
use handlebars::Handlebars;
pub struct Templater {
handlebars: Handlebars<'static>,
}
impl Templater {
pub fn new() -> Self {
Self {
handlebars: Handlebars::new(),
}
}
}

View file

@ -163,13 +163,7 @@ impl SemaBranch {
) -> SemaBranchId { ) -> SemaBranchId {
let attributes = Self::parse_attributes(treehouse, diagnostics, file_id, &branch); let attributes = Self::parse_attributes(treehouse, diagnostics, file_id, &branch);
let revision_info = treehouse let named_id = attributes.id.to_owned();
.revision_info(file_id)
.expect(".tree files must have Tree-type sources");
let named_id = match revision_info.is_latest {
true => attributes.id.to_owned(),
false => format!("{}@{}", attributes.id, revision_info.commit_short),
};
let html_id = format!( let html_id = format!(
"{}:{}", "{}:{}",
treehouse.tree_path(file_id).unwrap(), treehouse.tree_path(file_id).unwrap(),

View file

@ -7,7 +7,13 @@
use std::fmt::Write; use std::fmt::Write;
use std::ops::Range; use std::ops::Range;
use crate::{cli::Paths, config::Config, html::EscapeHtml, state::Treehouse, vfs::ReadFilesystem}; use crate::{
config::Config,
dirs::Dirs,
html::EscapeHtml,
state::Treehouse,
vfs::{Dir, VPath},
};
struct Lexer<'a> { struct Lexer<'a> {
input: &'a str, input: &'a str,
@ -148,13 +154,7 @@ impl Renderer<'_> {
self.output.push_str(&self.lexer.input[token.range.clone()]); self.output.push_str(&self.lexer.input[token.range.clone()]);
} }
fn render( fn render(&mut self, config: &Config, treehouse: &Treehouse, dirs: &Dirs) {
&mut self,
config: &Config,
treehouse: &Treehouse,
paths: &Paths<'_>,
pics_fs: &dyn ReadFilesystem,
) {
let kind_of = |token: &Token| token.kind; let kind_of = |token: &Token| token.kind;
while let Some(token) = self.lexer.next() { while let Some(token) = self.lexer.next() {
@ -171,8 +171,7 @@ impl Renderer<'_> {
match Self::render_template( match Self::render_template(
config, config,
treehouse, treehouse,
pics_fs, dirs,
paths,
self.lexer.input[inside.as_ref().unwrap().range.clone()].trim(), self.lexer.input[inside.as_ref().unwrap().range.clone()].trim(),
) { ) {
Ok(s) => match escaping { Ok(s) => match escaping {
@ -199,31 +198,27 @@ impl Renderer<'_> {
fn render_template( fn render_template(
config: &Config, config: &Config,
_treehouse: &Treehouse, _treehouse: &Treehouse,
pics_fs: &dyn ReadFilesystem, dirs: &Dirs,
paths: &Paths<'_>,
template: &str, template: &str,
) -> Result<String, InvalidTemplate> { ) -> Result<String, InvalidTemplate> {
let (function, arguments) = template.split_once(' ').unwrap_or((template, "")); let (function, arguments) = template.split_once(' ').unwrap_or((template, ""));
match function { match function {
"pic" => Ok(config.pic_url(pics_fs, arguments)), "pic" => Ok(config.pic_url(&*dirs.pics, arguments)),
"include_static" => std::fs::read_to_string(paths.static_dir.join(arguments)) "include_static" => VPath::try_new(arguments)
.map_err(|_| InvalidTemplate), .ok()
.and_then(|vpath| dirs.static_.content(vpath))
.and_then(|content| String::from_utf8(content).ok())
.ok_or(InvalidTemplate),
_ => Err(InvalidTemplate), _ => Err(InvalidTemplate),
} }
} }
} }
pub fn render( pub fn render(config: &Config, treehouse: &Treehouse, dirs: &Dirs, input: &str) -> String {
config: &Config,
treehouse: &Treehouse,
paths: &Paths<'_>,
pics_fs: &dyn ReadFilesystem,
input: &str,
) -> String {
let mut renderer = Renderer { let mut renderer = Renderer {
lexer: Lexer::new(input), lexer: Lexer::new(input),
output: String::new(), output: String::new(),
}; };
renderer.render(config, treehouse, paths, pics_fs); renderer.render(config, treehouse, dirs);
renderer.output renderer.output
} }

View file

@ -1,250 +1,82 @@
//! The treehouse virtual file system.
//!
//! Unlike traditional file systems, there is no separation between directories and files.
//! Instead, our file system is based on _entries_, which may have specific, optional, well-typed
//! metadata attached to them.
//! A directory is formed by returning a list of paths from [`dir`][Dir::dir], and a file is
//! formed by returning `Some` from [`content`][Dir::content].
//!
//! This makes using the file system simpler, as you do not have to differentiate between different
//! entry kinds. All paths act as if they _could_ return byte content, and all paths act as if they
//! _could_ have children.
//!
//! # Composability
//!
//! [`Dir`]s are composable. The [`Dir`] itself starts off with the root path ([`VPath::ROOT`]),
//! which may contain further [`dir`][Dir::dir] entries, or content by itself.
//! This makes it possible to nest a [`Dir`] under another [`Dir`].
//!
//! Additionally, there's also the inverse operation, [`Cd`] (named after the `cd`
//! _change directory_ shell command), which returns a [`Dir`] viewing a subpath within another
//! [`Dir`].
//!
//! # Building directories
//!
//! In-memory directories can be composed using the following primitives:
//!
//! - [`EmptyEntry`] - has no metadata whatsoever.
//! - [`BufferedFile`] - root path content is the provided byte vector.
//! - [`MemDir`] - a [`Dir`] containing a single level of other [`Dir`]s inside.
//!
//! Additionally, for interfacing with the OS file system, [`PhysicalDir`] is available,
//! representing a directory stored on the disk.
//!
//! # Virtual paths
//!
//! Entries within directories are referenced using [`VPath`]s (**v**irtual **path**s).
//! A virtual path is composed out of any amount of `/`-separated components.
//!
//! There are no special directories like `.` and `..` (those are just normal entries, though using
//! them is discouraged). [`VPath`]s are always relative to the root of the [`Dir`] you're querying.
//!
//! A leading or trailing slash is not allowed, because they would have no meaning.
//!
//! [`VPath`] also has an owned version, [`VPathBuf`].
use std::{ use std::{
borrow::Borrow,
fmt::{self, Debug}, fmt::{self, Debug},
ops::{ControlFlow, Deref}, ops::{ControlFlow, Deref},
sync::Arc,
}; };
use anyhow::ensure;
use serde::{Deserialize, Serialize};
mod anchored; mod anchored;
pub mod asynch;
mod cd; mod cd;
mod edit;
mod empty; mod empty;
mod file; mod file;
mod mount_points; mod mem_dir;
mod overlay;
mod path;
mod physical; mod physical;
pub use anchored::*; pub use anchored::*;
pub use cd::*; pub use cd::*;
pub use edit::*;
pub use empty::*; pub use empty::*;
pub use file::*; pub use file::*;
pub use mount_points::*; pub use mem_dir::*;
pub use overlay::*;
pub use path::*;
pub use physical::*; pub use physical::*;
#[derive(PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct VPath {
path: str,
}
impl VPath {
pub const SEPARATOR: char = '/';
pub const ROOT: &Self = unsafe { Self::new_unchecked("") };
pub fn try_new(s: &str) -> anyhow::Result<&Self> {
ensure!(
!s.ends_with(Self::SEPARATOR),
"path must not end with '{}' (got {s:?})",
Self::SEPARATOR
);
ensure!(
!s.starts_with(Self::SEPARATOR),
"paths are always absolute and must not start with '{}' (got {s:?})",
Self::SEPARATOR
);
Ok(unsafe { Self::new_unchecked(s) })
}
pub fn new(s: &str) -> &Self {
Self::try_new(s).expect("invalid path")
}
const unsafe fn new_unchecked(s: &str) -> &Self {
std::mem::transmute::<_, &Self>(s)
}
pub fn try_join(&self, sub: &str) -> anyhow::Result<VPathBuf> {
let mut buf = VPathBuf::from(self);
if !sub.is_empty() {
let sub = VPath::try_new(sub)?;
buf.path.push('/');
buf.path.push_str(&sub.path);
}
Ok(buf)
}
pub fn join(&self, sub: &str) -> VPathBuf {
self.try_join(sub).expect("invalid subpath")
}
pub fn strip_prefix(&self, prefix: &VPath) -> Option<&Self> {
if self == prefix {
Some(VPath::ROOT)
} else {
self.path
.strip_prefix(&prefix.path)
.and_then(|p| p.strip_prefix('/'))
// SAFETY: If `self` starts with `prefix`, `p` will end up not being prefixed by `self`
// nor a leading slash.
.map(|p| unsafe { VPath::new_unchecked(p) })
}
}
pub fn depth(&self) -> usize {
self.path.chars().filter(|&c| c == Self::SEPARATOR).count()
}
pub fn segments(&self) -> impl Iterator<Item = &Self> {
self.as_str().split(Self::SEPARATOR).map(|s| unsafe {
// SAFETY: Since we're splitting on the separator, the path cannot start or end with it.
Self::new_unchecked(s)
})
}
pub fn rsegments(&self) -> impl Iterator<Item = &Self> {
self.as_str().rsplit(Self::SEPARATOR).map(|s| unsafe {
// SAFETY: Since we're splitting on the separator, the path cannot start or end with it.
Self::new_unchecked(s)
})
}
pub fn file_name(&self) -> Option<&str> {
self.rsegments().next().map(Self::as_str)
}
pub fn extension(&self) -> Option<&str> {
let file_name = self.file_name()?;
let (left, right) = file_name.rsplit_once('.')?;
if left.is_empty() {
None
} else {
Some(right)
}
}
pub fn file_stem(&self) -> Option<&str> {
let file_name = self.file_name()?;
if let Some(extension) = self.extension() {
Some(&file_name[..file_name.len() - extension.len() - 1])
} else {
Some(file_name)
}
}
pub fn as_str(&self) -> &str {
&self.path
}
}
impl ToOwned for VPath {
type Owned = VPathBuf;
fn to_owned(&self) -> Self::Owned {
VPathBuf::from(self)
}
}
impl fmt::Debug for VPath {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(&self.path)
}
}
impl fmt::Display for VPath {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(&self.path)
}
}
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct VPathBuf {
path: String,
}
impl VPathBuf {
pub fn new(path: impl Into<String>) -> Self {
Self::try_new(path).expect("invalid path")
}
pub fn try_new(path: impl Into<String>) -> anyhow::Result<Self> {
let path = path.into();
match VPath::try_new(&path) {
Ok(_) => Ok(Self { path }),
Err(e) => Err(e),
}
}
unsafe fn new_unchecked(path: String) -> Self {
Self { path }
}
}
impl Deref for VPathBuf {
type Target = VPath;
fn deref(&self) -> &Self::Target {
unsafe { VPath::new_unchecked(&self.path) }
}
}
impl fmt::Debug for VPathBuf {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(&self.path)
}
}
impl fmt::Display for VPathBuf {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(&self.path)
}
}
impl From<&VPath> for VPathBuf {
fn from(value: &VPath) -> Self {
unsafe { Self::new_unchecked(value.path.to_owned()) }
}
}
impl Borrow<VPath> for VPathBuf {
fn borrow(&self) -> &VPath {
self
}
}
impl<'de> Deserialize<'de> for VPathBuf {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
use serde::de;
struct Visitor;
impl de::Visitor<'_> for Visitor {
type Value = VPathBuf;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("virtual path")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: de::Error,
{
VPathBuf::try_new(v).map_err(de::Error::custom)
}
}
deserializer.deserialize_str(Visitor)
}
}
impl Serialize for VPathBuf {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(self.as_str())
}
}
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct DirEntry { pub struct DirEntry {
pub path: VPathBuf, pub path: VPathBuf,
} }
pub trait ReadFilesystem: Debug { pub trait Dir: Debug {
/// List all files under the provided path. /// List all entries under the provided path.
fn dir(&self, path: &VPath) -> Vec<DirEntry>; fn dir(&self, path: &VPath) -> Vec<DirEntry>;
/// Return the byte content of the entry at the given path. /// Return the byte content of the entry at the given path.
@ -264,14 +96,96 @@ pub trait ReadFilesystem: Debug {
None None
} }
/// Optimization for [`ReadFilesystemCombinators::cd`] that allows for avoiding wrapping /// If a file can be written persistently, returns an [`EditPath`] representing the file in
/// `Cd`s in `Cd`s. /// persistent storage.
#[doc(hidden)] ///
fn cd_optimization(&self, _subpath: &VPath) -> Option<Cd<'_>> { /// An edit path can then be made into an [`Edit`].
fn edit_path(&self, _path: &VPath) -> Option<EditPath> {
None None
} }
} }
impl<T> Dir for &T
where
T: Dir,
{
fn dir(&self, path: &VPath) -> Vec<DirEntry> {
(**self).dir(path)
}
fn content(&self, path: &VPath) -> Option<Vec<u8>> {
(**self).content(path)
}
fn content_version(&self, path: &VPath) -> Option<String> {
(**self).content_version(path)
}
fn anchor(&self, path: &VPath) -> Option<VPathBuf> {
(**self).anchor(path)
}
fn edit_path(&self, path: &VPath) -> Option<EditPath> {
(**self).edit_path(path)
}
}
#[derive(Clone)]
pub struct DynDir {
arc: Arc<dyn Dir + Send + Sync>,
}
impl Dir for DynDir {
fn dir(&self, path: &VPath) -> Vec<DirEntry> {
self.arc.dir(path)
}
fn content(&self, path: &VPath) -> Option<Vec<u8>> {
self.arc.content(path)
}
fn content_version(&self, path: &VPath) -> Option<String> {
self.arc.content_version(path)
}
fn anchor(&self, path: &VPath) -> Option<VPathBuf> {
self.arc.anchor(path)
}
fn edit_path(&self, path: &VPath) -> Option<EditPath> {
self.arc.edit_path(path)
}
}
impl fmt::Debug for DynDir {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&*self.arc, f)
}
}
impl Deref for DynDir {
type Target = dyn Dir + Send + Sync;
fn deref(&self) -> &Self::Target {
&*self.arc
}
}
pub trait ToDynDir {
fn to_dyn(self) -> DynDir;
}
impl<T> ToDynDir for T
where
T: Dir + Send + Sync + 'static,
{
fn to_dyn(self) -> DynDir {
DynDir {
arc: Arc::new(self),
}
}
}
pub trait AnchoredAtExt { pub trait AnchoredAtExt {
fn anchored_at(self, at: VPathBuf) -> Anchored<Self> fn anchored_at(self, at: VPathBuf) -> Anchored<Self>
where where
@ -280,53 +194,28 @@ pub trait AnchoredAtExt {
impl<T> AnchoredAtExt for T impl<T> AnchoredAtExt for T
where where
T: ReadFilesystem, T: Dir,
{ {
fn anchored_at(self, at: VPathBuf) -> Anchored<Self> { fn anchored_at(self, at: VPathBuf) -> Anchored<Self> {
Anchored::new(self, at) Anchored::new(self, at)
} }
} }
pub trait CdExt { pub fn walk_dir_rec(dir: &dyn Dir, path: &VPath, f: &mut dyn FnMut(&VPath) -> ControlFlow<(), ()>) {
fn cd<'a>(self, into: VPathBuf) -> Cd<'a> for entry in dir.dir(path) {
where
Self: 'a;
}
impl CdExt for &dyn ReadFilesystem {
fn cd<'a>(self, into: VPathBuf) -> Cd<'a>
where
Self: 'a,
{
if let Some(cd) = self.cd_optimization(&into) {
cd
} else {
Cd::new(self, into)
}
}
}
pub fn walk_rec(
fs: &dyn ReadFilesystem,
path: &VPath,
f: &mut dyn FnMut(&VPath) -> ControlFlow<(), ()>,
) {
for entry in fs.dir(path) {
match f(&entry.path) { match f(&entry.path) {
ControlFlow::Continue(_) => (), ControlFlow::Continue(_) => (),
ControlFlow::Break(_) => return, ControlFlow::Break(_) => return,
} }
walk_rec(fs, &entry.path, f); walk_dir_rec(dir, &entry.path, f);
} }
} }
pub fn url(site: &str, fs: &dyn ReadFilesystem, path: &VPath) -> String { pub fn url(site: &str, dir: &dyn Dir, path: &VPath) -> Option<String> {
let Some(anchor) = fs.anchor(path) else { let anchor = dir.anchor(path)?;
panic!("filesystem {fs:?} is not anchored anywhere and a URL of it cannot be produced") if let Some(version) = dir.content_version(path) {
}; Some(format!("{}/{anchor}?v={version}", site))
if let Some(version) = fs.content_version(path) {
format!("{}/{anchor}?v={version}", site)
} else { } else {
format!("{}/{anchor}", site) Some(format!("{}/{anchor}", site))
} }
} }

View file

@ -1,6 +1,6 @@
use std::fmt; use std::fmt;
use super::{DirEntry, ReadFilesystem, VPath, VPathBuf}; use super::{Dir, DirEntry, VPath, VPathBuf};
pub struct Anchored<T> { pub struct Anchored<T> {
inner: T, inner: T,
@ -13,9 +13,9 @@ impl<T> Anchored<T> {
} }
} }
impl<T> ReadFilesystem for Anchored<T> impl<T> Dir for Anchored<T>
where where
T: ReadFilesystem, T: Dir,
{ {
fn dir(&self, path: &VPath) -> Vec<DirEntry> { fn dir(&self, path: &VPath) -> Vec<DirEntry> {
self.inner.dir(path) self.inner.dir(path)
@ -30,7 +30,7 @@ where
} }
fn anchor(&self, path: &VPath) -> Option<VPathBuf> { fn anchor(&self, path: &VPath) -> Option<VPathBuf> {
Some(self.at.join(path.as_str())) Some(self.at.join(path))
} }
} }

View file

@ -0,0 +1,23 @@
use super::{Dir, DynDir, VPath};
#[derive(Debug, Clone)]
pub struct AsyncDir {
inner: DynDir,
}
impl AsyncDir {
pub fn new(inner: DynDir) -> Self {
Self { inner }
}
pub async fn content(&self, path: &VPath) -> Option<Vec<u8>> {
let this = self.clone();
let path = path.to_owned();
// NOTE: Performance impact of spawning a blocking task may be a bit high in case
// we add caching.
// Measure throughput here.
tokio::task::spawn_blocking(move || this.inner.content(&path))
.await
.unwrap()
}
}

View file

@ -1,22 +1,25 @@
use std::fmt; use std::fmt;
use super::{DirEntry, ReadFilesystem, VPath, VPathBuf}; use super::{Dir, DirEntry, EditPath, VPath, VPathBuf};
pub struct Cd<'fs> { pub struct Cd<T> {
parent: &'fs dyn ReadFilesystem, parent: T,
path: VPathBuf, path: VPathBuf,
} }
impl<'fs> Cd<'fs> { impl<T> Cd<T> {
pub fn new(parent: &'fs dyn ReadFilesystem, path: VPathBuf) -> Self { pub fn new(parent: T, path: VPathBuf) -> Self {
Self { parent, path } Self { parent, path }
} }
} }
impl ReadFilesystem for Cd<'_> { impl<T> Dir for Cd<T>
where
T: Dir,
{
fn dir(&self, path: &VPath) -> Vec<DirEntry> { fn dir(&self, path: &VPath) -> Vec<DirEntry> {
self.parent self.parent
.dir(&self.path.join(path.as_str())) .dir(&self.path.join(path))
.into_iter() .into_iter()
.map(|entry| DirEntry { .map(|entry| DirEntry {
path: entry path: entry
@ -29,23 +32,26 @@ impl ReadFilesystem for Cd<'_> {
} }
fn content_version(&self, path: &VPath) -> Option<String> { fn content_version(&self, path: &VPath) -> Option<String> {
self.parent.content_version(&self.path.join(path.as_str())) self.parent.content_version(&self.path.join(path))
} }
fn content(&self, path: &VPath) -> Option<Vec<u8>> { fn content(&self, path: &VPath) -> Option<Vec<u8>> {
self.parent.content(&self.path.join(path.as_str())) self.parent.content(&self.path.join(path))
} }
fn anchor(&self, path: &VPath) -> Option<VPathBuf> { fn anchor(&self, path: &VPath) -> Option<VPathBuf> {
self.parent.anchor(&self.path.join(path.as_str())) self.parent.anchor(&self.path.join(path))
} }
fn cd_optimization(&self, subpath: &VPath) -> Option<Cd<'_>> { fn edit_path(&self, path: &VPath) -> Option<EditPath> {
Some(Cd::new(self, subpath.to_owned())) self.parent.edit_path(&self.path.join(path))
} }
} }
impl fmt::Debug for Cd<'_> { impl<T> fmt::Debug for Cd<T>
where
T: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}/{:?}", self.parent, self.path) write!(f, "{:?}/{:?}", self.parent, self.path)
} }

View file

@ -0,0 +1,92 @@
use std::{error::Error, fmt, future::Future, path::PathBuf};
use log::{error, info};
use tokio::task::JoinSet;
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct EditPath {
pub(super) path: PathBuf,
}
/// Represents a pending edit operation that can be written to persistent storage later.
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum Edit {
/// An edit that doesn't do anything.
NoOp,
/// Write the given string to a file.
Write(EditPath, String),
/// Execute a sequence of edits in order.
Seq(Vec<Edit>),
/// Execute the provided edits in parallel.
All(Vec<Edit>),
/// Makes an edit dry.
///
/// A dry edit only logs what operations would be performed, does not perform the I/O.
Dry(Box<Edit>),
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct ApplyFailed;
impl Edit {
#[expect(clippy::manual_async_fn)]
pub fn apply(self) -> impl Future<Output = Result<(), ApplyFailed>> + Send {
async {
match self {
Edit::NoOp => (),
Edit::Write(edit_path, content) => {
tokio::fs::write(&edit_path.path, &content)
.await
.inspect_err(|err| error!("write to {edit_path:?} failed: {err:?}"))
.map_err(|_| ApplyFailed)?;
}
Edit::Seq(vec) => {
for edit in vec {
Box::pin(edit.apply()).await?;
}
}
Edit::All(vec) => {
let mut set = JoinSet::new();
for edit in vec {
set.spawn(edit.apply());
}
while let Some(result) = set.try_join_next() {
result.map_err(|_| ApplyFailed)??;
}
}
Edit::Dry(edit) => edit.dry(),
}
Ok(())
}
}
pub fn dry(&self) {
match self {
Edit::NoOp => (),
Edit::Write(edit_path, content) => {
info!("{edit_path:?}: would write {:?} bytes", content.len());
}
Edit::Seq(edits) => edits.iter().for_each(Self::dry),
Edit::All(edits) => edits.iter().for_each(Self::dry),
Edit::Dry(edit) => edit.dry(),
}
}
}
impl fmt::Display for ApplyFailed {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("failed to apply some edits")
}
}
impl Error for ApplyFailed {}
impl fmt::Debug for EditPath {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&self.path, f)
}
}

View file

@ -1,9 +1,9 @@
use super::{DirEntry, ReadFilesystem, VPath}; use super::{Dir, DirEntry, VPath};
#[derive(Debug)] #[derive(Debug)]
pub struct EmptyFilesystem; pub struct EmptyEntry;
impl ReadFilesystem for EmptyFilesystem { impl Dir for EmptyEntry {
fn dir(&self, _path: &VPath) -> Vec<DirEntry> { fn dir(&self, _path: &VPath) -> Vec<DirEntry> {
vec![] vec![]
} }

View file

@ -1,6 +1,6 @@
use std::fmt; use std::fmt;
use super::{DirEntry, ReadFilesystem, VPath}; use super::{DirEntry, Dir, VPath};
pub struct BufferedFile { pub struct BufferedFile {
pub content: Vec<u8>, pub content: Vec<u8>,
@ -12,7 +12,7 @@ impl BufferedFile {
} }
} }
impl ReadFilesystem for BufferedFile { impl Dir for BufferedFile {
fn dir(&self, _path: &VPath) -> Vec<DirEntry> { fn dir(&self, _path: &VPath) -> Vec<DirEntry> {
vec![] vec![]
} }

View file

@ -1,29 +1,29 @@
use std::{collections::HashMap, fmt}; use std::{collections::HashMap, fmt};
use super::{DirEntry, ReadFilesystem, VPath, VPathBuf}; use super::{Dir, DirEntry, DynDir, EditPath, VPath, VPathBuf};
pub struct MountPoints { pub struct MemDir {
mount_points: HashMap<String, Box<dyn ReadFilesystem>>, mount_points: HashMap<String, DynDir>,
} }
enum Resolved<'fs, 'path> { enum Resolved<'fs, 'path> {
Root, Root,
MountPoint { MountPoint {
fs: &'fs dyn ReadFilesystem, fs: &'fs dyn Dir,
fs_path: &'path VPath, fs_path: &'path VPath,
subpath: &'path VPath, subpath: &'path VPath,
}, },
None, None,
} }
impl MountPoints { impl MemDir {
pub fn new() -> Self { pub fn new() -> Self {
Self { Self {
mount_points: HashMap::new(), mount_points: HashMap::new(),
} }
} }
pub fn add(&mut self, path: &VPath, fs: Box<dyn ReadFilesystem>) { pub fn add(&mut self, path: &VPath, dir: DynDir) {
assert_eq!( assert_eq!(
path.depth(), 0, path.depth(), 0,
"path must be situated at root. MountPoints does not support nested paths, but you can nest MountPoints within other MountPoints" "path must be situated at root. MountPoints does not support nested paths, but you can nest MountPoints within other MountPoints"
@ -31,7 +31,7 @@ impl MountPoints {
assert!( assert!(
self.mount_points self.mount_points
.insert(path.as_str().to_owned(), fs) .insert(path.as_str().to_owned(), dir)
.is_none(), .is_none(),
"duplicate mount point at {path:?}" "duplicate mount point at {path:?}"
); );
@ -57,13 +57,13 @@ impl MountPoints {
} }
} }
impl Default for MountPoints { impl Default for MemDir {
fn default() -> Self { fn default() -> Self {
Self::new() Self::new()
} }
} }
impl ReadFilesystem for MountPoints { impl Dir for MemDir {
fn dir(&self, path: &VPath) -> Vec<DirEntry> { fn dir(&self, path: &VPath) -> Vec<DirEntry> {
match self.resolve(path) { match self.resolve(path) {
Resolved::Root => self Resolved::Root => self
@ -81,7 +81,7 @@ impl ReadFilesystem for MountPoints {
.dir(subpath) .dir(subpath)
.into_iter() .into_iter()
.map(|entry| DirEntry { .map(|entry| DirEntry {
path: fs_path.join(entry.path.as_str()), path: fs_path.join(&entry.path),
}) })
.collect(), .collect(),
Resolved::None => vec![], Resolved::None => vec![],
@ -120,9 +120,20 @@ impl ReadFilesystem for MountPoints {
Resolved::Root | Resolved::None => None, Resolved::Root | Resolved::None => None,
} }
} }
fn edit_path(&self, path: &VPath) -> Option<EditPath> {
match self.resolve(path) {
Resolved::MountPoint {
fs,
fs_path: _,
subpath,
} => fs.edit_path(subpath),
Resolved::Root | Resolved::None => None,
}
}
} }
impl fmt::Debug for MountPoints { impl fmt::Debug for MemDir {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("MountPoints") f.write_str("MountPoints")
} }

View file

@ -0,0 +1,52 @@
use std::fmt;
use super::{Dir, DirEntry, DynDir, EditPath, VPath, VPathBuf};
pub struct Overlay {
base: DynDir,
overlay: DynDir,
}
impl Overlay {
pub fn new(base: DynDir, overlay: DynDir) -> Self {
Self { base, overlay }
}
}
impl Dir for Overlay {
fn dir(&self, path: &VPath) -> Vec<DirEntry> {
let mut dir = self.base.dir(path);
dir.append(&mut self.overlay.dir(path));
dir.sort();
dir.dedup();
dir
}
fn content(&self, path: &VPath) -> Option<Vec<u8>> {
self.overlay
.content(path)
.or_else(|| self.base.content(path))
}
fn content_version(&self, path: &VPath) -> Option<String> {
self.overlay
.content_version(path)
.or_else(|| self.base.content_version(path))
}
fn anchor(&self, path: &VPath) -> Option<VPathBuf> {
self.overlay.anchor(path).or_else(|| self.base.anchor(path))
}
fn edit_path(&self, path: &VPath) -> Option<EditPath> {
self.overlay
.edit_path(path)
.or_else(|| self.base.edit_path(path))
}
}
impl fmt::Debug for Overlay {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Overlay({:?}, {:?})", self.base, self.overlay)
}
}

View file

@ -0,0 +1,305 @@
use std::{borrow::Borrow, error::Error, fmt, ops::Deref, str::FromStr};
use serde::{Deserialize, Serialize};
#[derive(PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct VPath {
path: str,
}
impl VPath {
pub const SEPARATOR_BYTE: u8 = b'/';
pub const SEPARATOR: char = Self::SEPARATOR_BYTE as char;
pub const ROOT: &Self = unsafe { Self::new_unchecked("") };
pub const fn try_new(s: &str) -> Result<&Self, InvalidPathError> {
if s.is_empty() {
return Ok(Self::ROOT);
}
let b = s.as_bytes();
if b[b.len() - 1] == Self::SEPARATOR_BYTE {
return Err(InvalidPathError::TrailingSlash);
}
if b[0] == Self::SEPARATOR_BYTE {
return Err(InvalidPathError::LeadingSlash);
}
Ok(unsafe { Self::new_unchecked(s) })
}
pub fn new(s: &str) -> &Self {
Self::try_new(s).expect("invalid path")
}
/// `const` version of [`new`][Self::new]. This has worse error messages, so prefer `new` whenever possible.
pub const fn new_const(s: &str) -> &Self {
match Self::try_new(s) {
Ok(p) => p,
Err(_) => panic!("invalid path"),
}
}
const unsafe fn new_unchecked(s: &str) -> &Self {
std::mem::transmute::<_, &Self>(s)
}
pub fn is_empty(&self) -> bool {
self.path.is_empty()
}
pub fn is_root(&self) -> bool {
self.is_empty()
}
pub fn join(&self, sub: &VPath) -> VPathBuf {
let mut buf = self.to_owned();
buf.push(sub);
buf
}
pub fn parent(&self) -> Option<&VPath> {
if self.is_root() {
None
} else if self.depth() == 0 {
Some(VPath::ROOT)
} else {
let (left, _right) = self
.path
.split_once(Self::SEPARATOR)
.expect("path with depth > 0 must have separators");
// SAFETY: We're splitting on a `/`, so there cannot be a trailing `/` in `left`.
Some(unsafe { VPath::new_unchecked(left) })
}
}
pub fn strip_prefix(&self, prefix: &VPath) -> Option<&Self> {
if self == prefix {
Some(VPath::ROOT)
} else {
self.path
.strip_prefix(&prefix.path)
.and_then(|p| p.strip_prefix(Self::SEPARATOR))
// SAFETY: If `self` starts with `prefix`, `p` will end up not being prefixed by `self`
// nor a leading slash.
.map(|p| unsafe { VPath::new_unchecked(p) })
}
}
pub fn depth(&self) -> usize {
self.path.chars().filter(|&c| c == Self::SEPARATOR).count()
}
pub fn segments(&self) -> impl Iterator<Item = &Self> {
self.as_str().split(Self::SEPARATOR).map(|s| unsafe {
// SAFETY: Since we're splitting on the separator, the path cannot start or end with it.
Self::new_unchecked(s)
})
}
pub fn rsegments(&self) -> impl Iterator<Item = &Self> {
self.as_str().rsplit(Self::SEPARATOR).map(|s| unsafe {
// SAFETY: Since we're splitting on the separator, the path cannot start or end with it.
Self::new_unchecked(s)
})
}
pub fn file_name(&self) -> Option<&str> {
self.rsegments().next().map(Self::as_str)
}
pub fn extension(&self) -> Option<&str> {
let file_name = self.file_name()?;
let (left, right) = file_name.rsplit_once('.')?;
if left.is_empty() {
None
} else {
Some(right)
}
}
pub fn with_extension(&self, extension: &str) -> VPathBuf {
let mut buf = self.to_owned();
buf.set_extension(extension);
buf
}
pub fn file_stem(&self) -> Option<&str> {
let file_name = self.file_name()?;
if let Some(extension) = self.extension() {
Some(&file_name[..file_name.len() - extension.len() - 1])
} else {
Some(file_name)
}
}
pub fn as_str(&self) -> &str {
&self.path
}
}
impl ToOwned for VPath {
type Owned = VPathBuf;
fn to_owned(&self) -> Self::Owned {
VPathBuf::from(self)
}
}
impl fmt::Debug for VPath {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(&self.path)
}
}
impl fmt::Display for VPath {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(&self.path)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum InvalidPathError {
TrailingSlash,
LeadingSlash,
}
impl fmt::Display for InvalidPathError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
InvalidPathError::TrailingSlash => {
f.write_str("paths must not end with a trailing `/`")
}
InvalidPathError::LeadingSlash => {
f.write_str("paths are always absolute and must not start with `/`")
}
}
}
}
impl Error for InvalidPathError {}
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct VPathBuf {
path: String,
}
impl VPathBuf {
pub fn new(path: impl Into<String>) -> Self {
Self::try_new(path).expect("invalid path")
}
pub fn try_new(path: impl Into<String>) -> Result<Self, InvalidPathError> {
let path = path.into();
match VPath::try_new(&path) {
Ok(_) => Ok(Self { path }),
Err(e) => Err(e),
}
}
unsafe fn new_unchecked(path: String) -> Self {
Self { path }
}
pub fn push(&mut self, sub: &VPath) {
if !sub.is_empty() {
self.path.push('/');
self.path.push_str(&sub.path);
}
}
pub fn set_extension(&mut self, new_extension: &str) {
if let Some(existing) = self.extension() {
let mut chop_len = existing.len();
if new_extension.is_empty() {
chop_len += 1; // also chop off the `.`
}
let range = self.path.len() - chop_len..;
self.path.replace_range(range, new_extension);
}
}
}
impl Default for VPathBuf {
fn default() -> Self {
VPath::ROOT.to_owned()
}
}
impl Deref for VPathBuf {
type Target = VPath;
fn deref(&self) -> &Self::Target {
unsafe { VPath::new_unchecked(&self.path) }
}
}
impl fmt::Debug for VPathBuf {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(&self.path)
}
}
impl fmt::Display for VPathBuf {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(&self.path)
}
}
impl From<&VPath> for VPathBuf {
fn from(value: &VPath) -> Self {
unsafe { Self::new_unchecked(value.path.to_owned()) }
}
}
impl Borrow<VPath> for VPathBuf {
fn borrow(&self) -> &VPath {
self
}
}
impl<'de> Deserialize<'de> for VPathBuf {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
use serde::de;
struct Visitor;
impl de::Visitor<'_> for Visitor {
type Value = VPathBuf;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("virtual path")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: de::Error,
{
VPathBuf::try_new(v).map_err(de::Error::custom)
}
}
deserializer.deserialize_str(Visitor)
}
}
impl Serialize for VPathBuf {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(self.as_str())
}
}
impl FromStr for VPathBuf {
type Err = InvalidPathError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Self::try_new(s)
}
}

View file

@ -2,7 +2,7 @@ use std::path::{Path, PathBuf};
use log::error; use log::error;
use super::{DirEntry, ReadFilesystem, VPath, VPathBuf}; use super::{Dir, DirEntry, EditPath, VPath, VPathBuf};
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct PhysicalDir { pub struct PhysicalDir {
@ -15,7 +15,7 @@ impl PhysicalDir {
} }
} }
impl ReadFilesystem for PhysicalDir { impl Dir for PhysicalDir {
fn dir(&self, vpath: &VPath) -> Vec<DirEntry> { fn dir(&self, vpath: &VPath) -> Vec<DirEntry> {
let physical = self.root.join(physical_path(vpath)); let physical = self.root.join(physical_path(vpath));
if !physical.is_dir() { if !physical.is_dir() {
@ -68,6 +68,12 @@ impl ReadFilesystem for PhysicalDir {
.inspect_err(|err| error!("{self:?} cannot read file at vpath {path:?}: {err:?}",)) .inspect_err(|err| error!("{self:?} cannot read file at vpath {path:?}: {err:?}",))
.ok() .ok()
} }
fn edit_path(&self, path: &VPath) -> Option<EditPath> {
Some(EditPath {
path: self.root.join(physical_path(path)),
})
}
} }
fn physical_path(path: &VPath) -> &Path { fn physical_path(path: &VPath) -> &Path {

View file

@ -1,33 +1,31 @@
use treehouse::vfs::{ use treehouse::vfs::{BufferedFile, Cd, Dir, DirEntry, MemDir, ToDynDir, VPath, VPathBuf};
BufferedFile, Cd, CdExt, DirEntry, MountPoints, ReadFilesystem, VPath, VPathBuf,
};
const HEWWO: &[u8] = b"hewwo :3"; const HEWWO: &[u8] = b"hewwo :3";
const FWOOFEE: &[u8] = b"fwoofee -w-"; const FWOOFEE: &[u8] = b"fwoofee -w-";
const BOOP: &[u8] = b"boop >w<"; const BOOP: &[u8] = b"boop >w<";
fn vfs() -> MountPoints { fn vfs() -> MemDir {
let file1 = BufferedFile::new(HEWWO.to_vec()); let file1 = BufferedFile::new(HEWWO.to_vec());
let file2 = BufferedFile::new(FWOOFEE.to_vec()); let file2 = BufferedFile::new(FWOOFEE.to_vec());
let file3 = BufferedFile::new(BOOP.to_vec()); let file3 = BufferedFile::new(BOOP.to_vec());
let mut innermost = MountPoints::new(); let mut innermost = MemDir::new();
innermost.add(VPath::new("file3.txt"), Box::new(file3)); innermost.add(VPath::new("file3.txt"), file3.to_dyn());
let mut inner = MountPoints::new(); let mut inner = MemDir::new();
inner.add(VPath::new("file1.txt"), Box::new(file1)); inner.add(VPath::new("file1.txt"), file1.to_dyn());
inner.add(VPath::new("file2.txt"), Box::new(file2)); inner.add(VPath::new("file2.txt"), file2.to_dyn());
inner.add(VPath::new("innermost"), Box::new(innermost)); inner.add(VPath::new("innermost"), innermost.to_dyn());
let mut vfs = MountPoints::new(); let mut vfs = MemDir::new();
vfs.add(VPath::new("inner"), Box::new(inner)); vfs.add(VPath::new("inner"), inner.to_dyn());
vfs vfs
} }
#[test] #[test]
fn dir1() { fn dir1() {
let outer = vfs(); let outer = vfs();
let inner = Cd::new(&outer, VPathBuf::new("inner")); let inner = Cd::new(outer, VPathBuf::new("inner"));
let mut dir = inner.dir(VPath::ROOT); let mut dir = inner.dir(VPath::ROOT);
dir.sort(); dir.sort();
@ -49,23 +47,6 @@ fn dir1() {
#[test] #[test]
fn dir2() { fn dir2() {
let outer = vfs();
let outer: &dyn ReadFilesystem = &outer;
let inner: &dyn ReadFilesystem = &outer.cd(VPathBuf::new("inner"));
let innermost = inner.cd(VPathBuf::new("innermost"));
let mut dir = innermost.dir(VPath::ROOT);
dir.sort();
assert_eq!(
dir,
vec![DirEntry {
path: VPathBuf::new("file3.txt"),
},]
);
}
#[test]
fn dir3() {
let outer = vfs(); let outer = vfs();
let innermost = Cd::new(&outer, VPathBuf::new("inner/innermost")); let innermost = Cd::new(&outer, VPathBuf::new("inner/innermost"));

View file

@ -1,16 +1,16 @@
use treehouse::vfs::{EmptyFilesystem, ReadFilesystem, VPath}; use treehouse::vfs::{Dir, EmptyEntry, VPath};
#[test] #[test]
fn dir() { fn dir() {
assert!(EmptyFilesystem.dir(VPath::ROOT).is_empty()); assert!(EmptyEntry.dir(VPath::ROOT).is_empty());
} }
#[test] #[test]
fn content_version() { fn content_version() {
assert!(EmptyFilesystem.content_version(VPath::ROOT).is_none()); assert!(EmptyEntry.content_version(VPath::ROOT).is_none());
} }
#[test] #[test]
fn content() { fn content() {
assert!(EmptyFilesystem.content(VPath::ROOT).is_none()); assert!(EmptyEntry.content(VPath::ROOT).is_none());
} }

View file

@ -1,4 +1,4 @@
use treehouse::vfs::{BufferedFile, ReadFilesystem, VPath}; use treehouse::vfs::{BufferedFile, Dir, VPath};
fn vfs() -> BufferedFile { fn vfs() -> BufferedFile {
BufferedFile::new(b"hewwo :3".to_vec()) BufferedFile::new(b"hewwo :3".to_vec())

View file

@ -1,21 +1,23 @@
use treehouse::vfs::{BufferedFile, DirEntry, MountPoints, ReadFilesystem, VPath, VPathBuf}; use std::sync::Arc;
use treehouse::vfs::{BufferedFile, Dir, DirEntry, MemDir, ToDynDir, VPath, VPathBuf};
const HEWWO: &[u8] = b"hewwo :3"; const HEWWO: &[u8] = b"hewwo :3";
const FWOOFEE: &[u8] = b"fwoofee -w-"; const FWOOFEE: &[u8] = b"fwoofee -w-";
const BOOP: &[u8] = b"boop >w<"; const BOOP: &[u8] = b"boop >w<";
fn vfs() -> MountPoints { fn vfs() -> MemDir {
let file1 = BufferedFile::new(HEWWO.to_vec()); let file1 = BufferedFile::new(HEWWO.to_vec());
let file2 = BufferedFile::new(FWOOFEE.to_vec()); let file2 = BufferedFile::new(FWOOFEE.to_vec());
let file3 = BufferedFile::new(BOOP.to_vec()); let file3 = BufferedFile::new(BOOP.to_vec());
let mut inner = MountPoints::new(); let mut inner = MemDir::new();
inner.add(VPath::new("file3.txt"), Box::new(file3)); inner.add(VPath::new("file3.txt"), file3.to_dyn());
let mut vfs = MountPoints::new(); let mut vfs = MemDir::new();
vfs.add(VPath::new("file1.txt"), Box::new(file1)); vfs.add(VPath::new("file1.txt"), file1.to_dyn());
vfs.add(VPath::new("file2.txt"), Box::new(file2)); vfs.add(VPath::new("file2.txt"), file2.to_dyn());
vfs.add(VPath::new("inner"), Box::new(inner)); vfs.add(VPath::new("inner"), inner.to_dyn());
vfs vfs
} }

View file

@ -1,6 +1,6 @@
use std::path::Path; use std::path::Path;
use treehouse::vfs::{DirEntry, PhysicalDir, ReadFilesystem, VPath, VPathBuf}; use treehouse::vfs::{DirEntry, PhysicalDir, Dir, VPath, VPathBuf};
fn vfs() -> PhysicalDir { fn vfs() -> PhysicalDir {
let root = Path::new("tests/it/vfs_physical").to_path_buf(); let root = Path::new("tests/it/vfs_physical").to_path_buf();

View file

@ -57,7 +57,7 @@ description = "a place on the Internet I like to call home"
[build.javascript] [build.javascript]
import_roots = [ import_roots = [
{ name = "treehouse", path = "static/js" }, { name = "treehouse", path = "" },
{ name = "tairu", path = "static/js/components/tairu" }, { name = "tairu", path = "components/tairu" },
{ name = "haku", path = "static/js/components/haku" }, { name = "haku", path = "components/haku" },
] ]