introduce the virtual filesystem everywhere
this unfortunately means I had to cut some features (bye bye commit history! for now) stuff's not quite 100% working just yet (like branch links, which were and are still broken) we also don't have content_version impls just yet
This commit is contained in:
parent
db0329077e
commit
377fbe4dab
15
Cargo.lock
generated
15
Cargo.lock
generated
|
@ -1545,20 +1545,6 @@ version = "0.3.2"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0"
|
||||
|
||||
[[package]]
|
||||
name = "tower-livereload"
|
||||
version = "0.9.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "61d6cbbab4b2d3cafd21fb211cc4b06525a0df919c3e8ca3d36485b1c1bd4cd4"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"http",
|
||||
"http-body",
|
||||
"pin-project-lite",
|
||||
"tokio",
|
||||
"tower",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tower-service"
|
||||
version = "0.3.2"
|
||||
|
@ -1611,7 +1597,6 @@ dependencies = [
|
|||
"serde_json",
|
||||
"tokio",
|
||||
"toml_edit",
|
||||
"tower-livereload",
|
||||
"treehouse-format",
|
||||
"ulid",
|
||||
"url",
|
||||
|
|
|
@ -1,8 +1,5 @@
|
|||
%% title = "404"
|
||||
|
||||
% id = "404"
|
||||
- # 404
|
||||
|
||||
% id = "01HMF8KQ997F1ZTEGDNAE2S6F1"
|
||||
- seems like the page you're looking for isn't here.
|
||||
|
||||
|
|
|
@ -29,7 +29,6 @@ serde = { version = "1.0.183", features = ["derive"] }
|
|||
serde_json = "1.0.105"
|
||||
tokio = { version = "1.32.0", features = ["full"] }
|
||||
toml_edit = { version = "0.19.14", features = ["serde"] }
|
||||
tower-livereload = "0.9.2"
|
||||
walkdir = "2.3.3"
|
||||
ulid = "1.0.0"
|
||||
url = "2.5.0"
|
||||
|
|
|
@ -2,10 +2,10 @@ pub mod fix;
|
|||
pub mod serve;
|
||||
pub mod wc;
|
||||
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use clap::{Args, Parser, Subcommand};
|
||||
|
||||
use crate::vfs::VPathBuf;
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct ProgramArgs {
|
||||
#[clap(subcommand)]
|
||||
|
@ -41,20 +41,13 @@ pub enum Command {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct GenerateArgs {
|
||||
/// Only use commits as sources. This will cause the latest revision to be taken from the
|
||||
/// Git history instead of the working tree.
|
||||
///
|
||||
/// Recommended for deployment.
|
||||
#[clap(long)]
|
||||
pub commits_only: bool,
|
||||
}
|
||||
pub struct GenerateArgs {}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct FixArgs {
|
||||
/// Which file to fix. The fixed file will be printed into stdout so that you have a chance to
|
||||
/// see the changes.
|
||||
pub file: PathBuf,
|
||||
pub file: VPathBuf,
|
||||
|
||||
/// If you're happy with the suggested changes, specifying this will apply them to the file
|
||||
/// (overwrite it in place.)
|
||||
|
@ -63,7 +56,7 @@ pub struct FixArgs {
|
|||
|
||||
/// Write the previous version back to the specified path.
|
||||
#[clap(long)]
|
||||
pub backup: Option<PathBuf>,
|
||||
pub backup: Option<VPathBuf>,
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
|
@ -85,17 +78,5 @@ pub struct ServeArgs {
|
|||
pub struct WcArgs {
|
||||
/// A list of paths to report the word counts of.
|
||||
/// If no paths are provided, the entire tree is word-counted.
|
||||
pub paths: Vec<PathBuf>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct Paths<'a> {
|
||||
pub target_dir: &'a Path,
|
||||
pub template_target_dir: &'a Path,
|
||||
|
||||
pub static_dir: &'a Path,
|
||||
pub template_dir: &'a Path,
|
||||
pub content_dir: &'a Path,
|
||||
|
||||
pub config_file: &'a Path,
|
||||
pub paths: Vec<VPathBuf>,
|
||||
}
|
||||
|
|
|
@ -1,16 +1,17 @@
|
|||
use std::{ffi::OsStr, ops::Range};
|
||||
use std::ops::{ControlFlow, Range};
|
||||
|
||||
use anyhow::Context;
|
||||
use anyhow::{anyhow, Context};
|
||||
use codespan_reporting::diagnostic::Diagnostic;
|
||||
use log::{error, info};
|
||||
use treehouse_format::ast::Branch;
|
||||
use walkdir::WalkDir;
|
||||
|
||||
use crate::{
|
||||
parse::{self, parse_toml_with_diagnostics, parse_tree_with_diagnostics},
|
||||
state::{report_diagnostics, FileId, Source, Treehouse},
|
||||
vfs::{self, Dir, Edit, VPath},
|
||||
};
|
||||
|
||||
use super::{FixAllArgs, FixArgs, Paths};
|
||||
use super::{FixAllArgs, FixArgs};
|
||||
|
||||
struct Fix {
|
||||
range: Range<usize>,
|
||||
|
@ -132,68 +133,102 @@ pub fn fix_file(
|
|||
})
|
||||
}
|
||||
|
||||
pub fn fix_file_cli(fix_args: FixArgs) -> anyhow::Result<()> {
|
||||
let utf8_filename = fix_args.file.to_string_lossy().into_owned();
|
||||
let file = if utf8_filename == "-" {
|
||||
pub fn fix_file_cli(fix_args: FixArgs, root: &dyn Dir) -> anyhow::Result<Edit> {
|
||||
let file = if &*fix_args.file == VPath::new("-") {
|
||||
std::io::read_to_string(std::io::stdin().lock()).context("cannot read file from stdin")?
|
||||
} else {
|
||||
std::fs::read_to_string(&fix_args.file).context("cannot read file to fix")?
|
||||
String::from_utf8(
|
||||
root.content(&fix_args.file)
|
||||
.ok_or_else(|| anyhow!("cannot read file to fix"))?,
|
||||
)
|
||||
.context("input file has invalid UTF-8")?
|
||||
};
|
||||
|
||||
let mut treehouse = Treehouse::new();
|
||||
let mut diagnostics = vec![];
|
||||
let file_id = treehouse.add_file(utf8_filename, Source::Other(file));
|
||||
let file_id = treehouse.add_file(fix_args.file.as_str().to_owned(), Source::Other(file));
|
||||
let edit_path = root.edit_path(&fix_args.file).ok_or_else(|| {
|
||||
anyhow!(
|
||||
"{} is not an editable file (perhaps it is not in a persistent path?)",
|
||||
fix_args.file
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok(
|
||||
if let Ok(fixed) = fix_file(&mut treehouse, &mut diagnostics, file_id) {
|
||||
if fix_args.apply {
|
||||
// Try to write the backup first. If writing that fails, bail out without overwriting
|
||||
// the source file.
|
||||
if let Some(backup_path) = fix_args.backup {
|
||||
std::fs::write(backup_path, treehouse.source(file_id).input())
|
||||
.context("cannot write backup; original file will not be overwritten")?;
|
||||
let backup_edit_path = root.edit_path(&backup_path).ok_or_else(|| {
|
||||
anyhow!("backup file {backup_path} is not an editable file")
|
||||
})?;
|
||||
Edit::Seq(vec![
|
||||
Edit::Write(
|
||||
backup_edit_path,
|
||||
treehouse.source(file_id).input().to_owned(),
|
||||
),
|
||||
Edit::Write(edit_path, fixed),
|
||||
])
|
||||
} else {
|
||||
Edit::Write(edit_path, fixed)
|
||||
}
|
||||
std::fs::write(&fix_args.file, fixed).context("cannot overwrite original file")?;
|
||||
} else {
|
||||
println!("{fixed}");
|
||||
Edit::NoOp
|
||||
}
|
||||
} else {
|
||||
report_diagnostics(&treehouse.files, &diagnostics)?;
|
||||
Edit::NoOp
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
pub fn fix_all_cli(fix_all_args: FixAllArgs, dir: &dyn Dir) -> anyhow::Result<Edit> {
|
||||
let mut edits = vec![];
|
||||
|
||||
pub fn fix_all_cli(fix_all_args: FixAllArgs, paths: &Paths<'_>) -> anyhow::Result<()> {
|
||||
for entry in WalkDir::new(paths.content_dir) {
|
||||
let entry = entry?;
|
||||
if entry.file_type().is_file() && entry.path().extension() == Some(OsStr::new("tree")) {
|
||||
let file = std::fs::read_to_string(entry.path())
|
||||
.with_context(|| format!("cannot read file to fix: {:?}", entry.path()))?;
|
||||
let utf8_filename = entry.path().to_string_lossy();
|
||||
fn fix_one(dir: &dyn Dir, path: &VPath) -> anyhow::Result<Edit> {
|
||||
if path.extension() == Some("tree") {
|
||||
let Some(content) = dir.content(path) else {
|
||||
return Ok(Edit::NoOp);
|
||||
};
|
||||
let content = String::from_utf8(content).context("file is not valid UTF-8")?;
|
||||
|
||||
let mut treehouse = Treehouse::new();
|
||||
let mut diagnostics = vec![];
|
||||
let file_id = treehouse.add_file(utf8_filename.into_owned(), Source::Other(file));
|
||||
let file_id = treehouse.add_file(path.as_str().to_string(), Source::Other(content));
|
||||
let edit_path = dir.edit_path(path).context("path is not editable")?;
|
||||
|
||||
if let Ok(fixed) = fix_file(&mut treehouse, &mut diagnostics, file_id) {
|
||||
if fixed != treehouse.source(file_id).input() {
|
||||
if fix_all_args.apply {
|
||||
println!("fixing: {:?}", entry.path());
|
||||
std::fs::write(entry.path(), fixed).with_context(|| {
|
||||
format!("cannot overwrite original file: {:?}", entry.path())
|
||||
})?;
|
||||
} else {
|
||||
println!("will fix: {:?}", entry.path());
|
||||
}
|
||||
return Ok(Edit::Write(edit_path, fixed));
|
||||
}
|
||||
} else {
|
||||
report_diagnostics(&treehouse.files, &diagnostics)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
if !fix_all_args.apply {
|
||||
println!("run with `--apply` to apply changes");
|
||||
|
||||
Ok(Edit::NoOp)
|
||||
}
|
||||
|
||||
Ok(())
|
||||
info!("gathering edits");
|
||||
vfs::walk_dir_rec(dir, VPath::ROOT, &mut |path| {
|
||||
match fix_one(dir, path) {
|
||||
Ok(Edit::NoOp) => (),
|
||||
Ok(edit) => edits.push(edit),
|
||||
Err(err) => error!("cannot fix {path}: {err:?}"),
|
||||
}
|
||||
|
||||
ControlFlow::Continue(())
|
||||
});
|
||||
|
||||
// NOTE: This number may be higher than you expect, because NoOp edits also count!
|
||||
info!("{} edits to apply", edits.len());
|
||||
|
||||
if !fix_all_args.apply {
|
||||
info!("dry run; add `--apply` to apply changes");
|
||||
Ok(Edit::Dry(Box::new(Edit::All(edits))))
|
||||
} else {
|
||||
Ok(Edit::All(edits))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,131 +2,78 @@
|
|||
mod live_reload;
|
||||
|
||||
use std::fmt::Write;
|
||||
use std::{net::Ipv4Addr, path::PathBuf, sync::Arc};
|
||||
use std::{net::Ipv4Addr, sync::Arc};
|
||||
|
||||
use anyhow::Context;
|
||||
use axum::{
|
||||
extract::{Path, Query, RawQuery, State},
|
||||
http::{
|
||||
header::{CACHE_CONTROL, CONTENT_TYPE, LOCATION},
|
||||
header::{CACHE_CONTROL, CONTENT_TYPE},
|
||||
HeaderValue, StatusCode,
|
||||
},
|
||||
response::{Html, IntoResponse, Response},
|
||||
routing::get,
|
||||
Router,
|
||||
};
|
||||
use log::{error, info};
|
||||
use log::info;
|
||||
use serde::Deserialize;
|
||||
use tokio::net::TcpListener;
|
||||
|
||||
use crate::{
|
||||
config::Config,
|
||||
html::EscapeHtml,
|
||||
state::{Source, Treehouse},
|
||||
};
|
||||
use crate::generate::Sources;
|
||||
use crate::vfs::asynch::AsyncDir;
|
||||
use crate::vfs::VPath;
|
||||
use crate::{html::EscapeHtml, state::Source};
|
||||
|
||||
use super::Paths;
|
||||
mod system {
|
||||
use crate::vfs::VPath;
|
||||
|
||||
struct SystemPages {
|
||||
index: String,
|
||||
four_oh_four: String,
|
||||
b_docs: String,
|
||||
sandbox: String,
|
||||
|
||||
navmap: String,
|
||||
pub const INDEX: &VPath = VPath::new_const("index");
|
||||
pub const FOUR_OH_FOUR: &VPath = VPath::new_const("_treehouse/404");
|
||||
pub const B_DOCS: &VPath = VPath::new_const("_treehouse/b");
|
||||
}
|
||||
|
||||
struct Server {
|
||||
config: Config,
|
||||
treehouse: Treehouse,
|
||||
target_dir: PathBuf,
|
||||
system_pages: SystemPages,
|
||||
sources: Arc<Sources>,
|
||||
target: AsyncDir,
|
||||
}
|
||||
|
||||
pub async fn serve(
|
||||
config: Config,
|
||||
treehouse: Treehouse,
|
||||
paths: &Paths<'_>,
|
||||
port: u16,
|
||||
) -> anyhow::Result<()> {
|
||||
pub async fn serve(sources: Arc<Sources>, target: AsyncDir, port: u16) -> anyhow::Result<()> {
|
||||
let app = Router::new()
|
||||
.route("/", get(index))
|
||||
.route("/*page", get(page))
|
||||
.route("/", get(index)) // needed explicitly because * does not match empty paths
|
||||
.route("/*path", get(vfs_entry))
|
||||
.route("/b", get(branch))
|
||||
.route("/navmap.js", get(navmap))
|
||||
.route("/sandbox", get(sandbox))
|
||||
.route("/static/*file", get(static_file))
|
||||
.fallback(get(four_oh_four))
|
||||
.with_state(Arc::new(Server {
|
||||
config,
|
||||
treehouse,
|
||||
target_dir: paths.target_dir.to_owned(),
|
||||
system_pages: SystemPages {
|
||||
index: std::fs::read_to_string(paths.target_dir.join("index.html"))
|
||||
.context("cannot read index page")?,
|
||||
four_oh_four: std::fs::read_to_string(paths.target_dir.join("_treehouse/404.html"))
|
||||
.context("cannot read 404 page")?,
|
||||
b_docs: std::fs::read_to_string(paths.target_dir.join("_treehouse/b.html"))
|
||||
.context("cannot read /b documentation page")?,
|
||||
sandbox: std::fs::read_to_string(paths.target_dir.join("static/html/sandbox.html"))
|
||||
.context("cannot read sandbox page")?,
|
||||
navmap: std::fs::read_to_string(paths.target_dir.join("navmap.js"))
|
||||
.context("cannot read navigation map")?,
|
||||
},
|
||||
}));
|
||||
.with_state(Arc::new(Server { sources, target }));
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
let app = live_reload::live_reload(app);
|
||||
let app = app.nest("/dev/live-reload", live_reload::router());
|
||||
|
||||
info!("serving on port {port}");
|
||||
let listener = TcpListener::bind((Ipv4Addr::from([0u8, 0, 0, 0]), port)).await?;
|
||||
Ok(axum::serve(listener, app).await?)
|
||||
}
|
||||
|
||||
fn get_content_type(path: &str) -> Option<&'static str> {
|
||||
match () {
|
||||
_ if path.ends_with(".html") => Some("text/html"),
|
||||
_ if path.ends_with(".js") => Some("text/javascript"),
|
||||
_ if path.ends_with(".woff2") => Some("font/woff2"),
|
||||
_ if path.ends_with(".svg") => Some("image/svg+xml"),
|
||||
fn get_content_type(extension: &str) -> Option<&'static str> {
|
||||
match extension {
|
||||
"html" => Some("text/html"),
|
||||
"js" => Some("text/javascript"),
|
||||
"woff" => Some("font/woff2"),
|
||||
"svg" => Some("image/svg+xml"),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
async fn index(State(state): State<Arc<Server>>) -> Response {
|
||||
Html(state.system_pages.index.clone()).into_response()
|
||||
}
|
||||
|
||||
async fn navmap(State(state): State<Arc<Server>>) -> Response {
|
||||
let mut response = state.system_pages.navmap.clone().into_response();
|
||||
response
|
||||
.headers_mut()
|
||||
.insert(CONTENT_TYPE, HeaderValue::from_static("text/javascript"));
|
||||
response
|
||||
}
|
||||
|
||||
async fn four_oh_four(State(state): State<Arc<Server>>) -> Response {
|
||||
(
|
||||
StatusCode::NOT_FOUND,
|
||||
Html(state.system_pages.four_oh_four.clone()),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct StaticFileQuery {
|
||||
cache: Option<String>,
|
||||
struct VfsQuery {
|
||||
#[serde(rename = "v")]
|
||||
content_version: Option<String>,
|
||||
}
|
||||
|
||||
async fn static_file(
|
||||
Path(path): Path<String>,
|
||||
Query(query): Query<StaticFileQuery>,
|
||||
State(state): State<Arc<Server>>,
|
||||
) -> Response {
|
||||
if let Ok(file) = tokio::fs::read(state.target_dir.join("static").join(&path)).await {
|
||||
let mut response = file.into_response();
|
||||
async fn get_static_file(path: &str, query: &VfsQuery, state: &Server) -> Option<Response> {
|
||||
let vpath = VPath::try_new(path).ok()?;
|
||||
let content = state.target.content(vpath).await?;
|
||||
let mut response = content.into_response();
|
||||
|
||||
if let Some(content_type) = get_content_type(&path) {
|
||||
if let Some(content_type) = vpath.extension().and_then(get_content_type) {
|
||||
response
|
||||
.headers_mut()
|
||||
.insert(CONTENT_TYPE, HeaderValue::from_static(content_type));
|
||||
|
@ -134,77 +81,80 @@ async fn static_file(
|
|||
response.headers_mut().remove(CONTENT_TYPE);
|
||||
}
|
||||
|
||||
if query.cache.is_some() {
|
||||
if query.content_version.is_some() {
|
||||
response.headers_mut().insert(
|
||||
CACHE_CONTROL,
|
||||
HeaderValue::from_static("public, max-age=31536000, immutable"),
|
||||
);
|
||||
}
|
||||
|
||||
Some(response)
|
||||
}
|
||||
|
||||
async fn vfs_entry(
|
||||
Path(path): Path<String>,
|
||||
Query(query): Query<VfsQuery>,
|
||||
State(state): State<Arc<Server>>,
|
||||
) -> Response {
|
||||
if let Some(response) = get_static_file(&path, &query, &state).await {
|
||||
response
|
||||
} else {
|
||||
four_oh_four(State(state)).await
|
||||
}
|
||||
}
|
||||
|
||||
async fn page(Path(path): Path<String>, State(state): State<Arc<Server>>) -> Response {
|
||||
let bare_path = path.strip_suffix(".html").unwrap_or(&path);
|
||||
if let Some(redirected_path) = state.config.redirects.page.get(bare_path) {
|
||||
return (
|
||||
StatusCode::MOVED_PERMANENTLY,
|
||||
[(LOCATION, format!("{}/{redirected_path}", state.config.site))],
|
||||
async fn system_page(target: &AsyncDir, path: &VPath) -> Response {
|
||||
if let Some(content) = target.content(path).await {
|
||||
(StatusCode::NOT_FOUND, Html(content)).into_response()
|
||||
} else {
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
format!("500 Internal Server Error: system page {path} is not available"),
|
||||
)
|
||||
.into_response();
|
||||
}
|
||||
|
||||
let html_path = format!("{bare_path}.html");
|
||||
if let Ok(file) = tokio::fs::read(state.target_dir.join(&*html_path)).await {
|
||||
([(CONTENT_TYPE, "text/html")], file).into_response()
|
||||
} else {
|
||||
four_oh_four(State(state)).await
|
||||
.into_response()
|
||||
}
|
||||
}
|
||||
|
||||
async fn sandbox(State(state): State<Arc<Server>>) -> Response {
|
||||
// Small hack to prevent the LiveReloadLayer from injecting itself into the sandbox.
|
||||
// The sandbox is always nested under a different page, so there's no need to do that.
|
||||
let mut response = Html(state.system_pages.sandbox.clone()).into_response();
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
response
|
||||
.extensions_mut()
|
||||
.insert(live_reload::DisableLiveReload);
|
||||
}
|
||||
// Debounce requests a bit. There's a tendency to have very many sandboxes on a page, and
|
||||
// loading this page as many times as there are sandboxes doesn't seem like the best way to do
|
||||
// things.
|
||||
response
|
||||
.headers_mut()
|
||||
.insert(CACHE_CONTROL, HeaderValue::from_static("max-age=10"));
|
||||
response
|
||||
async fn index(State(state): State<Arc<Server>>) -> Response {
|
||||
system_page(&state.target, system::INDEX).await
|
||||
}
|
||||
|
||||
async fn branch(RawQuery(named_id): RawQuery, State(state): State<Arc<Server>>) -> Html<String> {
|
||||
async fn four_oh_four(State(state): State<Arc<Server>>) -> Response {
|
||||
system_page(&state.target, system::FOUR_OH_FOUR).await
|
||||
}
|
||||
|
||||
async fn branch(RawQuery(named_id): RawQuery, State(state): State<Arc<Server>>) -> Response {
|
||||
if let Some(named_id) = named_id {
|
||||
let branch_id = state
|
||||
.sources
|
||||
.treehouse
|
||||
.branches_by_named_id
|
||||
.get(&named_id)
|
||||
.copied()
|
||||
.or_else(|| state.treehouse.branch_redirects.get(&named_id).copied());
|
||||
.or_else(|| {
|
||||
state
|
||||
.sources
|
||||
.treehouse
|
||||
.branch_redirects
|
||||
.get(&named_id)
|
||||
.copied()
|
||||
});
|
||||
if let Some(branch_id) = branch_id {
|
||||
let branch = state.treehouse.tree.branch(branch_id);
|
||||
let branch = state.sources.treehouse.tree.branch(branch_id);
|
||||
if let Source::Tree {
|
||||
input, target_path, ..
|
||||
} = state.treehouse.source(branch.file_id)
|
||||
} = state.sources.treehouse.source(branch.file_id)
|
||||
{
|
||||
match std::fs::read_to_string(target_path) {
|
||||
Ok(content) => {
|
||||
let branch_markdown_content = input[branch.content.clone()].trim();
|
||||
if let Some(content) = state
|
||||
.target
|
||||
.content(target_path)
|
||||
.await
|
||||
.and_then(|s| String::from_utf8(s).ok())
|
||||
{
|
||||
let branch_markup = input[branch.content.clone()].trim();
|
||||
let mut per_page_metadata =
|
||||
String::from("<meta property=\"og:description\" content=\"");
|
||||
write!(per_page_metadata, "{}", EscapeHtml(branch_markdown_content))
|
||||
.unwrap();
|
||||
write!(per_page_metadata, "{}", EscapeHtml(branch_markup)).unwrap();
|
||||
per_page_metadata.push_str("\">");
|
||||
|
||||
const PER_PAGE_METADATA_REPLACEMENT_STRING: &str = "<!-- treehouse-ca37057a-cff5-45b3-8415-3b02dbf6c799-per-branch-metadata -->";
|
||||
|
@ -213,17 +163,20 @@ async fn branch(RawQuery(named_id): RawQuery, State(state): State<Arc<Server>>)
|
|||
&per_page_metadata,
|
||||
// Replace one under the assumption that it appears in all pages.
|
||||
1,
|
||||
));
|
||||
}
|
||||
Err(e) => {
|
||||
error!("error while reading file {target_path:?}: {e:?}");
|
||||
}
|
||||
))
|
||||
.into_response();
|
||||
} else {
|
||||
return (
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
format!("500 Internal Server Error: branch metadata points to entry {target_path} which does not have readable content")
|
||||
)
|
||||
.into_response();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Html(state.system_pages.four_oh_four.clone())
|
||||
system_page(&state.target, system::FOUR_OH_FOUR).await
|
||||
} else {
|
||||
Html(state.system_pages.b_docs.clone())
|
||||
system_page(&state.target, system::B_DOCS).await
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,21 +1,28 @@
|
|||
use axum::{
|
||||
http::{header::CONTENT_TYPE, Response},
|
||||
Router,
|
||||
};
|
||||
use std::time::Duration;
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct DisableLiveReload;
|
||||
use axum::{routing::get, Router};
|
||||
use tokio::time::sleep;
|
||||
|
||||
pub fn live_reload(router: Router) -> Router {
|
||||
router.layer(tower_livereload::LiveReloadLayer::new().response_predicate(
|
||||
|response: &Response<_>| {
|
||||
let is_html = response
|
||||
.headers()
|
||||
.get(CONTENT_TYPE)
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.is_some_and(|v| v.starts_with("text/html"));
|
||||
let is_disabled = response.extensions().get::<DisableLiveReload>().is_some();
|
||||
is_html && !is_disabled
|
||||
},
|
||||
))
|
||||
pub fn router<S>() -> Router<S> {
|
||||
let router = Router::new().route("/back-up", get(back_up));
|
||||
|
||||
// The endpoint for immediate reload is only enabled on debug builds.
|
||||
// Release builds use the exponential backoff system that detects is the WebSocket is closed.
|
||||
#[cfg(debug_assertions)]
|
||||
let router = router.route("/stall", get(stall));
|
||||
|
||||
router.with_state(())
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
async fn stall() -> String {
|
||||
loop {
|
||||
// Sleep for a day, I guess. Just to uphold the connection forever without really using any
|
||||
// significant resources.
|
||||
sleep(Duration::from_secs(60 * 60 * 24)).await;
|
||||
}
|
||||
}
|
||||
|
||||
async fn back_up() -> String {
|
||||
"".into()
|
||||
}
|
||||
|
|
|
@ -1,12 +1,11 @@
|
|||
use std::{ffi::OsStr, path::Path};
|
||||
use std::ops::ControlFlow;
|
||||
|
||||
use anyhow::Context;
|
||||
use treehouse_format::ast::{Branch, Roots};
|
||||
use walkdir::WalkDir;
|
||||
|
||||
use crate::{
|
||||
parse::parse_tree_with_diagnostics,
|
||||
state::{report_diagnostics, Source, Treehouse},
|
||||
vfs::{self, Dir, VPath},
|
||||
};
|
||||
|
||||
use super::WcArgs;
|
||||
|
@ -29,14 +28,14 @@ fn wc_roots(source: &str, roots: &Roots) -> usize {
|
|||
.sum()
|
||||
}
|
||||
|
||||
pub fn wc_cli(content_dir: &Path, mut wc_args: WcArgs) -> anyhow::Result<()> {
|
||||
pub fn wc_cli(content_dir: &dyn Dir, mut wc_args: WcArgs) -> anyhow::Result<()> {
|
||||
if wc_args.paths.is_empty() {
|
||||
for entry in WalkDir::new(content_dir) {
|
||||
let entry = entry?;
|
||||
if entry.file_type().is_file() && entry.path().extension() == Some(OsStr::new("tree")) {
|
||||
wc_args.paths.push(entry.into_path());
|
||||
}
|
||||
vfs::walk_dir_rec(content_dir, VPath::ROOT, &mut |path| {
|
||||
if path.extension() == Some("tree") {
|
||||
wc_args.paths.push(path.to_owned());
|
||||
}
|
||||
ControlFlow::Continue(())
|
||||
});
|
||||
}
|
||||
|
||||
let mut treehouse = Treehouse::new();
|
||||
|
@ -44,15 +43,11 @@ pub fn wc_cli(content_dir: &Path, mut wc_args: WcArgs) -> anyhow::Result<()> {
|
|||
let mut total = 0;
|
||||
|
||||
for path in &wc_args.paths {
|
||||
let file = std::fs::read_to_string(path)
|
||||
.with_context(|| format!("cannot read file to word count: {path:?}"))?;
|
||||
let path_without_ext = path.with_extension("");
|
||||
let utf8_filename = path_without_ext
|
||||
.strip_prefix(content_dir)
|
||||
.expect("paths should be rooted within the content directory")
|
||||
.to_string_lossy();
|
||||
|
||||
let file_id = treehouse.add_file(utf8_filename.into_owned(), Source::Other(file));
|
||||
if let Some(content) = content_dir
|
||||
.content(path)
|
||||
.and_then(|b| String::from_utf8(b).ok())
|
||||
{
|
||||
let file_id = treehouse.add_file(path.to_string(), Source::Other(content));
|
||||
match parse_tree_with_diagnostics(&mut treehouse, file_id) {
|
||||
Ok(parsed) => {
|
||||
let source = treehouse.source(file_id);
|
||||
|
@ -65,6 +60,7 @@ pub fn wc_cli(content_dir: &Path, mut wc_args: WcArgs) -> anyhow::Result<()> {
|
|||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
println!("{total:>8} total");
|
||||
|
||||
|
|
|
@ -1,11 +1,8 @@
|
|||
use std::{
|
||||
collections::HashMap, ffi::OsStr, fs::File, io::BufReader, ops::ControlFlow, path::Path,
|
||||
};
|
||||
use std::{collections::HashMap, ops::ControlFlow};
|
||||
|
||||
use anyhow::Context;
|
||||
use log::debug;
|
||||
use anyhow::{anyhow, Context};
|
||||
use log::{debug, error};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use walkdir::WalkDir;
|
||||
|
||||
use crate::{
|
||||
html::highlight::{
|
||||
|
@ -13,7 +10,7 @@ use crate::{
|
|||
Syntax,
|
||||
},
|
||||
import_map::ImportRoot,
|
||||
vfs::{self, ReadFilesystem, VPath, VPathBuf},
|
||||
vfs::{self, Dir, VPath, VPathBuf},
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
|
@ -102,8 +99,8 @@ pub enum Markup {
|
|||
}
|
||||
|
||||
impl Config {
|
||||
pub fn autopopulate_emoji(&mut self, dir: &dyn ReadFilesystem) -> anyhow::Result<()> {
|
||||
vfs::walk_rec(dir, VPath::ROOT, &mut |path| {
|
||||
pub fn autopopulate_emoji(&mut self, dir: &dyn Dir) -> anyhow::Result<()> {
|
||||
vfs::walk_dir_rec(dir, VPath::ROOT, &mut |path| {
|
||||
if path.extension().is_some_and(is_emoji_file) {
|
||||
if let Some(emoji_name) = path.file_stem() {
|
||||
if !self.emoji.contains_key(emoji_name) {
|
||||
|
@ -118,8 +115,8 @@ impl Config {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub fn autopopulate_pics(&mut self, dir: &dyn ReadFilesystem) -> anyhow::Result<()> {
|
||||
vfs::walk_rec(dir, VPath::ROOT, &mut |path| {
|
||||
pub fn autopopulate_pics(&mut self, dir: &dyn Dir) -> anyhow::Result<()> {
|
||||
vfs::walk_dir_rec(dir, VPath::ROOT, &mut |path| {
|
||||
if path.extension().is_some_and(is_pic_file) {
|
||||
if let Some(pic_name) = path.file_stem() {
|
||||
let pic_id = pic_name
|
||||
|
@ -142,38 +139,48 @@ impl Config {
|
|||
format!("{}/{}", self.site, page)
|
||||
}
|
||||
|
||||
pub fn pic_url(&self, pics_fs: &dyn ReadFilesystem, id: &str) -> String {
|
||||
pub fn pic_url(&self, pics_dir: &dyn Dir, id: &str) -> String {
|
||||
vfs::url(
|
||||
&self.site,
|
||||
pics_fs,
|
||||
pics_dir,
|
||||
self.pics
|
||||
.get(id)
|
||||
.map(|x| &**x)
|
||||
.unwrap_or(VPath::new("404.png")),
|
||||
)
|
||||
.expect("pics_dir is not anchored anywhere")
|
||||
}
|
||||
|
||||
/// Loads all syntax definition files.
|
||||
pub fn load_syntaxes(&mut self, dir: &Path) -> anyhow::Result<()> {
|
||||
for entry in WalkDir::new(dir) {
|
||||
let entry = entry?;
|
||||
if entry.path().extension() == Some(OsStr::new("json")) {
|
||||
let name = entry
|
||||
.path()
|
||||
pub fn load_syntaxes(&mut self, dir: &dyn Dir) -> anyhow::Result<()> {
|
||||
vfs::walk_dir_rec(dir, VPath::ROOT, &mut |path| {
|
||||
if path.extension() == Some("json") {
|
||||
let name = path
|
||||
.file_stem()
|
||||
.expect("syntax file name should have a stem")
|
||||
.to_string_lossy();
|
||||
.expect("syntax file name should have a stem due to the .json extension");
|
||||
debug!("loading syntax {name:?}");
|
||||
|
||||
let syntax: Syntax = serde_json::from_reader(BufReader::new(
|
||||
File::open(entry.path()).context("could not open syntax file")?,
|
||||
))
|
||||
.context("could not deserialize syntax file")?;
|
||||
let result: Result<Syntax, _> = dir
|
||||
.content(path)
|
||||
.ok_or_else(|| anyhow!("syntax .json is not a file"))
|
||||
.and_then(|b| {
|
||||
String::from_utf8(b).context("syntax .json contains invalid UTF-8")
|
||||
})
|
||||
.and_then(|s| {
|
||||
serde_json::from_str(&s).context("could not deserialize syntax file")
|
||||
});
|
||||
match result {
|
||||
Ok(syntax) => {
|
||||
let compiled = compile_syntax(&syntax);
|
||||
self.syntaxes.insert(name.into_owned(), compiled);
|
||||
self.syntaxes.insert(name.to_owned(), compiled);
|
||||
}
|
||||
Err(err) => error!("error while loading syntax file `{path}`: {err}"),
|
||||
}
|
||||
}
|
||||
|
||||
ControlFlow::Continue(())
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
15
crates/treehouse/src/dirs.rs
Normal file
15
crates/treehouse/src/dirs.rs
Normal file
|
@ -0,0 +1,15 @@
|
|||
use crate::vfs::DynDir;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Dirs {
|
||||
pub root: DynDir,
|
||||
|
||||
pub content: DynDir,
|
||||
pub static_: DynDir,
|
||||
pub template: DynDir,
|
||||
|
||||
// `static` directories
|
||||
pub pics: DynDir,
|
||||
pub emoji: DynDir,
|
||||
pub syntax: DynDir,
|
||||
}
|
|
@ -1,103 +1,53 @@
|
|||
use std::{
|
||||
collections::HashMap,
|
||||
ffi::OsStr,
|
||||
path::{Path, PathBuf},
|
||||
time::Instant,
|
||||
};
|
||||
mod dir_helper;
|
||||
mod include_static_helper;
|
||||
|
||||
use anyhow::{anyhow, bail, Context};
|
||||
use codespan_reporting::{
|
||||
diagnostic::{Diagnostic, Label, LabelStyle, Severity},
|
||||
files::Files as _,
|
||||
};
|
||||
use copy_dir::copy_dir;
|
||||
use std::{collections::HashMap, fmt, ops::ControlFlow, sync::Arc};
|
||||
|
||||
use anyhow::{anyhow, ensure, Context};
|
||||
use codespan_reporting::diagnostic::Diagnostic;
|
||||
use dir_helper::DirHelper;
|
||||
use handlebars::{handlebars_helper, Handlebars};
|
||||
use include_static_helper::IncludeStaticHelper;
|
||||
use log::{debug, error, info};
|
||||
use serde::Serialize;
|
||||
use walkdir::WalkDir;
|
||||
|
||||
use crate::{
|
||||
cli::Paths,
|
||||
config::Config,
|
||||
dirs::Dirs,
|
||||
fun::seasons::Season,
|
||||
history::History,
|
||||
html::{
|
||||
breadcrumbs::breadcrumbs_to_html,
|
||||
navmap::{build_navigation_map, NavigationMap},
|
||||
tree::branches_to_html,
|
||||
},
|
||||
html::{breadcrumbs::breadcrumbs_to_html, navmap::NavigationMap, tree::branches_to_html},
|
||||
import_map::ImportMap,
|
||||
include_static::IncludeStatic,
|
||||
parse::parse_tree_with_diagnostics,
|
||||
state::{has_errors, report_diagnostics, RevisionInfo, Source},
|
||||
static_urls::StaticUrls,
|
||||
state::{report_diagnostics, Source},
|
||||
tree::SemaRoots,
|
||||
vfs::{CdExt, ReadFilesystem, VPath, VPathBuf},
|
||||
vfs::{self, Cd, Dir, DirEntry, DynDir, MemDir, Overlay, ToDynDir, VPath, VPathBuf},
|
||||
};
|
||||
|
||||
use crate::state::{FileId, Treehouse};
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum LatestRevision {
|
||||
/// The working tree is treated as the latest revision.
|
||||
WorkingTree,
|
||||
/// The latest commit is treated as the latest revision. The working tree is ignored.
|
||||
LatestCommit,
|
||||
}
|
||||
|
||||
struct Generator {
|
||||
tree_files: Vec<PathBuf>,
|
||||
git: git2::Repository,
|
||||
history: History,
|
||||
latest_revision: LatestRevision,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct ParsedTree {
|
||||
source_path: String,
|
||||
pub struct ParsedTree {
|
||||
root_key: String,
|
||||
tree_path: String,
|
||||
file_id: FileId,
|
||||
target_path: PathBuf,
|
||||
target_path: VPathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct Page {
|
||||
pub title: String,
|
||||
pub thumbnail: Option<Thumbnail>,
|
||||
pub scripts: Vec<String>,
|
||||
pub styles: Vec<String>,
|
||||
pub breadcrumbs: String,
|
||||
pub tree_path: Option<String>,
|
||||
pub tree: String,
|
||||
|
||||
pub revision: RevisionInfo,
|
||||
pub revision_url: String,
|
||||
pub source_url: String,
|
||||
pub history_url: String,
|
||||
struct Page {
|
||||
title: String,
|
||||
thumbnail: Option<Thumbnail>,
|
||||
scripts: Vec<String>,
|
||||
styles: Vec<String>,
|
||||
breadcrumbs: String,
|
||||
tree_path: Option<String>,
|
||||
tree: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct Commit {
|
||||
pub revision_number: usize,
|
||||
pub hash: String,
|
||||
pub hash_short: String,
|
||||
pub summary: String,
|
||||
pub body: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct HistoryPage {
|
||||
pub title: String,
|
||||
pub commits: Vec<Commit>,
|
||||
pub tree_path: String,
|
||||
pub is_history: bool, // always true
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct Thumbnail {
|
||||
pub url: String,
|
||||
pub alt: Option<String>,
|
||||
struct Thumbnail {
|
||||
url: String,
|
||||
alt: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
|
@ -114,138 +64,61 @@ struct PageTemplateData<'a> {
|
|||
page: Page,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct HistoryTemplateData<'a> {
|
||||
#[serde(flatten)]
|
||||
base: &'a BaseTemplateData<'a>,
|
||||
page: HistoryPage,
|
||||
}
|
||||
fn create_handlebars(site: &str, static_: DynDir) -> Handlebars<'static> {
|
||||
let mut handlebars = Handlebars::new();
|
||||
|
||||
impl Generator {
|
||||
fn add_directory_rec(&mut self, directory: &Path) -> anyhow::Result<()> {
|
||||
for entry in WalkDir::new(directory) {
|
||||
let entry = entry?;
|
||||
if entry.path().extension() == Some(OsStr::new("tree")) {
|
||||
self.tree_files.push(entry.path().to_owned());
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn init_handlebars(handlebars: &mut Handlebars<'_>, paths: &Paths<'_>, config: &Config) {
|
||||
handlebars_helper!(cat: |a: String, b: String| a + &b);
|
||||
|
||||
handlebars.register_helper("cat", Box::new(cat));
|
||||
handlebars.register_helper(
|
||||
"asset",
|
||||
Box::new(StaticUrls::new(
|
||||
paths.target_dir.join("static"),
|
||||
format!("{}/static", config.site),
|
||||
)),
|
||||
);
|
||||
handlebars.register_helper("asset", Box::new(DirHelper::new(site, static_.clone())));
|
||||
handlebars.register_helper(
|
||||
"include_static",
|
||||
Box::new(IncludeStatic {
|
||||
// NOTE: Again, allow referring to generated static assets.
|
||||
// This is necessary for import maps, for whom the <src> attribute is not
|
||||
// currently supported.
|
||||
base_dir: paths.target_dir.join("static"),
|
||||
}),
|
||||
Box::new(IncludeStaticHelper::new(static_)),
|
||||
);
|
||||
|
||||
handlebars
|
||||
}
|
||||
|
||||
fn register_template(
|
||||
handlebars: &mut Handlebars<'_>,
|
||||
treehouse: &mut Treehouse,
|
||||
diagnostics: &mut Vec<Diagnostic<FileId>>,
|
||||
name: &str,
|
||||
path: &Path,
|
||||
) -> anyhow::Result<FileId> {
|
||||
let source = std::fs::read_to_string(path)
|
||||
.with_context(|| format!("cannot read template file {path:?}"))?;
|
||||
let file_id =
|
||||
treehouse.add_file(path.to_string_lossy().into_owned(), Source::Other(source));
|
||||
let source = treehouse.source(file_id);
|
||||
if let Err(error) = handlebars.register_template_string(name, source) {
|
||||
Self::wrangle_handlebars_error_into_diagnostic(
|
||||
treehouse,
|
||||
diagnostics,
|
||||
file_id,
|
||||
error.line_no,
|
||||
error.column_no,
|
||||
error.reason().to_string(),
|
||||
)?;
|
||||
fn load_templates(handlebars: &mut Handlebars, dir: &dyn Dir) {
|
||||
vfs::walk_dir_rec(dir, VPath::ROOT, &mut |path| {
|
||||
if path.extension() == Some("hbs") {
|
||||
if let Some(content) = dir.content(path).and_then(|b| String::from_utf8(b).ok()) {
|
||||
if let Err(err) = handlebars.register_template_string(path.as_str(), content) {
|
||||
error!("in template: {err}");
|
||||
}
|
||||
Ok(file_id)
|
||||
}
|
||||
|
||||
fn wrangle_handlebars_error_into_diagnostic(
|
||||
treehouse: &mut Treehouse,
|
||||
diagnostics: &mut Vec<Diagnostic<FileId>>,
|
||||
file_id: FileId,
|
||||
line: Option<usize>,
|
||||
column: Option<usize>,
|
||||
message: String,
|
||||
) -> anyhow::Result<()> {
|
||||
if let (Some(line), Some(column)) = (line, column) {
|
||||
let line_range = treehouse
|
||||
.files
|
||||
.line_range(file_id, line)
|
||||
.expect("file was added to the list");
|
||||
diagnostics.push(Diagnostic {
|
||||
severity: Severity::Error,
|
||||
code: Some("template".into()),
|
||||
message,
|
||||
labels: vec![Label {
|
||||
style: LabelStyle::Primary,
|
||||
file_id,
|
||||
range: line_range.start + column..line_range.start + column + 1,
|
||||
message: String::new(),
|
||||
}],
|
||||
notes: vec![],
|
||||
})
|
||||
} else {
|
||||
let file = treehouse.filename(file_id);
|
||||
bail!("template error in {file}: {message}");
|
||||
}
|
||||
Ok(())
|
||||
ControlFlow::Continue(())
|
||||
});
|
||||
}
|
||||
|
||||
fn parse_tree(
|
||||
treehouse: &mut Treehouse,
|
||||
config: &Config,
|
||||
source: String,
|
||||
source_path: String,
|
||||
source_path: VPathBuf,
|
||||
target_path: VPathBuf,
|
||||
tree_path: String,
|
||||
target_path: PathBuf,
|
||||
revision: RevisionInfo,
|
||||
) -> anyhow::Result<(Option<ParsedTree>, Vec<Diagnostic<FileId>>)> {
|
||||
let file_id = treehouse.add_file(
|
||||
format!("{source_path}@{}", revision.commit_short),
|
||||
source_path.as_str().to_owned(),
|
||||
Source::Tree {
|
||||
input: source,
|
||||
target_path: target_path.clone(),
|
||||
tree_path: tree_path.clone(),
|
||||
revision_info: revision.clone(),
|
||||
},
|
||||
);
|
||||
|
||||
match parse_tree_with_diagnostics(treehouse, file_id) {
|
||||
Ok(roots) => {
|
||||
let mut diagnostics = vec![];
|
||||
let roots =
|
||||
SemaRoots::from_roots(treehouse, &mut diagnostics, config, file_id, roots);
|
||||
let roots = SemaRoots::from_roots(treehouse, &mut diagnostics, config, file_id, roots);
|
||||
|
||||
let root_key = if revision.is_latest {
|
||||
tree_path.clone()
|
||||
} else {
|
||||
format!("{tree_path}@{}", revision.number)
|
||||
};
|
||||
let root_key = tree_path.clone();
|
||||
treehouse.roots.insert(root_key.clone(), roots);
|
||||
|
||||
Ok((
|
||||
Some(ParsedTree {
|
||||
source_path,
|
||||
root_key,
|
||||
tree_path,
|
||||
file_id,
|
||||
|
@ -259,233 +132,114 @@ impl Generator {
|
|||
}
|
||||
|
||||
fn parse_trees(
|
||||
&self,
|
||||
config: &Config,
|
||||
paths: &Paths<'_>,
|
||||
) -> anyhow::Result<(Treehouse, Vec<ParsedTree>, Vec<Diagnostic<FileId>>)> {
|
||||
dirs: &Dirs,
|
||||
) -> anyhow::Result<(Treehouse, HashMap<VPathBuf, ParsedTree>)> {
|
||||
let mut treehouse = Treehouse::new();
|
||||
let mut diagnostics = vec![];
|
||||
let mut parsed_trees = vec![];
|
||||
let mut parsed_trees = HashMap::new();
|
||||
|
||||
for path in &self.tree_files {
|
||||
let utf8_path = path.to_string_lossy();
|
||||
vfs::walk_dir_rec(&*dirs.content, VPath::ROOT, &mut |path| {
|
||||
if path.extension() == Some("tree") {
|
||||
if let Some(source) = dirs
|
||||
.content
|
||||
.content(path)
|
||||
.and_then(|b| String::from_utf8(b).ok())
|
||||
{
|
||||
let tree_path = path.with_extension("");
|
||||
let target_path = path.with_extension("html");
|
||||
|
||||
let tree_path = path
|
||||
.strip_prefix(paths.content_dir)
|
||||
.unwrap_or(path)
|
||||
.with_extension("")
|
||||
.to_string_lossy()
|
||||
.replace('\\', "/");
|
||||
debug!("tree file: {path:?}");
|
||||
debug!("tree file: {path}");
|
||||
|
||||
let page_history = self.history.by_page.get(&utf8_path[..]);
|
||||
let working_revision_number = page_history
|
||||
.map(|history| history.revisions.len() + 1)
|
||||
.unwrap_or(1);
|
||||
|
||||
if self.latest_revision == LatestRevision::WorkingTree {
|
||||
let source = std::fs::read_to_string(path)?;
|
||||
let target_path = paths.target_dir.join(&tree_path).with_extension("html");
|
||||
let (parsed_tree, mut parse_diagnostics) = Self::parse_tree(
|
||||
match parse_tree(
|
||||
&mut treehouse,
|
||||
config,
|
||||
source,
|
||||
utf8_path.clone().into_owned(),
|
||||
tree_path.clone(),
|
||||
path.to_owned(),
|
||||
target_path,
|
||||
RevisionInfo {
|
||||
is_latest: true,
|
||||
number: working_revision_number,
|
||||
commit: "working".into(),
|
||||
commit_short: "working".into(),
|
||||
},
|
||||
)?;
|
||||
tree_path.as_str().to_owned(),
|
||||
) {
|
||||
Ok((parsed_tree, mut parse_diagnostics)) => {
|
||||
diagnostics.append(&mut parse_diagnostics);
|
||||
if let Some(parsed_tree) = parsed_tree {
|
||||
parsed_trees.push(parsed_tree);
|
||||
parsed_trees.insert(tree_path, parsed_tree);
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
error!("failed to parse tree {path}: {err:?}")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(page_history) = page_history {
|
||||
for (i, revision) in page_history.revisions.iter().enumerate() {
|
||||
let revision_number = page_history.revisions.len() - i;
|
||||
ControlFlow::Continue(())
|
||||
});
|
||||
|
||||
let source = String::from_utf8(
|
||||
self.git.find_blob(revision.blob_oid)?.content().to_owned(),
|
||||
)?;
|
||||
report_diagnostics(&treehouse.files, &diagnostics)?;
|
||||
|
||||
let target_path = paths
|
||||
.target_dir
|
||||
.join(format!("{tree_path}@{revision_number}"))
|
||||
.with_extension("html");
|
||||
Ok((treehouse, parsed_trees))
|
||||
}
|
||||
|
||||
let (parsed_tree, parse_diagnostics) = Self::parse_tree(
|
||||
&mut treehouse,
|
||||
config,
|
||||
source,
|
||||
utf8_path.clone().into_owned(),
|
||||
tree_path.clone(),
|
||||
target_path,
|
||||
RevisionInfo {
|
||||
is_latest: false,
|
||||
number: revision_number,
|
||||
commit: revision.commit_oid.to_string(),
|
||||
commit_short: revision.commit_short(),
|
||||
},
|
||||
)?;
|
||||
_ = parse_diagnostics; // We don't reemit diagnostics from old revisions.
|
||||
if let Some(parsed_tree) = parsed_tree {
|
||||
// If this commit is also considered to be the latest revision, we need
|
||||
// to generate a second version of the page that will act as the
|
||||
// latest one.
|
||||
let is_latest =
|
||||
self.latest_revision == LatestRevision::LatestCommit && i == 0;
|
||||
if is_latest {
|
||||
let root_key = parsed_tree.tree_path.clone();
|
||||
treehouse.roots.insert(
|
||||
root_key.clone(),
|
||||
treehouse.roots.get(&parsed_tree.root_key).unwrap().clone(),
|
||||
// TODO: Generation of pages in static/html
|
||||
//
|
||||
// for (name, &file_id) in &template_file_ids {
|
||||
// let filename = name.rsplit_once('/').unwrap_or(("", name)).1;
|
||||
// if !filename.starts_with('_') {
|
||||
// let templated_html = match handlebars.render(name, &base_template_data) {
|
||||
// Ok(html) => html,
|
||||
// Err(error) => {
|
||||
// Self::wrangle_handlebars_error_into_diagnostic(
|
||||
// treehouse,
|
||||
// &mut global_diagnostics,
|
||||
// file_id,
|
||||
// error.line_no,
|
||||
// error.column_no,
|
||||
// error.desc,
|
||||
// )?;
|
||||
// continue;
|
||||
// }
|
||||
// };
|
||||
// std::fs::write(
|
||||
// paths.template_target_dir.join(name).with_extension("html"),
|
||||
// templated_html,
|
||||
// )?;
|
||||
// }
|
||||
// }
|
||||
|
||||
fn generate_tree(
|
||||
sources: &Sources,
|
||||
dirs: &Dirs,
|
||||
handlebars: &Handlebars,
|
||||
parsed_tree: &ParsedTree,
|
||||
) -> anyhow::Result<String> {
|
||||
let breadcrumbs = breadcrumbs_to_html(
|
||||
&sources.config,
|
||||
&sources.navigation_map,
|
||||
&parsed_tree.root_key,
|
||||
);
|
||||
|
||||
let target_path =
|
||||
paths.target_dir.join(&tree_path).with_extension("html");
|
||||
let file_id = {
|
||||
let file = treehouse.files.get(parsed_tree.file_id).unwrap();
|
||||
let filename = file.name().clone();
|
||||
let Source::Tree {
|
||||
input,
|
||||
tree_path,
|
||||
target_path,
|
||||
revision_info,
|
||||
} = file.source().clone()
|
||||
else {
|
||||
panic!(".tree files must have Tree sources")
|
||||
};
|
||||
treehouse.add_file(
|
||||
filename,
|
||||
Source::Tree {
|
||||
input,
|
||||
tree_path,
|
||||
target_path: target_path.clone(),
|
||||
revision_info: RevisionInfo {
|
||||
is_latest: true,
|
||||
..revision_info
|
||||
},
|
||||
},
|
||||
)
|
||||
};
|
||||
|
||||
parsed_trees.push(ParsedTree {
|
||||
root_key,
|
||||
target_path,
|
||||
file_id,
|
||||
..parsed_tree.clone()
|
||||
})
|
||||
}
|
||||
|
||||
parsed_trees.push(parsed_tree);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok((treehouse, parsed_trees, diagnostics))
|
||||
}
|
||||
|
||||
fn generate_all_files(
|
||||
&self,
|
||||
treehouse: &mut Treehouse,
|
||||
config: &Config,
|
||||
paths: &Paths<'_>,
|
||||
root_fs: &dyn ReadFilesystem,
|
||||
navigation_map: &NavigationMap,
|
||||
parsed_trees: Vec<ParsedTree>,
|
||||
) -> anyhow::Result<Vec<Diagnostic<FileId>>> {
|
||||
let mut global_diagnostics = vec![];
|
||||
|
||||
let mut handlebars: Handlebars<'static> = Handlebars::new();
|
||||
Self::init_handlebars(&mut handlebars, paths, config);
|
||||
|
||||
let mut template_file_ids = HashMap::new();
|
||||
for entry in WalkDir::new(paths.template_dir) {
|
||||
let entry = entry.context("cannot read directory entry")?;
|
||||
let path = entry.path();
|
||||
if !entry.file_type().is_dir() && path.extension() == Some(OsStr::new("hbs")) {
|
||||
let relative_path = path
|
||||
.strip_prefix(paths.template_dir)?
|
||||
.to_string_lossy()
|
||||
.into_owned()
|
||||
.replace('\\', "/");
|
||||
let file_id = Self::register_template(
|
||||
&mut handlebars,
|
||||
treehouse,
|
||||
&mut global_diagnostics,
|
||||
&relative_path,
|
||||
path,
|
||||
)?;
|
||||
template_file_ids.insert(relative_path, file_id);
|
||||
}
|
||||
}
|
||||
|
||||
let import_map =
|
||||
ImportMap::generate(config.site.clone(), &config.build.javascript.import_roots);
|
||||
|
||||
let base_template_data = BaseTemplateData {
|
||||
config,
|
||||
import_map: serde_json::to_string_pretty(&import_map)
|
||||
.expect("import map should be serializable to JSON"),
|
||||
season: Season::current(),
|
||||
};
|
||||
|
||||
std::fs::create_dir_all(paths.template_target_dir)?;
|
||||
for (name, &file_id) in &template_file_ids {
|
||||
let filename = name.rsplit_once('/').unwrap_or(("", name)).1;
|
||||
if !filename.starts_with('_') {
|
||||
let templated_html = match handlebars.render(name, &base_template_data) {
|
||||
Ok(html) => html,
|
||||
Err(error) => {
|
||||
Self::wrangle_handlebars_error_into_diagnostic(
|
||||
treehouse,
|
||||
&mut global_diagnostics,
|
||||
file_id,
|
||||
error.line_no,
|
||||
error.column_no,
|
||||
error.desc,
|
||||
)?;
|
||||
continue;
|
||||
}
|
||||
};
|
||||
std::fs::write(
|
||||
paths.template_target_dir.join(name).with_extension("html"),
|
||||
templated_html,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
|
||||
for parsed_tree in parsed_trees {
|
||||
debug!("generating: {:?}", parsed_tree.target_path);
|
||||
|
||||
let breadcrumbs = breadcrumbs_to_html(config, navigation_map, &parsed_tree.root_key);
|
||||
|
||||
let mut tree = String::new();
|
||||
// Temporarily steal the tree out of the treehouse.
|
||||
let roots = treehouse
|
||||
let roots = sources
|
||||
.treehouse
|
||||
.roots
|
||||
.remove(&parsed_tree.root_key)
|
||||
.get(&parsed_tree.root_key)
|
||||
.expect("tree should have been added to the treehouse");
|
||||
branches_to_html(
|
||||
&mut tree,
|
||||
treehouse,
|
||||
config,
|
||||
root_fs,
|
||||
paths,
|
||||
&sources.treehouse,
|
||||
&sources.config,
|
||||
dirs,
|
||||
parsed_tree.file_id,
|
||||
&roots.branches,
|
||||
);
|
||||
|
||||
let revision = treehouse
|
||||
.revision_info(parsed_tree.file_id)
|
||||
.expect(".tree files should have Tree sources");
|
||||
let base_template_data = BaseTemplateData {
|
||||
config: &sources.config,
|
||||
import_map: serde_json::to_string_pretty(&sources.import_map)
|
||||
.expect("import map should be serializable to JSON"),
|
||||
season: Season::current(),
|
||||
};
|
||||
|
||||
let template_data = PageTemplateData {
|
||||
base: &base_template_data,
|
||||
page: Page {
|
||||
|
@ -495,223 +249,203 @@ impl Generator {
|
|||
.thumbnail
|
||||
.as_ref()
|
||||
.map(|thumbnail| Thumbnail {
|
||||
url: config
|
||||
.pic_url(&root_fs.cd(VPathBuf::new("static/pics")), &thumbnail.id),
|
||||
url: sources.config.pic_url(&*dirs.pics, &thumbnail.id),
|
||||
alt: thumbnail.alt.clone(),
|
||||
}),
|
||||
scripts: roots.attributes.scripts.clone(),
|
||||
styles: roots.attributes.styles.clone(),
|
||||
breadcrumbs,
|
||||
tree_path: treehouse
|
||||
tree_path: sources
|
||||
.treehouse
|
||||
.tree_path(parsed_tree.file_id)
|
||||
.map(|s| s.to_owned()),
|
||||
tree,
|
||||
|
||||
revision_url: format!("{}/{}", config.site, parsed_tree.root_key),
|
||||
source_url: format!(
|
||||
"{}/{}/{}",
|
||||
config.commit_base_url, revision.commit, parsed_tree.source_path,
|
||||
),
|
||||
history_url: format!("{}/h/{}", config.site, parsed_tree.tree_path),
|
||||
revision: revision.clone(),
|
||||
},
|
||||
};
|
||||
let mut template_name = roots
|
||||
let template_name = roots
|
||||
.attributes
|
||||
.template
|
||||
.clone()
|
||||
.unwrap_or_else(|| "_tree.hbs".into());
|
||||
|
||||
if !template_file_ids.contains_key(&template_name) {
|
||||
template_name = "_tree.hbs".into();
|
||||
ensure!(
|
||||
handlebars.has_template(&template_name),
|
||||
"template {template_name} does not exist"
|
||||
);
|
||||
|
||||
handlebars
|
||||
.render(&template_name, &template_data)
|
||||
.context("template rendering failed")
|
||||
}
|
||||
|
||||
// Reinsert the stolen roots.
|
||||
treehouse.roots.insert(parsed_tree.root_key, roots);
|
||||
|
||||
let templated_html = match handlebars.render(&template_name, &template_data) {
|
||||
fn generate_tree_or_error(
|
||||
sources: &Sources,
|
||||
dirs: &Dirs,
|
||||
handlebars: &Handlebars,
|
||||
parsed_tree: &ParsedTree,
|
||||
) -> String {
|
||||
match generate_tree(sources, dirs, handlebars, parsed_tree) {
|
||||
Ok(html) => html,
|
||||
Err(error) => {
|
||||
Self::wrangle_handlebars_error_into_diagnostic(
|
||||
treehouse,
|
||||
// TODO: This should dump diagnostics out somewhere else.
|
||||
&mut global_diagnostics,
|
||||
template_file_ids[&template_name],
|
||||
error.line_no,
|
||||
error.column_no,
|
||||
error.desc,
|
||||
)?;
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
std::fs::create_dir_all(
|
||||
parsed_tree
|
||||
.target_path
|
||||
.parent()
|
||||
.expect("there should be a parent directory to generate files into"),
|
||||
)?;
|
||||
std::fs::write(parsed_tree.target_path, templated_html)?;
|
||||
}
|
||||
|
||||
for (path, page_history) in &self.history.by_page {
|
||||
let tree_path = path
|
||||
.strip_prefix("content/")
|
||||
.unwrap_or(path)
|
||||
.strip_suffix(".tree")
|
||||
.unwrap_or(path);
|
||||
let target_path = paths
|
||||
.target_dir
|
||||
.join("h")
|
||||
.join(path.strip_prefix("content/").unwrap_or(path))
|
||||
.with_extension("html");
|
||||
std::fs::create_dir_all(target_path.parent().unwrap())?;
|
||||
|
||||
let template_data = HistoryTemplateData {
|
||||
base: &base_template_data,
|
||||
page: HistoryPage {
|
||||
title: format!("page history: {tree_path}"),
|
||||
commits: page_history
|
||||
.revisions
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, revision)| Commit {
|
||||
revision_number: page_history.revisions.len() - i,
|
||||
hash: revision.commit_oid.to_string(),
|
||||
hash_short: revision.commit_short(),
|
||||
summary: self
|
||||
.history
|
||||
.commits
|
||||
.get(&revision.commit_oid)
|
||||
.map(|c| c.summary.as_str())
|
||||
.unwrap_or("<no summary available>")
|
||||
.to_owned(),
|
||||
body: self
|
||||
.history
|
||||
.commits
|
||||
.get(&revision.commit_oid)
|
||||
.map(|c| c.body.as_str())
|
||||
.unwrap_or("<no body available>")
|
||||
.to_owned(),
|
||||
})
|
||||
.collect(),
|
||||
tree_path: tree_path.to_owned(),
|
||||
is_history: true,
|
||||
},
|
||||
};
|
||||
let templated_html = match handlebars.render("_history.hbs", &template_data) {
|
||||
Ok(html) => html,
|
||||
Err(error) => {
|
||||
Self::wrangle_handlebars_error_into_diagnostic(
|
||||
treehouse,
|
||||
// TODO: This should dump diagnostics out somewhere else.
|
||||
&mut global_diagnostics,
|
||||
template_file_ids["_history.hbs"],
|
||||
error.line_no,
|
||||
error.column_no,
|
||||
error.desc,
|
||||
)?;
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
std::fs::write(target_path, templated_html)?;
|
||||
}
|
||||
|
||||
Ok(global_diagnostics)
|
||||
Err(error) => format!("error: {error:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn generate(
|
||||
paths: &Paths<'_>,
|
||||
src: &dyn ReadFilesystem,
|
||||
latest_revision: LatestRevision,
|
||||
) -> anyhow::Result<(Config, Treehouse)> {
|
||||
let start = Instant::now();
|
||||
pub struct Sources {
|
||||
pub config: Config,
|
||||
pub treehouse: Treehouse,
|
||||
pub parsed_trees: HashMap<VPathBuf, ParsedTree>,
|
||||
pub navigation_map: NavigationMap,
|
||||
pub import_map: ImportMap,
|
||||
}
|
||||
|
||||
impl Sources {
|
||||
pub fn load(dirs: &Dirs) -> anyhow::Result<Self> {
|
||||
info!("loading config");
|
||||
let mut config: Config = toml_edit::de::from_str(
|
||||
&src.content(VPath::new("treehouse.toml"))
|
||||
&dirs
|
||||
.root
|
||||
.content(VPath::new("treehouse.toml"))
|
||||
.map(String::from_utf8)
|
||||
.ok_or_else(|| anyhow!("config file does not exist"))??,
|
||||
)
|
||||
.context("failed to deserialize config")?;
|
||||
config.site = std::env::var("TREEHOUSE_SITE").unwrap_or(config.site);
|
||||
config.autopopulate_emoji(&src.cd(VPathBuf::new("static/emoji")))?;
|
||||
config.autopopulate_pics(&src.cd(VPathBuf::new("static/pic")))?;
|
||||
config.load_syntaxes(&paths.static_dir.join("syntax"))?;
|
||||
config.autopopulate_emoji(&*dirs.emoji)?;
|
||||
config.autopopulate_pics(&*dirs.pics)?;
|
||||
config.load_syntaxes(&*dirs.syntax)?;
|
||||
|
||||
// TODO: WriteFilesystem, such that we can write into the target directory?
|
||||
info!("parsing tree files");
|
||||
let (treehouse, parsed_trees) = parse_trees(&config, dirs)?;
|
||||
|
||||
info!("cleaning target directory");
|
||||
let _ = std::fs::remove_dir_all(paths.target_dir);
|
||||
std::fs::create_dir_all(paths.target_dir)?;
|
||||
info!("constructing navigation map");
|
||||
let navigation_map = NavigationMap::build(&treehouse, "index");
|
||||
|
||||
info!("copying static directory to target directory");
|
||||
copy_dir(paths.static_dir, paths.target_dir.join("static"))?;
|
||||
info!("constructing import map");
|
||||
let import_map = ImportMap::generate(
|
||||
&config.site,
|
||||
&Cd::new(dirs.static_.clone(), VPathBuf::new("js")),
|
||||
&config.build.javascript.import_roots,
|
||||
);
|
||||
|
||||
info!("getting history");
|
||||
let git = git2::Repository::open(".")?;
|
||||
let history = History::get(&git)?;
|
||||
|
||||
info!("parsing tree");
|
||||
let mut generator = Generator {
|
||||
tree_files: vec![],
|
||||
git,
|
||||
history,
|
||||
latest_revision,
|
||||
};
|
||||
generator.add_directory_rec(paths.content_dir)?;
|
||||
let (mut treehouse, parsed_trees, diagnostics) = generator.parse_trees(&config, paths)?;
|
||||
report_diagnostics(&treehouse.files, &diagnostics)?;
|
||||
if has_errors(&diagnostics) {
|
||||
bail!("diagnostics emitted during parsing");
|
||||
}
|
||||
|
||||
// NOTE: The navigation map is a legacy feature that is lazy-loaded when fragment-based
|
||||
// navigation is used.
|
||||
// I couldn't be bothered with adding it to the import map since fragment-based navigation is
|
||||
// only used on very old links. Adding caching to the navigation map is probably not worth it.
|
||||
info!("generating navigation map");
|
||||
let navigation_map = build_navigation_map(&treehouse, "index");
|
||||
std::fs::write(
|
||||
paths.target_dir.join("navmap.js"),
|
||||
navigation_map.to_javascript(),
|
||||
)?;
|
||||
|
||||
info!("generating standalone pages");
|
||||
let diagnostics = generator.generate_all_files(
|
||||
&mut treehouse,
|
||||
&config,
|
||||
paths,
|
||||
src,
|
||||
&navigation_map,
|
||||
Ok(Sources {
|
||||
config,
|
||||
treehouse,
|
||||
parsed_trees,
|
||||
)?;
|
||||
report_diagnostics(&treehouse.files, &diagnostics)?;
|
||||
navigation_map,
|
||||
import_map,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
info!("generating change history pages");
|
||||
/// Acceleration structure for `dir` operations on [`TreehouseDir`]s.
|
||||
#[derive(Debug, Default)]
|
||||
struct DirIndex {
|
||||
full_path: VPathBuf,
|
||||
children: HashMap<VPathBuf, DirIndex>,
|
||||
}
|
||||
|
||||
let duration = start.elapsed();
|
||||
info!("generation done in {duration:?}");
|
||||
impl DirIndex {
|
||||
pub fn new<'a>(paths: impl Iterator<Item = &'a VPath>) -> Self {
|
||||
let mut root = DirIndex::default();
|
||||
|
||||
if !has_errors(&diagnostics) {
|
||||
Ok((config, treehouse))
|
||||
for path in paths {
|
||||
let mut parent = &mut root;
|
||||
let mut full_path = VPath::ROOT.to_owned();
|
||||
for segment in path.segments() {
|
||||
full_path.push(segment);
|
||||
let child = parent
|
||||
.children
|
||||
.entry(segment.to_owned())
|
||||
.or_insert_with(|| DirIndex {
|
||||
full_path: full_path.clone(),
|
||||
children: HashMap::new(),
|
||||
});
|
||||
parent = child;
|
||||
}
|
||||
}
|
||||
|
||||
root
|
||||
}
|
||||
}
|
||||
|
||||
struct TreehouseDir {
|
||||
dirs: Arc<Dirs>,
|
||||
sources: Arc<Sources>,
|
||||
dir_index: DirIndex,
|
||||
handlebars: Handlebars<'static>,
|
||||
}
|
||||
|
||||
impl TreehouseDir {
|
||||
fn new(dirs: Arc<Dirs>, sources: Arc<Sources>, dir_index: DirIndex) -> Self {
|
||||
let mut handlebars = create_handlebars(&sources.config.site, dirs.static_.clone());
|
||||
load_templates(&mut handlebars, &dirs.template);
|
||||
|
||||
Self {
|
||||
dirs,
|
||||
sources,
|
||||
dir_index,
|
||||
handlebars,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Dir for TreehouseDir {
|
||||
fn dir(&self, path: &VPath) -> Vec<DirEntry> {
|
||||
let mut index = &self.dir_index;
|
||||
for component in path.segments() {
|
||||
if let Some(child) = index.children.get(component) {
|
||||
index = child;
|
||||
} else {
|
||||
bail!("generation errors occurred; diagnostics were emitted with detailed descriptions");
|
||||
// There cannot possibly be any entries under an invalid path.
|
||||
// Bail early.
|
||||
return vec![];
|
||||
}
|
||||
}
|
||||
|
||||
pub fn regenerate_or_report_error(
|
||||
paths: &Paths<'_>,
|
||||
src: &dyn ReadFilesystem,
|
||||
latest_revision: LatestRevision,
|
||||
) -> anyhow::Result<(Config, Treehouse)> {
|
||||
info!("regenerating site content");
|
||||
index
|
||||
.children
|
||||
.values()
|
||||
.map(|child| DirEntry {
|
||||
path: child.full_path.clone(),
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
let result = generate(paths, src, latest_revision);
|
||||
if let Err(e) = &result {
|
||||
error!("{e:?}");
|
||||
fn content(&self, path: &VPath) -> Option<Vec<u8>> {
|
||||
debug!("content({path})");
|
||||
|
||||
let path = if path.is_root() {
|
||||
VPath::new_const("index")
|
||||
} else {
|
||||
path
|
||||
};
|
||||
let mut path = path.to_owned();
|
||||
if path.extension() == Some("html") {
|
||||
path.set_extension("");
|
||||
}
|
||||
result
|
||||
|
||||
self.sources.parsed_trees.get(&path).map(|parsed_tree| {
|
||||
generate_tree_or_error(&self.sources, &self.dirs, &self.handlebars, parsed_tree).into()
|
||||
})
|
||||
}
|
||||
|
||||
fn content_version(&self, _path: &VPath) -> Option<String> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for TreehouseDir {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str("TreehouseDir")
|
||||
}
|
||||
}
|
||||
|
||||
pub fn target(dirs: Arc<Dirs>, sources: Arc<Sources>) -> DynDir {
|
||||
let mut root = MemDir::new();
|
||||
root.add(VPath::new("static"), dirs.static_.clone());
|
||||
|
||||
let dir_index = DirIndex::new(sources.parsed_trees.keys().map(|x| &**x));
|
||||
let tree_view = TreehouseDir::new(dirs, sources, dir_index);
|
||||
|
||||
Overlay::new(tree_view.to_dyn(), root.to_dyn()).to_dyn()
|
||||
}
|
||||
|
|
37
crates/treehouse/src/generate/dir_helper.rs
Normal file
37
crates/treehouse/src/generate/dir_helper.rs
Normal file
|
@ -0,0 +1,37 @@
|
|||
use handlebars::{Context, Handlebars, Helper, HelperDef, RenderContext, RenderError, ScopedJson};
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::vfs::{self, DynDir, VPath};
|
||||
|
||||
pub struct DirHelper {
|
||||
site: String,
|
||||
dir: DynDir,
|
||||
}
|
||||
|
||||
impl DirHelper {
|
||||
pub fn new(site: &str, dir: DynDir) -> Self {
|
||||
Self {
|
||||
site: site.to_owned(),
|
||||
dir,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HelperDef for DirHelper {
|
||||
fn call_inner<'reg: 'rc, 'rc>(
|
||||
&self,
|
||||
h: &Helper<'reg, 'rc>,
|
||||
_: &'reg Handlebars<'reg>,
|
||||
_: &'rc Context,
|
||||
_: &mut RenderContext<'reg, 'rc>,
|
||||
) -> Result<ScopedJson<'reg, 'rc>, RenderError> {
|
||||
if let Some(path) = h.param(0).and_then(|v| v.value().as_str()) {
|
||||
let vpath = VPath::try_new(path).map_err(|e| RenderError::new(e.to_string()))?;
|
||||
let url = vfs::url(&self.site, &self.dir, vpath)
|
||||
.ok_or_else(|| RenderError::new("path is not anchored anywhere"))?;
|
||||
Ok(ScopedJson::Derived(Value::String(url)))
|
||||
} else {
|
||||
Err(RenderError::new("missing path string"))
|
||||
}
|
||||
}
|
||||
}
|
37
crates/treehouse/src/generate/include_static_helper.rs
Normal file
37
crates/treehouse/src/generate/include_static_helper.rs
Normal file
|
@ -0,0 +1,37 @@
|
|||
use handlebars::{Context, Handlebars, Helper, HelperDef, RenderContext, RenderError, ScopedJson};
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::vfs::{DynDir, VPath};
|
||||
|
||||
pub struct IncludeStaticHelper {
|
||||
dir: DynDir,
|
||||
}
|
||||
|
||||
impl IncludeStaticHelper {
|
||||
pub fn new(dir: DynDir) -> Self {
|
||||
Self { dir }
|
||||
}
|
||||
}
|
||||
|
||||
impl HelperDef for IncludeStaticHelper {
|
||||
fn call_inner<'reg: 'rc, 'rc>(
|
||||
&self,
|
||||
h: &Helper<'reg, 'rc>,
|
||||
_: &'reg Handlebars<'reg>,
|
||||
_: &'rc Context,
|
||||
_: &mut RenderContext<'reg, 'rc>,
|
||||
) -> Result<ScopedJson<'reg, 'rc>, RenderError> {
|
||||
if let Some(path) = h.param(0).and_then(|v| v.value().as_str()) {
|
||||
let vpath = VPath::try_new(path).map_err(|e| RenderError::new(e.to_string()))?;
|
||||
let url = String::from_utf8(
|
||||
self.dir
|
||||
.content(vpath)
|
||||
.ok_or_else(|| RenderError::new("file does not exist"))?,
|
||||
)
|
||||
.map_err(|_| RenderError::new("included file does not contain UTF-8 text"))?;
|
||||
Ok(ScopedJson::Derived(Value::String(url)))
|
||||
} else {
|
||||
Err(RenderError::new("missing path string"))
|
||||
}
|
||||
}
|
||||
}
|
|
@ -17,10 +17,10 @@ use jotdown::OrderedListNumbering::*;
|
|||
use jotdown::SpanLinkType;
|
||||
|
||||
use crate::config::Config;
|
||||
use crate::dirs::Dirs;
|
||||
use crate::state::FileId;
|
||||
use crate::state::Treehouse;
|
||||
use crate::vfs;
|
||||
use crate::vfs::ReadFilesystem;
|
||||
|
||||
use super::highlight::highlight;
|
||||
|
||||
|
@ -28,10 +28,9 @@ use super::highlight::highlight;
|
|||
pub struct Renderer<'a> {
|
||||
pub config: &'a Config,
|
||||
|
||||
pub emoji_fs: &'a dyn ReadFilesystem,
|
||||
pub pics_fs: &'a dyn ReadFilesystem,
|
||||
pub dirs: &'a Dirs,
|
||||
|
||||
pub treehouse: &'a mut Treehouse,
|
||||
pub treehouse: &'a Treehouse,
|
||||
pub file_id: FileId,
|
||||
pub page_id: String,
|
||||
}
|
||||
|
@ -376,7 +375,7 @@ impl<'a> Writer<'a> {
|
|||
let pic_url = self
|
||||
.renderer
|
||||
.config
|
||||
.pic_url(self.renderer.pics_fs, placeholder_pic_id);
|
||||
.pic_url(&*self.renderer.dirs.pics, placeholder_pic_id);
|
||||
write_attr(&pic_url, out);
|
||||
out.push('"');
|
||||
|
||||
|
@ -563,7 +562,12 @@ impl<'a> Writer<'a> {
|
|||
out.push_str(r#"">"#)
|
||||
}
|
||||
|
||||
let url = vfs::url(&self.renderer.config.site, self.renderer.emoji_fs, vpath);
|
||||
let url = vfs::url(
|
||||
&self.renderer.config.site,
|
||||
&*self.renderer.dirs.emoji,
|
||||
vpath,
|
||||
)
|
||||
.expect("emoji directory is not anchored anywhere");
|
||||
|
||||
// TODO: this could do with better alt text
|
||||
write!(
|
||||
|
@ -644,7 +648,7 @@ impl<'a> Writer<'a> {
|
|||
)
|
||||
}),
|
||||
"page" => Some(config.page_url(linked)),
|
||||
"pic" => Some(config.pic_url(self.renderer.pics_fs, linked)),
|
||||
"pic" => Some(config.pic_url(&*self.renderer.dirs.pics, linked)),
|
||||
_ => None,
|
||||
})
|
||||
}
|
||||
|
|
|
@ -1,50 +1,39 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::{
|
||||
state::Treehouse,
|
||||
tree::{attributes::Content, SemaBranchId},
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, Default, Serialize)]
|
||||
#[derive(Debug, Clone, Default)]
|
||||
struct NavigationMapBuilder {
|
||||
stack: Vec<String>,
|
||||
navigation_map: NavigationMap,
|
||||
}
|
||||
|
||||
impl NavigationMapBuilder {
|
||||
fn enter_tree(&mut self, tree: String) {
|
||||
self.stack.push(tree.clone());
|
||||
self.navigation_map.paths.insert(tree, self.stack.clone());
|
||||
}
|
||||
|
||||
fn exit_tree(&mut self) {
|
||||
self.stack.pop();
|
||||
}
|
||||
|
||||
fn finish(self) -> NavigationMap {
|
||||
self.navigation_map
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct NavigationMap {
|
||||
/// Tells you which pages need to be opened to get to the key.
|
||||
pub paths: HashMap<String, Vec<String>>,
|
||||
}
|
||||
|
||||
impl NavigationMap {
|
||||
pub fn to_javascript(&self) -> String {
|
||||
format!(
|
||||
"export const navigationMap = {};",
|
||||
serde_json::to_string(&self.paths)
|
||||
.expect("serialization of the navigation map should not fail")
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct NavigationMapBuilder {
|
||||
stack: Vec<String>,
|
||||
navigation_map: NavigationMap,
|
||||
}
|
||||
|
||||
impl NavigationMapBuilder {
|
||||
pub fn enter_tree(&mut self, tree: String) {
|
||||
self.stack.push(tree.clone());
|
||||
self.navigation_map.paths.insert(tree, self.stack.clone());
|
||||
}
|
||||
|
||||
pub fn exit_tree(&mut self) {
|
||||
self.stack.pop();
|
||||
}
|
||||
|
||||
pub fn finish(self) -> NavigationMap {
|
||||
self.navigation_map
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build_navigation_map(treehouse: &Treehouse, root_tree_path: &str) -> NavigationMap {
|
||||
pub fn build(treehouse: &Treehouse, root_tree_path: &str) -> Self {
|
||||
let mut builder = NavigationMapBuilder::default();
|
||||
|
||||
fn rec_branch(
|
||||
|
@ -80,3 +69,4 @@ pub fn build_navigation_map(treehouse: &Treehouse, root_tree_path: &str) -> Navi
|
|||
|
||||
builder.finish()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,25 +3,23 @@ use std::{borrow::Cow, fmt::Write};
|
|||
use treehouse_format::pull::BranchKind;
|
||||
|
||||
use crate::{
|
||||
cli::Paths,
|
||||
config::Config,
|
||||
dirs::Dirs,
|
||||
html::EscapeAttribute,
|
||||
state::{FileId, Treehouse},
|
||||
tree::{
|
||||
attributes::{Content, Stage},
|
||||
mini_template, SemaBranchId,
|
||||
},
|
||||
vfs::{CdExt, ReadFilesystem, VPathBuf},
|
||||
};
|
||||
|
||||
use super::{djot, EscapeHtml};
|
||||
|
||||
pub fn branch_to_html(
|
||||
s: &mut String,
|
||||
treehouse: &mut Treehouse,
|
||||
treehouse: &Treehouse,
|
||||
config: &Config,
|
||||
root_fs: &dyn ReadFilesystem, // TODO: Lower privileges
|
||||
paths: &Paths<'_>,
|
||||
dirs: &Dirs,
|
||||
file_id: FileId,
|
||||
branch_id: SemaBranchId,
|
||||
) {
|
||||
|
@ -116,13 +114,7 @@ pub fn branch_to_html(
|
|||
}
|
||||
|
||||
if branch.attributes.template {
|
||||
final_markup = mini_template::render(
|
||||
config,
|
||||
treehouse,
|
||||
paths,
|
||||
&root_fs.cd(VPathBuf::new("static/pics")),
|
||||
&final_markup,
|
||||
);
|
||||
final_markup = mini_template::render(config, treehouse, dirs, &final_markup);
|
||||
}
|
||||
s.push_str("<th-bc>");
|
||||
|
||||
|
@ -137,8 +129,7 @@ pub fn branch_to_html(
|
|||
.to_owned(),
|
||||
|
||||
config,
|
||||
emoji_fs: &root_fs.cd(VPathBuf::new("static/emoji")),
|
||||
pics_fs: &root_fs.cd(VPathBuf::new("static/pics")),
|
||||
dirs,
|
||||
|
||||
treehouse,
|
||||
file_id,
|
||||
|
@ -195,7 +186,7 @@ pub fn branch_to_html(
|
|||
let num_children = branch.children.len();
|
||||
for i in 0..num_children {
|
||||
let child_id = treehouse.tree.branch(branch_id).children[i];
|
||||
branch_to_html(s, treehouse, config, root_fs, paths, file_id, child_id);
|
||||
branch_to_html(s, treehouse, config, dirs, file_id, child_id);
|
||||
}
|
||||
s.push_str("</ul>");
|
||||
}
|
||||
|
@ -209,16 +200,15 @@ pub fn branch_to_html(
|
|||
|
||||
pub fn branches_to_html(
|
||||
s: &mut String,
|
||||
treehouse: &mut Treehouse,
|
||||
treehouse: &Treehouse,
|
||||
config: &Config,
|
||||
root_fs: &dyn ReadFilesystem, // TODO: Lower privileges
|
||||
paths: &Paths<'_>,
|
||||
dirs: &Dirs,
|
||||
file_id: FileId,
|
||||
branches: &[SemaBranchId],
|
||||
) {
|
||||
s.push_str("<ul>");
|
||||
for &child in branches {
|
||||
branch_to_html(s, treehouse, config, root_fs, paths, file_id, child);
|
||||
branch_to_html(s, treehouse, config, dirs, file_id, child);
|
||||
}
|
||||
s.push_str("</ul>");
|
||||
}
|
||||
|
|
|
@ -1,11 +1,9 @@
|
|||
use std::{ffi::OsStr, path::PathBuf};
|
||||
use std::ops::ControlFlow;
|
||||
|
||||
use indexmap::IndexMap;
|
||||
use log::warn;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use walkdir::WalkDir;
|
||||
|
||||
use crate::static_urls::StaticUrls;
|
||||
use crate::vfs::{self, Dir, VPathBuf};
|
||||
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
pub struct ImportMap {
|
||||
|
@ -15,49 +13,30 @@ pub struct ImportMap {
|
|||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
pub struct ImportRoot {
|
||||
pub name: String,
|
||||
pub path: String,
|
||||
pub path: VPathBuf,
|
||||
}
|
||||
|
||||
impl ImportMap {
|
||||
pub fn generate(base_url: String, import_roots: &[ImportRoot]) -> Self {
|
||||
pub fn generate(site: &str, root: &dyn Dir, import_roots: &[ImportRoot]) -> Self {
|
||||
let mut import_map = ImportMap {
|
||||
imports: IndexMap::new(),
|
||||
};
|
||||
|
||||
for root in import_roots {
|
||||
let static_urls = StaticUrls::new(
|
||||
PathBuf::from(&root.path),
|
||||
format!("{base_url}/{}", root.path),
|
||||
for import_root in import_roots {
|
||||
vfs::walk_dir_rec(root, &import_root.path, &mut |path| {
|
||||
if path.extension() == Some("js") {
|
||||
import_map.imports.insert(
|
||||
format!(
|
||||
"{}/{}",
|
||||
import_root.name,
|
||||
path.strip_prefix(&import_root.path).unwrap_or(path)
|
||||
),
|
||||
vfs::url(site, root, path)
|
||||
.expect("import directory is not anchored anywhere"),
|
||||
);
|
||||
for entry in WalkDir::new(&root.path) {
|
||||
let entry = match entry {
|
||||
Ok(entry) => entry,
|
||||
Err(error) => {
|
||||
warn!("directory walk failed: {error}");
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
if !entry.file_type().is_dir() && entry.path().extension() == Some(OsStr::new("js"))
|
||||
{
|
||||
let normalized_path = entry
|
||||
.path()
|
||||
.strip_prefix(&root.path)
|
||||
.unwrap_or(entry.path())
|
||||
.to_string_lossy()
|
||||
.replace('\\', "/");
|
||||
match static_urls.get(&normalized_path) {
|
||||
Ok(url) => {
|
||||
import_map
|
||||
.imports
|
||||
.insert(format!("{}/{normalized_path}", root.name), url);
|
||||
}
|
||||
Err(error) => {
|
||||
warn!("could not get static url for {normalized_path}: {error}")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
ControlFlow::Continue(())
|
||||
});
|
||||
}
|
||||
|
||||
import_map.imports.sort_unstable_keys();
|
||||
|
|
|
@ -1,28 +0,0 @@
|
|||
use std::path::PathBuf;
|
||||
|
||||
use handlebars::{Context, Handlebars, Helper, HelperDef, RenderContext, RenderError, ScopedJson};
|
||||
use serde_json::Value;
|
||||
|
||||
pub struct IncludeStatic {
|
||||
pub base_dir: PathBuf,
|
||||
}
|
||||
|
||||
impl HelperDef for IncludeStatic {
|
||||
fn call_inner<'reg: 'rc, 'rc>(
|
||||
&self,
|
||||
helper: &Helper<'reg, 'rc>,
|
||||
_: &'reg Handlebars<'reg>,
|
||||
_: &'rc Context,
|
||||
_: &mut RenderContext<'reg, 'rc>,
|
||||
) -> Result<ScopedJson<'reg, 'rc>, RenderError> {
|
||||
if let Some(param) = helper.param(0).and_then(|v| v.value().as_str()) {
|
||||
return Ok(ScopedJson::Derived(Value::String(
|
||||
std::fs::read_to_string(self.base_dir.join(param)).map_err(|error| {
|
||||
RenderError::new(format!("cannot read static asset {param}: {error}"))
|
||||
})?,
|
||||
)));
|
||||
}
|
||||
|
||||
Err(RenderError::new("asset path must be provided"))
|
||||
}
|
||||
}
|
|
@ -1,15 +1,13 @@
|
|||
pub mod cli;
|
||||
pub mod config;
|
||||
pub mod dirs;
|
||||
pub mod fun;
|
||||
pub mod generate;
|
||||
pub mod history;
|
||||
pub mod html;
|
||||
pub mod import_map;
|
||||
pub mod include_static;
|
||||
pub mod parse;
|
||||
pub mod paths;
|
||||
pub mod state;
|
||||
pub mod static_urls;
|
||||
pub mod templater;
|
||||
pub mod tree;
|
||||
pub mod vfs;
|
||||
|
|
|
@ -1,76 +1,78 @@
|
|||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use std::{fs, path::Path};
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::Context;
|
||||
use clap::Parser;
|
||||
use log::error;
|
||||
use treehouse::generate::{regenerate_or_report_error, LatestRevision};
|
||||
use treehouse::vfs::PhysicalDir;
|
||||
use treehouse::vfs::{AnchoredAtExt, VPathBuf};
|
||||
use treehouse::cli::serve::serve;
|
||||
use treehouse::dirs::Dirs;
|
||||
use treehouse::generate::{self, Sources};
|
||||
use treehouse::vfs::asynch::AsyncDir;
|
||||
use treehouse::vfs::{AnchoredAtExt, DynDir, ToDynDir, VPathBuf};
|
||||
use treehouse::vfs::{Cd, PhysicalDir};
|
||||
use treehouse::{
|
||||
cli::{
|
||||
fix::{fix_all_cli, fix_file_cli},
|
||||
serve::serve,
|
||||
wc::wc_cli,
|
||||
Command, Paths, ProgramArgs,
|
||||
Command, ProgramArgs,
|
||||
},
|
||||
vfs::{BufferedFile, MountPoints, ReadFilesystem, VPath},
|
||||
vfs::{BufferedFile, MemDir, VPath},
|
||||
};
|
||||
|
||||
fn vfs_sources() -> anyhow::Result<impl ReadFilesystem> {
|
||||
let mut root = MountPoints::new();
|
||||
fn vfs_sources() -> anyhow::Result<DynDir> {
|
||||
let mut root = MemDir::new();
|
||||
|
||||
root.add(
|
||||
VPath::new("treehouse.toml"),
|
||||
Box::new(BufferedFile::new(fs::read("treehouse.toml")?)),
|
||||
BufferedFile::new(fs::read("treehouse.toml")?).to_dyn(),
|
||||
);
|
||||
root.add(
|
||||
VPath::new("static"),
|
||||
Box::new(PhysicalDir::new(PathBuf::from("static")).anchored_at(VPathBuf::new("static"))),
|
||||
PhysicalDir::new(PathBuf::from("static"))
|
||||
.anchored_at(VPathBuf::new("static"))
|
||||
.to_dyn(),
|
||||
);
|
||||
root.add(
|
||||
VPath::new("template"),
|
||||
Box::new(PhysicalDir::new(PathBuf::from("template"))),
|
||||
PhysicalDir::new(PathBuf::from("template")).to_dyn(),
|
||||
);
|
||||
root.add(
|
||||
VPath::new("content"),
|
||||
Box::new(PhysicalDir::new(PathBuf::from("content"))),
|
||||
PhysicalDir::new(PathBuf::from("content")).to_dyn(),
|
||||
);
|
||||
|
||||
Ok(root)
|
||||
Ok(root.to_dyn())
|
||||
}
|
||||
|
||||
async fn fallible_main() -> anyhow::Result<()> {
|
||||
let args = ProgramArgs::parse();
|
||||
|
||||
let paths = Paths {
|
||||
target_dir: Path::new("target/site"),
|
||||
template_target_dir: Path::new("target/site/static/html"),
|
||||
|
||||
config_file: Path::new("treehouse.toml"),
|
||||
static_dir: Path::new("static"),
|
||||
template_dir: Path::new("template"),
|
||||
content_dir: Path::new("content"),
|
||||
};
|
||||
|
||||
let src = vfs_sources()?;
|
||||
let dirs = Arc::new(Dirs {
|
||||
root: src.clone(),
|
||||
content: Cd::new(src.clone(), VPathBuf::new("content")).to_dyn(),
|
||||
static_: Cd::new(src.clone(), VPathBuf::new("static")).to_dyn(),
|
||||
template: Cd::new(src.clone(), VPathBuf::new("template")).to_dyn(),
|
||||
pics: Cd::new(src.clone(), VPathBuf::new("static/pics")).to_dyn(),
|
||||
emoji: Cd::new(src.clone(), VPathBuf::new("static/emoji")).to_dyn(),
|
||||
syntax: Cd::new(src.clone(), VPathBuf::new("static/syntax")).to_dyn(),
|
||||
});
|
||||
|
||||
match args.command {
|
||||
Command::Serve {
|
||||
generate: generate_args,
|
||||
generate: _,
|
||||
serve: serve_args,
|
||||
} => {
|
||||
let latest_revision = match generate_args.commits_only {
|
||||
true => LatestRevision::LatestCommit,
|
||||
false => LatestRevision::WorkingTree,
|
||||
};
|
||||
let (config, treehouse) = regenerate_or_report_error(&paths, &src, latest_revision)?;
|
||||
serve(config, treehouse, &paths, serve_args.port).await?;
|
||||
let sources = Arc::new(Sources::load(&dirs).context("failed to load sources")?);
|
||||
let target = generate::target(dirs, sources.clone());
|
||||
serve(sources, AsyncDir::new(target), serve_args.port).await?;
|
||||
}
|
||||
|
||||
Command::Fix(fix_args) => fix_file_cli(fix_args)?,
|
||||
Command::FixAll(fix_args) => fix_all_cli(fix_args, &paths)?,
|
||||
Command::Fix(fix_args) => fix_file_cli(fix_args, &*dirs.content)?.apply().await?,
|
||||
Command::FixAll(fix_args) => fix_all_cli(fix_args, &*dirs.content)?.apply().await?,
|
||||
|
||||
Command::Wc(wc_args) => wc_cli(paths.content_dir, wc_args)?,
|
||||
Command::Wc(wc_args) => wc_cli(&dirs.content, wc_args)?,
|
||||
|
||||
Command::Ulid => {
|
||||
let mut rng = rand::thread_rng();
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use std::{collections::HashMap, ops::Range, path::PathBuf};
|
||||
use std::{collections::HashMap, ops::Range};
|
||||
|
||||
use anyhow::Context;
|
||||
use codespan_reporting::{
|
||||
|
@ -6,26 +6,19 @@ use codespan_reporting::{
|
|||
files::SimpleFiles,
|
||||
term::termcolor::{ColorChoice, StandardStream},
|
||||
};
|
||||
use serde::Serialize;
|
||||
use ulid::Ulid;
|
||||
|
||||
use crate::tree::{SemaBranchId, SemaRoots, SemaTree};
|
||||
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
pub struct RevisionInfo {
|
||||
pub is_latest: bool,
|
||||
pub number: usize,
|
||||
pub commit: String,
|
||||
pub commit_short: String,
|
||||
}
|
||||
use crate::{
|
||||
tree::{SemaBranchId, SemaRoots, SemaTree},
|
||||
vfs::VPathBuf,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Source {
|
||||
Tree {
|
||||
input: String,
|
||||
tree_path: String,
|
||||
target_path: PathBuf,
|
||||
revision_info: RevisionInfo,
|
||||
target_path: VPathBuf,
|
||||
},
|
||||
Other(String),
|
||||
}
|
||||
|
@ -103,13 +96,6 @@ impl Treehouse {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn revision_info(&self, file_id: FileId) -> Option<&RevisionInfo> {
|
||||
match self.source(file_id) {
|
||||
Source::Tree { revision_info, .. } => Some(revision_info),
|
||||
Source::Other(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn next_missingno(&mut self) -> Ulid {
|
||||
self.missingno_generator
|
||||
.generate()
|
||||
|
|
|
@ -1,89 +0,0 @@
|
|||
use std::{
|
||||
collections::HashMap,
|
||||
fs::File,
|
||||
io::{self, BufReader},
|
||||
path::PathBuf,
|
||||
sync::{Mutex, RwLock},
|
||||
};
|
||||
|
||||
use handlebars::{Context, Handlebars, Helper, HelperDef, RenderContext, RenderError, ScopedJson};
|
||||
use serde_json::Value;
|
||||
|
||||
pub struct StaticUrls {
|
||||
base_dir: PathBuf,
|
||||
base_url: String,
|
||||
// Really annoying that we have to use an RwLock for this. We only ever generate in a
|
||||
// single-threaded environment.
|
||||
// Honestly it would be a lot more efficient if Handlebars just assumed single-threadedness
|
||||
// and required you to clone it over to different threads.
|
||||
// Stuff like this is why I really want to implement my own templating engine...
|
||||
hash_cache: RwLock<HashMap<String, String>>,
|
||||
missing_files: Mutex<Vec<MissingFile>>,
|
||||
}
|
||||
|
||||
pub struct MissingFile {
|
||||
pub path: String,
|
||||
}
|
||||
|
||||
impl StaticUrls {
|
||||
pub fn new(base_dir: PathBuf, base_url: String) -> Self {
|
||||
Self {
|
||||
base_dir,
|
||||
base_url,
|
||||
hash_cache: RwLock::new(HashMap::new()),
|
||||
missing_files: Mutex::new(vec![]),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get(&self, filename: &str) -> Result<String, io::Error> {
|
||||
let hash_cache = self.hash_cache.read().unwrap();
|
||||
if let Some(cached) = hash_cache.get(filename) {
|
||||
return Ok(cached.to_owned());
|
||||
}
|
||||
drop(hash_cache);
|
||||
|
||||
let mut hasher = blake3::Hasher::new();
|
||||
let file = BufReader::new(File::open(self.base_dir.join(filename))?);
|
||||
hasher.update_reader(file)?;
|
||||
// NOTE: Here the hash is truncated to 8 characters. This is fine, because we don't
|
||||
// care about security here - only detecting changes in files.
|
||||
let hash = format!(
|
||||
"{}/{}?cache=b3-{}",
|
||||
self.base_url,
|
||||
filename,
|
||||
&hasher.finalize().to_hex()[0..8]
|
||||
);
|
||||
{
|
||||
let mut hash_cache = self.hash_cache.write().unwrap();
|
||||
hash_cache.insert(filename.to_owned(), hash.clone());
|
||||
}
|
||||
Ok(hash)
|
||||
}
|
||||
|
||||
pub fn take_missing_files(&self) -> Vec<MissingFile> {
|
||||
std::mem::take(&mut self.missing_files.lock().unwrap())
|
||||
}
|
||||
}
|
||||
|
||||
impl HelperDef for StaticUrls {
|
||||
fn call_inner<'reg: 'rc, 'rc>(
|
||||
&self,
|
||||
helper: &Helper<'reg, 'rc>,
|
||||
_: &'reg Handlebars<'reg>,
|
||||
_: &'rc Context,
|
||||
_: &mut RenderContext<'reg, 'rc>,
|
||||
) -> Result<ScopedJson<'reg, 'rc>, RenderError> {
|
||||
if let Some(param) = helper.param(0).and_then(|v| v.value().as_str()) {
|
||||
return Ok(ScopedJson::Derived(Value::String(
|
||||
self.get(param).unwrap_or_else(|_| {
|
||||
self.missing_files.lock().unwrap().push(MissingFile {
|
||||
path: param.to_owned(),
|
||||
});
|
||||
format!("{}/{}", self.base_url, param)
|
||||
}),
|
||||
)));
|
||||
}
|
||||
|
||||
Err(RenderError::new("asset path must be provided"))
|
||||
}
|
||||
}
|
|
@ -1,13 +0,0 @@
|
|||
use handlebars::Handlebars;
|
||||
|
||||
pub struct Templater {
|
||||
handlebars: Handlebars<'static>,
|
||||
}
|
||||
|
||||
impl Templater {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
handlebars: Handlebars::new(),
|
||||
}
|
||||
}
|
||||
}
|
|
@ -163,13 +163,7 @@ impl SemaBranch {
|
|||
) -> SemaBranchId {
|
||||
let attributes = Self::parse_attributes(treehouse, diagnostics, file_id, &branch);
|
||||
|
||||
let revision_info = treehouse
|
||||
.revision_info(file_id)
|
||||
.expect(".tree files must have Tree-type sources");
|
||||
let named_id = match revision_info.is_latest {
|
||||
true => attributes.id.to_owned(),
|
||||
false => format!("{}@{}", attributes.id, revision_info.commit_short),
|
||||
};
|
||||
let named_id = attributes.id.to_owned();
|
||||
let html_id = format!(
|
||||
"{}:{}",
|
||||
treehouse.tree_path(file_id).unwrap(),
|
||||
|
|
|
@ -7,7 +7,13 @@
|
|||
use std::fmt::Write;
|
||||
use std::ops::Range;
|
||||
|
||||
use crate::{cli::Paths, config::Config, html::EscapeHtml, state::Treehouse, vfs::ReadFilesystem};
|
||||
use crate::{
|
||||
config::Config,
|
||||
dirs::Dirs,
|
||||
html::EscapeHtml,
|
||||
state::Treehouse,
|
||||
vfs::{Dir, VPath},
|
||||
};
|
||||
|
||||
struct Lexer<'a> {
|
||||
input: &'a str,
|
||||
|
@ -148,13 +154,7 @@ impl Renderer<'_> {
|
|||
self.output.push_str(&self.lexer.input[token.range.clone()]);
|
||||
}
|
||||
|
||||
fn render(
|
||||
&mut self,
|
||||
config: &Config,
|
||||
treehouse: &Treehouse,
|
||||
paths: &Paths<'_>,
|
||||
pics_fs: &dyn ReadFilesystem,
|
||||
) {
|
||||
fn render(&mut self, config: &Config, treehouse: &Treehouse, dirs: &Dirs) {
|
||||
let kind_of = |token: &Token| token.kind;
|
||||
|
||||
while let Some(token) = self.lexer.next() {
|
||||
|
@ -171,8 +171,7 @@ impl Renderer<'_> {
|
|||
match Self::render_template(
|
||||
config,
|
||||
treehouse,
|
||||
pics_fs,
|
||||
paths,
|
||||
dirs,
|
||||
self.lexer.input[inside.as_ref().unwrap().range.clone()].trim(),
|
||||
) {
|
||||
Ok(s) => match escaping {
|
||||
|
@ -199,31 +198,27 @@ impl Renderer<'_> {
|
|||
fn render_template(
|
||||
config: &Config,
|
||||
_treehouse: &Treehouse,
|
||||
pics_fs: &dyn ReadFilesystem,
|
||||
paths: &Paths<'_>,
|
||||
dirs: &Dirs,
|
||||
template: &str,
|
||||
) -> Result<String, InvalidTemplate> {
|
||||
let (function, arguments) = template.split_once(' ').unwrap_or((template, ""));
|
||||
match function {
|
||||
"pic" => Ok(config.pic_url(pics_fs, arguments)),
|
||||
"include_static" => std::fs::read_to_string(paths.static_dir.join(arguments))
|
||||
.map_err(|_| InvalidTemplate),
|
||||
"pic" => Ok(config.pic_url(&*dirs.pics, arguments)),
|
||||
"include_static" => VPath::try_new(arguments)
|
||||
.ok()
|
||||
.and_then(|vpath| dirs.static_.content(vpath))
|
||||
.and_then(|content| String::from_utf8(content).ok())
|
||||
.ok_or(InvalidTemplate),
|
||||
_ => Err(InvalidTemplate),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn render(
|
||||
config: &Config,
|
||||
treehouse: &Treehouse,
|
||||
paths: &Paths<'_>,
|
||||
pics_fs: &dyn ReadFilesystem,
|
||||
input: &str,
|
||||
) -> String {
|
||||
pub fn render(config: &Config, treehouse: &Treehouse, dirs: &Dirs, input: &str) -> String {
|
||||
let mut renderer = Renderer {
|
||||
lexer: Lexer::new(input),
|
||||
output: String::new(),
|
||||
};
|
||||
renderer.render(config, treehouse, paths, pics_fs);
|
||||
renderer.render(config, treehouse, dirs);
|
||||
renderer.output
|
||||
}
|
||||
|
|
|
@ -1,250 +1,82 @@
|
|||
//! The treehouse virtual file system.
|
||||
//!
|
||||
//! Unlike traditional file systems, there is no separation between directories and files.
|
||||
//! Instead, our file system is based on _entries_, which may have specific, optional, well-typed
|
||||
//! metadata attached to them.
|
||||
//! A directory is formed by returning a list of paths from [`dir`][Dir::dir], and a file is
|
||||
//! formed by returning `Some` from [`content`][Dir::content].
|
||||
//!
|
||||
//! This makes using the file system simpler, as you do not have to differentiate between different
|
||||
//! entry kinds. All paths act as if they _could_ return byte content, and all paths act as if they
|
||||
//! _could_ have children.
|
||||
//!
|
||||
//! # Composability
|
||||
//!
|
||||
//! [`Dir`]s are composable. The [`Dir`] itself starts off with the root path ([`VPath::ROOT`]),
|
||||
//! which may contain further [`dir`][Dir::dir] entries, or content by itself.
|
||||
//! This makes it possible to nest a [`Dir`] under another [`Dir`].
|
||||
//!
|
||||
//! Additionally, there's also the inverse operation, [`Cd`] (named after the `cd`
|
||||
//! _change directory_ shell command), which returns a [`Dir`] viewing a subpath within another
|
||||
//! [`Dir`].
|
||||
//!
|
||||
//! # Building directories
|
||||
//!
|
||||
//! In-memory directories can be composed using the following primitives:
|
||||
//!
|
||||
//! - [`EmptyEntry`] - has no metadata whatsoever.
|
||||
//! - [`BufferedFile`] - root path content is the provided byte vector.
|
||||
//! - [`MemDir`] - a [`Dir`] containing a single level of other [`Dir`]s inside.
|
||||
//!
|
||||
//! Additionally, for interfacing with the OS file system, [`PhysicalDir`] is available,
|
||||
//! representing a directory stored on the disk.
|
||||
//!
|
||||
//! # Virtual paths
|
||||
//!
|
||||
//! Entries within directories are referenced using [`VPath`]s (**v**irtual **path**s).
|
||||
//! A virtual path is composed out of any amount of `/`-separated components.
|
||||
//!
|
||||
//! There are no special directories like `.` and `..` (those are just normal entries, though using
|
||||
//! them is discouraged). [`VPath`]s are always relative to the root of the [`Dir`] you're querying.
|
||||
//!
|
||||
//! A leading or trailing slash is not allowed, because they would have no meaning.
|
||||
//!
|
||||
//! [`VPath`] also has an owned version, [`VPathBuf`].
|
||||
|
||||
use std::{
|
||||
borrow::Borrow,
|
||||
fmt::{self, Debug},
|
||||
ops::{ControlFlow, Deref},
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use anyhow::ensure;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
mod anchored;
|
||||
pub mod asynch;
|
||||
mod cd;
|
||||
mod edit;
|
||||
mod empty;
|
||||
mod file;
|
||||
mod mount_points;
|
||||
mod mem_dir;
|
||||
mod overlay;
|
||||
mod path;
|
||||
mod physical;
|
||||
|
||||
pub use anchored::*;
|
||||
pub use cd::*;
|
||||
pub use edit::*;
|
||||
pub use empty::*;
|
||||
pub use file::*;
|
||||
pub use mount_points::*;
|
||||
pub use mem_dir::*;
|
||||
pub use overlay::*;
|
||||
pub use path::*;
|
||||
pub use physical::*;
|
||||
|
||||
#[derive(PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct VPath {
|
||||
path: str,
|
||||
}
|
||||
|
||||
impl VPath {
|
||||
pub const SEPARATOR: char = '/';
|
||||
pub const ROOT: &Self = unsafe { Self::new_unchecked("") };
|
||||
|
||||
pub fn try_new(s: &str) -> anyhow::Result<&Self> {
|
||||
ensure!(
|
||||
!s.ends_with(Self::SEPARATOR),
|
||||
"path must not end with '{}' (got {s:?})",
|
||||
Self::SEPARATOR
|
||||
);
|
||||
ensure!(
|
||||
!s.starts_with(Self::SEPARATOR),
|
||||
"paths are always absolute and must not start with '{}' (got {s:?})",
|
||||
Self::SEPARATOR
|
||||
);
|
||||
|
||||
Ok(unsafe { Self::new_unchecked(s) })
|
||||
}
|
||||
|
||||
pub fn new(s: &str) -> &Self {
|
||||
Self::try_new(s).expect("invalid path")
|
||||
}
|
||||
|
||||
const unsafe fn new_unchecked(s: &str) -> &Self {
|
||||
std::mem::transmute::<_, &Self>(s)
|
||||
}
|
||||
|
||||
pub fn try_join(&self, sub: &str) -> anyhow::Result<VPathBuf> {
|
||||
let mut buf = VPathBuf::from(self);
|
||||
if !sub.is_empty() {
|
||||
let sub = VPath::try_new(sub)?;
|
||||
buf.path.push('/');
|
||||
buf.path.push_str(&sub.path);
|
||||
}
|
||||
Ok(buf)
|
||||
}
|
||||
|
||||
pub fn join(&self, sub: &str) -> VPathBuf {
|
||||
self.try_join(sub).expect("invalid subpath")
|
||||
}
|
||||
|
||||
pub fn strip_prefix(&self, prefix: &VPath) -> Option<&Self> {
|
||||
if self == prefix {
|
||||
Some(VPath::ROOT)
|
||||
} else {
|
||||
self.path
|
||||
.strip_prefix(&prefix.path)
|
||||
.and_then(|p| p.strip_prefix('/'))
|
||||
// SAFETY: If `self` starts with `prefix`, `p` will end up not being prefixed by `self`
|
||||
// nor a leading slash.
|
||||
.map(|p| unsafe { VPath::new_unchecked(p) })
|
||||
}
|
||||
}
|
||||
|
||||
pub fn depth(&self) -> usize {
|
||||
self.path.chars().filter(|&c| c == Self::SEPARATOR).count()
|
||||
}
|
||||
|
||||
pub fn segments(&self) -> impl Iterator<Item = &Self> {
|
||||
self.as_str().split(Self::SEPARATOR).map(|s| unsafe {
|
||||
// SAFETY: Since we're splitting on the separator, the path cannot start or end with it.
|
||||
Self::new_unchecked(s)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn rsegments(&self) -> impl Iterator<Item = &Self> {
|
||||
self.as_str().rsplit(Self::SEPARATOR).map(|s| unsafe {
|
||||
// SAFETY: Since we're splitting on the separator, the path cannot start or end with it.
|
||||
Self::new_unchecked(s)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn file_name(&self) -> Option<&str> {
|
||||
self.rsegments().next().map(Self::as_str)
|
||||
}
|
||||
|
||||
pub fn extension(&self) -> Option<&str> {
|
||||
let file_name = self.file_name()?;
|
||||
let (left, right) = file_name.rsplit_once('.')?;
|
||||
if left.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(right)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn file_stem(&self) -> Option<&str> {
|
||||
let file_name = self.file_name()?;
|
||||
if let Some(extension) = self.extension() {
|
||||
Some(&file_name[..file_name.len() - extension.len() - 1])
|
||||
} else {
|
||||
Some(file_name)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_str(&self) -> &str {
|
||||
&self.path
|
||||
}
|
||||
}
|
||||
|
||||
impl ToOwned for VPath {
|
||||
type Owned = VPathBuf;
|
||||
|
||||
fn to_owned(&self) -> Self::Owned {
|
||||
VPathBuf::from(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for VPath {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(&self.path)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for VPath {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(&self.path)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct VPathBuf {
|
||||
path: String,
|
||||
}
|
||||
|
||||
impl VPathBuf {
|
||||
pub fn new(path: impl Into<String>) -> Self {
|
||||
Self::try_new(path).expect("invalid path")
|
||||
}
|
||||
|
||||
pub fn try_new(path: impl Into<String>) -> anyhow::Result<Self> {
|
||||
let path = path.into();
|
||||
match VPath::try_new(&path) {
|
||||
Ok(_) => Ok(Self { path }),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
|
||||
unsafe fn new_unchecked(path: String) -> Self {
|
||||
Self { path }
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for VPathBuf {
|
||||
type Target = VPath;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
unsafe { VPath::new_unchecked(&self.path) }
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for VPathBuf {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(&self.path)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for VPathBuf {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(&self.path)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&VPath> for VPathBuf {
|
||||
fn from(value: &VPath) -> Self {
|
||||
unsafe { Self::new_unchecked(value.path.to_owned()) }
|
||||
}
|
||||
}
|
||||
|
||||
impl Borrow<VPath> for VPathBuf {
|
||||
fn borrow(&self) -> &VPath {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for VPathBuf {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
use serde::de;
|
||||
|
||||
struct Visitor;
|
||||
|
||||
impl de::Visitor<'_> for Visitor {
|
||||
type Value = VPathBuf;
|
||||
|
||||
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
formatter.write_str("virtual path")
|
||||
}
|
||||
|
||||
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
|
||||
where
|
||||
E: de::Error,
|
||||
{
|
||||
VPathBuf::try_new(v).map_err(de::Error::custom)
|
||||
}
|
||||
}
|
||||
|
||||
deserializer.deserialize_str(Visitor)
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for VPathBuf {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
serializer.serialize_str(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct DirEntry {
|
||||
pub path: VPathBuf,
|
||||
}
|
||||
|
||||
pub trait ReadFilesystem: Debug {
|
||||
/// List all files under the provided path.
|
||||
pub trait Dir: Debug {
|
||||
/// List all entries under the provided path.
|
||||
fn dir(&self, path: &VPath) -> Vec<DirEntry>;
|
||||
|
||||
/// Return the byte content of the entry at the given path.
|
||||
|
@ -264,14 +96,96 @@ pub trait ReadFilesystem: Debug {
|
|||
None
|
||||
}
|
||||
|
||||
/// Optimization for [`ReadFilesystemCombinators::cd`] that allows for avoiding wrapping
|
||||
/// `Cd`s in `Cd`s.
|
||||
#[doc(hidden)]
|
||||
fn cd_optimization(&self, _subpath: &VPath) -> Option<Cd<'_>> {
|
||||
/// If a file can be written persistently, returns an [`EditPath`] representing the file in
|
||||
/// persistent storage.
|
||||
///
|
||||
/// An edit path can then be made into an [`Edit`].
|
||||
fn edit_path(&self, _path: &VPath) -> Option<EditPath> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Dir for &T
|
||||
where
|
||||
T: Dir,
|
||||
{
|
||||
fn dir(&self, path: &VPath) -> Vec<DirEntry> {
|
||||
(**self).dir(path)
|
||||
}
|
||||
|
||||
fn content(&self, path: &VPath) -> Option<Vec<u8>> {
|
||||
(**self).content(path)
|
||||
}
|
||||
|
||||
fn content_version(&self, path: &VPath) -> Option<String> {
|
||||
(**self).content_version(path)
|
||||
}
|
||||
|
||||
fn anchor(&self, path: &VPath) -> Option<VPathBuf> {
|
||||
(**self).anchor(path)
|
||||
}
|
||||
|
||||
fn edit_path(&self, path: &VPath) -> Option<EditPath> {
|
||||
(**self).edit_path(path)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct DynDir {
|
||||
arc: Arc<dyn Dir + Send + Sync>,
|
||||
}
|
||||
|
||||
impl Dir for DynDir {
|
||||
fn dir(&self, path: &VPath) -> Vec<DirEntry> {
|
||||
self.arc.dir(path)
|
||||
}
|
||||
|
||||
fn content(&self, path: &VPath) -> Option<Vec<u8>> {
|
||||
self.arc.content(path)
|
||||
}
|
||||
|
||||
fn content_version(&self, path: &VPath) -> Option<String> {
|
||||
self.arc.content_version(path)
|
||||
}
|
||||
|
||||
fn anchor(&self, path: &VPath) -> Option<VPathBuf> {
|
||||
self.arc.anchor(path)
|
||||
}
|
||||
|
||||
fn edit_path(&self, path: &VPath) -> Option<EditPath> {
|
||||
self.arc.edit_path(path)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for DynDir {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
fmt::Debug::fmt(&*self.arc, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for DynDir {
|
||||
type Target = dyn Dir + Send + Sync;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&*self.arc
|
||||
}
|
||||
}
|
||||
|
||||
pub trait ToDynDir {
|
||||
fn to_dyn(self) -> DynDir;
|
||||
}
|
||||
|
||||
impl<T> ToDynDir for T
|
||||
where
|
||||
T: Dir + Send + Sync + 'static,
|
||||
{
|
||||
fn to_dyn(self) -> DynDir {
|
||||
DynDir {
|
||||
arc: Arc::new(self),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait AnchoredAtExt {
|
||||
fn anchored_at(self, at: VPathBuf) -> Anchored<Self>
|
||||
where
|
||||
|
@ -280,53 +194,28 @@ pub trait AnchoredAtExt {
|
|||
|
||||
impl<T> AnchoredAtExt for T
|
||||
where
|
||||
T: ReadFilesystem,
|
||||
T: Dir,
|
||||
{
|
||||
fn anchored_at(self, at: VPathBuf) -> Anchored<Self> {
|
||||
Anchored::new(self, at)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait CdExt {
|
||||
fn cd<'a>(self, into: VPathBuf) -> Cd<'a>
|
||||
where
|
||||
Self: 'a;
|
||||
}
|
||||
|
||||
impl CdExt for &dyn ReadFilesystem {
|
||||
fn cd<'a>(self, into: VPathBuf) -> Cd<'a>
|
||||
where
|
||||
Self: 'a,
|
||||
{
|
||||
if let Some(cd) = self.cd_optimization(&into) {
|
||||
cd
|
||||
} else {
|
||||
Cd::new(self, into)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn walk_rec(
|
||||
fs: &dyn ReadFilesystem,
|
||||
path: &VPath,
|
||||
f: &mut dyn FnMut(&VPath) -> ControlFlow<(), ()>,
|
||||
) {
|
||||
for entry in fs.dir(path) {
|
||||
pub fn walk_dir_rec(dir: &dyn Dir, path: &VPath, f: &mut dyn FnMut(&VPath) -> ControlFlow<(), ()>) {
|
||||
for entry in dir.dir(path) {
|
||||
match f(&entry.path) {
|
||||
ControlFlow::Continue(_) => (),
|
||||
ControlFlow::Break(_) => return,
|
||||
}
|
||||
walk_rec(fs, &entry.path, f);
|
||||
walk_dir_rec(dir, &entry.path, f);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn url(site: &str, fs: &dyn ReadFilesystem, path: &VPath) -> String {
|
||||
let Some(anchor) = fs.anchor(path) else {
|
||||
panic!("filesystem {fs:?} is not anchored anywhere and a URL of it cannot be produced")
|
||||
};
|
||||
if let Some(version) = fs.content_version(path) {
|
||||
format!("{}/{anchor}?v={version}", site)
|
||||
pub fn url(site: &str, dir: &dyn Dir, path: &VPath) -> Option<String> {
|
||||
let anchor = dir.anchor(path)?;
|
||||
if let Some(version) = dir.content_version(path) {
|
||||
Some(format!("{}/{anchor}?v={version}", site))
|
||||
} else {
|
||||
format!("{}/{anchor}", site)
|
||||
Some(format!("{}/{anchor}", site))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use std::fmt;
|
||||
|
||||
use super::{DirEntry, ReadFilesystem, VPath, VPathBuf};
|
||||
use super::{Dir, DirEntry, VPath, VPathBuf};
|
||||
|
||||
pub struct Anchored<T> {
|
||||
inner: T,
|
||||
|
@ -13,9 +13,9 @@ impl<T> Anchored<T> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<T> ReadFilesystem for Anchored<T>
|
||||
impl<T> Dir for Anchored<T>
|
||||
where
|
||||
T: ReadFilesystem,
|
||||
T: Dir,
|
||||
{
|
||||
fn dir(&self, path: &VPath) -> Vec<DirEntry> {
|
||||
self.inner.dir(path)
|
||||
|
@ -30,7 +30,7 @@ where
|
|||
}
|
||||
|
||||
fn anchor(&self, path: &VPath) -> Option<VPathBuf> {
|
||||
Some(self.at.join(path.as_str()))
|
||||
Some(self.at.join(path))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
23
crates/treehouse/src/vfs/asynch.rs
Normal file
23
crates/treehouse/src/vfs/asynch.rs
Normal file
|
@ -0,0 +1,23 @@
|
|||
use super::{Dir, DynDir, VPath};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AsyncDir {
|
||||
inner: DynDir,
|
||||
}
|
||||
|
||||
impl AsyncDir {
|
||||
pub fn new(inner: DynDir) -> Self {
|
||||
Self { inner }
|
||||
}
|
||||
|
||||
pub async fn content(&self, path: &VPath) -> Option<Vec<u8>> {
|
||||
let this = self.clone();
|
||||
let path = path.to_owned();
|
||||
// NOTE: Performance impact of spawning a blocking task may be a bit high in case
|
||||
// we add caching.
|
||||
// Measure throughput here.
|
||||
tokio::task::spawn_blocking(move || this.inner.content(&path))
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
}
|
|
@ -1,22 +1,25 @@
|
|||
use std::fmt;
|
||||
|
||||
use super::{DirEntry, ReadFilesystem, VPath, VPathBuf};
|
||||
use super::{Dir, DirEntry, EditPath, VPath, VPathBuf};
|
||||
|
||||
pub struct Cd<'fs> {
|
||||
parent: &'fs dyn ReadFilesystem,
|
||||
pub struct Cd<T> {
|
||||
parent: T,
|
||||
path: VPathBuf,
|
||||
}
|
||||
|
||||
impl<'fs> Cd<'fs> {
|
||||
pub fn new(parent: &'fs dyn ReadFilesystem, path: VPathBuf) -> Self {
|
||||
impl<T> Cd<T> {
|
||||
pub fn new(parent: T, path: VPathBuf) -> Self {
|
||||
Self { parent, path }
|
||||
}
|
||||
}
|
||||
|
||||
impl ReadFilesystem for Cd<'_> {
|
||||
impl<T> Dir for Cd<T>
|
||||
where
|
||||
T: Dir,
|
||||
{
|
||||
fn dir(&self, path: &VPath) -> Vec<DirEntry> {
|
||||
self.parent
|
||||
.dir(&self.path.join(path.as_str()))
|
||||
.dir(&self.path.join(path))
|
||||
.into_iter()
|
||||
.map(|entry| DirEntry {
|
||||
path: entry
|
||||
|
@ -29,23 +32,26 @@ impl ReadFilesystem for Cd<'_> {
|
|||
}
|
||||
|
||||
fn content_version(&self, path: &VPath) -> Option<String> {
|
||||
self.parent.content_version(&self.path.join(path.as_str()))
|
||||
self.parent.content_version(&self.path.join(path))
|
||||
}
|
||||
|
||||
fn content(&self, path: &VPath) -> Option<Vec<u8>> {
|
||||
self.parent.content(&self.path.join(path.as_str()))
|
||||
self.parent.content(&self.path.join(path))
|
||||
}
|
||||
|
||||
fn anchor(&self, path: &VPath) -> Option<VPathBuf> {
|
||||
self.parent.anchor(&self.path.join(path.as_str()))
|
||||
self.parent.anchor(&self.path.join(path))
|
||||
}
|
||||
|
||||
fn cd_optimization(&self, subpath: &VPath) -> Option<Cd<'_>> {
|
||||
Some(Cd::new(self, subpath.to_owned()))
|
||||
fn edit_path(&self, path: &VPath) -> Option<EditPath> {
|
||||
self.parent.edit_path(&self.path.join(path))
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for Cd<'_> {
|
||||
impl<T> fmt::Debug for Cd<T>
|
||||
where
|
||||
T: fmt::Debug,
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{:?}/{:?}", self.parent, self.path)
|
||||
}
|
||||
|
|
92
crates/treehouse/src/vfs/edit.rs
Normal file
92
crates/treehouse/src/vfs/edit.rs
Normal file
|
@ -0,0 +1,92 @@
|
|||
use std::{error::Error, fmt, future::Future, path::PathBuf};
|
||||
|
||||
use log::{error, info};
|
||||
use tokio::task::JoinSet;
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct EditPath {
|
||||
pub(super) path: PathBuf,
|
||||
}
|
||||
|
||||
/// Represents a pending edit operation that can be written to persistent storage later.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum Edit {
|
||||
/// An edit that doesn't do anything.
|
||||
NoOp,
|
||||
|
||||
/// Write the given string to a file.
|
||||
Write(EditPath, String),
|
||||
|
||||
/// Execute a sequence of edits in order.
|
||||
Seq(Vec<Edit>),
|
||||
/// Execute the provided edits in parallel.
|
||||
All(Vec<Edit>),
|
||||
|
||||
/// Makes an edit dry.
|
||||
///
|
||||
/// A dry edit only logs what operations would be performed, does not perform the I/O.
|
||||
Dry(Box<Edit>),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct ApplyFailed;
|
||||
|
||||
impl Edit {
|
||||
#[expect(clippy::manual_async_fn)]
|
||||
pub fn apply(self) -> impl Future<Output = Result<(), ApplyFailed>> + Send {
|
||||
async {
|
||||
match self {
|
||||
Edit::NoOp => (),
|
||||
Edit::Write(edit_path, content) => {
|
||||
tokio::fs::write(&edit_path.path, &content)
|
||||
.await
|
||||
.inspect_err(|err| error!("write to {edit_path:?} failed: {err:?}"))
|
||||
.map_err(|_| ApplyFailed)?;
|
||||
}
|
||||
Edit::Seq(vec) => {
|
||||
for edit in vec {
|
||||
Box::pin(edit.apply()).await?;
|
||||
}
|
||||
}
|
||||
Edit::All(vec) => {
|
||||
let mut set = JoinSet::new();
|
||||
for edit in vec {
|
||||
set.spawn(edit.apply());
|
||||
}
|
||||
while let Some(result) = set.try_join_next() {
|
||||
result.map_err(|_| ApplyFailed)??;
|
||||
}
|
||||
}
|
||||
Edit::Dry(edit) => edit.dry(),
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn dry(&self) {
|
||||
match self {
|
||||
Edit::NoOp => (),
|
||||
Edit::Write(edit_path, content) => {
|
||||
info!("{edit_path:?}: would write {:?} bytes", content.len());
|
||||
}
|
||||
Edit::Seq(edits) => edits.iter().for_each(Self::dry),
|
||||
Edit::All(edits) => edits.iter().for_each(Self::dry),
|
||||
Edit::Dry(edit) => edit.dry(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for ApplyFailed {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str("failed to apply some edits")
|
||||
}
|
||||
}
|
||||
|
||||
impl Error for ApplyFailed {}
|
||||
|
||||
impl fmt::Debug for EditPath {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
fmt::Debug::fmt(&self.path, f)
|
||||
}
|
||||
}
|
|
@ -1,9 +1,9 @@
|
|||
use super::{DirEntry, ReadFilesystem, VPath};
|
||||
use super::{Dir, DirEntry, VPath};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct EmptyFilesystem;
|
||||
pub struct EmptyEntry;
|
||||
|
||||
impl ReadFilesystem for EmptyFilesystem {
|
||||
impl Dir for EmptyEntry {
|
||||
fn dir(&self, _path: &VPath) -> Vec<DirEntry> {
|
||||
vec![]
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use std::fmt;
|
||||
|
||||
use super::{DirEntry, ReadFilesystem, VPath};
|
||||
use super::{DirEntry, Dir, VPath};
|
||||
|
||||
pub struct BufferedFile {
|
||||
pub content: Vec<u8>,
|
||||
|
@ -12,7 +12,7 @@ impl BufferedFile {
|
|||
}
|
||||
}
|
||||
|
||||
impl ReadFilesystem for BufferedFile {
|
||||
impl Dir for BufferedFile {
|
||||
fn dir(&self, _path: &VPath) -> Vec<DirEntry> {
|
||||
vec![]
|
||||
}
|
||||
|
|
|
@ -1,29 +1,29 @@
|
|||
use std::{collections::HashMap, fmt};
|
||||
|
||||
use super::{DirEntry, ReadFilesystem, VPath, VPathBuf};
|
||||
use super::{Dir, DirEntry, DynDir, EditPath, VPath, VPathBuf};
|
||||
|
||||
pub struct MountPoints {
|
||||
mount_points: HashMap<String, Box<dyn ReadFilesystem>>,
|
||||
pub struct MemDir {
|
||||
mount_points: HashMap<String, DynDir>,
|
||||
}
|
||||
|
||||
enum Resolved<'fs, 'path> {
|
||||
Root,
|
||||
MountPoint {
|
||||
fs: &'fs dyn ReadFilesystem,
|
||||
fs: &'fs dyn Dir,
|
||||
fs_path: &'path VPath,
|
||||
subpath: &'path VPath,
|
||||
},
|
||||
None,
|
||||
}
|
||||
|
||||
impl MountPoints {
|
||||
impl MemDir {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
mount_points: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add(&mut self, path: &VPath, fs: Box<dyn ReadFilesystem>) {
|
||||
pub fn add(&mut self, path: &VPath, dir: DynDir) {
|
||||
assert_eq!(
|
||||
path.depth(), 0,
|
||||
"path must be situated at root. MountPoints does not support nested paths, but you can nest MountPoints within other MountPoints"
|
||||
|
@ -31,7 +31,7 @@ impl MountPoints {
|
|||
|
||||
assert!(
|
||||
self.mount_points
|
||||
.insert(path.as_str().to_owned(), fs)
|
||||
.insert(path.as_str().to_owned(), dir)
|
||||
.is_none(),
|
||||
"duplicate mount point at {path:?}"
|
||||
);
|
||||
|
@ -57,13 +57,13 @@ impl MountPoints {
|
|||
}
|
||||
}
|
||||
|
||||
impl Default for MountPoints {
|
||||
impl Default for MemDir {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl ReadFilesystem for MountPoints {
|
||||
impl Dir for MemDir {
|
||||
fn dir(&self, path: &VPath) -> Vec<DirEntry> {
|
||||
match self.resolve(path) {
|
||||
Resolved::Root => self
|
||||
|
@ -81,7 +81,7 @@ impl ReadFilesystem for MountPoints {
|
|||
.dir(subpath)
|
||||
.into_iter()
|
||||
.map(|entry| DirEntry {
|
||||
path: fs_path.join(entry.path.as_str()),
|
||||
path: fs_path.join(&entry.path),
|
||||
})
|
||||
.collect(),
|
||||
Resolved::None => vec![],
|
||||
|
@ -120,9 +120,20 @@ impl ReadFilesystem for MountPoints {
|
|||
Resolved::Root | Resolved::None => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn edit_path(&self, path: &VPath) -> Option<EditPath> {
|
||||
match self.resolve(path) {
|
||||
Resolved::MountPoint {
|
||||
fs,
|
||||
fs_path: _,
|
||||
subpath,
|
||||
} => fs.edit_path(subpath),
|
||||
Resolved::Root | Resolved::None => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for MountPoints {
|
||||
impl fmt::Debug for MemDir {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str("MountPoints")
|
||||
}
|
52
crates/treehouse/src/vfs/overlay.rs
Normal file
52
crates/treehouse/src/vfs/overlay.rs
Normal file
|
@ -0,0 +1,52 @@
|
|||
use std::fmt;
|
||||
|
||||
use super::{Dir, DirEntry, DynDir, EditPath, VPath, VPathBuf};
|
||||
|
||||
pub struct Overlay {
|
||||
base: DynDir,
|
||||
overlay: DynDir,
|
||||
}
|
||||
|
||||
impl Overlay {
|
||||
pub fn new(base: DynDir, overlay: DynDir) -> Self {
|
||||
Self { base, overlay }
|
||||
}
|
||||
}
|
||||
|
||||
impl Dir for Overlay {
|
||||
fn dir(&self, path: &VPath) -> Vec<DirEntry> {
|
||||
let mut dir = self.base.dir(path);
|
||||
dir.append(&mut self.overlay.dir(path));
|
||||
dir.sort();
|
||||
dir.dedup();
|
||||
dir
|
||||
}
|
||||
|
||||
fn content(&self, path: &VPath) -> Option<Vec<u8>> {
|
||||
self.overlay
|
||||
.content(path)
|
||||
.or_else(|| self.base.content(path))
|
||||
}
|
||||
|
||||
fn content_version(&self, path: &VPath) -> Option<String> {
|
||||
self.overlay
|
||||
.content_version(path)
|
||||
.or_else(|| self.base.content_version(path))
|
||||
}
|
||||
|
||||
fn anchor(&self, path: &VPath) -> Option<VPathBuf> {
|
||||
self.overlay.anchor(path).or_else(|| self.base.anchor(path))
|
||||
}
|
||||
|
||||
fn edit_path(&self, path: &VPath) -> Option<EditPath> {
|
||||
self.overlay
|
||||
.edit_path(path)
|
||||
.or_else(|| self.base.edit_path(path))
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for Overlay {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "Overlay({:?}, {:?})", self.base, self.overlay)
|
||||
}
|
||||
}
|
305
crates/treehouse/src/vfs/path.rs
Normal file
305
crates/treehouse/src/vfs/path.rs
Normal file
|
@ -0,0 +1,305 @@
|
|||
use std::{borrow::Borrow, error::Error, fmt, ops::Deref, str::FromStr};
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct VPath {
|
||||
path: str,
|
||||
}
|
||||
|
||||
impl VPath {
|
||||
pub const SEPARATOR_BYTE: u8 = b'/';
|
||||
pub const SEPARATOR: char = Self::SEPARATOR_BYTE as char;
|
||||
pub const ROOT: &Self = unsafe { Self::new_unchecked("") };
|
||||
|
||||
pub const fn try_new(s: &str) -> Result<&Self, InvalidPathError> {
|
||||
if s.is_empty() {
|
||||
return Ok(Self::ROOT);
|
||||
}
|
||||
|
||||
let b = s.as_bytes();
|
||||
if b[b.len() - 1] == Self::SEPARATOR_BYTE {
|
||||
return Err(InvalidPathError::TrailingSlash);
|
||||
}
|
||||
if b[0] == Self::SEPARATOR_BYTE {
|
||||
return Err(InvalidPathError::LeadingSlash);
|
||||
}
|
||||
|
||||
Ok(unsafe { Self::new_unchecked(s) })
|
||||
}
|
||||
|
||||
pub fn new(s: &str) -> &Self {
|
||||
Self::try_new(s).expect("invalid path")
|
||||
}
|
||||
|
||||
/// `const` version of [`new`][Self::new]. This has worse error messages, so prefer `new` whenever possible.
|
||||
pub const fn new_const(s: &str) -> &Self {
|
||||
match Self::try_new(s) {
|
||||
Ok(p) => p,
|
||||
Err(_) => panic!("invalid path"),
|
||||
}
|
||||
}
|
||||
|
||||
const unsafe fn new_unchecked(s: &str) -> &Self {
|
||||
std::mem::transmute::<_, &Self>(s)
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.path.is_empty()
|
||||
}
|
||||
|
||||
pub fn is_root(&self) -> bool {
|
||||
self.is_empty()
|
||||
}
|
||||
|
||||
pub fn join(&self, sub: &VPath) -> VPathBuf {
|
||||
let mut buf = self.to_owned();
|
||||
buf.push(sub);
|
||||
buf
|
||||
}
|
||||
|
||||
pub fn parent(&self) -> Option<&VPath> {
|
||||
if self.is_root() {
|
||||
None
|
||||
} else if self.depth() == 0 {
|
||||
Some(VPath::ROOT)
|
||||
} else {
|
||||
let (left, _right) = self
|
||||
.path
|
||||
.split_once(Self::SEPARATOR)
|
||||
.expect("path with depth > 0 must have separators");
|
||||
// SAFETY: We're splitting on a `/`, so there cannot be a trailing `/` in `left`.
|
||||
Some(unsafe { VPath::new_unchecked(left) })
|
||||
}
|
||||
}
|
||||
|
||||
pub fn strip_prefix(&self, prefix: &VPath) -> Option<&Self> {
|
||||
if self == prefix {
|
||||
Some(VPath::ROOT)
|
||||
} else {
|
||||
self.path
|
||||
.strip_prefix(&prefix.path)
|
||||
.and_then(|p| p.strip_prefix(Self::SEPARATOR))
|
||||
// SAFETY: If `self` starts with `prefix`, `p` will end up not being prefixed by `self`
|
||||
// nor a leading slash.
|
||||
.map(|p| unsafe { VPath::new_unchecked(p) })
|
||||
}
|
||||
}
|
||||
|
||||
pub fn depth(&self) -> usize {
|
||||
self.path.chars().filter(|&c| c == Self::SEPARATOR).count()
|
||||
}
|
||||
|
||||
pub fn segments(&self) -> impl Iterator<Item = &Self> {
|
||||
self.as_str().split(Self::SEPARATOR).map(|s| unsafe {
|
||||
// SAFETY: Since we're splitting on the separator, the path cannot start or end with it.
|
||||
Self::new_unchecked(s)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn rsegments(&self) -> impl Iterator<Item = &Self> {
|
||||
self.as_str().rsplit(Self::SEPARATOR).map(|s| unsafe {
|
||||
// SAFETY: Since we're splitting on the separator, the path cannot start or end with it.
|
||||
Self::new_unchecked(s)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn file_name(&self) -> Option<&str> {
|
||||
self.rsegments().next().map(Self::as_str)
|
||||
}
|
||||
|
||||
pub fn extension(&self) -> Option<&str> {
|
||||
let file_name = self.file_name()?;
|
||||
let (left, right) = file_name.rsplit_once('.')?;
|
||||
if left.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(right)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_extension(&self, extension: &str) -> VPathBuf {
|
||||
let mut buf = self.to_owned();
|
||||
buf.set_extension(extension);
|
||||
buf
|
||||
}
|
||||
|
||||
pub fn file_stem(&self) -> Option<&str> {
|
||||
let file_name = self.file_name()?;
|
||||
if let Some(extension) = self.extension() {
|
||||
Some(&file_name[..file_name.len() - extension.len() - 1])
|
||||
} else {
|
||||
Some(file_name)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_str(&self) -> &str {
|
||||
&self.path
|
||||
}
|
||||
}
|
||||
|
||||
impl ToOwned for VPath {
|
||||
type Owned = VPathBuf;
|
||||
|
||||
fn to_owned(&self) -> Self::Owned {
|
||||
VPathBuf::from(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for VPath {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(&self.path)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for VPath {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(&self.path)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum InvalidPathError {
|
||||
TrailingSlash,
|
||||
LeadingSlash,
|
||||
}
|
||||
|
||||
impl fmt::Display for InvalidPathError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
InvalidPathError::TrailingSlash => {
|
||||
f.write_str("paths must not end with a trailing `/`")
|
||||
}
|
||||
InvalidPathError::LeadingSlash => {
|
||||
f.write_str("paths are always absolute and must not start with `/`")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Error for InvalidPathError {}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct VPathBuf {
|
||||
path: String,
|
||||
}
|
||||
|
||||
impl VPathBuf {
|
||||
pub fn new(path: impl Into<String>) -> Self {
|
||||
Self::try_new(path).expect("invalid path")
|
||||
}
|
||||
|
||||
pub fn try_new(path: impl Into<String>) -> Result<Self, InvalidPathError> {
|
||||
let path = path.into();
|
||||
match VPath::try_new(&path) {
|
||||
Ok(_) => Ok(Self { path }),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
|
||||
unsafe fn new_unchecked(path: String) -> Self {
|
||||
Self { path }
|
||||
}
|
||||
|
||||
pub fn push(&mut self, sub: &VPath) {
|
||||
if !sub.is_empty() {
|
||||
self.path.push('/');
|
||||
self.path.push_str(&sub.path);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_extension(&mut self, new_extension: &str) {
|
||||
if let Some(existing) = self.extension() {
|
||||
let mut chop_len = existing.len();
|
||||
if new_extension.is_empty() {
|
||||
chop_len += 1; // also chop off the `.`
|
||||
}
|
||||
|
||||
let range = self.path.len() - chop_len..;
|
||||
self.path.replace_range(range, new_extension);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for VPathBuf {
|
||||
fn default() -> Self {
|
||||
VPath::ROOT.to_owned()
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for VPathBuf {
|
||||
type Target = VPath;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
unsafe { VPath::new_unchecked(&self.path) }
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for VPathBuf {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(&self.path)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for VPathBuf {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(&self.path)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&VPath> for VPathBuf {
|
||||
fn from(value: &VPath) -> Self {
|
||||
unsafe { Self::new_unchecked(value.path.to_owned()) }
|
||||
}
|
||||
}
|
||||
|
||||
impl Borrow<VPath> for VPathBuf {
|
||||
fn borrow(&self) -> &VPath {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for VPathBuf {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
use serde::de;
|
||||
|
||||
struct Visitor;
|
||||
|
||||
impl de::Visitor<'_> for Visitor {
|
||||
type Value = VPathBuf;
|
||||
|
||||
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
formatter.write_str("virtual path")
|
||||
}
|
||||
|
||||
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
|
||||
where
|
||||
E: de::Error,
|
||||
{
|
||||
VPathBuf::try_new(v).map_err(de::Error::custom)
|
||||
}
|
||||
}
|
||||
|
||||
deserializer.deserialize_str(Visitor)
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for VPathBuf {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
serializer.serialize_str(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for VPathBuf {
|
||||
type Err = InvalidPathError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
Self::try_new(s)
|
||||
}
|
||||
}
|
|
@ -2,7 +2,7 @@ use std::path::{Path, PathBuf};
|
|||
|
||||
use log::error;
|
||||
|
||||
use super::{DirEntry, ReadFilesystem, VPath, VPathBuf};
|
||||
use super::{Dir, DirEntry, EditPath, VPath, VPathBuf};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PhysicalDir {
|
||||
|
@ -15,7 +15,7 @@ impl PhysicalDir {
|
|||
}
|
||||
}
|
||||
|
||||
impl ReadFilesystem for PhysicalDir {
|
||||
impl Dir for PhysicalDir {
|
||||
fn dir(&self, vpath: &VPath) -> Vec<DirEntry> {
|
||||
let physical = self.root.join(physical_path(vpath));
|
||||
if !physical.is_dir() {
|
||||
|
@ -68,6 +68,12 @@ impl ReadFilesystem for PhysicalDir {
|
|||
.inspect_err(|err| error!("{self:?} cannot read file at vpath {path:?}: {err:?}",))
|
||||
.ok()
|
||||
}
|
||||
|
||||
fn edit_path(&self, path: &VPath) -> Option<EditPath> {
|
||||
Some(EditPath {
|
||||
path: self.root.join(physical_path(path)),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn physical_path(path: &VPath) -> &Path {
|
||||
|
|
|
@ -1,33 +1,31 @@
|
|||
use treehouse::vfs::{
|
||||
BufferedFile, Cd, CdExt, DirEntry, MountPoints, ReadFilesystem, VPath, VPathBuf,
|
||||
};
|
||||
use treehouse::vfs::{BufferedFile, Cd, Dir, DirEntry, MemDir, ToDynDir, VPath, VPathBuf};
|
||||
|
||||
const HEWWO: &[u8] = b"hewwo :3";
|
||||
const FWOOFEE: &[u8] = b"fwoofee -w-";
|
||||
const BOOP: &[u8] = b"boop >w<";
|
||||
|
||||
fn vfs() -> MountPoints {
|
||||
fn vfs() -> MemDir {
|
||||
let file1 = BufferedFile::new(HEWWO.to_vec());
|
||||
let file2 = BufferedFile::new(FWOOFEE.to_vec());
|
||||
let file3 = BufferedFile::new(BOOP.to_vec());
|
||||
|
||||
let mut innermost = MountPoints::new();
|
||||
innermost.add(VPath::new("file3.txt"), Box::new(file3));
|
||||
let mut innermost = MemDir::new();
|
||||
innermost.add(VPath::new("file3.txt"), file3.to_dyn());
|
||||
|
||||
let mut inner = MountPoints::new();
|
||||
inner.add(VPath::new("file1.txt"), Box::new(file1));
|
||||
inner.add(VPath::new("file2.txt"), Box::new(file2));
|
||||
inner.add(VPath::new("innermost"), Box::new(innermost));
|
||||
let mut inner = MemDir::new();
|
||||
inner.add(VPath::new("file1.txt"), file1.to_dyn());
|
||||
inner.add(VPath::new("file2.txt"), file2.to_dyn());
|
||||
inner.add(VPath::new("innermost"), innermost.to_dyn());
|
||||
|
||||
let mut vfs = MountPoints::new();
|
||||
vfs.add(VPath::new("inner"), Box::new(inner));
|
||||
let mut vfs = MemDir::new();
|
||||
vfs.add(VPath::new("inner"), inner.to_dyn());
|
||||
vfs
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dir1() {
|
||||
let outer = vfs();
|
||||
let inner = Cd::new(&outer, VPathBuf::new("inner"));
|
||||
let inner = Cd::new(outer, VPathBuf::new("inner"));
|
||||
|
||||
let mut dir = inner.dir(VPath::ROOT);
|
||||
dir.sort();
|
||||
|
@ -49,23 +47,6 @@ fn dir1() {
|
|||
|
||||
#[test]
|
||||
fn dir2() {
|
||||
let outer = vfs();
|
||||
let outer: &dyn ReadFilesystem = &outer;
|
||||
let inner: &dyn ReadFilesystem = &outer.cd(VPathBuf::new("inner"));
|
||||
let innermost = inner.cd(VPathBuf::new("innermost"));
|
||||
|
||||
let mut dir = innermost.dir(VPath::ROOT);
|
||||
dir.sort();
|
||||
assert_eq!(
|
||||
dir,
|
||||
vec![DirEntry {
|
||||
path: VPathBuf::new("file3.txt"),
|
||||
},]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dir3() {
|
||||
let outer = vfs();
|
||||
let innermost = Cd::new(&outer, VPathBuf::new("inner/innermost"));
|
||||
|
||||
|
|
|
@ -1,16 +1,16 @@
|
|||
use treehouse::vfs::{EmptyFilesystem, ReadFilesystem, VPath};
|
||||
use treehouse::vfs::{Dir, EmptyEntry, VPath};
|
||||
|
||||
#[test]
|
||||
fn dir() {
|
||||
assert!(EmptyFilesystem.dir(VPath::ROOT).is_empty());
|
||||
assert!(EmptyEntry.dir(VPath::ROOT).is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn content_version() {
|
||||
assert!(EmptyFilesystem.content_version(VPath::ROOT).is_none());
|
||||
assert!(EmptyEntry.content_version(VPath::ROOT).is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn content() {
|
||||
assert!(EmptyFilesystem.content(VPath::ROOT).is_none());
|
||||
assert!(EmptyEntry.content(VPath::ROOT).is_none());
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use treehouse::vfs::{BufferedFile, ReadFilesystem, VPath};
|
||||
use treehouse::vfs::{BufferedFile, Dir, VPath};
|
||||
|
||||
fn vfs() -> BufferedFile {
|
||||
BufferedFile::new(b"hewwo :3".to_vec())
|
||||
|
|
|
@ -1,21 +1,23 @@
|
|||
use treehouse::vfs::{BufferedFile, DirEntry, MountPoints, ReadFilesystem, VPath, VPathBuf};
|
||||
use std::sync::Arc;
|
||||
|
||||
use treehouse::vfs::{BufferedFile, Dir, DirEntry, MemDir, ToDynDir, VPath, VPathBuf};
|
||||
|
||||
const HEWWO: &[u8] = b"hewwo :3";
|
||||
const FWOOFEE: &[u8] = b"fwoofee -w-";
|
||||
const BOOP: &[u8] = b"boop >w<";
|
||||
|
||||
fn vfs() -> MountPoints {
|
||||
fn vfs() -> MemDir {
|
||||
let file1 = BufferedFile::new(HEWWO.to_vec());
|
||||
let file2 = BufferedFile::new(FWOOFEE.to_vec());
|
||||
let file3 = BufferedFile::new(BOOP.to_vec());
|
||||
|
||||
let mut inner = MountPoints::new();
|
||||
inner.add(VPath::new("file3.txt"), Box::new(file3));
|
||||
let mut inner = MemDir::new();
|
||||
inner.add(VPath::new("file3.txt"), file3.to_dyn());
|
||||
|
||||
let mut vfs = MountPoints::new();
|
||||
vfs.add(VPath::new("file1.txt"), Box::new(file1));
|
||||
vfs.add(VPath::new("file2.txt"), Box::new(file2));
|
||||
vfs.add(VPath::new("inner"), Box::new(inner));
|
||||
let mut vfs = MemDir::new();
|
||||
vfs.add(VPath::new("file1.txt"), file1.to_dyn());
|
||||
vfs.add(VPath::new("file2.txt"), file2.to_dyn());
|
||||
vfs.add(VPath::new("inner"), inner.to_dyn());
|
||||
vfs
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use std::path::Path;
|
||||
|
||||
use treehouse::vfs::{DirEntry, PhysicalDir, ReadFilesystem, VPath, VPathBuf};
|
||||
use treehouse::vfs::{DirEntry, PhysicalDir, Dir, VPath, VPathBuf};
|
||||
|
||||
fn vfs() -> PhysicalDir {
|
||||
let root = Path::new("tests/it/vfs_physical").to_path_buf();
|
||||
|
|
|
@ -57,7 +57,7 @@ description = "a place on the Internet I like to call home"
|
|||
|
||||
[build.javascript]
|
||||
import_roots = [
|
||||
{ name = "treehouse", path = "static/js" },
|
||||
{ name = "tairu", path = "static/js/components/tairu" },
|
||||
{ name = "haku", path = "static/js/components/haku" },
|
||||
{ name = "treehouse", path = "" },
|
||||
{ name = "tairu", path = "components/tairu" },
|
||||
{ name = "haku", path = "components/haku" },
|
||||
]
|
||||
|
|
Loading…
Reference in a new issue