refactors: replacing config derived data with vfs, removing markdown
This commit is contained in:
parent
1e1b8df457
commit
db0329077e
23
Cargo.lock
generated
23
Cargo.lock
generated
|
@ -1,6 +1,6 @@
|
|||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing.
|
||||
version = 3
|
||||
version = 4
|
||||
|
||||
[[package]]
|
||||
name = "addr2line"
|
||||
|
@ -1129,17 +1129,6 @@ dependencies = [
|
|||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pulldown-cmark"
|
||||
version = "0.9.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "57206b407293d2bcd3af849ce869d52068623f19e1b5ff8e8778e3309439682b"
|
||||
dependencies = [
|
||||
"bitflags 2.5.0",
|
||||
"memchr",
|
||||
"unicase",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "qoi"
|
||||
version = "0.4.1"
|
||||
|
@ -1616,7 +1605,6 @@ dependencies = [
|
|||
"indexmap",
|
||||
"jotdown",
|
||||
"log",
|
||||
"pulldown-cmark",
|
||||
"rand",
|
||||
"regex",
|
||||
"serde",
|
||||
|
@ -1660,15 +1648,6 @@ dependencies = [
|
|||
"web-time",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unicase"
|
||||
version = "2.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89"
|
||||
dependencies = [
|
||||
"version_check",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unicode-bidi"
|
||||
version = "0.3.15"
|
||||
|
|
|
@ -46,7 +46,7 @@ enum AllowCodeBlocks {
|
|||
Yes,
|
||||
}
|
||||
|
||||
impl<'a> Parser<'a> {
|
||||
impl Parser<'_> {
|
||||
fn current(&self) -> Option<char> {
|
||||
self.input[self.position..].chars().next()
|
||||
}
|
||||
|
|
|
@ -33,6 +33,3 @@ tower-livereload = "0.9.2"
|
|||
walkdir = "2.3.3"
|
||||
ulid = "1.0.0"
|
||||
url = "2.5.0"
|
||||
|
||||
# TODO djot: To remove once migration to Djot is complete.
|
||||
pulldown-cmark = { version = "0.9.3", default-features = false }
|
||||
|
|
|
@ -14,9 +14,6 @@ pub struct ProgramArgs {
|
|||
|
||||
#[derive(Subcommand)]
|
||||
pub enum Command {
|
||||
/// Regenerate the website.
|
||||
Generate(#[clap(flatten)] GenerateArgs),
|
||||
|
||||
/// Populate missing metadata in blocks.
|
||||
Fix(#[clap(flatten)] FixArgs),
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
#[cfg(debug_assertions)]
|
||||
mod live_reload;
|
||||
|
||||
use std::fmt::Write;
|
||||
use std::{net::Ipv4Addr, path::PathBuf, sync::Arc};
|
||||
|
||||
use anyhow::Context;
|
||||
|
@ -15,12 +16,12 @@ use axum::{
|
|||
Router,
|
||||
};
|
||||
use log::{error, info};
|
||||
use pulldown_cmark::escape::escape_html;
|
||||
use serde::Deserialize;
|
||||
use tokio::net::TcpListener;
|
||||
|
||||
use crate::{
|
||||
config::Config,
|
||||
html::EscapeHtml,
|
||||
state::{Source, Treehouse},
|
||||
};
|
||||
|
||||
|
@ -202,7 +203,8 @@ async fn branch(RawQuery(named_id): RawQuery, State(state): State<Arc<Server>>)
|
|||
let branch_markdown_content = input[branch.content.clone()].trim();
|
||||
let mut per_page_metadata =
|
||||
String::from("<meta property=\"og:description\" content=\"");
|
||||
escape_html(&mut per_page_metadata, branch_markdown_content).unwrap();
|
||||
write!(per_page_metadata, "{}", EscapeHtml(branch_markdown_content))
|
||||
.unwrap();
|
||||
per_page_metadata.push_str("\">");
|
||||
|
||||
const PER_PAGE_METADATA_REPLACEMENT_STRING: &str = "<!-- treehouse-ca37057a-cff5-45b3-8415-3b02dbf6c799-per-branch-metadata -->";
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
use std::{collections::HashMap, ffi::OsStr, fs::File, io::BufReader, path::Path};
|
||||
use std::{
|
||||
collections::HashMap, ffi::OsStr, fs::File, io::BufReader, ops::ControlFlow, path::Path,
|
||||
};
|
||||
|
||||
use anyhow::Context;
|
||||
use image::ImageError;
|
||||
use log::{debug, warn};
|
||||
use log::debug;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use walkdir::WalkDir;
|
||||
|
||||
|
@ -12,7 +13,7 @@ use crate::{
|
|||
Syntax,
|
||||
},
|
||||
import_map::ImportRoot,
|
||||
static_urls::StaticUrls,
|
||||
vfs::{self, ReadFilesystem, VPath, VPathBuf},
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
|
@ -22,10 +23,6 @@ pub struct Config {
|
|||
/// preferred way of setting this in production, so as not to clobber treehouse.toml.)
|
||||
pub site: String,
|
||||
|
||||
/// Which markup to use when generating trees.
|
||||
/// TODO djot: Remove this once we transition to Djot fully.
|
||||
pub markup: Markup,
|
||||
|
||||
/// This is used to generate a link in the footer that links to the page's source commit.
|
||||
/// The final URL is `{commit_base_url}/{commit}/content/{tree_path}.tree`.
|
||||
pub commit_base_url: String,
|
||||
|
@ -59,17 +56,17 @@ pub struct Config {
|
|||
/// How the treehouse should be built.
|
||||
pub build: Build,
|
||||
|
||||
/// Overrides for emoji filenames. Useful for setting up aliases.
|
||||
/// Overrides for emoji names. Useful for setting up aliases.
|
||||
///
|
||||
/// On top of this, emojis are autodiscovered by walking the `static/emoji` directory.
|
||||
/// Paths are anchored within `static/emoji` and must not contain parent directories.
|
||||
#[serde(default)]
|
||||
pub emoji: HashMap<String, String>,
|
||||
pub emoji: HashMap<String, VPathBuf>,
|
||||
|
||||
/// Overrides for pic filenames. Useful for setting up aliases.
|
||||
///
|
||||
/// On top of this, pics are autodiscovered by walking the `static/pic` directory.
|
||||
/// Only the part before the first dash is treated as the pic's id.
|
||||
pub pics: HashMap<String, String>,
|
||||
pub pics: HashMap<String, VPathBuf>,
|
||||
|
||||
/// Syntax definitions.
|
||||
///
|
||||
|
@ -105,72 +102,39 @@ pub enum Markup {
|
|||
}
|
||||
|
||||
impl Config {
|
||||
pub fn load(path: &Path) -> anyhow::Result<Self> {
|
||||
let string = std::fs::read_to_string(path).context("cannot read config file")?;
|
||||
toml_edit::de::from_str(&string).context("error in config file")
|
||||
pub fn autopopulate_emoji(&mut self, dir: &dyn ReadFilesystem) -> anyhow::Result<()> {
|
||||
vfs::walk_rec(dir, VPath::ROOT, &mut |path| {
|
||||
if path.extension().is_some_and(is_emoji_file) {
|
||||
if let Some(emoji_name) = path.file_stem() {
|
||||
if !self.emoji.contains_key(emoji_name) {
|
||||
self.emoji.insert(emoji_name.to_owned(), path.to_owned());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn is_emoji_file(path: &Path) -> bool {
|
||||
path.extension() == Some(OsStr::new("png")) || path.extension() == Some(OsStr::new("svg"))
|
||||
}
|
||||
ControlFlow::Continue(())
|
||||
});
|
||||
|
||||
pub fn autopopulate_emoji(&mut self, dir: &Path) -> anyhow::Result<()> {
|
||||
for file in WalkDir::new(dir) {
|
||||
let entry = file?;
|
||||
if entry.file_type().is_file() && Self::is_emoji_file(entry.path()) {
|
||||
if let Some(emoji_name) = entry.path().file_stem() {
|
||||
let emoji_name = emoji_name.to_string_lossy();
|
||||
if !self.emoji.contains_key(emoji_name.as_ref()) {
|
||||
self.emoji.insert(
|
||||
emoji_name.into_owned(),
|
||||
entry
|
||||
.path()
|
||||
.strip_prefix(dir)
|
||||
.unwrap_or(entry.path())
|
||||
.to_string_lossy()
|
||||
.into_owned(),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn is_pic_file(path: &Path) -> bool {
|
||||
path.extension() == Some(OsStr::new("png"))
|
||||
|| path.extension() == Some(OsStr::new("svg"))
|
||||
|| path.extension() == Some(OsStr::new("jpg"))
|
||||
|| path.extension() == Some(OsStr::new("jpeg"))
|
||||
|| path.extension() == Some(OsStr::new("webp"))
|
||||
}
|
||||
|
||||
pub fn autopopulate_pics(&mut self, dir: &Path) -> anyhow::Result<()> {
|
||||
for file in WalkDir::new(dir) {
|
||||
let entry = file?;
|
||||
if entry.file_type().is_file() && Self::is_pic_file(entry.path()) {
|
||||
if let Some(pic_name) = entry.path().file_stem() {
|
||||
let pic_name = pic_name.to_string_lossy();
|
||||
|
||||
pub fn autopopulate_pics(&mut self, dir: &dyn ReadFilesystem) -> anyhow::Result<()> {
|
||||
vfs::walk_rec(dir, VPath::ROOT, &mut |path| {
|
||||
if path.extension().is_some_and(is_pic_file) {
|
||||
if let Some(pic_name) = path.file_stem() {
|
||||
let pic_id = pic_name
|
||||
.split_once('-')
|
||||
.map(|(before_dash, _after_dash)| before_dash)
|
||||
.unwrap_or(&pic_name);
|
||||
.unwrap_or(pic_name);
|
||||
|
||||
if !self.pics.contains_key(pic_id) {
|
||||
self.pics.insert(
|
||||
pic_id.to_owned(),
|
||||
entry
|
||||
.path()
|
||||
.strip_prefix(dir)
|
||||
.unwrap_or(entry.path())
|
||||
.to_string_lossy()
|
||||
.into_owned(),
|
||||
);
|
||||
}
|
||||
self.pics.insert(pic_id.to_owned(), path.to_owned());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ControlFlow::Continue(())
|
||||
});
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -178,11 +142,14 @@ impl Config {
|
|||
format!("{}/{}", self.site, page)
|
||||
}
|
||||
|
||||
pub fn pic_url(&self, id: &str) -> String {
|
||||
format!(
|
||||
"{}/static/pic/{}",
|
||||
self.site,
|
||||
self.pics.get(id).map(|x| &**x).unwrap_or("404.png")
|
||||
pub fn pic_url(&self, pics_fs: &dyn ReadFilesystem, id: &str) -> String {
|
||||
vfs::url(
|
||||
&self.site,
|
||||
pics_fs,
|
||||
self.pics
|
||||
.get(id)
|
||||
.map(|x| &**x)
|
||||
.unwrap_or(VPath::new("404.png")),
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -211,46 +178,10 @@ impl Config {
|
|||
}
|
||||
}
|
||||
|
||||
/// Data derived from the config.
|
||||
pub struct ConfigDerivedData {
|
||||
pub image_sizes: HashMap<String, Option<ImageSize>>,
|
||||
pub static_urls: StaticUrls,
|
||||
fn is_emoji_file(extension: &str) -> bool {
|
||||
matches!(extension, "png" | "svg")
|
||||
}
|
||||
|
||||
/// Image size. This is useful for emitting <img> elements with a specific size to eliminate
|
||||
/// layout shifting.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub struct ImageSize {
|
||||
pub width: u32,
|
||||
pub height: u32,
|
||||
}
|
||||
|
||||
impl ConfigDerivedData {
|
||||
fn read_image_size(filename: &str) -> Option<ImageSize> {
|
||||
let (width, height) = image::io::Reader::new(BufReader::new(File::open(filename).ok()?))
|
||||
.with_guessed_format()
|
||||
.map_err(ImageError::from)
|
||||
.and_then(|i| i.into_dimensions())
|
||||
// NOTE: Not being able to determine the image size is not the end of the world,
|
||||
// so just warn the user if we couldn't do it.
|
||||
// For example, currently SVG is not supported at all, which causes this to fail.
|
||||
.inspect_err(|e| warn!("cannot read image size of {filename}: {e}"))
|
||||
.ok()?;
|
||||
Some(ImageSize { width, height })
|
||||
}
|
||||
|
||||
pub fn image_size(&mut self, filename: &str) -> Option<ImageSize> {
|
||||
if !self.image_sizes.contains_key(filename) {
|
||||
self.image_sizes
|
||||
.insert(filename.to_owned(), Self::read_image_size(filename));
|
||||
}
|
||||
self.image_sizes.get(filename).copied().flatten()
|
||||
}
|
||||
|
||||
pub fn pic_size(&mut self, config: &Config, pic_id: &str) -> Option<ImageSize> {
|
||||
config
|
||||
.pics
|
||||
.get(pic_id)
|
||||
.and_then(|pic_filename| self.image_size(&format!("static/pic/{pic_filename}")))
|
||||
}
|
||||
fn is_pic_file(extension: &str) -> bool {
|
||||
matches!(extension, "png" | "svg" | "jpg" | "jpeg" | "webp")
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@ use std::{
|
|||
time::Instant,
|
||||
};
|
||||
|
||||
use anyhow::{bail, Context};
|
||||
use anyhow::{anyhow, bail, Context};
|
||||
use codespan_reporting::{
|
||||
diagnostic::{Diagnostic, Label, LabelStyle, Severity},
|
||||
files::Files as _,
|
||||
|
@ -18,7 +18,7 @@ use walkdir::WalkDir;
|
|||
|
||||
use crate::{
|
||||
cli::Paths,
|
||||
config::{Config, ConfigDerivedData},
|
||||
config::Config,
|
||||
fun::seasons::Season,
|
||||
history::History,
|
||||
html::{
|
||||
|
@ -32,6 +32,7 @@ use crate::{
|
|||
state::{has_errors, report_diagnostics, RevisionInfo, Source},
|
||||
static_urls::StaticUrls,
|
||||
tree::SemaRoots,
|
||||
vfs::{CdExt, ReadFilesystem, VPath, VPathBuf},
|
||||
};
|
||||
|
||||
use crate::state::{FileId, Treehouse};
|
||||
|
@ -60,11 +61,6 @@ struct ParsedTree {
|
|||
target_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct Feed {
|
||||
branches: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct Page {
|
||||
pub title: String,
|
||||
|
@ -105,24 +101,24 @@ pub struct Thumbnail {
|
|||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct StaticTemplateData<'a> {
|
||||
struct BaseTemplateData<'a> {
|
||||
config: &'a Config,
|
||||
import_map: String,
|
||||
season: Option<Season>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct PageTemplateData<'a> {
|
||||
config: &'a Config,
|
||||
#[serde(flatten)]
|
||||
base: &'a BaseTemplateData<'a>,
|
||||
page: Page,
|
||||
feeds: &'a HashMap<String, Feed>,
|
||||
season: Option<Season>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct HistoryTemplateData<'a> {
|
||||
config: &'a Config,
|
||||
#[serde(flatten)]
|
||||
base: &'a BaseTemplateData<'a>,
|
||||
page: HistoryPage,
|
||||
season: Option<Season>,
|
||||
}
|
||||
|
||||
impl Generator {
|
||||
|
@ -401,21 +397,13 @@ impl Generator {
|
|||
treehouse: &mut Treehouse,
|
||||
config: &Config,
|
||||
paths: &Paths<'_>,
|
||||
root_fs: &dyn ReadFilesystem,
|
||||
navigation_map: &NavigationMap,
|
||||
parsed_trees: Vec<ParsedTree>,
|
||||
) -> anyhow::Result<Vec<Diagnostic<FileId>>> {
|
||||
let mut global_diagnostics = vec![];
|
||||
|
||||
let mut config_derived_data = ConfigDerivedData {
|
||||
image_sizes: Default::default(),
|
||||
static_urls: StaticUrls::new(
|
||||
// NOTE: Allow referring to generated static assets here.
|
||||
paths.target_dir.join("static"),
|
||||
format!("{}/static", config.site),
|
||||
),
|
||||
};
|
||||
|
||||
let mut handlebars = Handlebars::new();
|
||||
let mut handlebars: Handlebars<'static> = Handlebars::new();
|
||||
Self::init_handlebars(&mut handlebars, paths, config);
|
||||
|
||||
let mut template_file_ids = HashMap::new();
|
||||
|
@ -439,17 +427,21 @@ impl Generator {
|
|||
}
|
||||
}
|
||||
|
||||
let import_map =
|
||||
ImportMap::generate(config.site.clone(), &config.build.javascript.import_roots);
|
||||
|
||||
let base_template_data = BaseTemplateData {
|
||||
config,
|
||||
import_map: serde_json::to_string_pretty(&import_map)
|
||||
.expect("import map should be serializable to JSON"),
|
||||
season: Season::current(),
|
||||
};
|
||||
|
||||
std::fs::create_dir_all(paths.template_target_dir)?;
|
||||
for (name, &file_id) in &template_file_ids {
|
||||
let filename = name.rsplit_once('/').unwrap_or(("", name)).1;
|
||||
if !filename.starts_with('_') {
|
||||
let templated_html = match handlebars.render(
|
||||
name,
|
||||
&StaticTemplateData {
|
||||
config,
|
||||
season: Season::current(),
|
||||
},
|
||||
) {
|
||||
let templated_html = match handlebars.render(name, &base_template_data) {
|
||||
Ok(html) => html,
|
||||
Err(error) => {
|
||||
Self::wrangle_handlebars_error_into_diagnostic(
|
||||
|
@ -470,23 +462,6 @@ impl Generator {
|
|||
}
|
||||
}
|
||||
|
||||
let mut feeds = HashMap::new();
|
||||
|
||||
for parsed_tree in &parsed_trees {
|
||||
let roots = &treehouse.roots[&parsed_tree.root_key];
|
||||
|
||||
if let Some(feed_name) = &roots.attributes.feed {
|
||||
let mut feed = Feed {
|
||||
branches: Vec::new(),
|
||||
};
|
||||
for &root in &roots.branches {
|
||||
let branch = treehouse.tree.branch(root);
|
||||
feed.branches.push(branch.attributes.id.clone());
|
||||
}
|
||||
feeds.insert(feed_name.to_owned(), feed);
|
||||
}
|
||||
}
|
||||
|
||||
for parsed_tree in parsed_trees {
|
||||
debug!("generating: {:?}", parsed_tree.target_path);
|
||||
|
||||
|
@ -502,7 +477,7 @@ impl Generator {
|
|||
&mut tree,
|
||||
treehouse,
|
||||
config,
|
||||
&mut config_derived_data,
|
||||
root_fs,
|
||||
paths,
|
||||
parsed_tree.file_id,
|
||||
&roots.branches,
|
||||
|
@ -512,7 +487,7 @@ impl Generator {
|
|||
.revision_info(parsed_tree.file_id)
|
||||
.expect(".tree files should have Tree sources");
|
||||
let template_data = PageTemplateData {
|
||||
config,
|
||||
base: &base_template_data,
|
||||
page: Page {
|
||||
title: roots.attributes.title.clone(),
|
||||
thumbnail: roots
|
||||
|
@ -520,7 +495,8 @@ impl Generator {
|
|||
.thumbnail
|
||||
.as_ref()
|
||||
.map(|thumbnail| Thumbnail {
|
||||
url: config.pic_url(&thumbnail.id),
|
||||
url: config
|
||||
.pic_url(&root_fs.cd(VPathBuf::new("static/pics")), &thumbnail.id),
|
||||
alt: thumbnail.alt.clone(),
|
||||
}),
|
||||
scripts: roots.attributes.scripts.clone(),
|
||||
|
@ -539,8 +515,6 @@ impl Generator {
|
|||
history_url: format!("{}/h/{}", config.site, parsed_tree.tree_path),
|
||||
revision: revision.clone(),
|
||||
},
|
||||
feeds: &feeds,
|
||||
season: Season::current(),
|
||||
};
|
||||
let mut template_name = roots
|
||||
.attributes
|
||||
|
@ -594,7 +568,7 @@ impl Generator {
|
|||
std::fs::create_dir_all(target_path.parent().unwrap())?;
|
||||
|
||||
let template_data = HistoryTemplateData {
|
||||
config,
|
||||
base: &base_template_data,
|
||||
page: HistoryPage {
|
||||
title: format!("page history: {tree_path}"),
|
||||
commits: page_history
|
||||
|
@ -624,7 +598,6 @@ impl Generator {
|
|||
tree_path: tree_path.to_owned(),
|
||||
is_history: true,
|
||||
},
|
||||
season: Season::current(),
|
||||
};
|
||||
let templated_html = match handlebars.render("_history.hbs", &template_data) {
|
||||
Ok(html) => html,
|
||||
|
@ -651,17 +624,25 @@ impl Generator {
|
|||
|
||||
pub fn generate(
|
||||
paths: &Paths<'_>,
|
||||
src: &dyn ReadFilesystem,
|
||||
latest_revision: LatestRevision,
|
||||
) -> anyhow::Result<(Config, Treehouse)> {
|
||||
let start = Instant::now();
|
||||
|
||||
info!("loading config");
|
||||
let mut config = Config::load(paths.config_file)?;
|
||||
let mut config: Config = toml_edit::de::from_str(
|
||||
&src.content(VPath::new("treehouse.toml"))
|
||||
.map(String::from_utf8)
|
||||
.ok_or_else(|| anyhow!("config file does not exist"))??,
|
||||
)
|
||||
.context("failed to deserialize config")?;
|
||||
config.site = std::env::var("TREEHOUSE_SITE").unwrap_or(config.site);
|
||||
config.autopopulate_emoji(&paths.static_dir.join("emoji"))?;
|
||||
config.autopopulate_pics(&paths.static_dir.join("pic"))?;
|
||||
config.autopopulate_emoji(&src.cd(VPathBuf::new("static/emoji")))?;
|
||||
config.autopopulate_pics(&src.cd(VPathBuf::new("static/pic")))?;
|
||||
config.load_syntaxes(&paths.static_dir.join("syntax"))?;
|
||||
|
||||
// TODO: WriteFilesystem, such that we can write into the target directory?
|
||||
|
||||
info!("cleaning target directory");
|
||||
let _ = std::fs::remove_dir_all(paths.target_dir);
|
||||
std::fs::create_dir_all(paths.target_dir)?;
|
||||
|
@ -669,9 +650,6 @@ pub fn generate(
|
|||
info!("copying static directory to target directory");
|
||||
copy_dir(paths.static_dir, paths.target_dir.join("static"))?;
|
||||
|
||||
info!("creating static/generated directory");
|
||||
std::fs::create_dir_all(paths.target_dir.join("static/generated"))?;
|
||||
|
||||
info!("getting history");
|
||||
let git = git2::Repository::open(".")?;
|
||||
let history = History::get(&git)?;
|
||||
|
@ -701,19 +679,12 @@ pub fn generate(
|
|||
navigation_map.to_javascript(),
|
||||
)?;
|
||||
|
||||
info!("generating import map");
|
||||
let import_map =
|
||||
ImportMap::generate(config.site.clone(), &config.build.javascript.import_roots);
|
||||
std::fs::write(
|
||||
paths.target_dir.join("static/generated/import-map.json"),
|
||||
serde_json::to_string_pretty(&import_map).context("could not serialize import map")?,
|
||||
)?;
|
||||
|
||||
info!("generating standalone pages");
|
||||
let diagnostics = generator.generate_all_files(
|
||||
&mut treehouse,
|
||||
&config,
|
||||
paths,
|
||||
src,
|
||||
&navigation_map,
|
||||
parsed_trees,
|
||||
)?;
|
||||
|
@ -733,11 +704,12 @@ pub fn generate(
|
|||
|
||||
pub fn regenerate_or_report_error(
|
||||
paths: &Paths<'_>,
|
||||
src: &dyn ReadFilesystem,
|
||||
latest_revision: LatestRevision,
|
||||
) -> anyhow::Result<(Config, Treehouse)> {
|
||||
info!("regenerating site content");
|
||||
|
||||
let result = generate(paths, latest_revision);
|
||||
let result = generate(paths, src, latest_revision);
|
||||
if let Err(e) = &result {
|
||||
error!("{e:?}");
|
||||
}
|
||||
|
|
|
@ -3,13 +3,12 @@ use std::fmt::{self, Display, Write};
|
|||
pub mod breadcrumbs;
|
||||
mod djot;
|
||||
pub mod highlight;
|
||||
mod markdown;
|
||||
pub mod navmap;
|
||||
pub mod tree;
|
||||
|
||||
pub struct EscapeAttribute<'a>(&'a str);
|
||||
pub struct EscapeAttribute<'a>(pub &'a str);
|
||||
|
||||
impl<'a> Display for EscapeAttribute<'a> {
|
||||
impl Display for EscapeAttribute<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
for c in self.0.chars() {
|
||||
if c == '"' {
|
||||
|
@ -22,9 +21,9 @@ impl<'a> Display for EscapeAttribute<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
pub struct EscapeHtml<'a>(&'a str);
|
||||
pub struct EscapeHtml<'a>(pub &'a str);
|
||||
|
||||
impl<'a> Display for EscapeHtml<'a> {
|
||||
impl Display for EscapeHtml<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
for c in self.0.chars() {
|
||||
match c {
|
||||
|
|
|
@ -17,22 +17,26 @@ use jotdown::OrderedListNumbering::*;
|
|||
use jotdown::SpanLinkType;
|
||||
|
||||
use crate::config::Config;
|
||||
use crate::config::ConfigDerivedData;
|
||||
use crate::state::FileId;
|
||||
use crate::state::Treehouse;
|
||||
use crate::vfs;
|
||||
use crate::vfs::ReadFilesystem;
|
||||
|
||||
use super::highlight::highlight;
|
||||
|
||||
/// [`Render`] implementor that writes HTML output.
|
||||
pub struct Renderer<'a> {
|
||||
pub config: &'a Config,
|
||||
pub config_derived_data: &'a mut ConfigDerivedData,
|
||||
|
||||
pub emoji_fs: &'a dyn ReadFilesystem,
|
||||
pub pics_fs: &'a dyn ReadFilesystem,
|
||||
|
||||
pub treehouse: &'a mut Treehouse,
|
||||
pub file_id: FileId,
|
||||
pub page_id: String,
|
||||
}
|
||||
|
||||
impl<'a> Renderer<'a> {
|
||||
impl Renderer<'_> {
|
||||
#[must_use]
|
||||
pub fn render(
|
||||
self,
|
||||
|
@ -369,31 +373,26 @@ impl<'a> Writer<'a> {
|
|||
r#"<img class="placeholder-image" loading="lazy" src=""#,
|
||||
);
|
||||
|
||||
let filename = self.renderer.config.pics.get(placeholder_pic_id);
|
||||
let pic_url = filename
|
||||
.and_then(|filename| {
|
||||
self.renderer
|
||||
.config_derived_data
|
||||
.static_urls
|
||||
.get(&format!("pic/{filename}"))
|
||||
.ok()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
let pic_url = self
|
||||
.renderer
|
||||
.config
|
||||
.pic_url(self.renderer.pics_fs, placeholder_pic_id);
|
||||
write_attr(&pic_url, out);
|
||||
out.push('"');
|
||||
|
||||
let image_size = filename.and_then(|filename| {
|
||||
self.renderer
|
||||
.config_derived_data
|
||||
.image_size(&format!("static/pic/{filename}"))
|
||||
});
|
||||
if let Some(image_size) = image_size {
|
||||
write!(
|
||||
out,
|
||||
r#" width="{}" height="{}""#,
|
||||
image_size.width, image_size.height
|
||||
)?;
|
||||
}
|
||||
// TODO: Image size derivation.
|
||||
// let image_size = filename.and_then(|filename| {
|
||||
// self.renderer
|
||||
// .config_derived_data
|
||||
// .image_size(&format!("static/pic/{filename}"))
|
||||
// });
|
||||
// if let Some(image_size) = image_size {
|
||||
// write!(
|
||||
// out,
|
||||
// r#" width="{}" height="{}""#,
|
||||
// image_size.width, image_size.height
|
||||
// )?;
|
||||
// }
|
||||
|
||||
out.push('>');
|
||||
}
|
||||
|
@ -523,8 +522,7 @@ impl<'a> Writer<'a> {
|
|||
self.renderer.config.syntaxes.get(code_block.language)
|
||||
});
|
||||
if let Some(syntax) = syntax {
|
||||
// TODO djot: make highlight infallible
|
||||
highlight(out, syntax, s).map_err(|_| std::fmt::Error)?;
|
||||
highlight(out, syntax, s);
|
||||
} else {
|
||||
write_text(s, out);
|
||||
}
|
||||
|
@ -547,7 +545,7 @@ impl<'a> Writer<'a> {
|
|||
});
|
||||
}
|
||||
Event::Symbol(sym) => {
|
||||
if let Some(filename) = self.renderer.config.emoji.get(sym.as_ref()) {
|
||||
if let Some(vpath) = self.renderer.config.emoji.get(sym.as_ref()) {
|
||||
let branch_id = self
|
||||
.renderer
|
||||
.treehouse
|
||||
|
@ -565,12 +563,7 @@ impl<'a> Writer<'a> {
|
|||
out.push_str(r#"">"#)
|
||||
}
|
||||
|
||||
let url = self
|
||||
.renderer
|
||||
.config_derived_data
|
||||
.static_urls
|
||||
.get(&format!("emoji/{filename}"))
|
||||
.unwrap_or_default();
|
||||
let url = vfs::url(&self.renderer.config.site, self.renderer.emoji_fs, vpath);
|
||||
|
||||
// TODO: this could do with better alt text
|
||||
write!(
|
||||
|
@ -580,17 +573,18 @@ impl<'a> Writer<'a> {
|
|||
write_attr(&url, out);
|
||||
out.push('"');
|
||||
|
||||
if let Some(image_size) = self
|
||||
.renderer
|
||||
.config_derived_data
|
||||
.image_size(&format!("static/emoji/{filename}"))
|
||||
{
|
||||
write!(
|
||||
out,
|
||||
r#" width="{}" height="{}""#,
|
||||
image_size.width, image_size.height
|
||||
)?;
|
||||
}
|
||||
// TODO: Image size derivation.
|
||||
// if let Some(image_size) = self
|
||||
// .renderer
|
||||
// .config_derived_data
|
||||
// .image_size(&format!("static/emoji/{vpath}"))
|
||||
// {
|
||||
// write!(
|
||||
// out,
|
||||
// r#" width="{}" height="{}""#,
|
||||
// image_size.width, image_size.height
|
||||
// )?;
|
||||
// }
|
||||
|
||||
out.push('>');
|
||||
|
||||
|
@ -635,10 +629,7 @@ impl<'a> Writer<'a> {
|
|||
|
||||
fn resolve_link(&self, link: &str) -> Option<String> {
|
||||
let Renderer {
|
||||
config,
|
||||
config_derived_data,
|
||||
treehouse,
|
||||
..
|
||||
config, treehouse, ..
|
||||
} = &self.renderer;
|
||||
link.split_once(':').and_then(|(kind, linked)| match kind {
|
||||
"def" => config.defs.get(linked).cloned(),
|
||||
|
@ -653,12 +644,7 @@ impl<'a> Writer<'a> {
|
|||
)
|
||||
}),
|
||||
"page" => Some(config.page_url(linked)),
|
||||
"pic" => config.pics.get(linked).and_then(|filename| {
|
||||
config_derived_data
|
||||
.static_urls
|
||||
.get(&format!("pic/{filename}"))
|
||||
.ok()
|
||||
}),
|
||||
"pic" => Some(config.pic_url(self.renderer.pics_fs, linked)),
|
||||
_ => None,
|
||||
})
|
||||
}
|
||||
|
|
|
@ -11,13 +11,14 @@
|
|||
pub mod compiled;
|
||||
pub mod tokenize;
|
||||
|
||||
use std::{collections::HashMap, io};
|
||||
use std::{collections::HashMap, fmt::Write};
|
||||
|
||||
use pulldown_cmark::escape::{escape_html, StrWrite};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use self::compiled::CompiledSyntax;
|
||||
|
||||
use super::EscapeHtml;
|
||||
|
||||
/// Syntax definition.
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
pub struct Syntax {
|
||||
|
@ -81,14 +82,13 @@ pub struct Keyword {
|
|||
pub only_replaces: Option<String>,
|
||||
}
|
||||
|
||||
pub fn highlight(mut w: impl StrWrite, syntax: &CompiledSyntax, code: &str) -> io::Result<()> {
|
||||
pub fn highlight(out: &mut String, syntax: &CompiledSyntax, code: &str) {
|
||||
let tokens = syntax.tokenize(code);
|
||||
for token in tokens {
|
||||
w.write_str("<span class=\"")?;
|
||||
escape_html(&mut w, &syntax.token_names[token.id])?;
|
||||
w.write_str("\">")?;
|
||||
escape_html(&mut w, &code[token.range])?;
|
||||
w.write_str("</span>")?;
|
||||
out.push_str("<span class=\"");
|
||||
_ = write!(out, "{}", EscapeHtml(&syntax.token_names[token.id]));
|
||||
out.push_str("\">");
|
||||
_ = write!(out, "{}", EscapeHtml(&code[token.range]));
|
||||
out.push_str("</span>");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -1,716 +0,0 @@
|
|||
// NOTE: This code is pasted pretty much verbatim from pulldown-cmark but tweaked to have my own
|
||||
// cool additions.
|
||||
|
||||
// Copyright 2015 Google Inc. All rights reserved.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included in
|
||||
// all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
// THE SOFTWARE.
|
||||
|
||||
//! HTML renderer that takes an iterator of events as input.
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::io;
|
||||
|
||||
use pulldown_cmark::escape::{escape_href, escape_html, StrWrite};
|
||||
use pulldown_cmark::{Alignment, CodeBlockKind, Event, LinkType, Tag};
|
||||
use pulldown_cmark::{CowStr, Event::*};
|
||||
|
||||
use crate::config::{Config, ConfigDerivedData, ImageSize};
|
||||
use crate::html::highlight::highlight;
|
||||
use crate::state::Treehouse;
|
||||
|
||||
enum TableState {
|
||||
Head,
|
||||
Body,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
enum CodeBlockState<'a> {
|
||||
NotInCodeBlock,
|
||||
InCodeBlock(Option<CowStr<'a>>),
|
||||
}
|
||||
|
||||
struct HtmlWriter<'a, I, W> {
|
||||
treehouse: &'a Treehouse,
|
||||
config: &'a Config,
|
||||
config_derived_data: &'a mut ConfigDerivedData,
|
||||
page_id: &'a str,
|
||||
|
||||
/// Iterator supplying events.
|
||||
iter: I,
|
||||
|
||||
/// Writer to write to.
|
||||
writer: W,
|
||||
|
||||
/// Whether or not the last write wrote a newline.
|
||||
end_newline: bool,
|
||||
|
||||
table_state: TableState,
|
||||
table_alignments: Vec<Alignment>,
|
||||
table_cell_index: usize,
|
||||
numbers: HashMap<CowStr<'a>, usize>,
|
||||
|
||||
code_block_state: CodeBlockState<'a>,
|
||||
}
|
||||
|
||||
impl<'a, I, W> HtmlWriter<'a, I, W>
|
||||
where
|
||||
I: Iterator<Item = Event<'a>>,
|
||||
W: StrWrite,
|
||||
{
|
||||
fn new(
|
||||
treehouse: &'a Treehouse,
|
||||
config: &'a Config,
|
||||
config_derived_data: &'a mut ConfigDerivedData,
|
||||
page_id: &'a str,
|
||||
iter: I,
|
||||
writer: W,
|
||||
) -> Self {
|
||||
Self {
|
||||
treehouse,
|
||||
config,
|
||||
config_derived_data,
|
||||
page_id,
|
||||
|
||||
iter,
|
||||
writer,
|
||||
end_newline: true,
|
||||
table_state: TableState::Head,
|
||||
table_alignments: vec![],
|
||||
table_cell_index: 0,
|
||||
numbers: HashMap::new(),
|
||||
code_block_state: CodeBlockState::NotInCodeBlock,
|
||||
}
|
||||
}
|
||||
|
||||
/// Writes a new line.
|
||||
fn write_newline(&mut self) -> io::Result<()> {
|
||||
self.end_newline = true;
|
||||
self.writer.write_str("\n")
|
||||
}
|
||||
|
||||
/// Writes a buffer, and tracks whether or not a newline was written.
|
||||
#[inline]
|
||||
fn write(&mut self, s: &str) -> io::Result<()> {
|
||||
self.writer.write_str(s)?;
|
||||
|
||||
if !s.is_empty() {
|
||||
self.end_newline = s.ends_with('\n');
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn run(mut self) -> io::Result<()> {
|
||||
while let Some(event) = self.iter.next() {
|
||||
match event {
|
||||
Start(tag) => {
|
||||
self.start_tag(tag)?;
|
||||
}
|
||||
End(tag) => {
|
||||
self.end_tag(tag)?;
|
||||
}
|
||||
Text(text) => {
|
||||
self.run_text(&text)?;
|
||||
self.end_newline = text.ends_with('\n');
|
||||
}
|
||||
Code(text) => {
|
||||
self.write("<code>")?;
|
||||
escape_html(&mut self.writer, &text)?;
|
||||
self.write("</code>")?;
|
||||
}
|
||||
Html(html) => {
|
||||
self.write(&html)?;
|
||||
}
|
||||
SoftBreak => {
|
||||
self.write_newline()?;
|
||||
}
|
||||
HardBreak => {
|
||||
self.write("<br />\n")?;
|
||||
}
|
||||
Rule => {
|
||||
if self.end_newline {
|
||||
self.write("<hr />\n")?;
|
||||
} else {
|
||||
self.write("\n<hr />\n")?;
|
||||
}
|
||||
}
|
||||
FootnoteReference(name) => {
|
||||
let len = self.numbers.len() + 1;
|
||||
self.write("<sup class=\"footnote-reference\"><a href=\"#")?;
|
||||
escape_html(&mut self.writer, &name)?;
|
||||
self.write("\">")?;
|
||||
let number = *self.numbers.entry(name).or_insert(len);
|
||||
write!(&mut self.writer, "{}", number)?;
|
||||
self.write("</a></sup>")?;
|
||||
}
|
||||
TaskListMarker(true) => {
|
||||
self.write("<input disabled=\"\" type=\"checkbox\" checked=\"\"/>\n")?;
|
||||
}
|
||||
TaskListMarker(false) => {
|
||||
self.write("<input disabled=\"\" type=\"checkbox\"/>\n")?;
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Writes the start of an HTML tag.
|
||||
fn start_tag(&mut self, tag: Tag<'a>) -> io::Result<()> {
|
||||
match tag {
|
||||
Tag::Paragraph => {
|
||||
if self.end_newline {
|
||||
self.write("<p>")
|
||||
} else {
|
||||
self.write("\n<p>")
|
||||
}
|
||||
}
|
||||
Tag::Heading(level, id, classes) => {
|
||||
if self.end_newline {
|
||||
self.end_newline = false;
|
||||
self.write("<")?;
|
||||
} else {
|
||||
self.write("\n<")?;
|
||||
}
|
||||
write!(&mut self.writer, "{}", level)?;
|
||||
if let Some(id) = id {
|
||||
self.write(" id=\"")?;
|
||||
escape_html(&mut self.writer, id)?;
|
||||
self.write("\"")?;
|
||||
}
|
||||
let mut classes = classes.iter();
|
||||
if let Some(class) = classes.next() {
|
||||
self.write(" class=\"")?;
|
||||
escape_html(&mut self.writer, class)?;
|
||||
for class in classes {
|
||||
self.write(" ")?;
|
||||
escape_html(&mut self.writer, class)?;
|
||||
}
|
||||
self.write("\"")?;
|
||||
}
|
||||
self.write(">")
|
||||
}
|
||||
Tag::Table(alignments) => {
|
||||
self.table_alignments = alignments;
|
||||
self.write("<table>")
|
||||
}
|
||||
Tag::TableHead => {
|
||||
self.table_state = TableState::Head;
|
||||
self.table_cell_index = 0;
|
||||
self.write("<thead><tr>")
|
||||
}
|
||||
Tag::TableRow => {
|
||||
self.table_cell_index = 0;
|
||||
self.write("<tr>")
|
||||
}
|
||||
Tag::TableCell => {
|
||||
match self.table_state {
|
||||
TableState::Head => {
|
||||
self.write("<th")?;
|
||||
}
|
||||
TableState::Body => {
|
||||
self.write("<td")?;
|
||||
}
|
||||
}
|
||||
match self.table_alignments.get(self.table_cell_index) {
|
||||
Some(&Alignment::Left) => self.write(" style=\"text-align: left\">"),
|
||||
Some(&Alignment::Center) => self.write(" style=\"text-align: center\">"),
|
||||
Some(&Alignment::Right) => self.write(" style=\"text-align: right\">"),
|
||||
_ => self.write(">"),
|
||||
}
|
||||
}
|
||||
Tag::BlockQuote => {
|
||||
if self.end_newline {
|
||||
self.write("<blockquote>\n")
|
||||
} else {
|
||||
self.write("\n<blockquote>\n")
|
||||
}
|
||||
}
|
||||
Tag::CodeBlock(info) => {
|
||||
self.code_block_state = CodeBlockState::InCodeBlock(None);
|
||||
if !self.end_newline {
|
||||
self.write_newline()?;
|
||||
}
|
||||
match info {
|
||||
CodeBlockKind::Fenced(language) => {
|
||||
self.code_block_state = CodeBlockState::InCodeBlock(Some(language.clone()));
|
||||
match CodeBlockMode::parse(&language) {
|
||||
CodeBlockMode::PlainText => self.write("<pre><code>"),
|
||||
CodeBlockMode::SyntaxHighlightOnly { language } => {
|
||||
self.write("<pre><code class=\"language-")?;
|
||||
escape_html(&mut self.writer, language)?;
|
||||
if self.config.syntaxes.contains_key(language) {
|
||||
self.write(" th-syntax-highlighting")?;
|
||||
}
|
||||
self.write("\">")
|
||||
}
|
||||
CodeBlockMode::LiterateProgram {
|
||||
language,
|
||||
kind,
|
||||
program_name,
|
||||
} => {
|
||||
self.write(match &kind {
|
||||
LiterateCodeKind::Input => {
|
||||
"<th-literate-program data-mode=\"input\" "
|
||||
}
|
||||
LiterateCodeKind::Output { .. } => {
|
||||
"<th-literate-program data-mode=\"output\" "
|
||||
}
|
||||
})?;
|
||||
self.write("data-program=\"")?;
|
||||
escape_href(&mut self.writer, self.page_id)?;
|
||||
self.write(":")?;
|
||||
escape_html(&mut self.writer, program_name)?;
|
||||
self.write("\" data-language=\"")?;
|
||||
escape_html(&mut self.writer, language)?;
|
||||
self.write("\" role=\"code\">")?;
|
||||
|
||||
if let LiterateCodeKind::Output { placeholder_pic_id } = kind {
|
||||
if !placeholder_pic_id.is_empty() {
|
||||
self.write("<img class=\"placeholder-image\" loading=\"lazy\" src=\"")?;
|
||||
escape_html(
|
||||
&mut self.writer,
|
||||
&self.config.pic_url(placeholder_pic_id),
|
||||
)?;
|
||||
self.write("\"")?;
|
||||
if let Some(ImageSize { width, height }) = self
|
||||
.config_derived_data
|
||||
.pic_size(self.config, placeholder_pic_id)
|
||||
{
|
||||
self.write(&format!(
|
||||
" width=\"{width}\" height=\"{height}\""
|
||||
))?;
|
||||
}
|
||||
self.write(">")?;
|
||||
}
|
||||
}
|
||||
|
||||
self.write("<pre class=\"placeholder-console\">")?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
CodeBlockKind::Indented => self.write("<pre><code>"),
|
||||
}
|
||||
}
|
||||
Tag::List(Some(1)) => {
|
||||
if self.end_newline {
|
||||
self.write("<ol>\n")
|
||||
} else {
|
||||
self.write("\n<ol>\n")
|
||||
}
|
||||
}
|
||||
Tag::List(Some(start)) => {
|
||||
if self.end_newline {
|
||||
self.write("<ol start=\"")?;
|
||||
} else {
|
||||
self.write("\n<ol start=\"")?;
|
||||
}
|
||||
write!(&mut self.writer, "{}", start)?;
|
||||
self.write("\">\n")
|
||||
}
|
||||
Tag::List(None) => {
|
||||
if self.end_newline {
|
||||
self.write("<ul>\n")
|
||||
} else {
|
||||
self.write("\n<ul>\n")
|
||||
}
|
||||
}
|
||||
Tag::Item => {
|
||||
if self.end_newline {
|
||||
self.write("<li>")
|
||||
} else {
|
||||
self.write("\n<li>")
|
||||
}
|
||||
}
|
||||
Tag::Emphasis => self.write("<em>"),
|
||||
Tag::Strong => self.write("<strong>"),
|
||||
Tag::Strikethrough => self.write("<del>"),
|
||||
Tag::Link(LinkType::Email, dest, title) => {
|
||||
self.write("<a href=\"mailto:")?;
|
||||
escape_href(&mut self.writer, &dest)?;
|
||||
if !title.is_empty() {
|
||||
self.write("\" title=\"")?;
|
||||
escape_html(&mut self.writer, &title)?;
|
||||
}
|
||||
self.write("\">")
|
||||
}
|
||||
Tag::Link(_link_type, dest, title) => {
|
||||
self.write("<a href=\"")?;
|
||||
escape_href(&mut self.writer, &dest)?;
|
||||
if !title.is_empty() {
|
||||
self.write("\" title=\"")?;
|
||||
escape_html(&mut self.writer, &title)?;
|
||||
}
|
||||
self.write("\">")
|
||||
}
|
||||
Tag::Image(_link_type, dest, title) => {
|
||||
self.write("<img class=\"pic\" src=\"")?;
|
||||
escape_href(&mut self.writer, &dest)?;
|
||||
self.write("\" alt=\"")?;
|
||||
self.raw_text()?;
|
||||
if !title.is_empty() {
|
||||
self.write("\" title=\"")?;
|
||||
escape_html(&mut self.writer, &title)?;
|
||||
}
|
||||
self.write("\" />")
|
||||
}
|
||||
Tag::FootnoteDefinition(name) => {
|
||||
if self.end_newline {
|
||||
self.write("<div class=\"footnote-definition\" id=\"")?;
|
||||
} else {
|
||||
self.write("\n<div class=\"footnote-definition\" id=\"")?;
|
||||
}
|
||||
escape_html(&mut self.writer, &name)?;
|
||||
self.write("\"><sup class=\"footnote-definition-label\">")?;
|
||||
let len = self.numbers.len() + 1;
|
||||
let number = *self.numbers.entry(name).or_insert(len);
|
||||
write!(&mut self.writer, "{}", number)?;
|
||||
self.write("</sup>")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn end_tag(&mut self, tag: Tag) -> io::Result<()> {
|
||||
match tag {
|
||||
Tag::Paragraph => {
|
||||
self.write("</p>\n")?;
|
||||
}
|
||||
Tag::Heading(level, _id, _classes) => {
|
||||
self.write("</")?;
|
||||
write!(&mut self.writer, "{}", level)?;
|
||||
self.write(">\n")?;
|
||||
}
|
||||
Tag::Table(_) => {
|
||||
self.write("</tbody></table>\n")?;
|
||||
}
|
||||
Tag::TableHead => {
|
||||
self.write("</tr></thead><tbody>\n")?;
|
||||
self.table_state = TableState::Body;
|
||||
}
|
||||
Tag::TableRow => {
|
||||
self.write("</tr>\n")?;
|
||||
}
|
||||
Tag::TableCell => {
|
||||
match self.table_state {
|
||||
TableState::Head => {
|
||||
self.write("</th>")?;
|
||||
}
|
||||
TableState::Body => {
|
||||
self.write("</td>")?;
|
||||
}
|
||||
}
|
||||
self.table_cell_index += 1;
|
||||
}
|
||||
Tag::BlockQuote => {
|
||||
self.write("</blockquote>\n")?;
|
||||
}
|
||||
Tag::CodeBlock(kind) => {
|
||||
self.write(match kind {
|
||||
CodeBlockKind::Fenced(language) => match CodeBlockMode::parse(&language) {
|
||||
CodeBlockMode::LiterateProgram { .. } => "</pre></th-literate-program>",
|
||||
_ => "</code></pre>",
|
||||
},
|
||||
_ => "</code></pre>\n",
|
||||
})?;
|
||||
self.code_block_state = CodeBlockState::NotInCodeBlock;
|
||||
}
|
||||
Tag::List(Some(_)) => {
|
||||
self.write("</ol>\n")?;
|
||||
}
|
||||
Tag::List(None) => {
|
||||
self.write("</ul>\n")?;
|
||||
}
|
||||
Tag::Item => {
|
||||
self.write("</li>\n")?;
|
||||
}
|
||||
Tag::Emphasis => {
|
||||
self.write("</em>")?;
|
||||
}
|
||||
Tag::Strong => {
|
||||
self.write("</strong>")?;
|
||||
}
|
||||
Tag::Strikethrough => {
|
||||
self.write("</del>")?;
|
||||
}
|
||||
Tag::Link(_, _, _) => {
|
||||
self.write("</a>")?;
|
||||
}
|
||||
Tag::Image(_, _, _) => (), // shouldn't happen, handled in start
|
||||
Tag::FootnoteDefinition(_) => {
|
||||
self.write("</div>\n")?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn run_text(&mut self, text: &str) -> io::Result<()> {
|
||||
struct EmojiParser<'a> {
|
||||
text: &'a str,
|
||||
position: usize,
|
||||
}
|
||||
|
||||
enum Token<'a> {
|
||||
Text(&'a str),
|
||||
Emoji(&'a str),
|
||||
}
|
||||
|
||||
impl<'a> EmojiParser<'a> {
|
||||
fn current(&self) -> Option<char> {
|
||||
self.text[self.position..].chars().next()
|
||||
}
|
||||
|
||||
fn next_token(&mut self) -> Option<Token<'a>> {
|
||||
match self.current() {
|
||||
Some(':') => {
|
||||
let text_start = self.position;
|
||||
self.position += 1;
|
||||
if self.current().is_some_and(|c| c.is_alphabetic()) {
|
||||
let name_start = self.position;
|
||||
while let Some(c) = self.current() {
|
||||
if c.is_alphanumeric() || c == '_' {
|
||||
self.position += c.len_utf8();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if self.current() == Some(':') {
|
||||
let name_end = self.position;
|
||||
self.position += 1;
|
||||
Some(Token::Emoji(&self.text[name_start..name_end]))
|
||||
} else {
|
||||
Some(Token::Text(&self.text[text_start..self.position]))
|
||||
}
|
||||
} else {
|
||||
Some(Token::Text(&self.text[text_start..self.position]))
|
||||
}
|
||||
}
|
||||
Some(_) => {
|
||||
let start = self.position;
|
||||
while let Some(c) = self.current() {
|
||||
if c == ':' {
|
||||
break;
|
||||
} else {
|
||||
self.position += c.len_utf8();
|
||||
}
|
||||
}
|
||||
let end = self.position;
|
||||
Some(Token::Text(&self.text[start..end]))
|
||||
}
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let CodeBlockState::InCodeBlock(language) = &self.code_block_state {
|
||||
let code_block_mode = language
|
||||
.as_ref()
|
||||
.map(|language| CodeBlockMode::parse(language));
|
||||
let highlighting_language = code_block_mode
|
||||
.as_ref()
|
||||
.and_then(|mode| mode.highlighting_language());
|
||||
let syntax =
|
||||
highlighting_language.and_then(|language| self.config.syntaxes.get(language));
|
||||
if let Some(syntax) = syntax {
|
||||
highlight(&mut self.writer, syntax, text)?;
|
||||
} else {
|
||||
escape_html(&mut self.writer, text)?;
|
||||
}
|
||||
} else {
|
||||
let mut parser = EmojiParser { text, position: 0 };
|
||||
while let Some(token) = parser.next_token() {
|
||||
match token {
|
||||
Token::Text(text) => escape_html(&mut self.writer, text)?,
|
||||
Token::Emoji(name) => {
|
||||
if let Some(filename) = self.config.emoji.get(name) {
|
||||
let branch_id = self
|
||||
.treehouse
|
||||
.branches_by_named_id
|
||||
.get(&format!("emoji/{name}"))
|
||||
.copied();
|
||||
if let Some(branch) = branch_id.map(|id| self.treehouse.tree.branch(id))
|
||||
{
|
||||
self.writer.write_str("<a href=\"")?;
|
||||
escape_html(&mut self.writer, &self.config.site)?;
|
||||
self.writer.write_str("/b?")?;
|
||||
escape_html(&mut self.writer, &branch.attributes.id)?;
|
||||
self.writer.write_str("\">")?;
|
||||
}
|
||||
|
||||
self.writer
|
||||
.write_str("<img data-cast=\"emoji\" title=\":")?;
|
||||
escape_html(&mut self.writer, name)?;
|
||||
self.writer.write_str(":\" src=\"")?;
|
||||
let url = self
|
||||
.config_derived_data
|
||||
.static_urls
|
||||
.get(&format!("emoji/{filename}"))
|
||||
.unwrap_or_default();
|
||||
escape_html(&mut self.writer, &url)?;
|
||||
self.writer.write_str("\" alt=\"")?;
|
||||
escape_html(&mut self.writer, name)?;
|
||||
if let Some(image_size) = self
|
||||
.config_derived_data
|
||||
.image_size(&format!("static/emoji/{filename}"))
|
||||
{
|
||||
write!(
|
||||
self.writer,
|
||||
"\" width=\"{}\" height=\"{}",
|
||||
image_size.width, image_size.height
|
||||
)?;
|
||||
}
|
||||
self.writer.write_str("\">")?;
|
||||
|
||||
if branch_id.is_some() {
|
||||
self.writer.write_str("</a>")?;
|
||||
}
|
||||
} else {
|
||||
self.writer.write_str(":")?;
|
||||
escape_html(&mut self.writer, name)?;
|
||||
self.writer.write_str(":")?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// run raw text, consuming end tag
|
||||
fn raw_text(&mut self) -> io::Result<()> {
|
||||
let mut nest = 0;
|
||||
while let Some(event) = self.iter.next() {
|
||||
match event {
|
||||
Start(_) => nest += 1,
|
||||
End(_) => {
|
||||
if nest == 0 {
|
||||
break;
|
||||
}
|
||||
nest -= 1;
|
||||
}
|
||||
Html(text) | Code(text) | Text(text) => {
|
||||
escape_html(&mut self.writer, &text)?;
|
||||
self.end_newline = text.ends_with('\n');
|
||||
}
|
||||
SoftBreak | HardBreak | Rule => {
|
||||
self.write(" ")?;
|
||||
}
|
||||
FootnoteReference(name) => {
|
||||
let len = self.numbers.len() + 1;
|
||||
let number = *self.numbers.entry(name).or_insert(len);
|
||||
write!(&mut self.writer, "[{}]", number)?;
|
||||
}
|
||||
TaskListMarker(true) => self.write("[x]")?,
|
||||
TaskListMarker(false) => self.write("[ ]")?,
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
enum LiterateCodeKind<'a> {
|
||||
Input,
|
||||
Output { placeholder_pic_id: &'a str },
|
||||
}
|
||||
|
||||
enum CodeBlockMode<'a> {
|
||||
PlainText,
|
||||
SyntaxHighlightOnly {
|
||||
language: &'a str,
|
||||
},
|
||||
LiterateProgram {
|
||||
language: &'a str,
|
||||
kind: LiterateCodeKind<'a>,
|
||||
program_name: &'a str,
|
||||
},
|
||||
}
|
||||
|
||||
impl<'a> CodeBlockMode<'a> {
|
||||
fn parse(language: &'a str) -> CodeBlockMode<'a> {
|
||||
if language.is_empty() {
|
||||
CodeBlockMode::PlainText
|
||||
} else if let Some((language, program_name)) = language.split_once(' ') {
|
||||
let (program_name, placeholder_pic_id) =
|
||||
program_name.split_once(' ').unwrap_or((program_name, ""));
|
||||
CodeBlockMode::LiterateProgram {
|
||||
language,
|
||||
kind: if language == "output" {
|
||||
LiterateCodeKind::Output { placeholder_pic_id }
|
||||
} else {
|
||||
LiterateCodeKind::Input
|
||||
},
|
||||
program_name: program_name.split(' ').next().unwrap(),
|
||||
}
|
||||
} else {
|
||||
CodeBlockMode::SyntaxHighlightOnly { language }
|
||||
}
|
||||
}
|
||||
|
||||
fn highlighting_language(&self) -> Option<&str> {
|
||||
if let CodeBlockMode::LiterateProgram { language, .. }
|
||||
| CodeBlockMode::SyntaxHighlightOnly { language } = self
|
||||
{
|
||||
Some(language)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterate over an `Iterator` of `Event`s, generate HTML for each `Event`, and
|
||||
/// push it to a `String`.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use pulldown_cmark::{html, Parser};
|
||||
///
|
||||
/// let markdown_str = r#"
|
||||
/// hello
|
||||
/// =====
|
||||
///
|
||||
/// * alpha
|
||||
/// * beta
|
||||
/// "#;
|
||||
/// let parser = Parser::new(markdown_str);
|
||||
///
|
||||
/// let mut html_buf = String::new();
|
||||
/// html::push_html(&mut html_buf, parser);
|
||||
///
|
||||
/// assert_eq!(html_buf, r#"<h1>hello</h1>
|
||||
/// <ul>
|
||||
/// <li>alpha</li>
|
||||
/// <li>beta</li>
|
||||
/// </ul>
|
||||
/// "#);
|
||||
/// ```
|
||||
pub fn push_html<'a, I>(
|
||||
s: &mut String,
|
||||
treehouse: &'a Treehouse,
|
||||
config: &'a Config,
|
||||
config_derived_data: &'a mut ConfigDerivedData,
|
||||
page_id: &'a str,
|
||||
iter: I,
|
||||
) where
|
||||
I: Iterator<Item = Event<'a>>,
|
||||
{
|
||||
HtmlWriter::new(treehouse, config, config_derived_data, page_id, iter, s)
|
||||
.run()
|
||||
.unwrap();
|
||||
}
|
|
@ -1,26 +1,26 @@
|
|||
use std::{borrow::Cow, fmt::Write};
|
||||
|
||||
use pulldown_cmark::{BrokenLink, LinkType};
|
||||
use treehouse_format::pull::BranchKind;
|
||||
|
||||
use crate::{
|
||||
cli::Paths,
|
||||
config::{Config, ConfigDerivedData, Markup},
|
||||
config::Config,
|
||||
html::EscapeAttribute,
|
||||
state::{FileId, Treehouse},
|
||||
tree::{
|
||||
attributes::{Content, Stage},
|
||||
mini_template, SemaBranchId,
|
||||
},
|
||||
vfs::{CdExt, ReadFilesystem, VPathBuf},
|
||||
};
|
||||
|
||||
use super::{djot, markdown, EscapeHtml};
|
||||
use super::{djot, EscapeHtml};
|
||||
|
||||
pub fn branch_to_html(
|
||||
s: &mut String,
|
||||
treehouse: &mut Treehouse,
|
||||
config: &Config,
|
||||
config_derived_data: &mut ConfigDerivedData,
|
||||
root_fs: &dyn ReadFilesystem, // TODO: Lower privileges
|
||||
paths: &Paths<'_>,
|
||||
file_id: FileId,
|
||||
branch_id: SemaBranchId,
|
||||
|
@ -115,73 +115,21 @@ pub fn branch_to_html(
|
|||
final_markup.push('\n');
|
||||
}
|
||||
|
||||
let broken_link_callback = &mut |broken_link: BrokenLink<'_>| {
|
||||
if let LinkType::Reference | LinkType::Shortcut = broken_link.link_type {
|
||||
broken_link
|
||||
.reference
|
||||
.split_once(':')
|
||||
.and_then(|(kind, linked)| match kind {
|
||||
"def" => config
|
||||
.defs
|
||||
.get(linked)
|
||||
.map(|link| (link.clone().into(), "".into())),
|
||||
"branch" => treehouse
|
||||
.branches_by_named_id
|
||||
.get(linked)
|
||||
.map(|&branch_id| {
|
||||
(
|
||||
format!(
|
||||
"{}/b?{}",
|
||||
config.site,
|
||||
treehouse.tree.branch(branch_id).attributes.id
|
||||
)
|
||||
.into(),
|
||||
"".into(),
|
||||
)
|
||||
}),
|
||||
"page" => Some((config.page_url(linked).into(), "".into())),
|
||||
"pic" => config.pics.get(linked).map(|filename| {
|
||||
(
|
||||
// NOTE: We can't generate a URL with a hash here yet, because we
|
||||
// cannot access ConfigDerivedData here due to it being borrowed
|
||||
// by the Markdown parser.
|
||||
format!("{}/static/pic/{}", config.site, &filename).into(),
|
||||
"".into(),
|
||||
)
|
||||
}),
|
||||
_ => None,
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
};
|
||||
if branch.attributes.template {
|
||||
final_markup = mini_template::render(config, treehouse, paths, &final_markup);
|
||||
final_markup = mini_template::render(
|
||||
config,
|
||||
treehouse,
|
||||
paths,
|
||||
&root_fs.cd(VPathBuf::new("static/pics")),
|
||||
&final_markup,
|
||||
);
|
||||
}
|
||||
s.push_str("<th-bc>");
|
||||
match config.markup {
|
||||
Markup::Markdown => {
|
||||
let markdown_parser = pulldown_cmark::Parser::new_with_broken_link_callback(
|
||||
&final_markup,
|
||||
{
|
||||
use pulldown_cmark::Options;
|
||||
Options::ENABLE_STRIKETHROUGH | Options::ENABLE_TABLES
|
||||
},
|
||||
Some(broken_link_callback),
|
||||
);
|
||||
markdown::push_html(
|
||||
s,
|
||||
treehouse,
|
||||
config,
|
||||
config_derived_data,
|
||||
treehouse.tree_path(file_id).expect(".tree file expected"),
|
||||
markdown_parser,
|
||||
)
|
||||
}
|
||||
Markup::Djot => {
|
||||
|
||||
let events: Vec<_> = jotdown::Parser::new(&final_markup)
|
||||
.into_offset_iter()
|
||||
.collect();
|
||||
// TODO: Report rendering diagnostics.
|
||||
let render_diagnostics = djot::Renderer {
|
||||
page_id: treehouse
|
||||
.tree_path(file_id)
|
||||
|
@ -189,13 +137,13 @@ pub fn branch_to_html(
|
|||
.to_owned(),
|
||||
|
||||
config,
|
||||
config_derived_data,
|
||||
emoji_fs: &root_fs.cd(VPathBuf::new("static/emoji")),
|
||||
pics_fs: &root_fs.cd(VPathBuf::new("static/pics")),
|
||||
|
||||
treehouse,
|
||||
file_id,
|
||||
}
|
||||
.render(&events, s);
|
||||
}
|
||||
};
|
||||
|
||||
let branch = treehouse.tree.branch(branch_id);
|
||||
if let Content::Link(link) = &branch.attributes.content {
|
||||
|
@ -247,15 +195,7 @@ pub fn branch_to_html(
|
|||
let num_children = branch.children.len();
|
||||
for i in 0..num_children {
|
||||
let child_id = treehouse.tree.branch(branch_id).children[i];
|
||||
branch_to_html(
|
||||
s,
|
||||
treehouse,
|
||||
config,
|
||||
config_derived_data,
|
||||
paths,
|
||||
file_id,
|
||||
child_id,
|
||||
);
|
||||
branch_to_html(s, treehouse, config, root_fs, paths, file_id, child_id);
|
||||
}
|
||||
s.push_str("</ul>");
|
||||
}
|
||||
|
@ -271,22 +211,14 @@ pub fn branches_to_html(
|
|||
s: &mut String,
|
||||
treehouse: &mut Treehouse,
|
||||
config: &Config,
|
||||
config_derived_data: &mut ConfigDerivedData,
|
||||
root_fs: &dyn ReadFilesystem, // TODO: Lower privileges
|
||||
paths: &Paths<'_>,
|
||||
file_id: FileId,
|
||||
branches: &[SemaBranchId],
|
||||
) {
|
||||
s.push_str("<ul>");
|
||||
for &child in branches {
|
||||
branch_to_html(
|
||||
s,
|
||||
treehouse,
|
||||
config,
|
||||
config_derived_data,
|
||||
paths,
|
||||
file_id,
|
||||
child,
|
||||
);
|
||||
branch_to_html(s, treehouse, config, root_fs, paths, file_id, child);
|
||||
}
|
||||
s.push_str("</ul>");
|
||||
}
|
||||
|
|
|
@ -10,5 +10,6 @@ pub mod parse;
|
|||
pub mod paths;
|
||||
pub mod state;
|
||||
pub mod static_urls;
|
||||
pub mod templater;
|
||||
pub mod tree;
|
||||
pub mod vfs;
|
||||
|
|
|
@ -1,14 +1,43 @@
|
|||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::{fs, path::Path};
|
||||
|
||||
use clap::Parser;
|
||||
use log::{error, info, warn};
|
||||
use treehouse::cli::{
|
||||
use log::error;
|
||||
use treehouse::generate::{regenerate_or_report_error, LatestRevision};
|
||||
use treehouse::vfs::PhysicalDir;
|
||||
use treehouse::vfs::{AnchoredAtExt, VPathBuf};
|
||||
use treehouse::{
|
||||
cli::{
|
||||
fix::{fix_all_cli, fix_file_cli},
|
||||
serve::serve,
|
||||
wc::wc_cli,
|
||||
Command, Paths, ProgramArgs,
|
||||
},
|
||||
vfs::{BufferedFile, MountPoints, ReadFilesystem, VPath},
|
||||
};
|
||||
use treehouse::generate::{regenerate_or_report_error, LatestRevision};
|
||||
|
||||
fn vfs_sources() -> anyhow::Result<impl ReadFilesystem> {
|
||||
let mut root = MountPoints::new();
|
||||
|
||||
root.add(
|
||||
VPath::new("treehouse.toml"),
|
||||
Box::new(BufferedFile::new(fs::read("treehouse.toml")?)),
|
||||
);
|
||||
root.add(
|
||||
VPath::new("static"),
|
||||
Box::new(PhysicalDir::new(PathBuf::from("static")).anchored_at(VPathBuf::new("static"))),
|
||||
);
|
||||
root.add(
|
||||
VPath::new("template"),
|
||||
Box::new(PhysicalDir::new(PathBuf::from("template"))),
|
||||
);
|
||||
root.add(
|
||||
VPath::new("content"),
|
||||
Box::new(PhysicalDir::new(PathBuf::from("content"))),
|
||||
);
|
||||
|
||||
Ok(root)
|
||||
}
|
||||
|
||||
async fn fallible_main() -> anyhow::Result<()> {
|
||||
let args = ProgramArgs::parse();
|
||||
|
@ -18,24 +47,14 @@ async fn fallible_main() -> anyhow::Result<()> {
|
|||
template_target_dir: Path::new("target/site/static/html"),
|
||||
|
||||
config_file: Path::new("treehouse.toml"),
|
||||
|
||||
// NOTE: These are intentionally left unconfigurable from within treehouse.toml
|
||||
// because this is is one of those things that should be consistent between sites.
|
||||
static_dir: Path::new("static"),
|
||||
template_dir: Path::new("template"),
|
||||
content_dir: Path::new("content"),
|
||||
};
|
||||
|
||||
let src = vfs_sources()?;
|
||||
|
||||
match args.command {
|
||||
Command::Generate(generate_args) => {
|
||||
info!("regenerating using directories: {paths:#?}");
|
||||
let latest_revision = match generate_args.commits_only {
|
||||
true => LatestRevision::LatestCommit,
|
||||
false => LatestRevision::WorkingTree,
|
||||
};
|
||||
regenerate_or_report_error(&paths, latest_revision)?;
|
||||
warn!("`generate` is for debugging only and the files cannot be fully served using a static file server; use `treehouse serve` if you wish to start a treehouse server");
|
||||
}
|
||||
Command::Serve {
|
||||
generate: generate_args,
|
||||
serve: serve_args,
|
||||
|
@ -44,7 +63,7 @@ async fn fallible_main() -> anyhow::Result<()> {
|
|||
true => LatestRevision::LatestCommit,
|
||||
false => LatestRevision::WorkingTree,
|
||||
};
|
||||
let (config, treehouse) = regenerate_or_report_error(&paths, latest_revision)?;
|
||||
let (config, treehouse) = regenerate_or_report_error(&paths, &src, latest_revision)?;
|
||||
serve(config, treehouse, &paths, serve_args.port).await?;
|
||||
}
|
||||
|
||||
|
|
13
crates/treehouse/src/templater.rs
Normal file
13
crates/treehouse/src/templater.rs
Normal file
|
@ -0,0 +1,13 @@
|
|||
use handlebars::Handlebars;
|
||||
|
||||
pub struct Templater {
|
||||
handlebars: Handlebars<'static>,
|
||||
}
|
||||
|
||||
impl Templater {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
handlebars: Handlebars::new(),
|
||||
}
|
||||
}
|
||||
}
|
|
@ -4,11 +4,10 @@
|
|||
//! for injecting *custom, stateful* context into the renderer, which is important for things like
|
||||
//! the `pic` template to work.
|
||||
|
||||
use std::fmt::Write;
|
||||
use std::ops::Range;
|
||||
|
||||
use pulldown_cmark::escape::escape_html;
|
||||
|
||||
use crate::{cli::Paths, config::Config, state::Treehouse};
|
||||
use crate::{cli::Paths, config::Config, html::EscapeHtml, state::Treehouse, vfs::ReadFilesystem};
|
||||
|
||||
struct Lexer<'a> {
|
||||
input: &'a str,
|
||||
|
@ -144,12 +143,18 @@ struct Renderer<'a> {
|
|||
|
||||
struct InvalidTemplate;
|
||||
|
||||
impl<'a> Renderer<'a> {
|
||||
impl Renderer<'_> {
|
||||
fn emit_token_verbatim(&mut self, token: &Token) {
|
||||
self.output.push_str(&self.lexer.input[token.range.clone()]);
|
||||
}
|
||||
|
||||
fn render(&mut self, config: &Config, treehouse: &Treehouse, paths: &Paths<'_>) {
|
||||
fn render(
|
||||
&mut self,
|
||||
config: &Config,
|
||||
treehouse: &Treehouse,
|
||||
paths: &Paths<'_>,
|
||||
pics_fs: &dyn ReadFilesystem,
|
||||
) {
|
||||
let kind_of = |token: &Token| token.kind;
|
||||
|
||||
while let Some(token) = self.lexer.next() {
|
||||
|
@ -166,12 +171,13 @@ impl<'a> Renderer<'a> {
|
|||
match Self::render_template(
|
||||
config,
|
||||
treehouse,
|
||||
pics_fs,
|
||||
paths,
|
||||
self.lexer.input[inside.as_ref().unwrap().range.clone()].trim(),
|
||||
) {
|
||||
Ok(s) => match escaping {
|
||||
EscapingMode::EscapeHtml => {
|
||||
_ = escape_html(&mut self.output, &s);
|
||||
_ = write!(self.output, "{}", EscapeHtml(&s));
|
||||
}
|
||||
EscapingMode::NoEscaping => self.output.push_str(&s),
|
||||
},
|
||||
|
@ -193,12 +199,13 @@ impl<'a> Renderer<'a> {
|
|||
fn render_template(
|
||||
config: &Config,
|
||||
_treehouse: &Treehouse,
|
||||
pics_fs: &dyn ReadFilesystem,
|
||||
paths: &Paths<'_>,
|
||||
template: &str,
|
||||
) -> Result<String, InvalidTemplate> {
|
||||
let (function, arguments) = template.split_once(' ').unwrap_or((template, ""));
|
||||
match function {
|
||||
"pic" => Ok(config.pic_url(arguments)),
|
||||
"pic" => Ok(config.pic_url(pics_fs, arguments)),
|
||||
"include_static" => std::fs::read_to_string(paths.static_dir.join(arguments))
|
||||
.map_err(|_| InvalidTemplate),
|
||||
_ => Err(InvalidTemplate),
|
||||
|
@ -206,11 +213,17 @@ impl<'a> Renderer<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn render(config: &Config, treehouse: &Treehouse, paths: &Paths<'_>, input: &str) -> String {
|
||||
pub fn render(
|
||||
config: &Config,
|
||||
treehouse: &Treehouse,
|
||||
paths: &Paths<'_>,
|
||||
pics_fs: &dyn ReadFilesystem,
|
||||
input: &str,
|
||||
) -> String {
|
||||
let mut renderer = Renderer {
|
||||
lexer: Lexer::new(input),
|
||||
output: String::new(),
|
||||
};
|
||||
renderer.render(config, treehouse, paths);
|
||||
renderer.render(config, treehouse, paths, pics_fs);
|
||||
renderer.output
|
||||
}
|
||||
|
|
|
@ -1,11 +1,25 @@
|
|||
use std::{borrow::Borrow, fmt, ops::Deref};
|
||||
use std::{
|
||||
borrow::Borrow,
|
||||
fmt::{self, Debug},
|
||||
ops::{ControlFlow, Deref},
|
||||
};
|
||||
|
||||
use anyhow::ensure;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
pub mod empty;
|
||||
pub mod file;
|
||||
pub mod mount_points;
|
||||
pub mod physical;
|
||||
mod anchored;
|
||||
mod cd;
|
||||
mod empty;
|
||||
mod file;
|
||||
mod mount_points;
|
||||
mod physical;
|
||||
|
||||
pub use anchored::*;
|
||||
pub use cd::*;
|
||||
pub use empty::*;
|
||||
pub use file::*;
|
||||
pub use mount_points::*;
|
||||
pub use physical::*;
|
||||
|
||||
#[derive(PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct VPath {
|
||||
|
@ -19,12 +33,12 @@ impl VPath {
|
|||
pub fn try_new(s: &str) -> anyhow::Result<&Self> {
|
||||
ensure!(
|
||||
!s.ends_with(Self::SEPARATOR),
|
||||
"path must not end with '{}'",
|
||||
"path must not end with '{}' (got {s:?})",
|
||||
Self::SEPARATOR
|
||||
);
|
||||
ensure!(
|
||||
!s.starts_with(Self::SEPARATOR),
|
||||
"paths are always absolute and must not start with '{}'",
|
||||
"paths are always absolute and must not start with '{}' (got {s:?})",
|
||||
Self::SEPARATOR
|
||||
);
|
||||
|
||||
|
@ -41,8 +55,11 @@ impl VPath {
|
|||
|
||||
pub fn try_join(&self, sub: &str) -> anyhow::Result<VPathBuf> {
|
||||
let mut buf = VPathBuf::from(self);
|
||||
if !sub.is_empty() {
|
||||
let sub = VPath::try_new(sub)?;
|
||||
buf.path.push('/');
|
||||
buf.path.push_str(&sub.path);
|
||||
}
|
||||
Ok(buf)
|
||||
}
|
||||
|
||||
|
@ -51,6 +68,9 @@ impl VPath {
|
|||
}
|
||||
|
||||
pub fn strip_prefix(&self, prefix: &VPath) -> Option<&Self> {
|
||||
if self == prefix {
|
||||
Some(VPath::ROOT)
|
||||
} else {
|
||||
self.path
|
||||
.strip_prefix(&prefix.path)
|
||||
.and_then(|p| p.strip_prefix('/'))
|
||||
|
@ -58,11 +78,49 @@ impl VPath {
|
|||
// nor a leading slash.
|
||||
.map(|p| unsafe { VPath::new_unchecked(p) })
|
||||
}
|
||||
}
|
||||
|
||||
pub fn depth(&self) -> usize {
|
||||
self.path.chars().filter(|&c| c == Self::SEPARATOR).count()
|
||||
}
|
||||
|
||||
pub fn segments(&self) -> impl Iterator<Item = &Self> {
|
||||
self.as_str().split(Self::SEPARATOR).map(|s| unsafe {
|
||||
// SAFETY: Since we're splitting on the separator, the path cannot start or end with it.
|
||||
Self::new_unchecked(s)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn rsegments(&self) -> impl Iterator<Item = &Self> {
|
||||
self.as_str().rsplit(Self::SEPARATOR).map(|s| unsafe {
|
||||
// SAFETY: Since we're splitting on the separator, the path cannot start or end with it.
|
||||
Self::new_unchecked(s)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn file_name(&self) -> Option<&str> {
|
||||
self.rsegments().next().map(Self::as_str)
|
||||
}
|
||||
|
||||
pub fn extension(&self) -> Option<&str> {
|
||||
let file_name = self.file_name()?;
|
||||
let (left, right) = file_name.rsplit_once('.')?;
|
||||
if left.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(right)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn file_stem(&self) -> Option<&str> {
|
||||
let file_name = self.file_name()?;
|
||||
if let Some(extension) = self.extension() {
|
||||
Some(&file_name[..file_name.len() - extension.len() - 1])
|
||||
} else {
|
||||
Some(file_name)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_str(&self) -> &str {
|
||||
&self.path
|
||||
}
|
||||
|
@ -82,6 +140,12 @@ impl fmt::Debug for VPath {
|
|||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for VPath {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(&self.path)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct VPathBuf {
|
||||
path: String,
|
||||
|
@ -119,6 +183,12 @@ impl fmt::Debug for VPathBuf {
|
|||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for VPathBuf {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(&self.path)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&VPath> for VPathBuf {
|
||||
fn from(value: &VPath) -> Self {
|
||||
unsafe { Self::new_unchecked(value.path.to_owned()) }
|
||||
|
@ -131,21 +201,132 @@ impl Borrow<VPath> for VPathBuf {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
impl<'de> Deserialize<'de> for VPathBuf {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
use serde::de;
|
||||
|
||||
struct Visitor;
|
||||
|
||||
impl de::Visitor<'_> for Visitor {
|
||||
type Value = VPathBuf;
|
||||
|
||||
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
formatter.write_str("virtual path")
|
||||
}
|
||||
|
||||
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
|
||||
where
|
||||
E: de::Error,
|
||||
{
|
||||
VPathBuf::try_new(v).map_err(de::Error::custom)
|
||||
}
|
||||
}
|
||||
|
||||
deserializer.deserialize_str(Visitor)
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for VPathBuf {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
serializer.serialize_str(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct DirEntry {
|
||||
pub path: VPathBuf,
|
||||
}
|
||||
|
||||
pub trait ReadFilesystem {
|
||||
pub trait ReadFilesystem: Debug {
|
||||
/// List all files under the provided path.
|
||||
fn dir(&self, path: &VPath) -> Vec<DirEntry>;
|
||||
|
||||
/// Return the byte content of the entry at the given path.
|
||||
fn content(&self, path: &VPath) -> Option<Vec<u8>>;
|
||||
|
||||
/// Get a string signifying the current version of the provided path's content.
|
||||
/// If the content changes, the version must also change.
|
||||
///
|
||||
/// Returns None if there is no content or no version string is available.
|
||||
fn content_version(&self, path: &VPath) -> Option<String>;
|
||||
|
||||
/// Return the byte content of the entry at the given path.
|
||||
fn content(&self, path: &VPath) -> Option<Vec<u8>>;
|
||||
/// Returns a path relative to `config.site` indicating where the file will be available
|
||||
/// once served.
|
||||
///
|
||||
/// May return `None` if the file is not served.
|
||||
fn anchor(&self, _path: &VPath) -> Option<VPathBuf> {
|
||||
None
|
||||
}
|
||||
|
||||
/// Optimization for [`ReadFilesystemCombinators::cd`] that allows for avoiding wrapping
|
||||
/// `Cd`s in `Cd`s.
|
||||
#[doc(hidden)]
|
||||
fn cd_optimization(&self, _subpath: &VPath) -> Option<Cd<'_>> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub trait AnchoredAtExt {
|
||||
fn anchored_at(self, at: VPathBuf) -> Anchored<Self>
|
||||
where
|
||||
Self: Sized;
|
||||
}
|
||||
|
||||
impl<T> AnchoredAtExt for T
|
||||
where
|
||||
T: ReadFilesystem,
|
||||
{
|
||||
fn anchored_at(self, at: VPathBuf) -> Anchored<Self> {
|
||||
Anchored::new(self, at)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait CdExt {
|
||||
fn cd<'a>(self, into: VPathBuf) -> Cd<'a>
|
||||
where
|
||||
Self: 'a;
|
||||
}
|
||||
|
||||
impl CdExt for &dyn ReadFilesystem {
|
||||
fn cd<'a>(self, into: VPathBuf) -> Cd<'a>
|
||||
where
|
||||
Self: 'a,
|
||||
{
|
||||
if let Some(cd) = self.cd_optimization(&into) {
|
||||
cd
|
||||
} else {
|
||||
Cd::new(self, into)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn walk_rec(
|
||||
fs: &dyn ReadFilesystem,
|
||||
path: &VPath,
|
||||
f: &mut dyn FnMut(&VPath) -> ControlFlow<(), ()>,
|
||||
) {
|
||||
for entry in fs.dir(path) {
|
||||
match f(&entry.path) {
|
||||
ControlFlow::Continue(_) => (),
|
||||
ControlFlow::Break(_) => return,
|
||||
}
|
||||
walk_rec(fs, &entry.path, f);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn url(site: &str, fs: &dyn ReadFilesystem, path: &VPath) -> String {
|
||||
let Some(anchor) = fs.anchor(path) else {
|
||||
panic!("filesystem {fs:?} is not anchored anywhere and a URL of it cannot be produced")
|
||||
};
|
||||
if let Some(version) = fs.content_version(path) {
|
||||
format!("{}/{anchor}?v={version}", site)
|
||||
} else {
|
||||
format!("{}/{anchor}", site)
|
||||
}
|
||||
}
|
||||
|
|
44
crates/treehouse/src/vfs/anchored.rs
Normal file
44
crates/treehouse/src/vfs/anchored.rs
Normal file
|
@ -0,0 +1,44 @@
|
|||
use std::fmt;
|
||||
|
||||
use super::{DirEntry, ReadFilesystem, VPath, VPathBuf};
|
||||
|
||||
pub struct Anchored<T> {
|
||||
inner: T,
|
||||
at: VPathBuf,
|
||||
}
|
||||
|
||||
impl<T> Anchored<T> {
|
||||
pub fn new(inner: T, at: VPathBuf) -> Self {
|
||||
Self { inner, at }
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> ReadFilesystem for Anchored<T>
|
||||
where
|
||||
T: ReadFilesystem,
|
||||
{
|
||||
fn dir(&self, path: &VPath) -> Vec<DirEntry> {
|
||||
self.inner.dir(path)
|
||||
}
|
||||
|
||||
fn content(&self, path: &VPath) -> Option<Vec<u8>> {
|
||||
self.inner.content(path)
|
||||
}
|
||||
|
||||
fn content_version(&self, path: &VPath) -> Option<String> {
|
||||
self.inner.content_version(path)
|
||||
}
|
||||
|
||||
fn anchor(&self, path: &VPath) -> Option<VPathBuf> {
|
||||
Some(self.at.join(path.as_str()))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> fmt::Debug for Anchored<T>
|
||||
where
|
||||
T: fmt::Debug,
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "Anchored({:?}, {})", self.inner, self.at)
|
||||
}
|
||||
}
|
52
crates/treehouse/src/vfs/cd.rs
Normal file
52
crates/treehouse/src/vfs/cd.rs
Normal file
|
@ -0,0 +1,52 @@
|
|||
use std::fmt;
|
||||
|
||||
use super::{DirEntry, ReadFilesystem, VPath, VPathBuf};
|
||||
|
||||
pub struct Cd<'fs> {
|
||||
parent: &'fs dyn ReadFilesystem,
|
||||
path: VPathBuf,
|
||||
}
|
||||
|
||||
impl<'fs> Cd<'fs> {
|
||||
pub fn new(parent: &'fs dyn ReadFilesystem, path: VPathBuf) -> Self {
|
||||
Self { parent, path }
|
||||
}
|
||||
}
|
||||
|
||||
impl ReadFilesystem for Cd<'_> {
|
||||
fn dir(&self, path: &VPath) -> Vec<DirEntry> {
|
||||
self.parent
|
||||
.dir(&self.path.join(path.as_str()))
|
||||
.into_iter()
|
||||
.map(|entry| DirEntry {
|
||||
path: entry
|
||||
.path
|
||||
.strip_prefix(&self.path)
|
||||
.expect("all entries must be anchored within `self.path`")
|
||||
.to_owned(),
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn content_version(&self, path: &VPath) -> Option<String> {
|
||||
self.parent.content_version(&self.path.join(path.as_str()))
|
||||
}
|
||||
|
||||
fn content(&self, path: &VPath) -> Option<Vec<u8>> {
|
||||
self.parent.content(&self.path.join(path.as_str()))
|
||||
}
|
||||
|
||||
fn anchor(&self, path: &VPath) -> Option<VPathBuf> {
|
||||
self.parent.anchor(&self.path.join(path.as_str()))
|
||||
}
|
||||
|
||||
fn cd_optimization(&self, subpath: &VPath) -> Option<Cd<'_>> {
|
||||
Some(Cd::new(self, subpath.to_owned()))
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for Cd<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{:?}/{:?}", self.parent, self.path)
|
||||
}
|
||||
}
|
|
@ -1,17 +1,18 @@
|
|||
use super::{DirEntry, ReadFilesystem, VPath};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct EmptyFilesystem;
|
||||
|
||||
impl ReadFilesystem for EmptyFilesystem {
|
||||
fn dir(&self, path: &VPath) -> Vec<DirEntry> {
|
||||
fn dir(&self, _path: &VPath) -> Vec<DirEntry> {
|
||||
vec![]
|
||||
}
|
||||
|
||||
fn content_version(&self, path: &VPath) -> Option<String> {
|
||||
fn content_version(&self, _path: &VPath) -> Option<String> {
|
||||
None
|
||||
}
|
||||
|
||||
fn content(&self, path: &VPath) -> Option<Vec<u8>> {
|
||||
fn content(&self, _path: &VPath) -> Option<Vec<u8>> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
use std::fmt;
|
||||
|
||||
use super::{DirEntry, ReadFilesystem, VPath};
|
||||
|
||||
pub struct BufferedFile {
|
||||
|
@ -28,3 +30,9 @@ impl ReadFilesystem for BufferedFile {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for BufferedFile {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "BufferedFile")
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use std::collections::HashMap;
|
||||
use std::{collections::HashMap, fmt};
|
||||
|
||||
use super::{DirEntry, ReadFilesystem, VPath, VPathBuf};
|
||||
|
||||
|
@ -10,6 +10,7 @@ enum Resolved<'fs, 'path> {
|
|||
Root,
|
||||
MountPoint {
|
||||
fs: &'fs dyn ReadFilesystem,
|
||||
fs_path: &'path VPath,
|
||||
subpath: &'path VPath,
|
||||
},
|
||||
None,
|
||||
|
@ -44,6 +45,7 @@ impl MountPoints {
|
|||
if let Some(mount_point) = self.mount_points.get(mount_point_name) {
|
||||
return Resolved::MountPoint {
|
||||
fs: &**mount_point,
|
||||
fs_path: VPath::new(mount_point_name),
|
||||
subpath: path
|
||||
.strip_prefix(VPath::new(mount_point_name))
|
||||
.expect("path should have `mount_point_name` as its prefix"),
|
||||
|
@ -71,22 +73,57 @@ impl ReadFilesystem for MountPoints {
|
|||
path: VPathBuf::new(name),
|
||||
})
|
||||
.collect(),
|
||||
Resolved::MountPoint { fs, subpath } => fs.dir(subpath),
|
||||
Resolved::MountPoint {
|
||||
fs,
|
||||
fs_path,
|
||||
subpath,
|
||||
} => fs
|
||||
.dir(subpath)
|
||||
.into_iter()
|
||||
.map(|entry| DirEntry {
|
||||
path: fs_path.join(entry.path.as_str()),
|
||||
})
|
||||
.collect(),
|
||||
Resolved::None => vec![],
|
||||
}
|
||||
}
|
||||
|
||||
fn content_version(&self, path: &VPath) -> Option<String> {
|
||||
match self.resolve(path) {
|
||||
Resolved::MountPoint { fs, subpath } => fs.content_version(subpath),
|
||||
Resolved::MountPoint {
|
||||
fs,
|
||||
fs_path: _,
|
||||
subpath,
|
||||
} => fs.content_version(subpath),
|
||||
Resolved::Root | Resolved::None => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn content(&self, path: &VPath) -> Option<Vec<u8>> {
|
||||
match self.resolve(path) {
|
||||
Resolved::MountPoint { fs, subpath } => fs.content(subpath),
|
||||
Resolved::MountPoint {
|
||||
fs,
|
||||
fs_path: _,
|
||||
subpath,
|
||||
} => fs.content(subpath),
|
||||
Resolved::Root | Resolved::None => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn anchor(&self, path: &VPath) -> Option<VPathBuf> {
|
||||
match self.resolve(path) {
|
||||
Resolved::MountPoint {
|
||||
fs,
|
||||
fs_path: _,
|
||||
subpath,
|
||||
} => fs.anchor(subpath),
|
||||
Resolved::Root | Resolved::None => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for MountPoints {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str("MountPoints")
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,14 +18,17 @@ impl PhysicalDir {
|
|||
impl ReadFilesystem for PhysicalDir {
|
||||
fn dir(&self, vpath: &VPath) -> Vec<DirEntry> {
|
||||
let physical = self.root.join(physical_path(vpath));
|
||||
if !physical.is_dir() {
|
||||
return vec![];
|
||||
}
|
||||
|
||||
match std::fs::read_dir(physical) {
|
||||
Ok(read_dir) => read_dir
|
||||
.filter_map(|entry| {
|
||||
entry
|
||||
.inspect_err(|err| {
|
||||
error!(
|
||||
"PhysicalDir {:?} error while reading entries in vpath {vpath:?}: {err:?}",
|
||||
self.root
|
||||
"{self:?} error while reading entries: {err:?}",
|
||||
)
|
||||
})
|
||||
.ok()
|
||||
|
@ -34,13 +37,13 @@ impl ReadFilesystem for PhysicalDir {
|
|||
let path_str = match path.strip_prefix(&self.root).unwrap_or(&path).to_str() {
|
||||
Some(p) => p,
|
||||
None => {
|
||||
error!("PhysicalDir {:?} entry {path:?} has invalid UTF-8 (while reading vpath {vpath:?})", self.root);
|
||||
error!("{self:?} entry {path:?} has invalid UTF-8 (while reading vpath {vpath:?})");
|
||||
return None;
|
||||
},
|
||||
};
|
||||
let vpath_buf = VPathBuf::try_new(path_str.replace('\\', "/"))
|
||||
.inspect_err(|err| {
|
||||
error!("PhysicalDir {:?} error with vpath for {path_str:?}: {err:?}", self.root);
|
||||
error!("{self:?} error with vpath for {path_str:?}: {err:?}");
|
||||
})
|
||||
.ok()?;
|
||||
Some(DirEntry { path: vpath_buf })
|
||||
|
@ -49,8 +52,7 @@ impl ReadFilesystem for PhysicalDir {
|
|||
.collect(),
|
||||
Err(err) => {
|
||||
error!(
|
||||
"PhysicalDir {:?} cannot read vpath {vpath:?}: {err:?}",
|
||||
self.root
|
||||
"{self:?} cannot read vpath {vpath:?}: {err:?}",
|
||||
);
|
||||
vec![]
|
||||
}
|
||||
|
@ -63,12 +65,7 @@ impl ReadFilesystem for PhysicalDir {
|
|||
|
||||
fn content(&self, path: &VPath) -> Option<Vec<u8>> {
|
||||
std::fs::read(self.root.join(physical_path(path)))
|
||||
.inspect_err(|err| {
|
||||
error!(
|
||||
"PhysicalDir {:?} cannot read file at vpath {path:?}: {err:?}",
|
||||
self.root
|
||||
)
|
||||
})
|
||||
.inspect_err(|err| error!("{self:?} cannot read file at vpath {path:?}: {err:?}",))
|
||||
.ok()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
mod cd;
|
||||
mod empty;
|
||||
mod file;
|
||||
mod mount_points;
|
||||
|
|
102
crates/treehouse/tests/it/vfs/cd.rs
Normal file
102
crates/treehouse/tests/it/vfs/cd.rs
Normal file
|
@ -0,0 +1,102 @@
|
|||
use treehouse::vfs::{
|
||||
BufferedFile, Cd, CdExt, DirEntry, MountPoints, ReadFilesystem, VPath, VPathBuf,
|
||||
};
|
||||
|
||||
const HEWWO: &[u8] = b"hewwo :3";
|
||||
const FWOOFEE: &[u8] = b"fwoofee -w-";
|
||||
const BOOP: &[u8] = b"boop >w<";
|
||||
|
||||
fn vfs() -> MountPoints {
|
||||
let file1 = BufferedFile::new(HEWWO.to_vec());
|
||||
let file2 = BufferedFile::new(FWOOFEE.to_vec());
|
||||
let file3 = BufferedFile::new(BOOP.to_vec());
|
||||
|
||||
let mut innermost = MountPoints::new();
|
||||
innermost.add(VPath::new("file3.txt"), Box::new(file3));
|
||||
|
||||
let mut inner = MountPoints::new();
|
||||
inner.add(VPath::new("file1.txt"), Box::new(file1));
|
||||
inner.add(VPath::new("file2.txt"), Box::new(file2));
|
||||
inner.add(VPath::new("innermost"), Box::new(innermost));
|
||||
|
||||
let mut vfs = MountPoints::new();
|
||||
vfs.add(VPath::new("inner"), Box::new(inner));
|
||||
vfs
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dir1() {
|
||||
let outer = vfs();
|
||||
let inner = Cd::new(&outer, VPathBuf::new("inner"));
|
||||
|
||||
let mut dir = inner.dir(VPath::ROOT);
|
||||
dir.sort();
|
||||
assert_eq!(
|
||||
dir,
|
||||
vec![
|
||||
DirEntry {
|
||||
path: VPathBuf::new("file1.txt"),
|
||||
},
|
||||
DirEntry {
|
||||
path: VPathBuf::new("file2.txt"),
|
||||
},
|
||||
DirEntry {
|
||||
path: VPathBuf::new("innermost"),
|
||||
}
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dir2() {
|
||||
let outer = vfs();
|
||||
let outer: &dyn ReadFilesystem = &outer;
|
||||
let inner: &dyn ReadFilesystem = &outer.cd(VPathBuf::new("inner"));
|
||||
let innermost = inner.cd(VPathBuf::new("innermost"));
|
||||
|
||||
let mut dir = innermost.dir(VPath::ROOT);
|
||||
dir.sort();
|
||||
assert_eq!(
|
||||
dir,
|
||||
vec![DirEntry {
|
||||
path: VPathBuf::new("file3.txt"),
|
||||
},]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dir3() {
|
||||
let outer = vfs();
|
||||
let innermost = Cd::new(&outer, VPathBuf::new("inner/innermost"));
|
||||
|
||||
let mut dir = innermost.dir(VPath::ROOT);
|
||||
dir.sort();
|
||||
assert_eq!(
|
||||
dir,
|
||||
vec![DirEntry {
|
||||
path: VPathBuf::new("file3.txt"),
|
||||
},]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn content_version() {
|
||||
let outer = vfs();
|
||||
let inner = Cd::new(&outer, VPathBuf::new("inner"));
|
||||
|
||||
assert_eq!(
|
||||
inner.content_version(VPath::new("test1.txt")),
|
||||
outer.content_version(VPath::new("inner/test1.txt"))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn content() {
|
||||
let outer = vfs();
|
||||
let inner = Cd::new(&outer, VPathBuf::new("inner"));
|
||||
|
||||
assert_eq!(
|
||||
inner.content(VPath::new("test1.txt")),
|
||||
outer.content(VPath::new("inner/test1.txt"))
|
||||
);
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
use treehouse::vfs::{empty::EmptyFilesystem, ReadFilesystem, VPath};
|
||||
use treehouse::vfs::{EmptyFilesystem, ReadFilesystem, VPath};
|
||||
|
||||
#[test]
|
||||
fn dir() {
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use treehouse::vfs::{file::BufferedFile, ReadFilesystem, VPath};
|
||||
use treehouse::vfs::{BufferedFile, ReadFilesystem, VPath};
|
||||
|
||||
fn vfs() -> BufferedFile {
|
||||
BufferedFile::new(b"hewwo :3".to_vec())
|
||||
|
|
|
@ -1,14 +1,13 @@
|
|||
use std::path::Path;
|
||||
use treehouse::vfs::{BufferedFile, DirEntry, MountPoints, ReadFilesystem, VPath, VPathBuf};
|
||||
|
||||
use treehouse::vfs::{
|
||||
file::BufferedFile, mount_points::MountPoints, physical::PhysicalDir, DirEntry, ReadFilesystem,
|
||||
VPath, VPathBuf,
|
||||
};
|
||||
const HEWWO: &[u8] = b"hewwo :3";
|
||||
const FWOOFEE: &[u8] = b"fwoofee -w-";
|
||||
const BOOP: &[u8] = b"boop >w<";
|
||||
|
||||
fn vfs() -> MountPoints {
|
||||
let file1 = BufferedFile::new(b"hewwo :3".to_vec());
|
||||
let file2 = BufferedFile::new(b"fwoofee -w-".to_vec());
|
||||
let file3 = BufferedFile::new(b"boop >w<".to_vec());
|
||||
let file1 = BufferedFile::new(HEWWO.to_vec());
|
||||
let file2 = BufferedFile::new(FWOOFEE.to_vec());
|
||||
let file3 = BufferedFile::new(BOOP.to_vec());
|
||||
|
||||
let mut inner = MountPoints::new();
|
||||
inner.add(VPath::new("file3.txt"), Box::new(file3));
|
||||
|
@ -24,8 +23,10 @@ fn vfs() -> MountPoints {
|
|||
fn dir() {
|
||||
let vfs = vfs();
|
||||
|
||||
let mut dir = vfs.dir(VPath::new(""));
|
||||
dir.sort();
|
||||
assert_eq!(
|
||||
vfs.dir(VPath::new("")),
|
||||
dir,
|
||||
vec![
|
||||
DirEntry {
|
||||
path: VPathBuf::new("file1.txt"),
|
||||
|
@ -44,7 +45,44 @@ fn dir() {
|
|||
assert_eq!(
|
||||
vfs.dir(VPath::new("inner")),
|
||||
vec![DirEntry {
|
||||
path: VPathBuf::new("file3.txt")
|
||||
path: VPathBuf::new("inner/file3.txt")
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn content_version() {
|
||||
let vfs = vfs();
|
||||
|
||||
let file1 = BufferedFile::new(HEWWO.to_vec());
|
||||
let file2 = BufferedFile::new(FWOOFEE.to_vec());
|
||||
let file3 = BufferedFile::new(BOOP.to_vec());
|
||||
|
||||
assert_eq!(
|
||||
vfs.content_version(VPath::new("file1.txt")),
|
||||
file1.content_version(VPath::ROOT)
|
||||
);
|
||||
assert_eq!(
|
||||
vfs.content_version(VPath::new("file2.txt")),
|
||||
file2.content_version(VPath::ROOT)
|
||||
);
|
||||
assert_eq!(
|
||||
vfs.content_version(VPath::new("inner/file3.txt")),
|
||||
file3.content_version(VPath::ROOT)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn content() {
|
||||
let vfs = vfs();
|
||||
|
||||
assert_eq!(vfs.content(VPath::new("file1.txt")).as_deref(), Some(HEWWO));
|
||||
assert_eq!(
|
||||
vfs.content(VPath::new("file2.txt")).as_deref(),
|
||||
Some(FWOOFEE)
|
||||
);
|
||||
assert_eq!(
|
||||
vfs.content(VPath::new("inner/file3.txt")).as_deref(),
|
||||
Some(BOOP)
|
||||
);
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use std::path::Path;
|
||||
|
||||
use treehouse::vfs::{physical::PhysicalDir, DirEntry, ReadFilesystem, VPath, VPathBuf};
|
||||
use treehouse::vfs::{DirEntry, PhysicalDir, ReadFilesystem, VPath, VPathBuf};
|
||||
|
||||
fn vfs() -> PhysicalDir {
|
||||
let root = Path::new("tests/it/vfs_physical").to_path_buf();
|
||||
|
|
|
@ -11,10 +11,11 @@
|
|||
<link rel="stylesheet" href="{{ asset 'css/icons.css' }}">
|
||||
<link rel="stylesheet" href="{{ asset 'css/tree.css' }}">
|
||||
|
||||
{{!-- Import maps currently don't support the src="" attribute. Unless we come up with something
|
||||
clever to do while browser vendors figure that out, we'll just have to do a cache-busting include_static. --}}
|
||||
{{!-- <script type="importmap" src="{{ asset 'generated/import-map.json' }}"></script> --}}
|
||||
<script type="importmap">{{{ include_static 'generated/import-map.json' }}}</script>
|
||||
{{!--
|
||||
Import maps currently don't support the src="" attribute. Unless we come up with something
|
||||
clever to do while browser vendors figure that out, we'll just have to do a cache-busting string substitution.
|
||||
--}}
|
||||
<script type="importmap">{{{ import_map }}}</script>
|
||||
|
||||
<script>
|
||||
const TREEHOUSE_SITE = `{{ config.site }}`;
|
||||
|
|
|
@ -21,7 +21,7 @@
|
|||
}
|
||||
</style>
|
||||
|
||||
<script type="importmap">{{{ include_static 'generated/import-map.json' }}}</script>
|
||||
<script type="importmap">{{{ import_map }}}</script>
|
||||
|
||||
<script type="module">
|
||||
import { evaluate, domConsole, jsConsole } from "treehouse/components/literate-programming/eval.js";
|
||||
|
|
|
@ -5,9 +5,6 @@
|
|||
# This variable can also be set using the TREEHOUSE_SITE environment variable.
|
||||
site = ""
|
||||
|
||||
# TODO djot: Remove once transition is over.
|
||||
markup = "Djot"
|
||||
|
||||
# This is used to generate a link in the footer that links to the page's source commit.
|
||||
# The final URL is `{commit_base_url}/{commit}/content/{tree_path}.tree`.
|
||||
commit_base_url = "https://src.liquidev.net/liquidex/treehouse/src/commit"
|
||||
|
|
Loading…
Reference in a new issue