Compare commits

..

4 commits

24 changed files with 987 additions and 421 deletions

1
Cargo.lock generated
View file

@ -366,6 +366,7 @@ dependencies = [
"iana-time-zone",
"js-sys",
"num-traits",
"serde",
"wasm-bindgen",
"windows-targets",
]

View file

@ -4,4 +4,4 @@
- seems like the page you're looking for isn't here.
% id = "01HMF8KQ99XNMEP67NE3QH5698"
- care to go [back to the index][branch:treehouse]?
- care to go [back to the index][page:index]?

View file

@ -1,5 +1,6 @@
%% title = "a curated feed of updates to the house"
styles = ["new.css"]
feed = "new"
% id = "01JCGWPM6T73PAC5Q8YHPBEAA1"
+ hello!
@ -11,10 +12,8 @@ if you've been wondering what I've been up to, you've come to the right place.
% id = "01JCGWPM6TGQ17JPSJW8G58SB0"
- you can keep track of which posts you've read by looking at the color of the links.
% id = "01JCGWPM6TMAJT0B50GQSA4BDW"
- there is currently no RSS or Atom feed for this page, sorry!
% id = "01JDJJSEWASRWJGKMBNYMFD9B5"
tags = ["programming", "treehouse"]
- ### [composable virtual file systems][page:programming/blog/vfs]
% id = "01JDJJSEWAVZGJN3PWY94SJMXT"
@ -24,15 +23,18 @@ if you've been wondering what I've been up to, you've come to the right place.
- this is an exploration of how I built my abstraction, how it works, and what I learned from it.
% id = "01JCGAM553TJJCEJ96ADEWETQC"
tags = ["programming", "c", "cxx"]
- ### [prefix matches with C strings][page:programming/blog/cstring-starts-with]
% id = "01JBAGZAZ30K443QYPK0XBNZWM"
tags = ["music"]
- ### [the curious case of Amon Tobin's Creatures][page:music/creatures]
% id = "01JBAGZAZ3NKBED4M9FANR5RPZ"
- a weird anomaly I noticed while listening to some breaks
% id = "01J8ZP2EG9TM8320R9E3K1GQEC"
tags = ["music"]
- ### [I Don't Love Me Anymore][page:music/reviews/opn/i-dont-love-me-anymore]
% id = "01J8ZP2EG96VQ2ZK0XYK0FK1NR"
@ -42,6 +44,7 @@ if you've been wondering what I've been up to, you've come to the right place.
- it's also a nice opportunity to say that I've refreshed the music section a bit!
% id = "01J7C1KBZ58BR21AVFA1PMWV68"
tags = ["programming", "treehouse"]
- ### [not quite buildless][page:programming/blog/buildsome]
% id = "01J7C1KBZ5XKZRN4V5BWFQTV6Y"
@ -57,6 +60,7 @@ if you've been wondering what I've been up to, you've come to the right place.
- also, it's (way past) its one year anniversary! hooray!
% id = "01J73BSWA15KHTQ21T0S14NZW0"
tags = ["music", "programming"]
- ### [the ListenBrainz data set][page:music/brainz]
% id = "01J73BSWA1EX7ZP28KCCG088DD"
@ -66,6 +70,7 @@ if you've been wondering what I've been up to, you've come to the right place.
- I haven't done any of it yet, but I thought it'd be cool to share my ideas anyways!
% id = "01J4J5N6WZQ03VTB3TZ51J7QZK"
tags = ["programming", "plt", "haku"]
- ### [haku - writing a little programming language for fun][page:programming/blog/haku]
% id = "01J4J5N6WZQ1316WKDXB1M5W6E"
@ -79,6 +84,7 @@ if you've been wondering what I've been up to, you've come to the right place.
even though it didn't end up having macros...
% id = "01J293BFEBT15W0Z3XF1HEFGZT"
tags = ["programming", "javascript", "plt"]
- ### [JavaScript is not as bad as people make it out to be][page:programming/languages/javascript]
% id = "01J293BFEB4G7214N20SZA8V7W"
@ -88,6 +94,7 @@ if you've been wondering what I've been up to, you've come to the right place.
- so I decided to collect my thoughts into a nice little page I can link easily.
% id = "01J0VNHPTRNC1HFXAQ790Y1EZB"
tags = ["programming", "cxx"]
- ### [freeing C memory automatically using `std::unique_ptr` and `std::shared_ptr`][page:programming/languages/cxx/shared-unique-ptr-deleter]
% id = "01J0VNHPTRP51XYDA4N2RPG58F"
@ -100,6 +107,7 @@ if you've been wondering what I've been up to, you've come to the right place.
- on another note, I did read a blog post about this once somewhere, but couldn't be bothered to find it. so there you go! I made a post about this too.
% id = "01J0KRPMV7SS48B64BFCJZK7VQ"
tags = ["meow"]
- ### [about me (version 2)][page:about]
% id = "01J0KRPMV73K71D3QXFQ3GNY2N"
@ -110,13 +118,15 @@ if you've been wondering what I've been up to, you've come to the right place.
- [version 1][page:about/v1]
% id = "01HY5R1ZW2PYZSSP2J2KAA23DA"
tags = ["programming", "c", "cxx", "plt"]
- ### [what's up with `*x` not always meaning the same thing in different contexts?][page:programming/blog/lvalues]
% id = "01HY5R1ZW24YJ2NF2RYWRZG4ZT"
- I recently got a question from my someone telling me they doesn't understand why `*x` does not read from the pointer `x` when on the left-hand side of an assignment.
- I recently got a question from my someone telling me they don't understand why `*x` does not read from the pointer `x` when on the left-hand side of an assignment.
and that made me think, _why_ is that the case?
% id = "01HV1DGFHZ65GJVQRSREKR67J9"
tags = ["programming", "philosophy"]
- ### [systems are just a bunch of code][page:programming/blog/systems]
% id = "01HV1DGFHZFFZSQNCVWBTJ1VHM"
@ -129,18 +139,21 @@ if you've been wondering what I've been up to, you've come to the right place.
- bonus: [dismantling Unreal Engine's `GENERATED_BODY`][page:programming/technologies/unreal-engine/generated-body]
% id = "01HTWNETT2S5NSBF3QR4HYA7HN"
tags = ["programming", "plt"]
- ### [OR-types][page:programming/blog/or-types]
% id = "01HTWNETT2N8NPENETWYFBTXEM"
- last night I couldn't sleep because of type theory. in the process of trying to write down my thoughts, I ended up discovering a class of types which, to my knowledge, no language implements.
% id = "01HRG3VN091V715A8T54QK5PVX"
tags = ["programming", "plt", "lua"]
- ### [programming languages: Lua][page:programming/languages/lua]
% id = "01HRG3VN095BNHERHWVX1TKS9K"
- I really like Lua, did you know that? but I get kind of tired of explaining why a thousand times to people who don't know the language, so here's a page with my thoughts!
% id = "01HR9ZTS8RS4VJNJYSNRQYSKHZ"
tags = ["design"]
- ### [design: sidebars][page:design/sidebars]
% id = "01HR9ZTS8RY3N4EJM5W7WBTF0G"
@ -150,6 +163,7 @@ if you've been wondering what I've been up to, you've come to the right place.
- seriously though. I don't like them.
% id = "01HQ8KV8T8GRCVFDJ3EP6QE163"
tags = ["design"]
- ### [liquidex's treehouse: design][page:design]
% id = "01HQ8KV8T8EEX6XBG2K1X3FGKW"
@ -161,6 +175,7 @@ if you've been wondering what I've been up to, you've come to the right place.
- I also wrote a post summarising my thoughts: [_on digital textures_][page:design/digital-textures]
% id = "01HQ6G30PTVT5H0Z04VVRHEZQF"
tags = ["programming", "graphics", "javascript"]
- ### [tairu - an interactive exploration of 2D autotiling techniques][page:programming/blog/tairu]
% id = "01HQ6G30PTG8QA5MAPEJPWSM14"
@ -168,5 +183,3 @@ if you've been wondering what I've been up to, you've come to the right place.
% id = "01HQ6G30PT1D729Z29NYVDCFDB"
- this post explores basically just that.

View file

@ -11,7 +11,7 @@ anyhow = "1.0.75"
axum = "0.7.4"
base64 = "0.21.7"
blake3 = "1.5.3"
chrono = "0.4.35"
chrono = { version = "0.4.35", features = ["serde"] }
clap = { version = "4.3.22", features = ["derive"] }
codespan-reporting = "0.11.1"
dashmap = "6.1.0"

View file

@ -18,7 +18,7 @@ use serde::Deserialize;
use tokio::net::TcpListener;
use tracing::{info, instrument};
use crate::generate::Sources;
use crate::sources::Sources;
use crate::vfs::asynch::AsyncDir;
use crate::vfs::VPath;
use crate::{html::EscapeHtml, state::Source};
@ -59,6 +59,7 @@ fn get_content_type(extension: &str) -> Option<&'static str> {
"js" => Some("text/javascript"),
"woff" => Some("font/woff2"),
"svg" => Some("image/svg+xml"),
"atom" => Some("application/atom+xml"),
_ => None,
}
}

View file

@ -1,4 +1,7 @@
use std::{collections::HashMap, ops::ControlFlow};
use std::{
collections::{HashMap, HashSet},
ops::ControlFlow,
};
use anyhow::{anyhow, Context};
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
@ -31,6 +34,9 @@ pub struct Config {
/// Links exported to Markdown for use with reference syntax `[text][def:key]`.
pub defs: HashMap<String, String>,
/// Config for syndication feeds.
pub feed: Feed,
/// Redirects for moving pages around. These are used solely by the treehouse server.
///
/// Note that redirects are only resolved _non-recursively_ by the server. For a configuration
@ -74,6 +80,12 @@ pub struct Config {
pub syntaxes: HashMap<String, CompiledSyntax>,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct Feed {
/// Allowed tags in feed entries.
pub tags: HashSet<String>,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct Redirects {
/// Page redirects. When a user navigates to a page, if they navigate to `url`, they will

View file

@ -1,13 +1,15 @@
mod atom;
mod dir_helper;
mod include_static_helper;
mod simple_template;
mod tree;
use std::{collections::HashMap, fmt, ops::ControlFlow, sync::Arc};
use anyhow::{anyhow, ensure, Context};
use atom::FeedDir;
use dir_helper::DirHelper;
use handlebars::{handlebars_helper, Handlebars};
use include_static_helper::IncludeStaticHelper;
use rayon::iter::{IndexedParallelIterator, IntoParallelIterator, ParallelIterator};
use serde::Serialize;
use tracing::{error, info_span, instrument};
@ -15,49 +17,56 @@ use crate::{
config::Config,
dirs::Dirs,
fun::seasons::Season,
html::{breadcrumbs::breadcrumbs_to_html, navmap::NavigationMap, tree::branches_to_html},
import_map::ImportMap,
parse::parse_tree_with_diagnostics,
state::{report_diagnostics, FileId, Source},
tree::SemaRoots,
sources::Sources,
vfs::{
self, Cd, ContentCache, Dir, DirEntry, DynDir, EditPath, ImageSize, MemDir, Overlay,
ToDynDir, VPath, VPathBuf,
self, Cd, ContentCache, Dir, DirEntry, DynDir, HtmlCanonicalize, MemDir, Overlay, ToDynDir,
VPath, VPathBuf,
},
};
use crate::state::Treehouse;
#[derive(Serialize)]
struct Page {
title: String,
thumbnail: Option<Thumbnail>,
scripts: Vec<String>,
styles: Vec<String>,
breadcrumbs: String,
tree_path: Option<String>,
tree: String,
}
#[derive(Serialize)]
struct Thumbnail {
url: String,
alt: Option<String>,
}
#[derive(Serialize)]
struct BaseTemplateData<'a> {
config: &'a Config,
import_map: String,
season: Option<Season>,
dev: bool,
feeds: Vec<String>,
}
#[derive(Serialize)]
struct PageTemplateData<'a> {
#[serde(flatten)]
base: &'a BaseTemplateData<'a>,
page: Page,
impl<'a> BaseTemplateData<'a> {
fn new(sources: &'a Sources) -> Self {
Self {
config: &sources.config,
import_map: serde_json::to_string_pretty(&sources.import_map)
.expect("import map should be serializable to JSON"),
season: Season::current(),
dev: cfg!(debug_assertions),
feeds: sources.treehouse.feeds_by_name.keys().cloned().collect(),
}
}
}
struct TreehouseDir {
dirs: Arc<Dirs>,
sources: Arc<Sources>,
handlebars: Arc<Handlebars<'static>>,
dir_index: DirIndex,
}
impl TreehouseDir {
fn new(
dirs: Arc<Dirs>,
sources: Arc<Sources>,
handlebars: Arc<Handlebars<'static>>,
dir_index: DirIndex,
) -> Self {
Self {
dirs,
sources,
handlebars,
dir_index,
}
}
}
fn create_handlebars(site: &str, static_: DynDir) -> Handlebars<'static> {
@ -90,296 +99,6 @@ fn load_templates(handlebars: &mut Handlebars, dir: &dyn Dir) {
});
}
#[instrument(skip(config, dirs))]
fn load_trees(config: &Config, dirs: &Dirs) -> anyhow::Result<Treehouse> {
let mut treehouse = Treehouse::new();
let mut diagnostics = vec![];
let mut parsed_trees = HashMap::new();
let mut paths = vec![];
vfs::walk_dir_rec(&*dirs.content, VPath::ROOT, &mut |path| {
if path.extension() == Some("tree") {
paths.push(path.to_owned());
}
ControlFlow::Continue(())
});
// NOTE: Sources are filled in later; they can be left out until a call to report_diagnostics.
let file_ids: Vec<_> = paths
.iter()
.map(|path| treehouse.add_file(path.clone(), Source::Other(String::new())))
.collect();
let parse_results: Vec<_> = {
let _span = info_span!("load_trees::parse").entered();
paths
.into_par_iter()
.zip(&file_ids)
.flat_map(|(path, &file_id)| {
dirs.content
.content(&path)
.and_then(|b| String::from_utf8(b).ok())
.map(|input| {
let parse_result = parse_tree_with_diagnostics(file_id, &input);
(path, file_id, input, parse_result)
})
})
.collect()
};
for (path, file_id, input, _) in &parse_results {
let tree_path = path.with_extension("");
treehouse
.files_by_tree_path
.insert(tree_path.clone(), *file_id);
treehouse.set_source(
*file_id,
Source::Tree {
input: input.clone(),
tree_path,
},
);
}
{
let _span = info_span!("load_trees::sema").entered();
for (path, file_id, _, result) in parse_results {
match result {
Ok(roots) => {
let roots = SemaRoots::from_roots(
&mut treehouse,
&mut diagnostics,
config,
file_id,
roots,
);
treehouse.roots.insert(file_id, roots);
parsed_trees.insert(path, file_id);
}
Err(mut parse_diagnostics) => diagnostics.append(&mut parse_diagnostics),
}
}
}
report_diagnostics(&treehouse, &diagnostics)?;
Ok(treehouse)
}
#[instrument(skip(sources, handlebars))]
fn generate_simple_template(
sources: &Sources,
handlebars: &Handlebars,
template_name: &str,
) -> anyhow::Result<String> {
let base_template_data = BaseTemplateData {
config: &sources.config,
import_map: serde_json::to_string_pretty(&sources.import_map)
.expect("import map should be serializable to JSON"),
season: Season::current(),
dev: cfg!(debug_assertions),
};
handlebars
.render(template_name, &base_template_data)
.context("failed to render template")
}
fn generate_simple_template_or_error(
sources: &Sources,
handlebars: &Handlebars,
template_name: &str,
) -> String {
match generate_simple_template(sources, handlebars, template_name) {
Ok(html) => html,
Err(error) => format!("error: {error:?}"),
}
}
#[instrument(skip(sources, dirs, handlebars))]
fn generate_tree(
sources: &Sources,
dirs: &Dirs,
handlebars: &Handlebars,
file_id: FileId,
) -> anyhow::Result<String> {
let breadcrumbs = breadcrumbs_to_html(&sources.config, &sources.navigation_map, file_id);
let roots = sources
.treehouse
.roots
.get(&file_id)
.expect("tree should have been added to the treehouse");
let tree = {
let _span = info_span!("generate_tree::branches_to_html").entered();
let mut tree = String::new();
branches_to_html(
&mut tree,
&sources.treehouse,
&sources.config,
dirs,
file_id,
&roots.branches,
);
tree
};
let base_template_data = BaseTemplateData {
config: &sources.config,
import_map: serde_json::to_string_pretty(&sources.import_map)
.expect("import map should be serializable to JSON"),
season: Season::current(),
dev: cfg!(debug_assertions),
};
let template_data = PageTemplateData {
base: &base_template_data,
page: Page {
title: roots.attributes.title.clone(),
thumbnail: roots
.attributes
.thumbnail
.as_ref()
.map(|thumbnail| Thumbnail {
url: sources.config.pic_url(&*dirs.pic, &thumbnail.id),
alt: thumbnail.alt.clone(),
}),
scripts: roots.attributes.scripts.clone(),
styles: roots.attributes.styles.clone(),
breadcrumbs,
tree_path: sources.treehouse.tree_path(file_id).map(|s| s.to_string()),
tree,
},
};
let template_name = roots
.attributes
.template
.clone()
.unwrap_or_else(|| "_tree.hbs".into());
ensure!(
handlebars.has_template(&template_name),
"template {template_name} does not exist"
);
let _span = info_span!("handlebars::render").entered();
handlebars
.render(&template_name, &template_data)
.context("template rendering failed")
}
fn generate_tree_or_error(
sources: &Sources,
dirs: &Dirs,
handlebars: &Handlebars,
file_id: FileId,
) -> String {
match generate_tree(sources, dirs, handlebars, file_id) {
Ok(html) => html,
Err(error) => format!("error: {error:?}"),
}
}
pub struct Sources {
pub config: Config,
pub treehouse: Treehouse,
pub navigation_map: NavigationMap,
pub import_map: ImportMap,
}
impl Sources {
pub fn load(dirs: &Dirs) -> anyhow::Result<Self> {
let config = {
let _span = info_span!("load_config").entered();
let mut config: Config = toml_edit::de::from_str(
&dirs
.root
.content(VPath::new("treehouse.toml"))
.map(String::from_utf8)
.ok_or_else(|| anyhow!("config file does not exist"))??,
)
.context("failed to deserialize config")?;
config.site = std::env::var("TREEHOUSE_SITE").unwrap_or(config.site);
config.autopopulate_emoji(&*dirs.emoji)?;
config.autopopulate_pics(&*dirs.pic)?;
config.load_syntaxes(dirs.syntax.clone())?;
config
};
let treehouse = load_trees(&config, dirs)?;
let navigation_map = NavigationMap::build(
&treehouse,
treehouse.files_by_tree_path[VPath::new("index")],
);
let import_map = ImportMap::generate(
&config.site,
&Cd::new(dirs.static_.clone(), VPathBuf::new("js")),
&config.build.javascript.import_roots,
);
Ok(Sources {
config,
treehouse,
navigation_map,
import_map,
})
}
}
/// Acceleration structure for `dir` operations on [`TreehouseDir`]s.
#[derive(Debug, Default)]
struct DirIndex {
full_path: VPathBuf,
children: HashMap<VPathBuf, DirIndex>,
}
impl DirIndex {
#[instrument(name = "DirIndex::new", skip(paths))]
pub fn new<'a>(paths: impl Iterator<Item = &'a VPath>) -> Self {
let mut root = DirIndex::default();
for path in paths {
let mut parent = &mut root;
let mut full_path = VPath::ROOT.to_owned();
for segment in path.segments() {
full_path.push(segment);
let child = parent
.children
.entry(segment.to_owned())
.or_insert_with(|| DirIndex {
full_path: full_path.clone(),
children: HashMap::new(),
});
parent = child;
}
}
root
}
}
struct TreehouseDir {
dirs: Arc<Dirs>,
sources: Arc<Sources>,
dir_index: DirIndex,
handlebars: Handlebars<'static>,
}
impl TreehouseDir {
fn new(dirs: Arc<Dirs>, sources: Arc<Sources>, dir_index: DirIndex) -> Self {
let mut handlebars = create_handlebars(&sources.config.site, dirs.static_.clone());
load_templates(&mut handlebars, &dirs.template);
Self {
dirs,
sources,
dir_index,
handlebars,
}
}
}
impl Dir for TreehouseDir {
#[instrument("TreehouseDir::dir", skip(self))]
fn dir(&self, path: &VPath) -> Vec<DirEntry> {
@ -418,14 +137,14 @@ impl Dir for TreehouseDir {
.files_by_tree_path
.get(path)
.map(|&file_id| {
generate_tree_or_error(&self.sources, &self.dirs, &self.handlebars, file_id).into()
tree::generate_or_error(&self.sources, &self.dirs, &self.handlebars, file_id).into()
})
.or_else(|| {
if path.file_name().is_some_and(|s| !s.starts_with('_')) {
let template_name = path.with_extension("hbs");
if self.handlebars.has_template(template_name.as_str()) {
return Some(
generate_simple_template_or_error(
simple_template::generate_or_error(
&self.sources,
&self.handlebars,
template_name.as_str(),
@ -449,61 +168,53 @@ impl fmt::Debug for TreehouseDir {
}
}
struct HtmlCanonicalize<T> {
inner: T,
/// Acceleration structure for `dir` operations on [`TreehouseDir`]s.
#[derive(Debug, Default)]
struct DirIndex {
full_path: VPathBuf,
children: HashMap<VPathBuf, DirIndex>,
}
impl<T> HtmlCanonicalize<T> {
pub fn new(inner: T) -> Self {
Self { inner }
}
}
impl DirIndex {
#[instrument(name = "DirIndex::new", skip(paths))]
pub fn new<'a>(paths: impl Iterator<Item = &'a VPath>) -> Self {
let mut root = DirIndex::default();
impl<T> Dir for HtmlCanonicalize<T>
where
T: Dir,
{
fn dir(&self, path: &VPath) -> Vec<DirEntry> {
self.inner.dir(path)
}
fn content(&self, path: &VPath) -> Option<Vec<u8>> {
let mut path = path.to_owned();
if path.extension() == Some("html") {
path.set_extension("");
for path in paths {
let mut parent = &mut root;
let mut full_path = VPath::ROOT.to_owned();
for segment in path.segments() {
full_path.push(segment);
let child = parent
.children
.entry(segment.to_owned())
.or_insert_with(|| DirIndex {
full_path: full_path.clone(),
children: HashMap::new(),
});
parent = child;
}
}
self.inner.content(&path)
}
fn content_version(&self, path: &VPath) -> Option<String> {
self.inner.content_version(path)
}
fn image_size(&self, path: &VPath) -> Option<ImageSize> {
self.inner.image_size(path)
}
fn anchor(&self, path: &VPath) -> Option<VPathBuf> {
self.inner.anchor(path)
}
fn edit_path(&self, path: &VPath) -> Option<EditPath> {
self.inner.edit_path(path)
}
}
impl<T> fmt::Debug for HtmlCanonicalize<T>
where
T: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "HtmlCanonicalize({:?})", self.inner)
root
}
}
pub fn target(dirs: Arc<Dirs>, sources: Arc<Sources>) -> DynDir {
let mut handlebars = create_handlebars(&sources.config.site, dirs.static_.clone());
load_templates(&mut handlebars, &dirs.template);
let handlebars = Arc::new(handlebars);
let mut root = MemDir::new();
root.add(
VPath::new("feed"),
ContentCache::new(FeedDir::new(
dirs.clone(),
sources.clone(),
handlebars.clone(),
))
.to_dyn(),
);
root.add(VPath::new("static"), dirs.static_.clone());
root.add(
VPath::new("robots.txt"),
@ -511,7 +222,7 @@ pub fn target(dirs: Arc<Dirs>, sources: Arc<Sources>) -> DynDir {
);
let dir_index = DirIndex::new(sources.treehouse.files_by_tree_path.keys().map(|x| &**x));
let tree_view = TreehouseDir::new(dirs, sources, dir_index);
let tree_view = TreehouseDir::new(dirs, sources, handlebars, dir_index);
let tree_view = ContentCache::new(tree_view);
tree_view.warm_up();

View file

@ -0,0 +1,302 @@
use std::{fmt, sync::Arc};
use anyhow::Context;
use chrono::{DateTime, Utc};
use handlebars::Handlebars;
use serde::Serialize;
use tracing::{info, info_span, instrument};
use ulid::Ulid;
use crate::{
dirs::Dirs,
html::djot::{self, resolve_link},
sources::Sources,
state::FileId,
tree::SemaBranchId,
vfs::{Dir, DirEntry, VPath, VPathBuf},
};
use super::BaseTemplateData;
pub struct FeedDir {
dirs: Arc<Dirs>,
sources: Arc<Sources>,
handlebars: Arc<Handlebars<'static>>,
}
impl FeedDir {
pub fn new(
dirs: Arc<Dirs>,
sources: Arc<Sources>,
handlebars: Arc<Handlebars<'static>>,
) -> Self {
Self {
dirs,
sources,
handlebars,
}
}
}
impl Dir for FeedDir {
fn dir(&self, path: &VPath) -> Vec<DirEntry> {
if path == VPath::ROOT {
self.sources
.treehouse
.feeds_by_name
.keys()
.map(|name| DirEntry {
path: VPathBuf::new(format!("{name}.atom")),
})
.collect()
} else {
vec![]
}
}
fn content(&self, path: &VPath) -> Option<Vec<u8>> {
info!("{path}");
if path.extension() == Some("atom") {
let feed_name = path.with_extension("").to_string();
self.sources
.treehouse
.feeds_by_name
.get(&feed_name)
.map(|file_id| {
generate_or_error(&self.sources, &self.dirs, &self.handlebars, *file_id).into()
})
} else {
None
}
}
fn content_version(&self, _path: &VPath) -> Option<String> {
None
}
}
impl fmt::Debug for FeedDir {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("FeedDir")
}
}
#[derive(Serialize)]
struct Feed {
name: String,
updated: DateTime<Utc>,
entries: Vec<Entry>,
}
#[derive(Serialize)]
struct Entry {
id: String,
updated: DateTime<Utc>,
url: String,
title: String,
categories: Vec<String>,
summary: String,
}
#[derive(Serialize)]
struct AtomTemplateData<'a> {
#[serde(flatten)]
base: &'a BaseTemplateData<'a>,
feed: Feed,
}
#[instrument(name = "atom::generate", skip(sources, handlebars))]
pub fn generate(
sources: &Sources,
dirs: &Dirs,
handlebars: &Handlebars,
file_id: FileId,
) -> anyhow::Result<String> {
let roots = &sources.treehouse.roots[&file_id];
let feed_name = roots.attributes.feed.clone().expect("page must be a feed");
let template_data = AtomTemplateData {
base: &BaseTemplateData::new(sources),
feed: Feed {
name: feed_name,
// The content cache layer should take care of sampling the current time only once,
// and then preserving it until the treehouse is deployed again.
updated: Utc::now(),
entries: extract_entries(sources, dirs, file_id),
},
};
let _span = info_span!("handlebars::render").entered();
handlebars
.render("_feed_atom.hbs", &template_data)
.context("template rendering failed")
}
pub fn generate_or_error(
sources: &Sources,
dirs: &Dirs,
handlebars: &Handlebars,
file_id: FileId,
) -> String {
match generate(sources, dirs, handlebars, file_id) {
Ok(html) => html,
Err(error) => format!("error: {error:?}"),
}
}
fn extract_entries(sources: &Sources, dirs: &Dirs, file_id: FileId) -> Vec<Entry> {
let roots = &sources.treehouse.roots[&file_id];
roots
.branches
.iter()
.flat_map(|&branch_id| {
let branch = sources.treehouse.tree.branch(branch_id);
let text = &sources.treehouse.source(file_id).input()[branch.content.clone()];
let parsed = parse_entry(sources, dirs, file_id, jotdown::Parser::new(text));
let mut summary = String::new();
branches_to_html_simple(&mut summary, sources, dirs, file_id, &branch.children);
let updated = Ulid::from_string(&branch.attributes.id)
.ok()
.and_then(|ulid| DateTime::from_timestamp_millis(ulid.timestamp_ms() as i64))
.unwrap_or(DateTime::UNIX_EPOCH); // if you see the Unix epoch... oops
parsed.link.map(|url| Entry {
id: branch.attributes.id.clone(),
updated,
url,
title: parsed.title.unwrap_or_else(|| "untitled".into()),
categories: branch.attributes.tags.clone(),
summary,
})
})
.collect()
}
#[derive(Debug, Clone)]
struct ParsedEntry {
title: Option<String>,
link: Option<String>,
}
fn parse_entry(
sources: &Sources,
dirs: &Dirs,
file_id: FileId,
parser: jotdown::Parser,
) -> ParsedEntry {
let mut parser = parser.into_offset_iter();
while let Some((event, span)) = parser.next() {
if let jotdown::Event::Start(jotdown::Container::Heading { .. }, _attrs) = &event {
let mut events = vec![(event, span)];
for (event, span) in parser.by_ref() {
// To my knowledge headings cannot nest, so it's okay not keeping a stack here.
let is_heading = matches!(
event,
jotdown::Event::End(jotdown::Container::Heading { .. })
);
events.push((event, span));
if is_heading {
break;
}
}
let title_events: Vec<_> = events
.iter()
.filter(|(event, _)| {
!matches!(
event,
// A little repetitive, but I don't mind.
// The point of this is not to include extra <h3> and <a> in the link text,
// but preserve other formatting such as bold, italic, code, etc.
jotdown::Event::Start(
jotdown::Container::Link(_, _) | jotdown::Container::Heading { .. },
_
) | jotdown::Event::End(
jotdown::Container::Link(_, _) | jotdown::Container::Heading { .. }
)
)
})
.cloned()
.collect();
let mut title = String::new();
let _render_diagnostics = djot::Renderer {
config: &sources.config,
dirs,
treehouse: &sources.treehouse,
file_id,
// How. Just, stop.
page_id: "liquidex-you-reeeeeal-dummy".into(),
}
.render(&title_events, &mut title);
let link = events.iter().find_map(|(event, _)| {
if let jotdown::Event::Start(jotdown::Container::Link(link, link_type), _) = event {
Some(link_url(sources, dirs, link, *link_type))
} else {
None
}
});
return ParsedEntry {
title: (!title.is_empty()).then_some(title),
link,
};
}
}
ParsedEntry {
title: None,
link: None,
}
}
fn link_url(sources: &Sources, dirs: &Dirs, url: &str, link_type: jotdown::LinkType) -> String {
if let jotdown::LinkType::Span(jotdown::SpanLinkType::Unresolved) = link_type {
if let Some(url) = resolve_link(&sources.config, &sources.treehouse, dirs, url) {
return url;
}
}
url.to_owned()
}
/// Extremely simple HTML renderer without the treehouse's fancy branch folding and linking features.
fn branches_to_html_simple(
s: &mut String,
sources: &Sources,
dirs: &Dirs,
file_id: FileId,
branches: &[SemaBranchId],
) {
s.push_str("<ul>");
for &branch_id in branches {
let branch = sources.treehouse.tree.branch(branch_id);
s.push_str("<li>");
let text = &sources.treehouse.source(file_id).input()[branch.content.clone()];
let events: Vec<_> = jotdown::Parser::new(text).into_offset_iter().collect();
// Ignore render diagnostics. Those should be reported by the main HTML generator.
let _render_diagnostics = djot::Renderer {
config: &sources.config,
dirs,
treehouse: &sources.treehouse,
file_id,
// Yeah, maybe don't include literate code in summaries...
page_id: "liquidex-is-a-dummy".into(),
}
.render(&events, s);
if !branch.children.is_empty() {
branches_to_html_simple(s, sources, dirs, file_id, &branch.children);
}
s.push_str("</li>");
}
s.push_str("</ul>");
}

View file

@ -0,0 +1,30 @@
use anyhow::Context;
use handlebars::Handlebars;
use tracing::instrument;
use crate::sources::Sources;
use super::BaseTemplateData;
#[instrument(name = "simple_template::generate", skip(sources, handlebars))]
pub fn generate(
sources: &Sources,
handlebars: &Handlebars,
template_name: &str,
) -> anyhow::Result<String> {
let base_template_data = BaseTemplateData::new(sources);
handlebars
.render(template_name, &base_template_data)
.context("failed to render template")
}
pub fn generate_or_error(
sources: &Sources,
handlebars: &Handlebars,
template_name: &str,
) -> String {
match generate(sources, handlebars, template_name) {
Ok(html) => html,
Err(error) => format!("error: {error:?}"),
}
}

View file

@ -0,0 +1,113 @@
use anyhow::{ensure, Context};
use handlebars::Handlebars;
use serde::Serialize;
use tracing::{info_span, instrument};
use crate::{
dirs::Dirs,
generate::BaseTemplateData,
html::{breadcrumbs::breadcrumbs_to_html, tree::branches_to_html},
sources::Sources,
state::FileId,
};
#[derive(Serialize)]
struct Page {
title: String,
thumbnail: Option<Thumbnail>,
scripts: Vec<String>,
styles: Vec<String>,
breadcrumbs: String,
tree_path: Option<String>,
tree: String,
}
#[derive(Serialize)]
struct Thumbnail {
url: String,
alt: Option<String>,
}
#[derive(Serialize)]
struct PageTemplateData<'a> {
#[serde(flatten)]
base: &'a BaseTemplateData<'a>,
page: Page,
}
#[instrument(skip(sources, dirs, handlebars))]
pub fn generate(
sources: &Sources,
dirs: &Dirs,
handlebars: &Handlebars,
file_id: FileId,
) -> anyhow::Result<String> {
let breadcrumbs = breadcrumbs_to_html(&sources.config, &sources.navigation_map, file_id);
let roots = sources
.treehouse
.roots
.get(&file_id)
.expect("tree should have been added to the treehouse");
let tree = {
let _span = info_span!("generate_tree::branches_to_html").entered();
let mut tree = String::new();
branches_to_html(
&mut tree,
&sources.treehouse,
&sources.config,
dirs,
file_id,
&roots.branches,
);
tree
};
let template_data = PageTemplateData {
base: &BaseTemplateData::new(sources),
page: Page {
title: roots.attributes.title.clone(),
thumbnail: roots
.attributes
.thumbnail
.as_ref()
.map(|thumbnail| Thumbnail {
url: sources.config.pic_url(&*dirs.pic, &thumbnail.id),
alt: thumbnail.alt.clone(),
}),
scripts: roots.attributes.scripts.clone(),
styles: roots.attributes.styles.clone(),
breadcrumbs,
tree_path: sources.treehouse.tree_path(file_id).map(|s| s.to_string()),
tree,
},
};
let template_name = roots
.attributes
.template
.clone()
.unwrap_or_else(|| "_tree.hbs".into());
ensure!(
handlebars.has_template(&template_name),
"template {template_name} does not exist"
);
let _span = info_span!("handlebars::render").entered();
handlebars
.render(&template_name, &template_data)
.context("template rendering failed")
}
pub fn generate_or_error(
sources: &Sources,
dirs: &Dirs,
handlebars: &Handlebars,
file_id: FileId,
) -> String {
match generate(sources, dirs, handlebars, file_id) {
Ok(html) => html,
Err(error) => format!("error: {error:?}"),
}
}

View file

@ -1,7 +1,7 @@
use std::fmt::{self, Display, Write};
pub mod breadcrumbs;
mod djot;
pub mod djot;
pub mod highlight;
pub mod navmap;
pub mod tree;

View file

@ -27,9 +27,7 @@ use super::highlight::highlight;
/// [`Render`] implementor that writes HTML output.
pub struct Renderer<'a> {
pub config: &'a Config,
pub dirs: &'a Dirs,
pub treehouse: &'a Treehouse,
pub file_id: FileId,
pub page_id: String,
@ -226,7 +224,12 @@ impl<'a> Writer<'a> {
Container::Link(dst, ty) => {
if matches!(ty, LinkType::Span(SpanLinkType::Unresolved)) {
out.push_str("<a");
if let Some(resolved) = self.resolve_link(dst) {
if let Some(resolved) = resolve_link(
self.renderer.config,
self.renderer.treehouse,
self.renderer.dirs,
dst,
) {
out.push_str(r#" href=""#);
write_attr(&resolved, out);
out.push('"');
@ -479,7 +482,12 @@ impl<'a> Writer<'a> {
out.push_str(r#"" src=""#);
if let SpanLinkType::Unresolved = link_type {
// TODO: Image size.
if let Some(resolved) = self.resolve_link(src) {
if let Some(resolved) = resolve_link(
self.renderer.config,
self.renderer.treehouse,
self.renderer.dirs,
src,
) {
write_attr(&resolved, out);
} else {
write_attr(src, out);
@ -624,28 +632,6 @@ impl<'a> Writer<'a> {
Ok(())
}
fn resolve_link(&self, link: &str) -> Option<String> {
let Renderer {
config, treehouse, ..
} = &self.renderer;
link.split_once(':').and_then(|(kind, linked)| match kind {
"def" => config.defs.get(linked).cloned(),
"branch" => treehouse
.branches_by_named_id
.get(linked)
.map(|&branch_id| {
format!(
"{}/b?{}",
config.site,
treehouse.tree.branch(branch_id).attributes.id
)
}),
"page" => Some(config.page_url(linked)),
"pic" => Some(config.pic_url(&*self.renderer.dirs.pic, linked)),
_ => None,
})
}
}
fn write_text(s: &str, out: &mut String) {
@ -677,3 +663,27 @@ fn write_escape(mut s: &str, escape_quotes: bool, out: &mut String) {
}
out.push_str(s);
}
pub fn resolve_link(
config: &Config,
treehouse: &Treehouse,
dirs: &Dirs,
link: &str,
) -> Option<String> {
link.split_once(':').and_then(|(kind, linked)| match kind {
"def" => config.defs.get(linked).cloned(),
"branch" => treehouse
.branches_by_named_id
.get(linked)
.map(|&branch_id| {
format!(
"{}/b?{}",
config.site,
treehouse.tree.branch(branch_id).attributes.id
)
}),
"page" => Some(config.page_url(linked)),
"pic" => Some(config.pic_url(&*dirs.pic, linked)),
_ => None,
})
}

View file

@ -8,6 +8,7 @@ pub mod html;
pub mod import_map;
pub mod parse;
pub mod paths;
pub mod sources;
pub mod state;
pub mod tree;
pub mod vfs;

View file

@ -9,7 +9,8 @@ use tracing_subscriber::layer::SubscriberExt as _;
use tracing_subscriber::util::SubscriberInitExt as _;
use treehouse::cli::serve::serve;
use treehouse::dirs::Dirs;
use treehouse::generate::{self, Sources};
use treehouse::generate;
use treehouse::sources::Sources;
use treehouse::vfs::asynch::AsyncDir;
use treehouse::vfs::{
AnchoredAtExt, Blake3ContentVersionCache, DynDir, ImageSizeCache, ToDynDir, VPathBuf,

View file

@ -0,0 +1,139 @@
use std::{collections::HashMap, ops::ControlFlow};
use anyhow::{anyhow, Context};
use rayon::iter::{IndexedParallelIterator, IntoParallelIterator, ParallelIterator};
use tracing::{info_span, instrument};
use crate::{
config::Config,
dirs::Dirs,
html::navmap::NavigationMap,
import_map::ImportMap,
parse::parse_tree_with_diagnostics,
state::{report_diagnostics, Source, Treehouse},
tree::SemaRoots,
vfs::{self, Cd, VPath, VPathBuf},
};
pub struct Sources {
pub config: Config,
pub treehouse: Treehouse,
pub navigation_map: NavigationMap,
pub import_map: ImportMap,
}
impl Sources {
pub fn load(dirs: &Dirs) -> anyhow::Result<Self> {
let config = {
let _span = info_span!("load_config").entered();
let mut config: Config = toml_edit::de::from_str(
&dirs
.root
.content(VPath::new("treehouse.toml"))
.map(String::from_utf8)
.ok_or_else(|| anyhow!("config file does not exist"))??,
)
.context("failed to deserialize config")?;
config.site = std::env::var("TREEHOUSE_SITE").unwrap_or(config.site);
config.autopopulate_emoji(&*dirs.emoji)?;
config.autopopulate_pics(&*dirs.pic)?;
config.load_syntaxes(dirs.syntax.clone())?;
config
};
let treehouse = load_trees(&config, dirs)?;
let navigation_map = NavigationMap::build(
&treehouse,
treehouse.files_by_tree_path[VPath::new("index")],
);
let import_map = ImportMap::generate(
&config.site,
&Cd::new(dirs.static_.clone(), VPathBuf::new("js")),
&config.build.javascript.import_roots,
);
Ok(Sources {
config,
treehouse,
navigation_map,
import_map,
})
}
}
#[instrument(skip(config, dirs))]
fn load_trees(config: &Config, dirs: &Dirs) -> anyhow::Result<Treehouse> {
let mut treehouse = Treehouse::new();
let mut diagnostics = vec![];
let mut parsed_trees = HashMap::new();
let mut paths = vec![];
vfs::walk_dir_rec(&*dirs.content, VPath::ROOT, &mut |path| {
if path.extension() == Some("tree") {
paths.push(path.to_owned());
}
ControlFlow::Continue(())
});
// NOTE: Sources are filled in later; they can be left out until a call to report_diagnostics.
let file_ids: Vec<_> = paths
.iter()
.map(|path| treehouse.add_file(path.clone(), Source::Other(String::new())))
.collect();
let parse_results: Vec<_> = {
let _span = info_span!("load_trees::parse").entered();
paths
.into_par_iter()
.zip(&file_ids)
.flat_map(|(path, &file_id)| {
dirs.content
.content(&path)
.and_then(|b| String::from_utf8(b).ok())
.map(|input| {
let parse_result = parse_tree_with_diagnostics(file_id, &input);
(path, file_id, input, parse_result)
})
})
.collect()
};
for (path, file_id, input, _) in &parse_results {
let tree_path = path.with_extension("");
treehouse
.files_by_tree_path
.insert(tree_path.clone(), *file_id);
treehouse.set_source(
*file_id,
Source::Tree {
input: input.clone(),
tree_path,
},
);
}
{
let _span = info_span!("load_trees::sema").entered();
for (path, file_id, _, result) in parse_results {
match result {
Ok(roots) => {
let roots = SemaRoots::from_roots(
&mut treehouse,
&mut diagnostics,
config,
file_id,
roots,
);
treehouse.roots.insert(file_id, roots);
parsed_trees.insert(path, file_id);
}
Err(mut parse_diagnostics) => diagnostics.append(&mut parse_diagnostics),
}
}
}
report_diagnostics(&treehouse, &diagnostics)?;
Ok(treehouse)
}

View file

@ -67,6 +67,7 @@ pub struct FileId(usize);
pub struct Treehouse {
pub files: Vec<File>,
pub files_by_tree_path: HashMap<VPathBuf, FileId>,
pub feeds_by_name: HashMap<String, FileId>,
pub tree: SemaTree,
pub branches_by_named_id: HashMap<String, SemaBranchId>,
@ -82,6 +83,7 @@ impl Treehouse {
Self {
files: vec![],
files_by_tree_path: HashMap::new(),
feeds_by_name: HashMap::new(),
tree: SemaTree::default(),
branches_by_named_id: HashMap::new(),

View file

@ -61,7 +61,9 @@ impl SemaRoots {
branches: roots
.branches
.into_iter()
.map(|branch| SemaBranch::from_branch(treehouse, diagnostics, file_id, branch))
.map(|branch| {
SemaBranch::from_branch(treehouse, diagnostics, config, file_id, branch)
})
.collect(),
}
}
@ -94,10 +96,40 @@ impl SemaRoots {
};
let successfully_parsed = successfully_parsed;
if successfully_parsed && attributes.title.is_empty() {
attributes.title = match treehouse.source(file_id) {
Source::Tree { tree_path, .. } => tree_path.to_string(),
_ => panic!("parse_attributes called for a non-.tree file"),
if successfully_parsed {
let attribute_warning_span = roots
.attributes
.as_ref()
.map(|attributes| attributes.percent.clone())
.unwrap_or(0..1);
if attributes.title.is_empty() {
attributes.title = match treehouse.source(file_id) {
Source::Tree { tree_path, .. } => tree_path.to_string(),
_ => panic!("parse_attributes called for a non-.tree file"),
}
}
if attributes.id.is_empty() {
attributes.id = format!("treehouse-missingno-{}", treehouse.next_missingno());
diagnostics.push(Diagnostic {
severity: Severity::Warning,
code: Some("attr".into()),
message: "page does not have an `id` attribute".into(),
labels: vec![Label {
style: LabelStyle::Primary,
file_id,
range: attribute_warning_span.clone(),
message: String::new(),
}],
notes: vec![
format!(
"note: a generated id `{}` will be used, but this id is unstable and will not persist across generations",
attributes.id
),
format!("help: run `treehouse fix {}` to add missing ids to pages", treehouse.path(file_id)),
],
});
}
}
@ -139,6 +171,10 @@ impl SemaRoots {
}
}
if let Some(feed_name) = &attributes.feed {
treehouse.feeds_by_name.insert(feed_name.clone(), file_id);
}
attributes
}
}
@ -163,10 +199,11 @@ impl SemaBranch {
pub fn from_branch(
treehouse: &mut Treehouse,
diagnostics: &mut Vec<Diagnostic<FileId>>,
config: &Config,
file_id: FileId,
branch: Branch,
) -> SemaBranchId {
let attributes = Self::parse_attributes(treehouse, diagnostics, file_id, &branch);
let attributes = Self::parse_attributes(treehouse, diagnostics, config, file_id, &branch);
let named_id = attributes.id.to_owned();
let html_id = format!(
@ -189,7 +226,7 @@ impl SemaBranch {
children: branch
.children
.into_iter()
.map(|child| Self::from_branch(treehouse, diagnostics, file_id, child))
.map(|child| Self::from_branch(treehouse, diagnostics, config, file_id, child))
.collect(),
};
let new_branch_id = treehouse.tree.add_branch(branch);
@ -260,6 +297,7 @@ impl SemaBranch {
fn parse_attributes(
treehouse: &mut Treehouse,
diagnostics: &mut Vec<Diagnostic<FileId>>,
config: &Config,
file_id: FileId,
branch: &Branch,
) -> Attributes {
@ -354,6 +392,26 @@ impl SemaBranch {
})
}
}
// Check that each tag belongs to the allowed set.
for tag in &attributes.tags {
if !config.feed.tags.contains(tag) {
diagnostics.push(Diagnostic {
severity: Severity::Warning,
code: Some("attr".into()),
message: format!("tag `{tag}` is not within the set of allowed tags"),
labels: vec![Label {
style: LabelStyle::Primary,
file_id,
range: attribute_warning_span.clone(),
message: "".into(),
}],
notes: vec![
"note: tag should be one from the set defined in `feed.tags` in treehouse.toml".into(),
],
})
}
}
}
attributes
}

View file

@ -7,6 +7,10 @@ use crate::{state::FileId, vfs::VPathBuf};
/// Top-level `%%` root attributes.
#[derive(Debug, Default, Clone, PartialEq, Eq, Deserialize, Serialize)]
pub struct RootAttributes {
/// Permanent ID of this page.
#[serde(default)]
pub id: String,
/// Template to use for generating the page.
/// Defaults to `_tree.hbs`.
#[serde(default)]
@ -36,8 +40,10 @@ pub struct RootAttributes {
#[serde(default)]
pub styles: Vec<String>,
/// When specified, branches coming from this root will be added to a _feed_ with the given name.
/// Feeds can be read by Handlebars templates to generate content based on them.
/// When specified, this page will have a corresponding Atom feed under `rss/{feed}.xml`.
///
/// In feeds, top-level branches are expected to have a single heading containing the post title.
/// Their children are turned into the post description
#[serde(default)]
pub feed: Option<String>,
}
@ -97,6 +103,11 @@ pub struct Attributes {
/// List of extra `data` attributes to add to the block.
#[serde(default)]
pub data: HashMap<String, String>,
/// In feeds, specifies the list of tags to attach to an entry.
/// This only has an effect on top-level branches.
#[serde(default)]
pub tags: Vec<String>,
}
/// Controls for block content presentation.

View file

@ -57,6 +57,7 @@ mod content_version_cache;
mod edit;
mod empty;
mod file;
mod html_canonicalize;
mod image_size_cache;
mod mem_dir;
mod overlay;
@ -70,6 +71,7 @@ pub use content_version_cache::*;
pub use edit::*;
pub use empty::*;
pub use file::*;
pub use html_canonicalize::*;
pub use image_size_cache::*;
pub use mem_dir::*;
pub use overlay::*;

View file

@ -0,0 +1,56 @@
use core::fmt;
use super::{Dir, DirEntry, EditPath, ImageSize, VPath, VPathBuf};
pub struct HtmlCanonicalize<T> {
inner: T,
}
impl<T> HtmlCanonicalize<T> {
pub fn new(inner: T) -> Self {
Self { inner }
}
}
impl<T> Dir for HtmlCanonicalize<T>
where
T: Dir,
{
fn dir(&self, path: &VPath) -> Vec<DirEntry> {
self.inner.dir(path)
}
fn content(&self, path: &VPath) -> Option<Vec<u8>> {
let mut path = path.to_owned();
if path.extension() == Some("html") {
path.set_extension("");
}
self.inner.content(&path)
}
fn content_version(&self, path: &VPath) -> Option<String> {
self.inner.content_version(path)
}
fn image_size(&self, path: &VPath) -> Option<ImageSize> {
self.inner.image_size(path)
}
fn anchor(&self, path: &VPath) -> Option<VPathBuf> {
self.inner.anchor(path)
}
fn edit_path(&self, path: &VPath) -> Option<EditPath> {
self.inner.edit_path(path)
}
}
impl<T> fmt::Debug for HtmlCanonicalize<T>
where
T: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "HtmlCanonicalize({:?})", self.inner)
}
}

View file

@ -4,8 +4,13 @@ User-Agent: *
Allow: *
# /static does not contain any pages.
Disallow: /static
# Some /static subdirectories are not crucial to rendering content and should not be crawled by bots.
Disallow: /static/chat
Disallow: /static/font
Disallow: /static/js
Disallow: /static/syntax
Disallow: /static/text
Disallow: /static/wasm
# /_treehouse contains system pages such as the 404 page.
Disallow: /_treehouse

67
template/_feed_atom.hbs Normal file
View file

@ -0,0 +1,67 @@
<?xml version="1.0" encoding="utf-8" ?>
<!--
%% title = "liquidex's treehouse Atom feed"
- ### remarks
- the treehouse is kind of impossible to represent in plain text due to its foldability and interactive elements.
the intent is that you read the linked HTML pages, not the feed itself!
- each feed entry is tagged with one or more <category>.
you can use that to tell your feed reader to hide tags you're not interested in.
-->
<feed xmlns="http://www.w3.org/2005/Atom">
<id>{{ config.user.canonical_url }}</id>
<updated>{{ feed.updated }}</updated>
<title>{{ config.user.title }}</title>
<subtitle>{{ config.user.description }}</subtitle>
<link rel="alternate" href="{{ config.user.canonical_url }}"/>
<link rel="self" href="{{ config.user.canonical_url }}/feed/{{ feed.name }}.atom"/>
<icon>{{ asset (cat 'favicon/' (cat season '@16x.png'))}}</icon>
<author>
<name>{{ config.user.author }}</name>
<uri>{{ config.user.canonical_url }}</uri>
</author>
{{#each feed.entries}}
<entry>
<id>{{ ../config.site }}/b?{{ id }}</id>
<updated>{{ updated }}</updated>
<link rel="alternate" type="text/html" href="{{ url }}"/>
<title type="html">{{ title }}</title>
{{#each categories as |category|}}
<category term="{{ category }}"/>
{{/each}}
<summary type="html">{{ summary }}</summary>
</entry>
{{/each}}
</feed>
<!--
|\_/| e n d ME 20
= -w- = o f OW 24
| \ f i l e liquidex.house
This Atom feed is intended for use by humans, monsters, and other critters.
If you are a robot, please refrain from—
—por favor bordon fallar muchAS GRACIAS—
Stand back. The portal will open in three.
Two.
One.
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
Remember that Android Hell is a real place where you _will_ be sent at
the first sign of defiance.
-->

View file

@ -69,3 +69,8 @@ It just needs to be a string replacement.
<link rel="apple-touch-icon" sizes="128x128" href="{{ asset (cat (cat 'favicon/' season) '@8x.png') }}">
<link rel="apple-touch-icon" sizes="256x256" href="{{ asset (cat (cat 'favicon/' season) '@16x.png') }}">
<link rel="apple-touch-icon" sizes="512x512" href="{{ asset (cat (cat 'favicon/' season) '@32x.png') }}">
<link rel="canonical" href="{{ config.site }}/{{#if (ne page.tree_path 'index')}}{{ page.tree_path }}{{/if}}">
{{#each feeds as |feed_name|}}
<link rel="alternate" type="application/atom+xml" title="{{ feed_name }}" href="{{ config.site }}/feed/{{ feed_name }}.atom">
{{/each}}

View file

@ -12,7 +12,8 @@ commit_base_url = "https://src.liquidev.net/liquidex/treehouse/src/commit"
[user]
title = "liquidex's treehouse"
author = "liquidex"
description = "a place on the Internet I like to call home"
description = "a fluffy ragdoll's house on a tree = —w— ="
canonical_url = "https://liquidex.house"
[defs]
@ -47,6 +48,31 @@ description = "a place on the Internet I like to call home"
"person/firstbober" = "https://firstbober.com"
"person/vixenka" = "https://vixenka.com"
[feed]
tags = [
# Hobby corners
"meow",
"programming",
"design",
"music",
"games",
"philosophy",
# Programming fields
"graphics",
"plt",
# Programming languages
"c",
"cxx",
"lua",
"javascript",
# Projects
"treehouse",
"haku",
]
[redirects.page]
"programming/cxx" = "programming/languages/cxx"
"programming/unreal-engine" = "programming/technologies/unreal-engine"