code housekeeping

This commit is contained in:
りき萌 2025-08-26 12:46:50 +02:00
parent e1b6578b2a
commit d3c3ff8e4e
10 changed files with 108 additions and 100 deletions

View file

@ -6,14 +6,14 @@ use std::{net::Ipv4Addr, sync::Arc};
use axum::http::header::LOCATION;
use axum::{
Router,
extract::{Path, Query, RawQuery, State},
http::{
header::{CACHE_CONTROL, CONTENT_TYPE},
HeaderValue, StatusCode,
header::{CACHE_CONTROL, CONTENT_TYPE},
},
response::{Html, IntoResponse, Response},
routing::get,
Router,
};
use serde::Deserialize;
use tokio::net::TcpListener;
@ -149,13 +149,12 @@ async fn branch(RawQuery(named_id): RawQuery, State(state): State<Arc<Server>>)
});
if let Some(branch_id) = branch_id {
let branch = state.sources.treehouse.tree.branch(branch_id);
if let Source::Tree { tree_path, .. } = state.sources.treehouse.source(branch.file_id) {
if let Some(url) =
if let Source::Tree { tree_path, .. } = state.sources.treehouse.source(branch.file_id)
&& let Some(url) =
vfs::url(&state.sources.config.site, &state.target.sync(), tree_path)
{
let url = format!("{url}#{}", branch.html_id);
return (StatusCode::FOUND, [(LOCATION, url)]).into_response();
}
{
let url = format!("{url}#{}", branch.html_id);
return (StatusCode::FOUND, [(LOCATION, url)]).into_response();
}
}

View file

@ -3,15 +3,15 @@ use std::{
ops::ControlFlow,
};
use anyhow::{anyhow, Context};
use anyhow::{Context, anyhow};
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use serde::{Deserialize, Serialize};
use tracing::{error, info_span, instrument};
use crate::{
html::highlight::{
compiled::{compile_syntax, CompiledSyntax},
Syntax,
compiled::{CompiledSyntax, compile_syntax},
},
import_map::ImportRoot,
vfs::{self, Content, Dir, DynDir, ImageSize, VPath, VPathBuf},
@ -116,12 +116,11 @@ impl Config {
#[instrument(name = "Config::autopopulate_emoji", skip(self))]
pub fn autopopulate_emoji(&mut self, dir: &dyn Dir) -> anyhow::Result<()> {
vfs::walk_dir_rec(dir, VPath::ROOT, &mut |path| {
if path.extension().is_some_and(is_image_file) {
if let Some(emoji_name) = path.file_stem() {
if !self.emoji.contains_key(emoji_name) {
self.emoji.insert(emoji_name.to_owned(), path.to_owned());
}
}
if path.extension().is_some_and(is_image_file)
&& let Some(emoji_name) = path.file_stem()
&& !self.emoji.contains_key(emoji_name)
{
self.emoji.insert(emoji_name.to_owned(), path.to_owned());
}
ControlFlow::Continue(())
@ -133,16 +132,16 @@ impl Config {
#[instrument(name = "Config::autopopulate_pics", skip(self))]
pub fn autopopulate_pics(&mut self, dir: &dyn Dir) -> anyhow::Result<()> {
vfs::walk_dir_rec(dir, VPath::ROOT, &mut |path| {
if path.extension().is_some_and(is_image_file) {
if let Some(pic_name) = path.file_stem() {
let pic_id = pic_name
.split_once('-')
.map(|(before_dash, _after_dash)| before_dash)
.unwrap_or(pic_name);
if path.extension().is_some_and(is_image_file)
&& let Some(pic_name) = path.file_stem()
{
let pic_id = pic_name
.split_once('-')
.map(|(before_dash, _after_dash)| before_dash)
.unwrap_or(pic_name);
if !self.pics.contains_key(pic_id) {
self.pics.insert(pic_id.to_owned(), path.to_owned());
}
if !self.pics.contains_key(pic_id) {
self.pics.insert(pic_id.to_owned(), path.to_owned());
}
}

View file

@ -3,6 +3,7 @@ use codespan_reporting::diagnostic::{Diagnostic, Label};
use serde::Deserialize;
use crate::{
config::Config,
state::{FileId, TomlError, Treehouse, toml_error_to_diagnostic},
tree::attributes::{Picture, timestamp_from_id},
};
@ -77,7 +78,11 @@ pub struct IncludeFeed {
}
impl Doc {
pub fn parse(treehouse: &mut Treehouse, file_id: FileId) -> (Doc, Vec<Diagnostic<FileId>>) {
pub fn parse(
treehouse: &mut Treehouse,
config: &Config,
file_id: FileId,
) -> (Doc, Vec<Diagnostic<FileId>>) {
let mut diagnostics = vec![];
let source = treehouse.source(file_id).input();
@ -125,6 +130,17 @@ impl Doc {
]),
);
}
for tag in &attributes.tags {
if !config.feed.tags.contains(tag) {
diagnostics.push(
Diagnostic::warning()
.with_code("attr")
.with_message(format!("doc has unregistered tag `{tag}`"))
.with_labels(vec![Label::primary(file_id, attributes_span.clone())]),
);
}
}
}
(

View file

@ -1,13 +1,13 @@
use std::{collections::HashMap, fmt, sync::Arc};
use anyhow::{ensure, Context};
use anyhow::{Context, ensure};
use handlebars::Handlebars;
use serde::Serialize;
use tracing::{info_span, instrument};
use crate::{
dirs::Dirs,
generate::{simple_template, BaseTemplateData},
generate::BaseTemplateData,
html::{breadcrumbs::breadcrumbs_to_html, tree},
sources::Sources,
state::FileId,

View file

@ -5,9 +5,6 @@ use std::fmt::Write;
use std::ops::Range;
use codespan_reporting::diagnostic::Diagnostic;
use codespan_reporting::diagnostic::Label;
use codespan_reporting::diagnostic::LabelStyle;
use codespan_reporting::diagnostic::Severity;
use jotdown::Alignment;
use jotdown::Container;
use jotdown::Event;
@ -103,7 +100,7 @@ impl<'a> Writer<'a> {
fn render_event(
&mut self,
e: &Event<'a>,
range: Range<usize>,
#[expect(unused)] range: Range<usize>,
out: &mut String,
) -> std::fmt::Result {
if matches!(&e, Event::Start(Container::LinkDefinition { .. }, ..)) {
@ -302,13 +299,13 @@ impl<'a> Writer<'a> {
class.parts().for_each(|part| write_attr(part, out));
}
// div class goes after classes from attrs
if let Container::Div { class } = c {
if !class.is_empty() {
if first_written {
out.push(' ');
}
out.push_str(class);
if let Container::Div { class } = c
&& !class.is_empty()
{
if first_written {
out.push(' ');
}
out.push_str(class);
}
out.push('"');
}

View file

@ -1,6 +1,6 @@
use std::ops::Range;
use super::compiled::{CompiledSyntax, CompiledTokenTypes, TokenId, TOKEN_ID_DEFAULT};
use super::compiled::{CompiledSyntax, CompiledTokenTypes, TOKEN_ID_DEFAULT, TokenId};
pub struct Token {
pub id: TokenId,
@ -71,10 +71,10 @@ impl CompiledSyntax {
}
for token in &mut tokens {
if let Some(keyword) = self.keywords.get(&text[token.range.clone()]) {
if keyword.only_replaces.is_none() || Some(token.id) == keyword.only_replaces {
token.id = keyword.into;
}
if let Some(keyword) = self.keywords.get(&text[token.range.clone()])
&& (keyword.only_replaces.is_none() || Some(token.id) == keyword.only_replaces)
{
token.id = keyword.into;
}
}
@ -87,11 +87,11 @@ fn push_token(tokens: &mut Vec<Token>, id: TokenId, range: Range<usize>) {
return;
}
if let Some(previous_token) = tokens.last_mut() {
if previous_token.id == id {
previous_token.range.end = range.end;
return;
}
if let Some(previous_token) = tokens.last_mut()
&& previous_token.id == id
{
previous_token.range.end = range.end;
return;
}
tokens.push(Token { id, range });
}

View file

@ -11,7 +11,7 @@ use crate::{
html::navmap::NavigationMap,
import_map::ImportMap,
parse::parse_tree_with_diagnostics,
state::{Source, Tag, Treehouse, report_diagnostics},
state::{Source, Treehouse, report_diagnostics},
tree::SemaRoots,
vfs::{self, Cd, Content, VPath, VPathBuf},
};
@ -142,7 +142,7 @@ fn load_trees(config: &Config, dirs: &Dirs) -> anyhow::Result<Treehouse> {
for path in &doc_paths {
if let Some(input) =
vfs::query::<Content>(&dirs.content, &path).and_then(|c| c.string().ok())
vfs::query::<Content>(&dirs.content, path).and_then(|c| c.string().ok())
{
let file_id = treehouse.add_file(path.clone(), Source::Other(input));
treehouse.files_by_doc_path.insert(path.clone(), file_id);
@ -155,31 +155,25 @@ fn load_trees(config: &Config, dirs: &Dirs) -> anyhow::Result<Treehouse> {
}
for file_id in doc_file_ids {
let (doc, mut doc_diagnostics) = Doc::parse(&mut treehouse, file_id);
let (doc, mut doc_diagnostics) = Doc::parse(&mut treehouse, config, file_id);
treehouse.docs.insert(file_id, doc);
diagnostics.append(&mut doc_diagnostics);
}
// Tags
for (_, file_id) in &treehouse.files_by_tree_path {
for file_id in treehouse.files_by_tree_path.values() {
let roots = &treehouse.roots[file_id];
for tag_name in &roots.attributes.tags {
let tag = treehouse
.tags
.entry(tag_name.clone())
.or_insert_with(Tag::default);
let tag = treehouse.tags.entry(tag_name.clone()).or_default();
tag.files.push(*file_id);
}
}
for (_, file_id) in &treehouse.files_by_doc_path {
for file_id in treehouse.files_by_doc_path.values() {
let doc = &treehouse.docs[file_id];
for tag_name in &doc.attributes.tags {
let tag = treehouse
.tags
.entry(tag_name.clone())
.or_insert_with(Tag::default);
let tag = treehouse.tags.entry(tag_name.clone()).or_default();
tag.files.push(*file_id);
}
}

View file

@ -232,7 +232,7 @@ impl SemaBranch {
diagnostics.push(
Diagnostic::warning()
.with_code("sema")
.with_message(format!("two branches share the same id `{}`", named_id))
.with_message(format!("two branches share the same id `{named_id}`"))
.with_labels(vec![
Label {
style: LabelStyle::Primary,
@ -345,23 +345,23 @@ impl SemaBranch {
}
// Check that link-type blocks are `+`-type to facilitate lazy loading.
if let Content::Link(_) = &attributes.content {
if branch.kind == BranchKind::Expanded {
diagnostics.push(Diagnostic {
severity: Severity::Warning,
code: Some("attr".into()),
message: "`content.link` branch is expanded by default".into(),
labels: vec![Label {
style: LabelStyle::Primary,
file_id,
range: branch.kind_span.clone(),
message: String::new(),
}],
notes: vec![
"note: `content.link` branches should normally be collapsed to allow for lazy loading".into(),
],
});
}
if let Content::Link(_) = &attributes.content
&& branch.kind == BranchKind::Expanded
{
diagnostics.push(Diagnostic {
severity: Severity::Warning,
code: Some("attr".into()),
message: "`content.link` branch is expanded by default".into(),
labels: vec![Label {
style: LabelStyle::Primary,
file_id,
range: branch.kind_span.clone(),
message: String::new(),
}],
notes: vec![
"note: `content.link` branches should normally be collapsed to allow for lazy loading".into(),
],
});
}
// Resolve content.links.

View file

@ -6,7 +6,7 @@ use tracing::{info_span, instrument, warn};
use crate::config;
use super::{query, Content, Dir, ImageSize, Query, VPath, VPathBuf};
use super::{Content, Dir, ImageSize, Query, VPath, VPathBuf, query};
pub struct ImageSizeCache<T> {
inner: T,
@ -27,18 +27,18 @@ where
T: Dir,
{
fn compute_image_size(&self, path: &VPath) -> anyhow::Result<Option<ImageSize>> {
if path.extension().is_some_and(config::is_image_file) {
if let Some(content) = query::<Content>(&self.inner, path) {
if path.extension() == Some("svg") {
return Ok(svg_size(&content.string()?));
} else {
let _span = info_span!("raster_image_size").entered();
let reader = image::ImageReader::new(Cursor::new(content.bytes()))
.with_guessed_format()
.context("cannot guess image format")?;
let (width, height) = reader.into_dimensions()?;
return Ok(Some(ImageSize { width, height }));
}
if path.extension().is_some_and(config::is_image_file)
&& let Some(content) = query::<Content>(&self.inner, path)
{
if path.extension() == Some("svg") {
return Ok(svg_size(&content.string()?));
} else {
let _span = info_span!("raster_image_size").entered();
let reader = image::ImageReader::new(Cursor::new(content.bytes()))
.with_guessed_format()
.context("cannot guess image format")?;
let (width, height) = reader.into_dimensions()?;
return Ok(Some(ImageSize { width, height }));
}
}
@ -105,11 +105,11 @@ fn svg_size(svg: &str) -> Option<ImageSize> {
let mut height: Option<u32> = None;
let mut view_box: Option<[u32; 4]> = None;
while let Some(Ok(token)) = tokenizer.next() {
if let xmlparser::Token::ElementStart { local, .. } = &token {
if local == "svg" {
in_svg = true;
continue;
}
if let xmlparser::Token::ElementStart { local, .. } = &token
&& local == "svg"
{
in_svg = true;
continue;
}
if in_svg {

View file

@ -57,14 +57,15 @@ main.doc {
& ul,
& ol {
/* Is there a better way to add spacing to the marker, other than adding whitespace? */
margin-top: 0;
margin-bottom: 0;
padding-top: 0.5lh;
padding-bottom: 0.5lh;
padding-left: 3.2em;
}
& ul {
/* Is there a better way to add spacing to the marker, other than adding whitespace? */
list-style: "- ";
}
@ -88,7 +89,7 @@ main.doc {
}
& section.feed {
width: 40ch;
max-width: 40ch;
flex-shrink: 0;
padding: 0.8rem;
padding-top: 3.2rem;
@ -103,6 +104,7 @@ main.doc {
& .vertical-center {
--article-padding: 3.2rem;
min-height: 0;
flex-grow: 1;
}
& footer {
@ -110,7 +112,8 @@ main.doc {
}
& section.feed {
width: var(--doc-text-width);
max-width: var(--doc-text-width);
flex-grow: 1;
margin-top: 2.4em;
padding: 1.6rem;