rudimentary search engine optimisation (robots.txt, <meta name="robots"> in /b)

This commit is contained in:
りき萌 2024-11-24 14:28:34 +01:00
parent 107a9a4c39
commit 41fa245150
3 changed files with 16 additions and 0 deletions

View file

@ -156,6 +156,7 @@ async fn branch(RawQuery(named_id): RawQuery, State(state): State<Arc<Server>>)
String::from("<meta property=\"og:description\" content=\"");
write!(per_page_metadata, "{}", EscapeHtml(branch_markup)).unwrap();
per_page_metadata.push_str("\">");
per_page_metadata.push_str(r#"<meta name="robots" content="noindex">"#);
const PER_PAGE_METADATA_REPLACEMENT_STRING: &str = "<!-- treehouse-ca37057a-cff5-45b3-8415-3b02dbf6c799-per-branch-metadata -->";
return Html(content.replacen(

View file

@ -511,6 +511,10 @@ where
pub fn target(dirs: Arc<Dirs>, sources: Arc<Sources>) -> DynDir {
let mut root = MemDir::new();
root.add(VPath::new("static"), dirs.static_.clone());
root.add(
VPath::new("robots.txt"),
Cd::new(dirs.static_.clone(), VPathBuf::new("robots.txt")).to_dyn(),
);
let dir_index = DirIndex::new(sources.parsed_trees.keys().map(|x| &**x));
let tree_view = TreehouseDir::new(dirs, sources, dir_index);