diff --git a/Cargo.lock b/Cargo.lock index 5e5f55e..244c87d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -313,6 +313,7 @@ dependencies = [ "futures", "handlebars", "include_dir", + "indexmap 2.7.0", "mime_guess", "notify-debouncer-full", "rss", @@ -978,7 +979,7 @@ dependencies = [ "futures-sink", "futures-util", "http 0.2.12", - "indexmap 2.3.0", + "indexmap 2.7.0", "slab", "tokio", "tokio-util", @@ -1007,9 +1008,9 @@ checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" [[package]] name = "hashbrown" -version = "0.14.5" +version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" +checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" [[package]] name = "hdrhistogram" @@ -1239,12 +1240,13 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.3.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de3fc2e30ba82dd1b3911c8de1ffc143c74a914a14e99514d7637e3099df5ea0" +checksum = "62f822373a4fe84d4bb149bf54e584a7f4abec90e072ed49cda0edea5b95471f" dependencies = [ "equivalent", - "hashbrown 0.14.5", + "hashbrown 0.15.2", + "serde", ] [[package]] @@ -1682,7 +1684,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42cf17e9a1800f5f396bc67d193dc9411b59012a5876445ef450d449881e1016" dependencies = [ "base64 0.22.1", - "indexmap 2.3.0", + "indexmap 2.7.0", "quick-xml 0.32.0", "serde", "time", @@ -1967,7 +1969,7 @@ version = "1.0.124" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "66ad62847a56b3dba58cc891acd13884b9c61138d330c0d7b6181713d4fce38d" dependencies = [ - "indexmap 2.3.0", + "indexmap 2.7.0", "itoa", "memchr", "ryu", @@ -2316,7 +2318,7 @@ version = "0.22.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "583c44c02ad26b0c3f3066fe629275e50627026c51ac2e595cca4c230ce1ce1d" dependencies = [ - "indexmap 2.3.0", + "indexmap 2.7.0", "serde", "serde_spanned", "toml_datetime", diff --git a/Cargo.toml b/Cargo.toml index 55907a1..96785a3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -42,6 +42,7 @@ fronma = "0.2.0" futures = "0.3.31" handlebars = "6.0.0" include_dir = "0.7.4" +indexmap = { version = "2.7.0", features = ["serde"] } mime_guess = "2.0.5" notify-debouncer-full = { version = "0.3.1", default-features = false } rss = "2.0.7" diff --git a/src/app.rs b/src/app.rs index 4a4a4a2..34e3ae5 100644 --- a/src/app.rs +++ b/src/app.rs @@ -10,6 +10,7 @@ use axum::routing::get; use axum::{Json, Router}; use handlebars::Handlebars; use include_dir::{include_dir, Dir}; +use indexmap::IndexMap; use rss::{Category, ChannelBuilder, ItemBuilder}; use serde::{Deserialize, Serialize}; use serde_json::Map; @@ -80,7 +81,7 @@ struct QueryParams { #[serde(rename = "n")] num_posts: Option, #[serde(flatten)] - other: HashMap, + other: IndexMap, } fn collect_tags(posts: &Vec) -> Map { diff --git a/src/post/blag.rs b/src/post/blag.rs index 59b34d9..e1923f6 100644 --- a/src/post/blag.rs +++ b/src/post/blag.rs @@ -1,40 +1,129 @@ -use std::collections::HashMap; +use std::collections::BTreeSet; +use std::hash::{DefaultHasher, Hash, Hasher}; use std::path::Path; use std::process::Stdio; use std::sync::Arc; +use std::time::Duration; use axum::async_trait; use axum::http::HeaderValue; +use chrono::{DateTime, Utc}; use futures::stream::FuturesUnordered; use futures::StreamExt; +use indexmap::IndexMap; +use serde::Deserialize; use serde_value::Value; use tokio::fs::OpenOptions; use tokio::io::{AsyncBufReadExt, AsyncReadExt, BufReader}; use tokio::time::Instant; -use tracing::{debug, error}; +use tracing::{debug, error, info, instrument, warn}; use crate::error::PostError; use crate::post::Filter; +use crate::systemtime_as_secs::as_secs; -use super::cache::CacheGuard; +use super::cache::{CacheGuard, CacheValue}; use super::{ApplyFilters, PostManager, PostMetadata, RenderStats, ReturnedPost}; +#[derive(Deserialize, Debug)] +struct BlagMetadata { + pub title: String, + pub description: String, + pub author: String, + pub icon: Option, + pub icon_alt: Option, + pub color: Option, + pub created_at: Option>, + pub modified_at: Option>, + #[serde(default)] + pub tags: BTreeSet, + pub dont_cache: bool, +} + +impl BlagMetadata { + pub fn into_full(self, name: String) -> (PostMetadata, bool) { + ( + PostMetadata { + name, + title: self.title, + description: self.description, + author: self.author, + icon: self.icon, + icon_alt: self.icon_alt, + color: self.color, + created_at: self.created_at, + modified_at: self.modified_at, + tags: self.tags.into_iter().collect(), + }, + self.dont_cache, + ) + } +} + pub struct Blag { root: Arc, blag_bin: Arc, - _cache: Option>, + cache: Option>, _fastblag: bool, } impl Blag { - pub fn new(root: Arc, blag_bin: Arc, _cache: Option>) -> Blag { + pub fn new(root: Arc, blag_bin: Arc, cache: Option>) -> Blag { Self { root, blag_bin, - _cache, + cache, _fastblag: false, } } + + async fn render( + &self, + name: &str, + path: impl AsRef, + query_json: String, + ) -> Result<(PostMetadata, String, (Duration, Duration), bool), PostError> { + let start = Instant::now(); + + debug!(%name, "rendering"); + + let mut cmd = tokio::process::Command::new(&*self.blag_bin) + .arg(path.as_ref()) + .env("BLAG_QUERY", query_json) + .stdout(Stdio::piped()) + .spawn() + .map_err(|err| { + error!("failed to spawn {:?}: {err}", self.blag_bin); + err + })?; + + let stdout = cmd.stdout.take().unwrap(); + + let mut reader = BufReader::new(stdout); + let mut buf = String::new(); + reader.read_line(&mut buf).await?; + + let blag_meta: BlagMetadata = serde_json::from_str(&buf)?; + debug!("blag meta: {blag_meta:?}"); + let (meta, dont_cache) = blag_meta.into_full(name.to_string()); + let parsed = start.elapsed(); + + let rendering = Instant::now(); + buf.clear(); + reader.read_to_string(&mut buf).await?; + + debug!("read output: {} bytes", buf.len()); + + let exit_status = cmd.wait().await?; + debug!("exited: {exit_status}"); + if !exit_status.success() { + return Err(PostError::RenderError(exit_status.to_string())); + } + + let rendered = rendering.elapsed(); + + Ok((meta, buf, (parsed, rendered), dont_cache)) + } } #[async_trait] @@ -42,10 +131,10 @@ impl PostManager for Blag { async fn get_all_posts( &self, filters: &[Filter<'_>], - query: &HashMap, + query: &IndexMap, ) -> Result, PostError> { let mut set = FuturesUnordered::new(); - let mut meow = Vec::new(); + let mut posts = Vec::new(); let mut files = tokio::fs::read_dir(&self.root).await?; loop { @@ -88,19 +177,20 @@ impl PostManager for Blag { }; if post.0.apply_filters(filters) { - meow.push(post); + posts.push(post); } } debug!("collected posts"); - Ok(meow) + Ok(posts) } + #[instrument(level = "info", skip(self))] async fn get_post( &self, name: &str, - _query: &HashMap, + query: &IndexMap, ) -> Result { let start = Instant::now(); let mut path = self.root.join(name); @@ -137,49 +227,68 @@ impl PostManager for Blag { return Err(PostError::NotFound(name.to_string())); } - let mut cmd = tokio::process::Command::new(&*self.blag_bin) - .arg(path) - .stdout(Stdio::piped()) - .spawn() - .map_err(|err| { - error!("failed to spawn {:?}: {err}", self.blag_bin); - err - })?; + let mtime = as_secs(&stat.modified()?); - let stdout = cmd.stdout.take().unwrap(); + let query_json = serde_json::to_string(&query).expect("this should not fail"); + let mut hasher = DefaultHasher::new(); + query_json.hash(&mut hasher); + let query_hash = hasher.finish(); - let mut reader = BufReader::new(stdout); - let mut buf = String::new(); - reader.read_line(&mut buf).await?; + let post = if let Some(cache) = &self.cache { + if let Some(CacheValue { + metadata, rendered, .. + }) = cache.lookup(name, mtime, query_hash).await + { + ReturnedPost::Rendered(metadata, rendered, RenderStats::Cached(start.elapsed())) + } else { + let (meta, content, (parsed, rendered), dont_cache) = + self.render(name, path, query_json).await?; - let mut meta: PostMetadata = serde_json::from_str(&buf)?; - meta.name = name.to_string(); - let parsed = start.elapsed(); + if !dont_cache { + cache + .insert( + name.to_string(), + meta.clone(), + mtime, + content.clone(), + query_hash, + ) + .await + .unwrap_or_else(|err| warn!("failed to insert {:?} into cache", err.0)); + } - let rendering = Instant::now(); - buf.clear(); - reader.read_to_string(&mut buf).await?; + let total = start.elapsed(); + ReturnedPost::Rendered( + meta, + content, + RenderStats::Rendered { + total, + parsed, + rendered, + }, + ) + } + } else { + let (meta, content, (parsed, rendered), ..) = + self.render(name, path, query_json).await?; - debug!("read output: {} bytes", buf.len()); + let total = start.elapsed(); + ReturnedPost::Rendered( + meta, + content, + RenderStats::Rendered { + total, + parsed, + rendered, + }, + ) + }; - let exit_status = cmd.wait().await?; - debug!("exited: {exit_status}"); - if !exit_status.success() { - return Err(PostError::RenderError(exit_status.to_string())); + if let ReturnedPost::Rendered(.., stats) = &post { + info!("rendered blagpost in {:?}", stats); } - let rendered = rendering.elapsed(); - let total = start.elapsed(); - - Ok(ReturnedPost::Rendered( - meta, - buf, - RenderStats::Rendered { - parsed, - rendered, - total, - }, - )) + Ok(post) } async fn as_raw(&self, name: &str) -> Result, PostError> { diff --git a/src/post/cache.rs b/src/post/cache.rs index 250a073..400fc1a 100644 --- a/src/post/cache.rs +++ b/src/post/cache.rs @@ -17,7 +17,7 @@ pub struct CacheValue { pub metadata: PostMetadata, pub rendered: String, pub mtime: u64, - extra: u64, + pub extra: u64, } #[derive(Serialize, Deserialize, Clone)] diff --git a/src/post/markdown_posts.rs b/src/post/markdown_posts.rs index cda4010..616fe9c 100644 --- a/src/post/markdown_posts.rs +++ b/src/post/markdown_posts.rs @@ -1,4 +1,4 @@ -use std::collections::{BTreeSet, HashMap}; +use std::collections::BTreeSet; use std::hash::{DefaultHasher, Hash, Hasher}; use std::io; use std::path::Path; @@ -13,11 +13,12 @@ use chrono::{DateTime, Utc}; use color_eyre::eyre::{self, Context}; use comrak::plugins::syntect::SyntectAdapter; use fronma::parser::{parse, ParsedData}; +use indexmap::IndexMap; use serde::Deserialize; use serde_value::Value; use tokio::fs; use tokio::io::AsyncReadExt; -use tracing::warn; +use tracing::{info, instrument, warn}; use crate::config::Config; use crate::markdown_render::{build_syntect, render}; @@ -141,7 +142,7 @@ impl PostManager for MarkdownPosts { async fn get_all_posts( &self, filters: &[Filter<'_>], - query: &HashMap, + query: &IndexMap, ) -> Result, PostError> { let mut posts = Vec::new(); @@ -173,7 +174,7 @@ impl PostManager for MarkdownPosts { async fn get_all_post_metadata( &self, filters: &[Filter<'_>], - _query: &HashMap, + _query: &IndexMap, ) -> Result, PostError> { let mut posts = Vec::new(); @@ -214,12 +215,13 @@ impl PostManager for MarkdownPosts { Ok(posts) } + #[instrument(level = "info", skip(self))] async fn get_post( &self, name: &str, - _query: &HashMap, + _query: &IndexMap, ) -> Result { - if self.config.markdown_access && name.ends_with(".md") { + let post = if self.config.markdown_access && name.ends_with(".md") { let path = self.config.dirs.posts.join(name); let mut file = match tokio::fs::OpenOptions::new().read(true).open(&path).await { @@ -239,10 +241,7 @@ impl PostManager for MarkdownPosts { file.read_to_end(&mut buf).await?; - Ok(ReturnedPost::Raw( - buf, - HeaderValue::from_static("text/plain"), - )) + ReturnedPost::Raw(buf, HeaderValue::from_static("text/plain")) } else { let start = Instant::now(); let path = self.config.dirs.posts.join(name.to_owned() + ".md"); @@ -264,15 +263,15 @@ impl PostManager for MarkdownPosts { if let Some(cache) = &self.cache && let Some(hit) = cache.lookup(name, mtime, self.render_hash).await { - Ok(ReturnedPost::Rendered( + ReturnedPost::Rendered( hit.metadata, hit.rendered, RenderStats::Cached(start.elapsed()), - )) + ) } else { let (metadata, rendered, stats) = self.parse_and_render(name.to_string(), path).await?; - Ok(ReturnedPost::Rendered( + ReturnedPost::Rendered( metadata, rendered, RenderStats::Rendered { @@ -280,9 +279,15 @@ impl PostManager for MarkdownPosts { parsed: stats.0, rendered: stats.1, }, - )) + ) } + }; + + if let ReturnedPost::Rendered(.., stats) = &post { + info!("rendered post in {:?}", stats); } + + Ok(post) } async fn cleanup(&self) { diff --git a/src/post/mod.rs b/src/post/mod.rs index 2b40fb3..fc1541c 100644 --- a/src/post/mod.rs +++ b/src/post/mod.rs @@ -2,10 +2,11 @@ pub mod blag; pub mod cache; pub mod markdown_posts; -use std::{collections::HashMap, time::Duration}; +use std::time::Duration; use axum::{async_trait, http::HeaderValue}; use chrono::{DateTime, Utc}; +use indexmap::IndexMap; use serde::{Deserialize, Serialize}; use serde_value::Value; @@ -81,7 +82,7 @@ pub trait PostManager { async fn get_all_post_metadata( &self, filters: &[Filter<'_>], - query: &HashMap, + query: &IndexMap, ) -> Result, PostError> { self.get_all_posts(filters, query) .await @@ -91,14 +92,14 @@ pub trait PostManager { async fn get_all_posts( &self, filters: &[Filter<'_>], - query: &HashMap, + query: &IndexMap, ) -> Result, PostError>; async fn get_max_n_post_metadata_with_optional_tag_sorted( &self, n: Option, tag: Option<&str>, - query: &HashMap, + query: &IndexMap, ) -> Result, PostError> { let filters = tag.and(Some(Filter::Tags(tag.as_slice()))); let mut posts = self @@ -119,7 +120,7 @@ pub trait PostManager { async fn get_post_metadata( &self, name: &str, - query: &HashMap, + query: &IndexMap, ) -> Result { match self.get_post(name, query).await? { ReturnedPost::Rendered(metadata, ..) => Ok(metadata), @@ -130,7 +131,7 @@ pub trait PostManager { async fn get_post( &self, name: &str, - query: &HashMap, + query: &IndexMap, ) -> Result; async fn cleanup(&self) {}