implement config hot reloading

This commit is contained in:
slonkazoid 2025-01-29 17:25:12 +03:00
parent 5c8aea9277
commit fade130b8d
Signed by: slonk
SSH key fingerprint: SHA256:tbZfJX4IOvZ0LGWOWu5Ijo8jfMPi78TU7x1VoEeCIjM
13 changed files with 470 additions and 287 deletions

View file

@ -3,17 +3,36 @@
the configuration format, with defaults, is documented below:
```toml
title = "bingus-blog" # title of the blog
# description of the blog
description = "blazingly fast blog software written in rust memory safe"
raw_access = true # allow users to see the raw source of a post
js_enable = true # enable javascript (required for sorting and dates)
engine = "markdown" # choose which post engine to use
[engine]
mode = "markdown" # choose which post engine to use
# options: "markdown", "blag"
# absolutely do not use "blag" unless you know exactly
# what you are getting yourself into.
[engine.markdown] # options for the `markdown` engine
root = "posts" # where posts are served from
raw_access = true # allow visitors to see the raw markdown of a post
[engine.markdown.render]
escape = false # escape HTML in the markdown soucre instead of
# clobbering it (https://docs.rs/comrak/latest/comrak/struct.RenderOptions.html#structfield.escape)
unsafe = false # allow HTML and dangerous links (https://docs.rs/comrak/latest/comrak/struct.RenderOptions.html#structfield.unsafe_)
[engine.markdown.render.syntect]
load_defaults = false # include default syntect themes
themes_dir = "themes" # directory to include themes from
theme = "Catppuccin Mocha" # theme file name (without `.tmTheme`)
[engine.blag]
root = "posts" # where posts are served from
bin = "blag" # path to the `blag` binary
raw_access = true # allow visitors to see the raw bash of a post
[style]
title = "bingus-blog" # title of the blog
# description of the blog
description = "blazingly fast blog software written in rust memory safe"
js_enable = true # enable javascript (required for sorting and dates)
date_format = "RFC3339" # format string used to format dates in the backend
# it's highly recommended to leave this as default,
# so the date can be formatted by the browser.
@ -31,7 +50,6 @@ enable = false # serve an rss field under /feed.xml
link = "https://..." # public url of the blog, required if rss is enabled
[dirs]
posts = "posts" # where posts are stored
media = "media" # directory served under /media/
custom_templates = "templates" # custom templates dir
custom_static = "static" # custom static dir
@ -54,19 +72,6 @@ persistence = true # save the cache to on shutdown and load on startup
file = "cache" # file to save the cache to
compress = true # compress the cache file
compression_level = 3 # zstd compression level, 3 is recommended
[render]
escape = false # escape HTML in the markdown soucre instead of
# clobbering it (https://docs.rs/comrak/latest/comrak/struct.RenderOptions.html#structfield.escape)
unsafe = false # allow HTML and dangerous links (https://docs.rs/comrak/latest/comrak/struct.RenderOptions.html#structfield.unsafe_)
[render.syntect]
load_defaults = false # include default syntect themes
themes_dir = "themes" # directory to include themes from
theme = "Catppuccin Mocha" # theme file name (without `.tmTheme`)
[blag]
bin = "blag" # path to blag binary
```
configuration is done in [TOML](https://toml.io/)

17
Cargo.lock generated
View file

@ -38,6 +38,15 @@ version = "1.0.95"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04"
[[package]]
name = "arc-swap"
version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457"
dependencies = [
"serde",
]
[[package]]
name = "askama"
version = "0.12.1"
@ -266,6 +275,7 @@ dependencies = [
name = "bingus-blog"
version = "0.1.0"
dependencies = [
"arc-swap",
"askama",
"askama_axum",
"axum",
@ -274,6 +284,7 @@ dependencies = [
"color-eyre",
"comrak",
"console-subscriber",
"const-str",
"fronma",
"futures",
"handlebars",
@ -515,6 +526,12 @@ dependencies = [
"tracing-subscriber",
]
[[package]]
name = "const-str"
version = "0.5.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3618cccc083bb987a415d85c02ca6c9994ea5b44731ec28b9ecf09658655fba9"
[[package]]
name = "cpufeatures"
version = "0.2.16"

View file

@ -10,11 +10,12 @@ default = []
tokio-console = ["dep:console-subscriber"]
[profile.release]
lto = "fat"
lto = "thin"
opt-level = 3
strip = true
[dependencies]
arc-swap = { version = "1.7.1", features = ["serde"] }
askama = { version = "0.12.1", features = [
"with-axum",
], default-features = false }
@ -37,6 +38,7 @@ comrak = { version = "0.32.0", features = [
"syntect",
], default-features = false }
console-subscriber = { version = "0.4.1", optional = true }
const-str = "0.5.7"
fronma = "0.2.0"
futures = "0.3.31"
handlebars = "6.0.0"
@ -57,6 +59,7 @@ tokio = { version = "1.37.0", features = [
"rt-multi-thread",
"signal",
"process",
"sync",
] }
tokio-util = { version = "0.7.10", default-features = false }
toml = "0.8.12"

View file

@ -1,6 +1,7 @@
use std::sync::Arc;
use std::time::Duration;
use arc_swap::access::DynAccess;
use axum::extract::{Path, Query, State};
use axum::http::header::CONTENT_TYPE;
use axum::http::Request;
@ -19,7 +20,7 @@ use tower_http::services::ServeDir;
use tower_http::trace::TraceLayer;
use tracing::{info, info_span, Span};
use crate::config::{Config, StyleConfig};
use crate::config::{DirsConfig, RssConfig, StyleConfig};
use crate::error::{AppError, AppResult};
use crate::post::{Filter, PostManager, PostMetadata, RenderStats, ReturnedPost};
use crate::serve_dir_included::handle;
@ -42,7 +43,8 @@ const BINGUS_INFO: BingusInfo = BingusInfo {
#[derive(Clone)]
#[non_exhaustive]
pub struct AppState {
pub config: Arc<Config>,
pub rss: Arc<dyn DynAccess<RssConfig> + Send + Sync>,
pub style: Arc<dyn DynAccess<StyleConfig> + Send + Sync>,
pub posts: Arc<dyn PostManager + Send + Sync>,
pub templates: Arc<RwLock<Handlebars<'static>>>,
}
@ -50,8 +52,6 @@ pub struct AppState {
#[derive(Serialize)]
struct IndexTemplate<'a> {
bingus_info: &'a BingusInfo,
title: &'a str,
description: &'a str,
posts: Vec<PostMetadata>,
rss: bool,
js: bool,
@ -64,8 +64,8 @@ struct IndexTemplate<'a> {
struct PostTemplate<'a> {
bingus_info: &'a BingusInfo,
meta: &'a PostMetadata,
rendered: Arc<str>,
rendered_in: RenderStats,
body: Arc<str>,
perf: RenderStats,
js: bool,
color: Option<&'a str>,
joined_tags: String,
@ -116,7 +116,8 @@ fn join_tags_for_meta(tags: &IndexMap<Arc<str>, u64>, delim: &str) -> String {
async fn index(
State(AppState {
config,
rss,
style,
posts,
templates: reg,
..
@ -135,21 +136,21 @@ async fn index(
let joined_tags = join_tags_for_meta(&tags, ", ");
let reg = reg.read().await;
let style = style.load();
let rendered = reg.render(
"index",
&IndexTemplate {
title: &config.title,
description: &config.description,
bingus_info: &BINGUS_INFO,
posts,
rss: config.rss.enable,
js: config.js_enable,
rss: rss.load().enable,
js: style.js_enable,
tags,
joined_tags,
style: &config.style,
style: &style,
},
);
drop(reg);
drop((style, reg));
Ok(Html(rendered?))
}
@ -169,10 +170,12 @@ async fn all_posts(
}
async fn rss(
State(AppState { config, posts, .. }): State<AppState>,
State(AppState {
rss, style, posts, ..
}): State<AppState>,
Query(query): Query<QueryParams>,
) -> AppResult<Response> {
if !config.rss.enable {
if !rss.load().enable {
return Err(AppError::RssDisabled);
}
@ -187,11 +190,13 @@ async fn rss(
)
.await?;
let rss = rss.load();
let style = style.load();
let mut channel = ChannelBuilder::default();
channel
.title(&config.title)
.link(config.rss.link.to_string())
.description(&config.description);
.title(&*style.title)
.link(rss.link.to_string())
.description(&*style.description);
//TODO: .language()
for (metadata, content, _) in posts {
@ -213,15 +218,14 @@ async fn rss(
.pub_date(metadata.written_at.map(|date| date.to_rfc2822()))
.content(content.to_string())
.link(
config
.rss
.link
rss.link
.join(&format!("/posts/{}", metadata.name))?
.to_string(),
)
.build(),
);
}
drop((style, rss));
let body = channel.build().to_string();
drop(channel);
@ -231,7 +235,7 @@ async fn rss(
async fn post(
State(AppState {
config,
style,
posts,
templates: reg,
..
@ -242,33 +246,30 @@ async fn post(
match posts.get_post(name.clone(), &query.other).await? {
ReturnedPost::Rendered {
ref meta,
body: rendered,
perf: rendered_in,
body,
perf,
raw_name,
} => {
let joined_tags = meta.tags.join(", ");
let reg = reg.read().await;
let style = style.load();
let rendered = reg.render(
"post",
&PostTemplate {
bingus_info: &BINGUS_INFO,
meta,
rendered,
rendered_in,
js: config.js_enable,
color: meta
.color
.as_deref()
.or(config.style.default_color.as_deref()),
body,
perf,
js: style.js_enable,
color: meta.color.as_deref().or(style.default_color.as_deref()),
joined_tags,
style: &config.style,
raw_name: config
.markdown_access
.then(|| posts.as_raw(&meta.name))
.unwrap_or(None),
style: &style,
raw_name,
},
);
drop(reg);
drop((style, reg));
Ok(Html(rendered?).into_response())
}
ReturnedPost::Raw {
@ -278,7 +279,7 @@ async fn post(
}
}
pub fn new(config: &Config) -> Router<AppState> {
pub fn new(dirs: &DirsConfig) -> Router<AppState> {
Router::new()
.route("/", get(index))
.route(
@ -292,11 +293,11 @@ pub fn new(config: &Config) -> Router<AppState> {
.route("/feed.xml", get(rss))
.nest_service(
"/static",
ServeDir::new(&config.dirs.custom_static)
ServeDir::new(&dirs.static_)
.precompressed_gzip()
.fallback(service_fn(|req| handle(req, &STATIC))),
)
.nest_service("/media", ServeDir::new(&config.dirs.media))
.nest_service("/media", ServeDir::new(&dirs.media))
.layer(
TraceLayer::new_for_http()
.make_span_with(|request: &Request<_>| {

View file

@ -1,34 +1,34 @@
use std::borrow::Cow;
use std::env;
use std::net::{IpAddr, Ipv6Addr};
use std::num::NonZeroU64;
use std::path::PathBuf;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use std::time::Duration;
use arc_swap::ArcSwap;
use color_eyre::eyre::{self, bail, Context};
use const_str::{concat, convert_ascii_case};
use notify_debouncer_full::notify::RecursiveMode;
use notify_debouncer_full::{new_debouncer, DebouncedEvent};
use serde::{Deserialize, Serialize};
use tokio::io::{AsyncReadExt, AsyncWriteExt};
use tracing::{error, info, instrument};
use tokio::select;
use tokio_util::sync::CancellationToken;
use tracing::{error, info, instrument, trace};
use url::Url;
use crate::de::*;
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)]
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Hash)]
#[serde(default)]
pub struct SyntectConfig {
pub load_defaults: bool,
pub themes_dir: Option<PathBuf>,
pub theme: Option<String>,
pub themes_dir: Option<Box<Path>>,
pub theme: Option<Box<str>>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash, Default)]
#[serde(default)]
pub struct RenderConfig {
pub syntect: SyntectConfig,
pub escape: bool,
#[serde(rename = "unsafe")]
pub unsafe_: bool,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
#[derive(Serialize, Deserialize, Debug)]
#[serde(default)]
pub struct CacheConfig {
pub enable: bool,
@ -38,43 +38,43 @@ pub struct CacheConfig {
#[serde(deserialize_with = "check_millis")]
pub cleanup_interval: Option<NonZeroU64>,
pub persistence: bool,
pub file: PathBuf,
pub file: Box<Path>,
pub compress: bool,
#[serde(deserialize_with = "check_zstd_level_bounds")]
pub compression_level: i32,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
#[derive(Serialize, Deserialize, Debug)]
#[serde(default)]
pub struct HttpConfig {
pub host: IpAddr,
pub port: u16,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
#[derive(Serialize, Deserialize, Debug)]
#[serde(default)]
pub struct DirsConfig {
pub posts: PathBuf,
pub media: PathBuf,
pub custom_static: PathBuf,
pub custom_templates: PathBuf,
pub media: Box<Path>,
#[serde(rename = "static")]
pub static_: Box<Path>,
pub templates: Box<Path>,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
#[derive(Serialize, Deserialize, Debug)]
pub struct RssConfig {
pub enable: bool,
pub link: Url,
}
#[derive(Serialize, Deserialize, Debug, Clone, Default)]
#[derive(Serialize, Deserialize, Debug, Default)]
pub enum DateFormat {
#[default]
RFC3339,
#[serde(untagged)]
Strftime(String),
Strftime(Box<str>),
}
#[derive(Serialize, Deserialize, Debug, Clone, Default, Copy, PartialEq, Eq)]
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, Default)]
#[serde(rename_all = "lowercase")]
#[repr(u8)]
pub enum Sort {
@ -83,61 +83,104 @@ pub enum Sort {
Name,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
#[derive(Serialize, Deserialize, Debug)]
#[serde(default)]
#[derive(Default)]
pub struct StyleConfig {
pub title: Box<str>,
pub description: Box<str>,
pub js_enable: bool,
pub display_dates: DisplayDates,
pub date_format: DateFormat,
pub default_sort: Sort,
pub default_color: Option<String>,
pub default_color: Option<Box<str>>,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
impl Default for StyleConfig {
fn default() -> Self {
Self {
title: "bingus-blog".into(),
description: "blazingly fast markdown blog software written in rust memory safe".into(),
js_enable: true,
display_dates: Default::default(),
date_format: Default::default(),
default_sort: Default::default(),
default_color: Default::default(),
}
}
}
#[derive(Serialize, Deserialize, Debug, Clone, Copy)]
#[serde(default)]
pub struct DisplayDates {
pub creation: bool,
pub modification: bool,
}
#[derive(Serialize, Deserialize, Default, Debug, Clone)]
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Hash, Default)]
#[serde(default)]
pub struct MarkdownRenderConfig {
pub syntect: SyntectConfig,
pub escape: bool,
#[serde(rename = "unsafe")]
pub unsafe_: bool,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct MarkdownConfig {
pub root: Box<Path>,
pub render: MarkdownRenderConfig,
pub raw_access: bool,
}
impl Default for MarkdownConfig {
fn default() -> Self {
Self {
root: PathBuf::from("posts").into(),
render: Default::default(),
raw_access: true,
}
}
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(default)]
pub struct BlagConfig {
pub root: Box<Path>,
pub bin: Box<Path>,
pub raw_access: bool,
}
#[derive(Serialize, Deserialize, Debug, Clone, Copy, Default)]
#[serde(rename_all = "lowercase")]
pub enum Engine {
pub enum EngineMode {
#[default]
Markdown,
Blag,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(default)]
pub struct BlagConfig {
pub bin: PathBuf,
#[derive(Serialize, Deserialize, Debug, Default)]
#[serde(default, rename_all = "lowercase")]
pub struct Engine {
pub mode: EngineMode,
pub markdown: MarkdownConfig,
pub blag: BlagConfig,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
#[derive(Serialize, Deserialize, Debug)]
#[serde(default)]
pub struct Config {
pub title: String,
pub description: String,
pub markdown_access: bool,
pub js_enable: bool,
pub engine: Engine,
pub style: StyleConfig,
pub rss: RssConfig,
#[serde(rename = "custom")]
pub dirs: DirsConfig,
pub http: HttpConfig,
pub render: RenderConfig,
pub cache: CacheConfig,
pub blag: BlagConfig,
}
impl Default for Config {
fn default() -> Self {
Self {
title: "bingus-blog".into(),
description: "blazingly fast markdown blog software written in rust memory safe".into(),
markdown_access: true,
js_enable: true,
engine: Default::default(),
style: Default::default(),
// i have a love-hate relationship with serde
@ -152,9 +195,7 @@ impl Default for Config {
},
dirs: Default::default(),
http: Default::default(),
render: Default::default(),
cache: Default::default(),
blag: Default::default(),
}
}
}
@ -171,10 +212,9 @@ impl Default for DisplayDates {
impl Default for DirsConfig {
fn default() -> Self {
Self {
posts: "posts".into(),
media: "media".into(),
custom_static: "static".into(),
custom_templates: "templates".into(),
media: PathBuf::from("media").into_boxed_path(),
static_: PathBuf::from("static").into_boxed_path(),
templates: PathBuf::from("templates").into_boxed_path(),
}
}
}
@ -192,7 +232,7 @@ impl Default for SyntectConfig {
fn default() -> Self {
Self {
load_defaults: false,
themes_dir: Some("themes".into()),
themes_dir: Some(PathBuf::from("themes").into_boxed_path()),
theme: Some("Catppuccin Mocha".into()),
}
}
@ -206,7 +246,7 @@ impl Default for CacheConfig {
cleanup: true,
cleanup_interval: None,
persistence: true,
file: "cache".into(),
file: PathBuf::from("cache").into(),
compress: true,
compression_level: 3,
}
@ -215,22 +255,25 @@ impl Default for CacheConfig {
impl Default for BlagConfig {
fn default() -> Self {
Self { bin: "blag".into() }
Self {
root: PathBuf::from("posts").into(),
bin: PathBuf::from("blag").into(),
raw_access: true,
}
}
}
#[instrument(name = "config")]
pub async fn load() -> eyre::Result<Config> {
let config_file = env::var(format!(
"{}_CONFIG",
env!("CARGO_BIN_NAME").to_uppercase().replace('-', "_")
fn config_path() -> Cow<'static, str> {
env::var(concat!(
convert_ascii_case!(upper_camel, env!("CARGO_BIN_NAME")),
"_CONFIG"
))
.unwrap_or_else(|_| String::from("config.toml"));
match tokio::fs::OpenOptions::new()
.read(true)
.open(&config_file)
.await
{
.map(Into::into)
.unwrap_or("config.toml".into())
}
pub async fn load_from(path: (impl AsRef<Path> + std::fmt::Debug)) -> eyre::Result<Config> {
match tokio::fs::OpenOptions::new().read(true).open(&path).await {
Ok(mut file) => {
let mut buf = String::new();
file.read_to_string(&mut buf)
@ -246,7 +289,7 @@ pub async fn load() -> eyre::Result<Config> {
.write(true)
.create(true)
.truncate(true)
.open(&config_file)
.open(&path)
.await
{
Ok(mut file) => file
@ -256,18 +299,93 @@ pub async fn load() -> eyre::Result<Config> {
.as_bytes(),
)
.await
.unwrap_or_else(|err| error!("couldn't write configuration: {}", err)),
Err(err) => {
error!("couldn't open file {:?} for writing: {}", &config_file, err)
}
.unwrap_or_else(|err| error!("couldn't write configuration: {err}")),
Err(err) => error!("couldn't open file {path:?} for writing: {err}"),
}
Ok(config)
}
_ => bail!("couldn't open config file: {}", err),
_ => bail!("couldn't open config file: {err}"),
},
}
}
#[instrument]
pub async fn load() -> eyre::Result<(Config, Cow<'static, str>)> {
let config_file = config_path();
let config = load_from(&*config_file).await?;
Ok((config, config_file))
}
async fn process_event(
event: DebouncedEvent,
config_file: &Path,
swapper: &ArcSwap<Config>,
) -> eyre::Result<()> {
if !event.kind.is_modify() && !event.kind.is_create()
|| !event.paths.iter().any(|p| p == config_file)
{
trace!("not interested: {event:?}");
return Ok(());
}
let config = load_from(config_file).await?;
info!("reloaded config from {config_file:?}");
swapper.store(Arc::new(config));
Ok(())
}
#[instrument(skip_all)]
pub async fn watcher(
config_file: impl AsRef<str>,
watcher_token: CancellationToken,
swapper: Arc<ArcSwap<Config>>,
) -> eyre::Result<()> {
let config_file = tokio::fs::canonicalize(config_file.as_ref())
.await
.context("failed to canonicalize path")?;
let (tx, mut rx) = tokio::sync::mpsc::channel(1);
let mut debouncer = new_debouncer(Duration::from_millis(100), None, move |events| {
tx.blocking_send(events)
.expect("failed to send message over channel")
})?;
let dir = config_file
.as_path()
.parent()
.expect("absolute path to have parent");
debouncer
.watch(&dir, RecursiveMode::NonRecursive)
.with_context(|| format!("failed to watch {dir:?}"))?;
'event_loop: while let Some(ev) = select! {
_ = watcher_token.cancelled() => {
info!("2");
break 'event_loop;
},
ev = rx.recv() => ev,
} {
let events = match ev {
Ok(events) => events,
Err(err) => {
error!("error getting events: {err:?}");
continue;
}
};
for event in events {
if let Err(err) = process_event(event, &config_file, &swapper).await {
error!("error while processing event: {err}");
}
}
}
Ok(())
}
fn check_zstd_level_bounds<'de, D>(d: D) -> Result<i32, D::Error>
where
D: serde::Deserializer<'de>,

View file

@ -1,4 +1,4 @@
#![feature(let_chains, pattern, path_add_extension)]
#![feature(let_chains, pattern, path_add_extension, if_let_guard)]
mod app;
mod config;
@ -18,8 +18,10 @@ use std::process::exit;
use std::sync::Arc;
use std::time::Duration;
use arc_swap::access::Map;
use arc_swap::ArcSwap;
use color_eyre::eyre::{self, Context};
use config::Engine;
use config::{Config, EngineMode};
use tokio::net::TcpListener;
use tokio::sync::RwLock;
use tokio::task::JoinSet;
@ -27,7 +29,7 @@ use tokio::time::Instant;
use tokio::{select, signal};
use tokio_util::sync::CancellationToken;
use tracing::level_filters::LevelFilter;
use tracing::{debug, error, info, info_span, warn, Instrument};
use tracing::{debug, error, info, warn};
use tracing_subscriber::layer::SubscriberExt;
use tracing_subscriber::{util::SubscriberInitExt, EnvFilter};
@ -51,21 +53,25 @@ async fn main() -> eyre::Result<()> {
);
reg.with(tracing_subscriber::fmt::layer()).init();
let config = Arc::new(
config::load()
.await
.context("couldn't load configuration")?,
);
let socket_addr = SocketAddr::new(config.http.host, config.http.port);
let mut tasks = JoinSet::new();
let cancellation_token = CancellationToken::new();
let (config, config_file) = config::load()
.await
.context("couldn't load configuration")?;
let config = Arc::new(config);
let swapper = Arc::new(ArcSwap::from(config.clone()));
let config_cache_access: crate::post::cache::ConfigAccess =
Box::new(arc_swap::access::Map::new(swapper.clone(), |c: &Config| {
&c.cache
}));
info!("loaded config from {config_file:?}");
let start = Instant::now();
// NOTE: use tokio::task::spawn_blocking if this ever turns into a concurrent task
let mut reg = new_registry(&config.dirs.custom_templates)
.context("failed to create handlebars registry")?;
let mut reg =
new_registry(&config.dirs.templates).context("failed to create handlebars registry")?;
reg.register_helper("date", Box::new(helpers::date));
reg.register_helper("duration", Box::new(helpers::duration));
debug!(duration = ?start.elapsed(), "registered all templates");
@ -74,14 +80,11 @@ async fn main() -> eyre::Result<()> {
debug!("setting up watcher");
let watcher_token = cancellation_token.child_token();
tasks.spawn(
watch_templates(
config.dirs.custom_templates.clone(),
watcher_token.clone(),
registry.clone(),
)
.instrument(info_span!("custom_template_watcher")),
);
tasks.spawn(watch_templates(
config.dirs.templates.clone(),
watcher_token.clone(),
registry.clone(),
));
let cache = if config.cache.enable {
if config.cache.persistence && tokio::fs::try_exists(&config.cache.file).await? {
@ -104,18 +107,26 @@ async fn main() -> eyre::Result<()> {
} else {
None
}
.map(|cache| CacheGuard::new(cache, config.cache.clone()))
.map(|cache| CacheGuard::new(cache, config_cache_access))
.map(Arc::new);
let posts: Arc<dyn PostManager + Send + Sync> = match config.engine {
Engine::Markdown => Arc::new(MarkdownPosts::new(Arc::clone(&config), cache.clone()).await?),
Engine::Blag => Arc::new(Blag::new(
config.dirs.posts.clone().into(),
config.blag.bin.clone().into(),
cache.clone(),
)),
let posts: Arc<dyn PostManager + Send + Sync> = match config.engine.mode {
EngineMode::Markdown => {
let access = Map::new(swapper.clone(), |c: &Config| &c.engine.markdown);
Arc::new(MarkdownPosts::new(access, cache.clone()).await?)
}
EngineMode::Blag => {
let access = Map::new(swapper.clone(), |c: &Config| &c.engine.blag);
Arc::new(Blag::new(access, cache.clone()))
}
};
debug!("setting up config watcher");
let token = cancellation_token.child_token();
tasks.spawn(config::watcher(config_file, token, swapper.clone()));
if config.cache.enable && config.cache.cleanup {
if let Some(millis) = config.cache.cleanup_interval {
let posts = Arc::clone(&posts);
@ -138,12 +149,14 @@ async fn main() -> eyre::Result<()> {
}
let state = AppState {
config: Arc::clone(&config),
rss: Arc::new(Map::new(swapper.clone(), |c: &Config| &c.rss)),
style: Arc::new(Map::new(swapper.clone(), |c: &Config| &c.style)),
posts,
templates: registry,
};
let app = app::new(&config).with_state(state.clone());
let app = app::new(&config.dirs).with_state(state.clone());
let socket_addr = SocketAddr::new(config.http.host, config.http.port);
let listener = TcpListener::bind(socket_addr)
.await
.with_context(|| format!("couldn't listen on {}", socket_addr))?;

View file

@ -6,9 +6,9 @@ use comrak::ComrakOptions;
use comrak::RenderPlugins;
use syntect::highlighting::ThemeSet;
use crate::config::RenderConfig;
use crate::config::MarkdownRenderConfig;
pub fn build_syntect(config: &RenderConfig) -> eyre::Result<SyntectAdapter> {
pub fn build_syntect(config: &MarkdownRenderConfig) -> eyre::Result<SyntectAdapter> {
let mut theme_set = if config.syntect.load_defaults {
ThemeSet::load_defaults()
} else {
@ -28,7 +28,7 @@ pub fn build_syntect(config: &RenderConfig) -> eyre::Result<SyntectAdapter> {
pub fn render(
markdown: &str,
config: &RenderConfig,
config: &MarkdownRenderConfig,
syntect: Option<&dyn SyntaxHighlighterAdapter>,
) -> String {
let mut options = ComrakOptions::default();

View file

@ -5,6 +5,7 @@ use std::process::Stdio;
use std::sync::Arc;
use std::time::Duration;
use arc_swap::access::Access;
use axum::async_trait;
use axum::http::HeaderValue;
use chrono::{DateTime, Utc};
@ -18,6 +19,7 @@ use tokio::io::{AsyncBufReadExt, AsyncReadExt, BufReader};
use tokio::time::Instant;
use tracing::{debug, error, info, instrument};
use crate::config::BlagConfig;
use crate::error::PostError;
use crate::post::Filter;
use crate::systemtime_as_secs::as_secs;
@ -63,9 +65,8 @@ impl BlagMetadata {
}
}
pub struct Blag {
root: Arc<Path>,
blag_bin: Arc<Path>,
pub struct Blag<A> {
config: A,
cache: Option<Arc<CacheGuard>>,
_fastblag: bool,
}
@ -75,11 +76,15 @@ enum RenderResult {
Raw(Vec<u8>, Arc<str>),
}
impl Blag {
pub fn new(root: Arc<Path>, blag_bin: Arc<Path>, cache: Option<Arc<CacheGuard>>) -> Blag {
impl<A> Blag<A>
where
A: Access<BlagConfig>,
A: Sync,
A::Guard: Send,
{
pub fn new(config: A, cache: Option<Arc<CacheGuard>>) -> Self {
Self {
root,
blag_bin,
config,
cache,
_fastblag: false,
}
@ -92,10 +97,11 @@ impl Blag {
query_json: String,
) -> Result<RenderResult, PostError> {
let start = Instant::now();
let bin = self.config.load().bin.clone();
debug!(%name, "rendering");
let mut cmd = tokio::process::Command::new(&*self.blag_bin)
let mut cmd = tokio::process::Command::new(&*bin)
.arg(path.as_ref())
.env("BLAG_QUERY", query_json)
.stdout(Stdio::piped())
@ -103,7 +109,7 @@ impl Blag {
.stdin(Stdio::null())
.spawn()
.map_err(|err| {
error!("failed to spawn {:?}: {err}", self.blag_bin);
error!("failed to spawn {bin:?}: {err}");
err
})?;
@ -145,18 +151,37 @@ impl Blag {
dont_cache,
))
}
fn as_raw(name: &str) -> String {
let mut buf = String::with_capacity(name.len() + 3);
buf += name;
buf += ".sh";
buf
}
fn is_raw(name: &str) -> bool {
name.ends_with(".sh")
}
}
#[async_trait]
impl PostManager for Blag {
impl<A> PostManager for Blag<A>
where
A: Access<BlagConfig>,
A: Sync,
A::Guard: Send,
{
async fn get_all_posts(
&self,
filters: &[Filter<'_>],
query: &IndexMap<String, Value>,
) -> Result<Vec<(PostMetadata, Arc<str>, RenderStats)>, PostError> {
let root = &self.config.load().root;
let mut set = FuturesUnordered::new();
let mut posts = Vec::new();
let mut files = tokio::fs::read_dir(&self.root).await?;
let mut files = tokio::fs::read_dir(&root).await?;
loop {
let entry = match files.next_entry().await {
@ -178,7 +203,7 @@ impl PostManager for Blag {
}
};
if self.is_raw(&name) {
if Self::is_raw(&name) {
name.truncate(name.len() - 3);
let name = name.into();
set.push(self.get_post(Arc::clone(&name), query).map(|v| (name, v)));
@ -188,18 +213,19 @@ impl PostManager for Blag {
while let Some((name, result)) = set.next().await {
let post = match result {
Ok(v) => match v {
ReturnedPost::Rendered { meta, body, perf } => (meta, body, perf),
ReturnedPost::Raw { .. } => unreachable!(),
},
Ok(v) => v,
Err(err) => {
error!("error while rendering blagpost {name:?}: {err}");
continue;
}
};
if post.0.apply_filters(filters) {
posts.push(post);
if let ReturnedPost::Rendered {
meta, body, perf, ..
} = post
&& meta.apply_filters(filters)
{
posts.push((meta, body, perf));
}
}
@ -215,29 +241,34 @@ impl PostManager for Blag {
query: &IndexMap<String, Value>,
) -> Result<ReturnedPost, PostError> {
let start = Instant::now();
let mut path = self.root.join(&*name);
let BlagConfig {
ref root,
ref raw_access,
..
} = &*self.config.load();
if self.is_raw(&name) {
if Self::is_raw(&name) {
let mut buffer = Vec::new();
let mut file =
OpenOptions::new()
.read(true)
.open(&path)
.await
.map_err(|err| match err.kind() {
std::io::ErrorKind::NotFound => PostError::NotFound(name),
_ => PostError::IoError(err),
})?;
let mut file = OpenOptions::new()
.read(true)
.open(root.join(&*name))
.await
.map_err(|err| match err.kind() {
std::io::ErrorKind::NotFound => PostError::NotFound(name),
_ => PostError::IoError(err),
})?;
file.read_to_end(&mut buffer).await?;
return Ok(ReturnedPost::Raw {
buffer,
content_type: HeaderValue::from_static("text/x-shellscript"),
});
} else {
path.add_extension("sh");
}
let raw_name = Self::as_raw(&name);
let path = root.join(&raw_name);
let raw_name = raw_access.then_some(raw_name);
let stat = tokio::fs::metadata(&path)
.await
.map_err(|err| match err.kind() {
@ -264,6 +295,7 @@ impl PostManager for Blag {
meta,
body,
perf: RenderStats::Cached(start.elapsed()),
raw_name,
}
} else {
let (meta, content, (parsed, rendered), dont_cache) =
@ -295,6 +327,7 @@ impl PostManager for Blag {
parsed,
rendered,
},
raw_name,
}
};
@ -307,15 +340,13 @@ impl PostManager for Blag {
async fn cleanup(&self) {
if let Some(cache) = &self.cache {
let root = &self.config.load().root;
cache
.cleanup(|key, value| {
let mtime = std::fs::metadata(
self.root
.join(self.as_raw(&key.name).unwrap_or_else(|| unreachable!())),
)
.ok()
.and_then(|metadata| metadata.modified().ok())
.map(as_secs);
let mtime = std::fs::metadata(root.join(Self::as_raw(&key.name)))
.ok()
.and_then(|metadata| metadata.modified().ok())
.map(as_secs);
match mtime {
Some(mtime) => mtime <= value.mtime,
@ -325,16 +356,4 @@ impl PostManager for Blag {
.await
}
}
fn is_raw(&self, name: &str) -> bool {
name.ends_with(".sh")
}
fn as_raw(&self, name: &str) -> Option<String> {
let mut buf = String::with_capacity(name.len() + 3);
buf += name;
buf += ".sh";
Some(buf)
}
}

View file

@ -7,6 +7,7 @@ use std::time::SystemTime;
use crate::config::CacheConfig;
use crate::post::PostMetadata;
use arc_swap::access::DynAccess;
use color_eyre::eyre::{self, Context};
use scc::HashMap;
use serde::{Deserialize, Serialize};
@ -201,13 +202,15 @@ impl Cache {
}
}
pub type ConfigAccess = Box<dyn DynAccess<CacheConfig> + Send + Sync>;
pub struct CacheGuard {
inner: Cache,
config: CacheConfig,
config: ConfigAccess,
}
impl CacheGuard {
pub fn new(cache: Cache, config: CacheConfig) -> Self {
pub fn new(cache: Cache, config: ConfigAccess) -> Self {
Self {
inner: cache,
config,
@ -215,13 +218,14 @@ impl CacheGuard {
}
fn try_drop(&mut self) -> Result<(), eyre::Report> {
let config = self.config.load();
// write cache to file
let path = &self.config.file;
let path = &*config.file;
let serialized = bitcode::serialize(&self.inner).context("failed to serialize cache")?;
let mut cache_file = std::fs::File::create(path)
.with_context(|| format!("failed to open cache at {}", path.display()))?;
let compression_level = self.config.compression_level;
if self.config.compress {
let compression_level = config.compression_level;
if config.compress {
std::io::Write::write_all(
&mut zstd::stream::write::Encoder::new(cache_file, compression_level)?
.auto_finish(),
@ -231,7 +235,7 @@ impl CacheGuard {
cache_file.write_all(&serialized)
}
.context("failed to write cache to file")?;
info!("wrote cache to {}", path.display());
info!("wrote cache to {path:?}");
Ok(())
}
}

View file

@ -7,6 +7,7 @@ use std::time::Duration;
use std::time::Instant;
use std::time::SystemTime;
use arc_swap::access::Access;
use axum::async_trait;
use axum::http::HeaderValue;
use chrono::{DateTime, Utc};
@ -20,7 +21,7 @@ use tokio::fs;
use tokio::io::AsyncReadExt;
use tracing::{info, instrument, warn};
use crate::config::Config;
use crate::config::MarkdownConfig;
use crate::markdown_render::{build_syntect, render};
use crate::systemtime_as_secs::as_secs;
@ -66,23 +67,25 @@ impl FrontMatter {
}
}
pub struct MarkdownPosts {
pub struct MarkdownPosts<A> {
cache: Option<Arc<CacheGuard>>,
config: Arc<Config>,
config: A,
render_hash: u64,
syntect: SyntectAdapter,
}
impl MarkdownPosts {
pub async fn new(
config: Arc<Config>,
cache: Option<Arc<CacheGuard>>,
) -> eyre::Result<MarkdownPosts> {
let syntect =
build_syntect(&config.render).context("failed to create syntax highlighting engine")?;
impl<A> MarkdownPosts<A>
where
A: Access<MarkdownConfig>,
A: Sync,
A::Guard: Send,
{
pub async fn new(config: A, cache: Option<Arc<CacheGuard>>) -> eyre::Result<Self> {
let syntect = build_syntect(&config.load().render)
.context("failed to create syntax highlighting engine")?;
let mut hasher = DefaultHasher::new();
config.render.hash(&mut hasher);
config.load().render.hash(&mut hasher);
let render_hash = hasher.finish();
Ok(Self {
@ -118,7 +121,7 @@ impl MarkdownPosts {
let parsing = parsing_start.elapsed();
let before_render = Instant::now();
let post = render(body, &self.config.render, Some(&self.syntect)).into();
let post = render(body, &self.config.load().render, Some(&self.syntect)).into();
let rendering = before_render.elapsed();
if let Some(cache) = &self.cache {
@ -135,10 +138,27 @@ impl MarkdownPosts {
Ok((metadata, post, (parsing, rendering)))
}
fn is_raw(name: &str) -> bool {
name.ends_with(".md")
}
fn as_raw(name: &str) -> Option<String> {
let mut buf = String::with_capacity(name.len() + 3);
buf += name;
buf += ".md";
Some(buf)
}
}
#[async_trait]
impl PostManager for MarkdownPosts {
impl<A> PostManager for MarkdownPosts<A>
where
A: Access<MarkdownConfig>,
A: Sync,
A::Guard: Send,
{
async fn get_all_posts(
&self,
filters: &[Filter<'_>],
@ -146,7 +166,7 @@ impl PostManager for MarkdownPosts {
) -> Result<Vec<(PostMetadata, Arc<str>, RenderStats)>, PostError> {
let mut posts = Vec::new();
let mut read_dir = fs::read_dir(&self.config.dirs.posts).await?;
let mut read_dir = fs::read_dir(&self.config.load().root).await?;
while let Some(entry) = read_dir.next_entry().await? {
let path = entry.path();
let stat = fs::metadata(&path).await?;
@ -161,7 +181,9 @@ impl PostManager for MarkdownPosts {
.into();
let post = self.get_post(Arc::clone(&name), query).await?;
if let ReturnedPost::Rendered { meta, body, perf } = post
if let ReturnedPost::Rendered {
meta, body, perf, ..
} = post
&& meta.apply_filters(filters)
{
posts.push((meta, body, perf));
@ -179,7 +201,7 @@ impl PostManager for MarkdownPosts {
) -> Result<Vec<PostMetadata>, PostError> {
let mut posts = Vec::new();
let mut read_dir = fs::read_dir(&self.config.dirs.posts).await?;
let mut read_dir = fs::read_dir(&self.config.load().root).await?;
while let Some(entry) = read_dir.next_entry().await? {
let path = entry.path();
let stat = fs::metadata(&path).await?;
@ -225,8 +247,9 @@ impl PostManager for MarkdownPosts {
name: Arc<str>,
_query: &IndexMap<String, Value>,
) -> Result<ReturnedPost, PostError> {
let post = if self.config.markdown_access && self.is_raw(&name) {
let path = self.config.dirs.posts.join(&*name);
let config = self.config.load();
let post = if config.raw_access && Self::is_raw(&name) {
let path = config.root.join(&*name);
let mut file = match tokio::fs::OpenOptions::new().read(true).open(&path).await {
Ok(value) => value,
@ -248,11 +271,8 @@ impl PostManager for MarkdownPosts {
}
} else {
let start = Instant::now();
let path = self
.config
.dirs
.posts
.join(self.as_raw(&name).unwrap_or_else(|| unreachable!()));
let raw_name = Self::as_raw(&name).unwrap_or_else(|| unreachable!());
let path = config.root.join(&raw_name);
let stat = match tokio::fs::metadata(&path).await {
Ok(value) => value,
@ -265,26 +285,29 @@ impl PostManager for MarkdownPosts {
};
let mtime = as_secs(stat.modified()?);
if let Some(cache) = &self.cache
let (meta, body, perf) = if let Some(cache) = &self.cache
&& let Some(CacheValue { meta, body, .. }) =
cache.lookup(name.clone(), mtime, self.render_hash).await
{
ReturnedPost::Rendered {
meta,
body,
perf: RenderStats::Cached(start.elapsed()),
}
(meta, body, RenderStats::Cached(start.elapsed()))
} else {
let (meta, body, stats) = self.parse_and_render(name, path).await?;
ReturnedPost::Rendered {
(
meta,
body,
perf: RenderStats::Rendered {
RenderStats::Rendered {
total: start.elapsed(),
parsed: stats.0,
rendered: stats.1,
},
}
)
};
ReturnedPost::Rendered {
meta,
body,
perf,
raw_name: config.raw_access.then_some(raw_name),
}
};
@ -306,9 +329,9 @@ impl PostManager for MarkdownPosts {
let mtime = std::fs::metadata(
self.config
.dirs
.posts
.join(self.as_raw(name).unwrap_or_else(|| unreachable!())),
.load()
.root
.join(Self::as_raw(name).unwrap_or_else(|| unreachable!())),
)
.ok()
.and_then(|metadata| metadata.modified().ok())
@ -322,16 +345,4 @@ impl PostManager for MarkdownPosts {
.await
}
}
fn is_raw(&self, name: &str) -> bool {
name.ends_with(".md")
}
fn as_raw(&self, name: &str) -> Option<String> {
let mut buf = String::with_capacity(name.len() + 3);
buf += name;
buf += ".md";
Some(buf)
}
}

View file

@ -55,6 +55,7 @@ pub enum ReturnedPost {
meta: PostMetadata,
body: Arc<str>,
perf: RenderStats,
raw_name: Option<String>,
},
Raw {
buffer: Vec<u8>,
@ -149,14 +150,4 @@ pub trait PostManager {
) -> Result<ReturnedPost, PostError>;
async fn cleanup(&self) {}
#[allow(unused)]
fn is_raw(&self, name: &str) -> bool {
false
}
#[allow(unused)]
fn as_raw(&self, name: &str) -> Option<String> {
None
}
}

View file

@ -8,7 +8,7 @@ use notify_debouncer_full::{new_debouncer, DebouncedEvent};
use tokio::select;
use tokio::sync::RwLock;
use tokio_util::sync::CancellationToken;
use tracing::{debug, debug_span, error, info, trace};
use tracing::{debug, debug_span, error, info, instrument, trace};
use crate::templates::*;
@ -71,11 +71,12 @@ async fn process_event(
Ok(())
}
#[instrument(skip_all)]
pub async fn watch_templates<'a>(
path: impl AsRef<Path>,
watcher_token: CancellationToken,
reg: Arc<RwLock<Handlebars<'a>>>,
) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
) -> Result<(), color_eyre::eyre::Report> {
let path = path.as_ref();
let (tx, mut rx) = tokio::sync::mpsc::channel(1);

View file

@ -144,14 +144,14 @@ body > main > h1:first-child {
}
@media (min-width: 956px) {
:root {
:root:has(body.cool) {
--target-ratio: 0.7; /* 669px - 1344x */
--width: min(100% * var(--target-ratio), 1920px * var(--target-ratio));
--padding: 4em;
--padded-width: calc(var(--width) - var(--padding) * 2);
}
body {
body.cool {
padding: 4em 0;
min-height: calc(100vh - 8em);
@ -165,13 +165,13 @@ body > main > h1:first-child {
);
}
body > * {
body.cool > * {
margin: auto;
padding: var(--padding);
width: var(--padded-width);
}
body > footer {
body.cool > footer {
padding: initial;
width: var(--width);
}