implement config hot reloading

This commit is contained in:
slonkazoid 2025-01-29 17:25:12 +03:00
parent 5c8aea9277
commit fade130b8d
Signed by: slonk
SSH key fingerprint: SHA256:tbZfJX4IOvZ0LGWOWu5Ijo8jfMPi78TU7x1VoEeCIjM
13 changed files with 470 additions and 287 deletions

View file

@ -3,17 +3,36 @@
the configuration format, with defaults, is documented below: the configuration format, with defaults, is documented below:
```toml ```toml
title = "bingus-blog" # title of the blog [engine]
# description of the blog mode = "markdown" # choose which post engine to use
description = "blazingly fast blog software written in rust memory safe"
raw_access = true # allow users to see the raw source of a post
js_enable = true # enable javascript (required for sorting and dates)
engine = "markdown" # choose which post engine to use
# options: "markdown", "blag" # options: "markdown", "blag"
# absolutely do not use "blag" unless you know exactly # absolutely do not use "blag" unless you know exactly
# what you are getting yourself into. # what you are getting yourself into.
[engine.markdown] # options for the `markdown` engine
root = "posts" # where posts are served from
raw_access = true # allow visitors to see the raw markdown of a post
[engine.markdown.render]
escape = false # escape HTML in the markdown soucre instead of
# clobbering it (https://docs.rs/comrak/latest/comrak/struct.RenderOptions.html#structfield.escape)
unsafe = false # allow HTML and dangerous links (https://docs.rs/comrak/latest/comrak/struct.RenderOptions.html#structfield.unsafe_)
[engine.markdown.render.syntect]
load_defaults = false # include default syntect themes
themes_dir = "themes" # directory to include themes from
theme = "Catppuccin Mocha" # theme file name (without `.tmTheme`)
[engine.blag]
root = "posts" # where posts are served from
bin = "blag" # path to the `blag` binary
raw_access = true # allow visitors to see the raw bash of a post
[style] [style]
title = "bingus-blog" # title of the blog
# description of the blog
description = "blazingly fast blog software written in rust memory safe"
js_enable = true # enable javascript (required for sorting and dates)
date_format = "RFC3339" # format string used to format dates in the backend date_format = "RFC3339" # format string used to format dates in the backend
# it's highly recommended to leave this as default, # it's highly recommended to leave this as default,
# so the date can be formatted by the browser. # so the date can be formatted by the browser.
@ -31,7 +50,6 @@ enable = false # serve an rss field under /feed.xml
link = "https://..." # public url of the blog, required if rss is enabled link = "https://..." # public url of the blog, required if rss is enabled
[dirs] [dirs]
posts = "posts" # where posts are stored
media = "media" # directory served under /media/ media = "media" # directory served under /media/
custom_templates = "templates" # custom templates dir custom_templates = "templates" # custom templates dir
custom_static = "static" # custom static dir custom_static = "static" # custom static dir
@ -54,19 +72,6 @@ persistence = true # save the cache to on shutdown and load on startup
file = "cache" # file to save the cache to file = "cache" # file to save the cache to
compress = true # compress the cache file compress = true # compress the cache file
compression_level = 3 # zstd compression level, 3 is recommended compression_level = 3 # zstd compression level, 3 is recommended
[render]
escape = false # escape HTML in the markdown soucre instead of
# clobbering it (https://docs.rs/comrak/latest/comrak/struct.RenderOptions.html#structfield.escape)
unsafe = false # allow HTML and dangerous links (https://docs.rs/comrak/latest/comrak/struct.RenderOptions.html#structfield.unsafe_)
[render.syntect]
load_defaults = false # include default syntect themes
themes_dir = "themes" # directory to include themes from
theme = "Catppuccin Mocha" # theme file name (without `.tmTheme`)
[blag]
bin = "blag" # path to blag binary
``` ```
configuration is done in [TOML](https://toml.io/) configuration is done in [TOML](https://toml.io/)

17
Cargo.lock generated
View file

@ -38,6 +38,15 @@ version = "1.0.95"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04" checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04"
[[package]]
name = "arc-swap"
version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457"
dependencies = [
"serde",
]
[[package]] [[package]]
name = "askama" name = "askama"
version = "0.12.1" version = "0.12.1"
@ -266,6 +275,7 @@ dependencies = [
name = "bingus-blog" name = "bingus-blog"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"arc-swap",
"askama", "askama",
"askama_axum", "askama_axum",
"axum", "axum",
@ -274,6 +284,7 @@ dependencies = [
"color-eyre", "color-eyre",
"comrak", "comrak",
"console-subscriber", "console-subscriber",
"const-str",
"fronma", "fronma",
"futures", "futures",
"handlebars", "handlebars",
@ -515,6 +526,12 @@ dependencies = [
"tracing-subscriber", "tracing-subscriber",
] ]
[[package]]
name = "const-str"
version = "0.5.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3618cccc083bb987a415d85c02ca6c9994ea5b44731ec28b9ecf09658655fba9"
[[package]] [[package]]
name = "cpufeatures" name = "cpufeatures"
version = "0.2.16" version = "0.2.16"

View file

@ -10,11 +10,12 @@ default = []
tokio-console = ["dep:console-subscriber"] tokio-console = ["dep:console-subscriber"]
[profile.release] [profile.release]
lto = "fat" lto = "thin"
opt-level = 3 opt-level = 3
strip = true strip = true
[dependencies] [dependencies]
arc-swap = { version = "1.7.1", features = ["serde"] }
askama = { version = "0.12.1", features = [ askama = { version = "0.12.1", features = [
"with-axum", "with-axum",
], default-features = false } ], default-features = false }
@ -37,6 +38,7 @@ comrak = { version = "0.32.0", features = [
"syntect", "syntect",
], default-features = false } ], default-features = false }
console-subscriber = { version = "0.4.1", optional = true } console-subscriber = { version = "0.4.1", optional = true }
const-str = "0.5.7"
fronma = "0.2.0" fronma = "0.2.0"
futures = "0.3.31" futures = "0.3.31"
handlebars = "6.0.0" handlebars = "6.0.0"
@ -57,6 +59,7 @@ tokio = { version = "1.37.0", features = [
"rt-multi-thread", "rt-multi-thread",
"signal", "signal",
"process", "process",
"sync",
] } ] }
tokio-util = { version = "0.7.10", default-features = false } tokio-util = { version = "0.7.10", default-features = false }
toml = "0.8.12" toml = "0.8.12"

View file

@ -1,6 +1,7 @@
use std::sync::Arc; use std::sync::Arc;
use std::time::Duration; use std::time::Duration;
use arc_swap::access::DynAccess;
use axum::extract::{Path, Query, State}; use axum::extract::{Path, Query, State};
use axum::http::header::CONTENT_TYPE; use axum::http::header::CONTENT_TYPE;
use axum::http::Request; use axum::http::Request;
@ -19,7 +20,7 @@ use tower_http::services::ServeDir;
use tower_http::trace::TraceLayer; use tower_http::trace::TraceLayer;
use tracing::{info, info_span, Span}; use tracing::{info, info_span, Span};
use crate::config::{Config, StyleConfig}; use crate::config::{DirsConfig, RssConfig, StyleConfig};
use crate::error::{AppError, AppResult}; use crate::error::{AppError, AppResult};
use crate::post::{Filter, PostManager, PostMetadata, RenderStats, ReturnedPost}; use crate::post::{Filter, PostManager, PostMetadata, RenderStats, ReturnedPost};
use crate::serve_dir_included::handle; use crate::serve_dir_included::handle;
@ -42,7 +43,8 @@ const BINGUS_INFO: BingusInfo = BingusInfo {
#[derive(Clone)] #[derive(Clone)]
#[non_exhaustive] #[non_exhaustive]
pub struct AppState { pub struct AppState {
pub config: Arc<Config>, pub rss: Arc<dyn DynAccess<RssConfig> + Send + Sync>,
pub style: Arc<dyn DynAccess<StyleConfig> + Send + Sync>,
pub posts: Arc<dyn PostManager + Send + Sync>, pub posts: Arc<dyn PostManager + Send + Sync>,
pub templates: Arc<RwLock<Handlebars<'static>>>, pub templates: Arc<RwLock<Handlebars<'static>>>,
} }
@ -50,8 +52,6 @@ pub struct AppState {
#[derive(Serialize)] #[derive(Serialize)]
struct IndexTemplate<'a> { struct IndexTemplate<'a> {
bingus_info: &'a BingusInfo, bingus_info: &'a BingusInfo,
title: &'a str,
description: &'a str,
posts: Vec<PostMetadata>, posts: Vec<PostMetadata>,
rss: bool, rss: bool,
js: bool, js: bool,
@ -64,8 +64,8 @@ struct IndexTemplate<'a> {
struct PostTemplate<'a> { struct PostTemplate<'a> {
bingus_info: &'a BingusInfo, bingus_info: &'a BingusInfo,
meta: &'a PostMetadata, meta: &'a PostMetadata,
rendered: Arc<str>, body: Arc<str>,
rendered_in: RenderStats, perf: RenderStats,
js: bool, js: bool,
color: Option<&'a str>, color: Option<&'a str>,
joined_tags: String, joined_tags: String,
@ -116,7 +116,8 @@ fn join_tags_for_meta(tags: &IndexMap<Arc<str>, u64>, delim: &str) -> String {
async fn index( async fn index(
State(AppState { State(AppState {
config, rss,
style,
posts, posts,
templates: reg, templates: reg,
.. ..
@ -135,21 +136,21 @@ async fn index(
let joined_tags = join_tags_for_meta(&tags, ", "); let joined_tags = join_tags_for_meta(&tags, ", ");
let reg = reg.read().await; let reg = reg.read().await;
let style = style.load();
let rendered = reg.render( let rendered = reg.render(
"index", "index",
&IndexTemplate { &IndexTemplate {
title: &config.title,
description: &config.description,
bingus_info: &BINGUS_INFO, bingus_info: &BINGUS_INFO,
posts, posts,
rss: config.rss.enable, rss: rss.load().enable,
js: config.js_enable, js: style.js_enable,
tags, tags,
joined_tags, joined_tags,
style: &config.style, style: &style,
}, },
); );
drop(reg); drop((style, reg));
Ok(Html(rendered?)) Ok(Html(rendered?))
} }
@ -169,10 +170,12 @@ async fn all_posts(
} }
async fn rss( async fn rss(
State(AppState { config, posts, .. }): State<AppState>, State(AppState {
rss, style, posts, ..
}): State<AppState>,
Query(query): Query<QueryParams>, Query(query): Query<QueryParams>,
) -> AppResult<Response> { ) -> AppResult<Response> {
if !config.rss.enable { if !rss.load().enable {
return Err(AppError::RssDisabled); return Err(AppError::RssDisabled);
} }
@ -187,11 +190,13 @@ async fn rss(
) )
.await?; .await?;
let rss = rss.load();
let style = style.load();
let mut channel = ChannelBuilder::default(); let mut channel = ChannelBuilder::default();
channel channel
.title(&config.title) .title(&*style.title)
.link(config.rss.link.to_string()) .link(rss.link.to_string())
.description(&config.description); .description(&*style.description);
//TODO: .language() //TODO: .language()
for (metadata, content, _) in posts { for (metadata, content, _) in posts {
@ -213,15 +218,14 @@ async fn rss(
.pub_date(metadata.written_at.map(|date| date.to_rfc2822())) .pub_date(metadata.written_at.map(|date| date.to_rfc2822()))
.content(content.to_string()) .content(content.to_string())
.link( .link(
config rss.link
.rss
.link
.join(&format!("/posts/{}", metadata.name))? .join(&format!("/posts/{}", metadata.name))?
.to_string(), .to_string(),
) )
.build(), .build(),
); );
} }
drop((style, rss));
let body = channel.build().to_string(); let body = channel.build().to_string();
drop(channel); drop(channel);
@ -231,7 +235,7 @@ async fn rss(
async fn post( async fn post(
State(AppState { State(AppState {
config, style,
posts, posts,
templates: reg, templates: reg,
.. ..
@ -242,33 +246,30 @@ async fn post(
match posts.get_post(name.clone(), &query.other).await? { match posts.get_post(name.clone(), &query.other).await? {
ReturnedPost::Rendered { ReturnedPost::Rendered {
ref meta, ref meta,
body: rendered, body,
perf: rendered_in, perf,
raw_name,
} => { } => {
let joined_tags = meta.tags.join(", "); let joined_tags = meta.tags.join(", ");
let reg = reg.read().await; let reg = reg.read().await;
let style = style.load();
let rendered = reg.render( let rendered = reg.render(
"post", "post",
&PostTemplate { &PostTemplate {
bingus_info: &BINGUS_INFO, bingus_info: &BINGUS_INFO,
meta, meta,
rendered, body,
rendered_in, perf,
js: config.js_enable, js: style.js_enable,
color: meta color: meta.color.as_deref().or(style.default_color.as_deref()),
.color
.as_deref()
.or(config.style.default_color.as_deref()),
joined_tags, joined_tags,
style: &config.style, style: &style,
raw_name: config raw_name,
.markdown_access
.then(|| posts.as_raw(&meta.name))
.unwrap_or(None),
}, },
); );
drop(reg); drop((style, reg));
Ok(Html(rendered?).into_response()) Ok(Html(rendered?).into_response())
} }
ReturnedPost::Raw { ReturnedPost::Raw {
@ -278,7 +279,7 @@ async fn post(
} }
} }
pub fn new(config: &Config) -> Router<AppState> { pub fn new(dirs: &DirsConfig) -> Router<AppState> {
Router::new() Router::new()
.route("/", get(index)) .route("/", get(index))
.route( .route(
@ -292,11 +293,11 @@ pub fn new(config: &Config) -> Router<AppState> {
.route("/feed.xml", get(rss)) .route("/feed.xml", get(rss))
.nest_service( .nest_service(
"/static", "/static",
ServeDir::new(&config.dirs.custom_static) ServeDir::new(&dirs.static_)
.precompressed_gzip() .precompressed_gzip()
.fallback(service_fn(|req| handle(req, &STATIC))), .fallback(service_fn(|req| handle(req, &STATIC))),
) )
.nest_service("/media", ServeDir::new(&config.dirs.media)) .nest_service("/media", ServeDir::new(&dirs.media))
.layer( .layer(
TraceLayer::new_for_http() TraceLayer::new_for_http()
.make_span_with(|request: &Request<_>| { .make_span_with(|request: &Request<_>| {

View file

@ -1,34 +1,34 @@
use std::borrow::Cow;
use std::env; use std::env;
use std::net::{IpAddr, Ipv6Addr}; use std::net::{IpAddr, Ipv6Addr};
use std::num::NonZeroU64; use std::num::NonZeroU64;
use std::path::PathBuf; use std::path::{Path, PathBuf};
use std::sync::Arc;
use std::time::Duration;
use arc_swap::ArcSwap;
use color_eyre::eyre::{self, bail, Context}; use color_eyre::eyre::{self, bail, Context};
use const_str::{concat, convert_ascii_case};
use notify_debouncer_full::notify::RecursiveMode;
use notify_debouncer_full::{new_debouncer, DebouncedEvent};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tokio::io::{AsyncReadExt, AsyncWriteExt}; use tokio::io::{AsyncReadExt, AsyncWriteExt};
use tracing::{error, info, instrument}; use tokio::select;
use tokio_util::sync::CancellationToken;
use tracing::{error, info, instrument, trace};
use url::Url; use url::Url;
use crate::de::*; use crate::de::*;
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)] #[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Hash)]
#[serde(default)] #[serde(default)]
pub struct SyntectConfig { pub struct SyntectConfig {
pub load_defaults: bool, pub load_defaults: bool,
pub themes_dir: Option<PathBuf>, pub themes_dir: Option<Box<Path>>,
pub theme: Option<String>, pub theme: Option<Box<str>>,
} }
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash, Default)] #[derive(Serialize, Deserialize, Debug)]
#[serde(default)]
pub struct RenderConfig {
pub syntect: SyntectConfig,
pub escape: bool,
#[serde(rename = "unsafe")]
pub unsafe_: bool,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(default)] #[serde(default)]
pub struct CacheConfig { pub struct CacheConfig {
pub enable: bool, pub enable: bool,
@ -38,43 +38,43 @@ pub struct CacheConfig {
#[serde(deserialize_with = "check_millis")] #[serde(deserialize_with = "check_millis")]
pub cleanup_interval: Option<NonZeroU64>, pub cleanup_interval: Option<NonZeroU64>,
pub persistence: bool, pub persistence: bool,
pub file: PathBuf, pub file: Box<Path>,
pub compress: bool, pub compress: bool,
#[serde(deserialize_with = "check_zstd_level_bounds")] #[serde(deserialize_with = "check_zstd_level_bounds")]
pub compression_level: i32, pub compression_level: i32,
} }
#[derive(Serialize, Deserialize, Debug, Clone)] #[derive(Serialize, Deserialize, Debug)]
#[serde(default)] #[serde(default)]
pub struct HttpConfig { pub struct HttpConfig {
pub host: IpAddr, pub host: IpAddr,
pub port: u16, pub port: u16,
} }
#[derive(Serialize, Deserialize, Debug, Clone)] #[derive(Serialize, Deserialize, Debug)]
#[serde(default)] #[serde(default)]
pub struct DirsConfig { pub struct DirsConfig {
pub posts: PathBuf, pub media: Box<Path>,
pub media: PathBuf, #[serde(rename = "static")]
pub custom_static: PathBuf, pub static_: Box<Path>,
pub custom_templates: PathBuf, pub templates: Box<Path>,
} }
#[derive(Serialize, Deserialize, Debug, Clone)] #[derive(Serialize, Deserialize, Debug)]
pub struct RssConfig { pub struct RssConfig {
pub enable: bool, pub enable: bool,
pub link: Url, pub link: Url,
} }
#[derive(Serialize, Deserialize, Debug, Clone, Default)] #[derive(Serialize, Deserialize, Debug, Default)]
pub enum DateFormat { pub enum DateFormat {
#[default] #[default]
RFC3339, RFC3339,
#[serde(untagged)] #[serde(untagged)]
Strftime(String), Strftime(Box<str>),
} }
#[derive(Serialize, Deserialize, Debug, Clone, Default, Copy, PartialEq, Eq)] #[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, Default)]
#[serde(rename_all = "lowercase")] #[serde(rename_all = "lowercase")]
#[repr(u8)] #[repr(u8)]
pub enum Sort { pub enum Sort {
@ -83,61 +83,104 @@ pub enum Sort {
Name, Name,
} }
#[derive(Serialize, Deserialize, Debug, Clone)] #[derive(Serialize, Deserialize, Debug)]
#[serde(default)] #[serde(default)]
#[derive(Default)]
pub struct StyleConfig { pub struct StyleConfig {
pub title: Box<str>,
pub description: Box<str>,
pub js_enable: bool,
pub display_dates: DisplayDates, pub display_dates: DisplayDates,
pub date_format: DateFormat, pub date_format: DateFormat,
pub default_sort: Sort, pub default_sort: Sort,
pub default_color: Option<String>, pub default_color: Option<Box<str>>,
} }
#[derive(Serialize, Deserialize, Debug, Clone)] impl Default for StyleConfig {
fn default() -> Self {
Self {
title: "bingus-blog".into(),
description: "blazingly fast markdown blog software written in rust memory safe".into(),
js_enable: true,
display_dates: Default::default(),
date_format: Default::default(),
default_sort: Default::default(),
default_color: Default::default(),
}
}
}
#[derive(Serialize, Deserialize, Debug, Clone, Copy)]
#[serde(default)] #[serde(default)]
pub struct DisplayDates { pub struct DisplayDates {
pub creation: bool, pub creation: bool,
pub modification: bool, pub modification: bool,
} }
#[derive(Serialize, Deserialize, Default, Debug, Clone)] #[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Hash, Default)]
#[serde(default)]
pub struct MarkdownRenderConfig {
pub syntect: SyntectConfig,
pub escape: bool,
#[serde(rename = "unsafe")]
pub unsafe_: bool,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct MarkdownConfig {
pub root: Box<Path>,
pub render: MarkdownRenderConfig,
pub raw_access: bool,
}
impl Default for MarkdownConfig {
fn default() -> Self {
Self {
root: PathBuf::from("posts").into(),
render: Default::default(),
raw_access: true,
}
}
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(default)]
pub struct BlagConfig {
pub root: Box<Path>,
pub bin: Box<Path>,
pub raw_access: bool,
}
#[derive(Serialize, Deserialize, Debug, Clone, Copy, Default)]
#[serde(rename_all = "lowercase")] #[serde(rename_all = "lowercase")]
pub enum Engine { pub enum EngineMode {
#[default] #[default]
Markdown, Markdown,
Blag, Blag,
} }
#[derive(Serialize, Deserialize, Debug, Clone)] #[derive(Serialize, Deserialize, Debug, Default)]
#[serde(default)] #[serde(default, rename_all = "lowercase")]
pub struct BlagConfig { pub struct Engine {
pub bin: PathBuf, pub mode: EngineMode,
pub markdown: MarkdownConfig,
pub blag: BlagConfig,
} }
#[derive(Serialize, Deserialize, Debug, Clone)] #[derive(Serialize, Deserialize, Debug)]
#[serde(default)] #[serde(default)]
pub struct Config { pub struct Config {
pub title: String,
pub description: String,
pub markdown_access: bool,
pub js_enable: bool,
pub engine: Engine, pub engine: Engine,
pub style: StyleConfig, pub style: StyleConfig,
pub rss: RssConfig, pub rss: RssConfig,
#[serde(rename = "custom")]
pub dirs: DirsConfig, pub dirs: DirsConfig,
pub http: HttpConfig, pub http: HttpConfig,
pub render: RenderConfig,
pub cache: CacheConfig, pub cache: CacheConfig,
pub blag: BlagConfig,
} }
impl Default for Config { impl Default for Config {
fn default() -> Self { fn default() -> Self {
Self { Self {
title: "bingus-blog".into(),
description: "blazingly fast markdown blog software written in rust memory safe".into(),
markdown_access: true,
js_enable: true,
engine: Default::default(), engine: Default::default(),
style: Default::default(), style: Default::default(),
// i have a love-hate relationship with serde // i have a love-hate relationship with serde
@ -152,9 +195,7 @@ impl Default for Config {
}, },
dirs: Default::default(), dirs: Default::default(),
http: Default::default(), http: Default::default(),
render: Default::default(),
cache: Default::default(), cache: Default::default(),
blag: Default::default(),
} }
} }
} }
@ -171,10 +212,9 @@ impl Default for DisplayDates {
impl Default for DirsConfig { impl Default for DirsConfig {
fn default() -> Self { fn default() -> Self {
Self { Self {
posts: "posts".into(), media: PathBuf::from("media").into_boxed_path(),
media: "media".into(), static_: PathBuf::from("static").into_boxed_path(),
custom_static: "static".into(), templates: PathBuf::from("templates").into_boxed_path(),
custom_templates: "templates".into(),
} }
} }
} }
@ -192,7 +232,7 @@ impl Default for SyntectConfig {
fn default() -> Self { fn default() -> Self {
Self { Self {
load_defaults: false, load_defaults: false,
themes_dir: Some("themes".into()), themes_dir: Some(PathBuf::from("themes").into_boxed_path()),
theme: Some("Catppuccin Mocha".into()), theme: Some("Catppuccin Mocha".into()),
} }
} }
@ -206,7 +246,7 @@ impl Default for CacheConfig {
cleanup: true, cleanup: true,
cleanup_interval: None, cleanup_interval: None,
persistence: true, persistence: true,
file: "cache".into(), file: PathBuf::from("cache").into(),
compress: true, compress: true,
compression_level: 3, compression_level: 3,
} }
@ -215,22 +255,25 @@ impl Default for CacheConfig {
impl Default for BlagConfig { impl Default for BlagConfig {
fn default() -> Self { fn default() -> Self {
Self { bin: "blag".into() } Self {
root: PathBuf::from("posts").into(),
bin: PathBuf::from("blag").into(),
raw_access: true,
}
} }
} }
#[instrument(name = "config")] fn config_path() -> Cow<'static, str> {
pub async fn load() -> eyre::Result<Config> { env::var(concat!(
let config_file = env::var(format!( convert_ascii_case!(upper_camel, env!("CARGO_BIN_NAME")),
"{}_CONFIG", "_CONFIG"
env!("CARGO_BIN_NAME").to_uppercase().replace('-', "_")
)) ))
.unwrap_or_else(|_| String::from("config.toml")); .map(Into::into)
match tokio::fs::OpenOptions::new() .unwrap_or("config.toml".into())
.read(true) }
.open(&config_file)
.await pub async fn load_from(path: (impl AsRef<Path> + std::fmt::Debug)) -> eyre::Result<Config> {
{ match tokio::fs::OpenOptions::new().read(true).open(&path).await {
Ok(mut file) => { Ok(mut file) => {
let mut buf = String::new(); let mut buf = String::new();
file.read_to_string(&mut buf) file.read_to_string(&mut buf)
@ -246,7 +289,7 @@ pub async fn load() -> eyre::Result<Config> {
.write(true) .write(true)
.create(true) .create(true)
.truncate(true) .truncate(true)
.open(&config_file) .open(&path)
.await .await
{ {
Ok(mut file) => file Ok(mut file) => file
@ -256,18 +299,93 @@ pub async fn load() -> eyre::Result<Config> {
.as_bytes(), .as_bytes(),
) )
.await .await
.unwrap_or_else(|err| error!("couldn't write configuration: {}", err)), .unwrap_or_else(|err| error!("couldn't write configuration: {err}")),
Err(err) => { Err(err) => error!("couldn't open file {path:?} for writing: {err}"),
error!("couldn't open file {:?} for writing: {}", &config_file, err)
}
} }
Ok(config) Ok(config)
} }
_ => bail!("couldn't open config file: {}", err), _ => bail!("couldn't open config file: {err}"),
}, },
} }
} }
#[instrument]
pub async fn load() -> eyre::Result<(Config, Cow<'static, str>)> {
let config_file = config_path();
let config = load_from(&*config_file).await?;
Ok((config, config_file))
}
async fn process_event(
event: DebouncedEvent,
config_file: &Path,
swapper: &ArcSwap<Config>,
) -> eyre::Result<()> {
if !event.kind.is_modify() && !event.kind.is_create()
|| !event.paths.iter().any(|p| p == config_file)
{
trace!("not interested: {event:?}");
return Ok(());
}
let config = load_from(config_file).await?;
info!("reloaded config from {config_file:?}");
swapper.store(Arc::new(config));
Ok(())
}
#[instrument(skip_all)]
pub async fn watcher(
config_file: impl AsRef<str>,
watcher_token: CancellationToken,
swapper: Arc<ArcSwap<Config>>,
) -> eyre::Result<()> {
let config_file = tokio::fs::canonicalize(config_file.as_ref())
.await
.context("failed to canonicalize path")?;
let (tx, mut rx) = tokio::sync::mpsc::channel(1);
let mut debouncer = new_debouncer(Duration::from_millis(100), None, move |events| {
tx.blocking_send(events)
.expect("failed to send message over channel")
})?;
let dir = config_file
.as_path()
.parent()
.expect("absolute path to have parent");
debouncer
.watch(&dir, RecursiveMode::NonRecursive)
.with_context(|| format!("failed to watch {dir:?}"))?;
'event_loop: while let Some(ev) = select! {
_ = watcher_token.cancelled() => {
info!("2");
break 'event_loop;
},
ev = rx.recv() => ev,
} {
let events = match ev {
Ok(events) => events,
Err(err) => {
error!("error getting events: {err:?}");
continue;
}
};
for event in events {
if let Err(err) = process_event(event, &config_file, &swapper).await {
error!("error while processing event: {err}");
}
}
}
Ok(())
}
fn check_zstd_level_bounds<'de, D>(d: D) -> Result<i32, D::Error> fn check_zstd_level_bounds<'de, D>(d: D) -> Result<i32, D::Error>
where where
D: serde::Deserializer<'de>, D: serde::Deserializer<'de>,

View file

@ -1,4 +1,4 @@
#![feature(let_chains, pattern, path_add_extension)] #![feature(let_chains, pattern, path_add_extension, if_let_guard)]
mod app; mod app;
mod config; mod config;
@ -18,8 +18,10 @@ use std::process::exit;
use std::sync::Arc; use std::sync::Arc;
use std::time::Duration; use std::time::Duration;
use arc_swap::access::Map;
use arc_swap::ArcSwap;
use color_eyre::eyre::{self, Context}; use color_eyre::eyre::{self, Context};
use config::Engine; use config::{Config, EngineMode};
use tokio::net::TcpListener; use tokio::net::TcpListener;
use tokio::sync::RwLock; use tokio::sync::RwLock;
use tokio::task::JoinSet; use tokio::task::JoinSet;
@ -27,7 +29,7 @@ use tokio::time::Instant;
use tokio::{select, signal}; use tokio::{select, signal};
use tokio_util::sync::CancellationToken; use tokio_util::sync::CancellationToken;
use tracing::level_filters::LevelFilter; use tracing::level_filters::LevelFilter;
use tracing::{debug, error, info, info_span, warn, Instrument}; use tracing::{debug, error, info, warn};
use tracing_subscriber::layer::SubscriberExt; use tracing_subscriber::layer::SubscriberExt;
use tracing_subscriber::{util::SubscriberInitExt, EnvFilter}; use tracing_subscriber::{util::SubscriberInitExt, EnvFilter};
@ -51,21 +53,25 @@ async fn main() -> eyre::Result<()> {
); );
reg.with(tracing_subscriber::fmt::layer()).init(); reg.with(tracing_subscriber::fmt::layer()).init();
let config = Arc::new(
config::load()
.await
.context("couldn't load configuration")?,
);
let socket_addr = SocketAddr::new(config.http.host, config.http.port);
let mut tasks = JoinSet::new(); let mut tasks = JoinSet::new();
let cancellation_token = CancellationToken::new(); let cancellation_token = CancellationToken::new();
let (config, config_file) = config::load()
.await
.context("couldn't load configuration")?;
let config = Arc::new(config);
let swapper = Arc::new(ArcSwap::from(config.clone()));
let config_cache_access: crate::post::cache::ConfigAccess =
Box::new(arc_swap::access::Map::new(swapper.clone(), |c: &Config| {
&c.cache
}));
info!("loaded config from {config_file:?}");
let start = Instant::now(); let start = Instant::now();
// NOTE: use tokio::task::spawn_blocking if this ever turns into a concurrent task // NOTE: use tokio::task::spawn_blocking if this ever turns into a concurrent task
let mut reg = new_registry(&config.dirs.custom_templates) let mut reg =
.context("failed to create handlebars registry")?; new_registry(&config.dirs.templates).context("failed to create handlebars registry")?;
reg.register_helper("date", Box::new(helpers::date)); reg.register_helper("date", Box::new(helpers::date));
reg.register_helper("duration", Box::new(helpers::duration)); reg.register_helper("duration", Box::new(helpers::duration));
debug!(duration = ?start.elapsed(), "registered all templates"); debug!(duration = ?start.elapsed(), "registered all templates");
@ -74,14 +80,11 @@ async fn main() -> eyre::Result<()> {
debug!("setting up watcher"); debug!("setting up watcher");
let watcher_token = cancellation_token.child_token(); let watcher_token = cancellation_token.child_token();
tasks.spawn( tasks.spawn(watch_templates(
watch_templates( config.dirs.templates.clone(),
config.dirs.custom_templates.clone(), watcher_token.clone(),
watcher_token.clone(), registry.clone(),
registry.clone(), ));
)
.instrument(info_span!("custom_template_watcher")),
);
let cache = if config.cache.enable { let cache = if config.cache.enable {
if config.cache.persistence && tokio::fs::try_exists(&config.cache.file).await? { if config.cache.persistence && tokio::fs::try_exists(&config.cache.file).await? {
@ -104,18 +107,26 @@ async fn main() -> eyre::Result<()> {
} else { } else {
None None
} }
.map(|cache| CacheGuard::new(cache, config.cache.clone())) .map(|cache| CacheGuard::new(cache, config_cache_access))
.map(Arc::new); .map(Arc::new);
let posts: Arc<dyn PostManager + Send + Sync> = match config.engine { let posts: Arc<dyn PostManager + Send + Sync> = match config.engine.mode {
Engine::Markdown => Arc::new(MarkdownPosts::new(Arc::clone(&config), cache.clone()).await?), EngineMode::Markdown => {
Engine::Blag => Arc::new(Blag::new( let access = Map::new(swapper.clone(), |c: &Config| &c.engine.markdown);
config.dirs.posts.clone().into(), Arc::new(MarkdownPosts::new(access, cache.clone()).await?)
config.blag.bin.clone().into(), }
cache.clone(), EngineMode::Blag => {
)), let access = Map::new(swapper.clone(), |c: &Config| &c.engine.blag);
Arc::new(Blag::new(access, cache.clone()))
}
}; };
debug!("setting up config watcher");
let token = cancellation_token.child_token();
tasks.spawn(config::watcher(config_file, token, swapper.clone()));
if config.cache.enable && config.cache.cleanup { if config.cache.enable && config.cache.cleanup {
if let Some(millis) = config.cache.cleanup_interval { if let Some(millis) = config.cache.cleanup_interval {
let posts = Arc::clone(&posts); let posts = Arc::clone(&posts);
@ -138,12 +149,14 @@ async fn main() -> eyre::Result<()> {
} }
let state = AppState { let state = AppState {
config: Arc::clone(&config), rss: Arc::new(Map::new(swapper.clone(), |c: &Config| &c.rss)),
style: Arc::new(Map::new(swapper.clone(), |c: &Config| &c.style)),
posts, posts,
templates: registry, templates: registry,
}; };
let app = app::new(&config).with_state(state.clone()); let app = app::new(&config.dirs).with_state(state.clone());
let socket_addr = SocketAddr::new(config.http.host, config.http.port);
let listener = TcpListener::bind(socket_addr) let listener = TcpListener::bind(socket_addr)
.await .await
.with_context(|| format!("couldn't listen on {}", socket_addr))?; .with_context(|| format!("couldn't listen on {}", socket_addr))?;

View file

@ -6,9 +6,9 @@ use comrak::ComrakOptions;
use comrak::RenderPlugins; use comrak::RenderPlugins;
use syntect::highlighting::ThemeSet; use syntect::highlighting::ThemeSet;
use crate::config::RenderConfig; use crate::config::MarkdownRenderConfig;
pub fn build_syntect(config: &RenderConfig) -> eyre::Result<SyntectAdapter> { pub fn build_syntect(config: &MarkdownRenderConfig) -> eyre::Result<SyntectAdapter> {
let mut theme_set = if config.syntect.load_defaults { let mut theme_set = if config.syntect.load_defaults {
ThemeSet::load_defaults() ThemeSet::load_defaults()
} else { } else {
@ -28,7 +28,7 @@ pub fn build_syntect(config: &RenderConfig) -> eyre::Result<SyntectAdapter> {
pub fn render( pub fn render(
markdown: &str, markdown: &str,
config: &RenderConfig, config: &MarkdownRenderConfig,
syntect: Option<&dyn SyntaxHighlighterAdapter>, syntect: Option<&dyn SyntaxHighlighterAdapter>,
) -> String { ) -> String {
let mut options = ComrakOptions::default(); let mut options = ComrakOptions::default();

View file

@ -5,6 +5,7 @@ use std::process::Stdio;
use std::sync::Arc; use std::sync::Arc;
use std::time::Duration; use std::time::Duration;
use arc_swap::access::Access;
use axum::async_trait; use axum::async_trait;
use axum::http::HeaderValue; use axum::http::HeaderValue;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
@ -18,6 +19,7 @@ use tokio::io::{AsyncBufReadExt, AsyncReadExt, BufReader};
use tokio::time::Instant; use tokio::time::Instant;
use tracing::{debug, error, info, instrument}; use tracing::{debug, error, info, instrument};
use crate::config::BlagConfig;
use crate::error::PostError; use crate::error::PostError;
use crate::post::Filter; use crate::post::Filter;
use crate::systemtime_as_secs::as_secs; use crate::systemtime_as_secs::as_secs;
@ -63,9 +65,8 @@ impl BlagMetadata {
} }
} }
pub struct Blag { pub struct Blag<A> {
root: Arc<Path>, config: A,
blag_bin: Arc<Path>,
cache: Option<Arc<CacheGuard>>, cache: Option<Arc<CacheGuard>>,
_fastblag: bool, _fastblag: bool,
} }
@ -75,11 +76,15 @@ enum RenderResult {
Raw(Vec<u8>, Arc<str>), Raw(Vec<u8>, Arc<str>),
} }
impl Blag { impl<A> Blag<A>
pub fn new(root: Arc<Path>, blag_bin: Arc<Path>, cache: Option<Arc<CacheGuard>>) -> Blag { where
A: Access<BlagConfig>,
A: Sync,
A::Guard: Send,
{
pub fn new(config: A, cache: Option<Arc<CacheGuard>>) -> Self {
Self { Self {
root, config,
blag_bin,
cache, cache,
_fastblag: false, _fastblag: false,
} }
@ -92,10 +97,11 @@ impl Blag {
query_json: String, query_json: String,
) -> Result<RenderResult, PostError> { ) -> Result<RenderResult, PostError> {
let start = Instant::now(); let start = Instant::now();
let bin = self.config.load().bin.clone();
debug!(%name, "rendering"); debug!(%name, "rendering");
let mut cmd = tokio::process::Command::new(&*self.blag_bin) let mut cmd = tokio::process::Command::new(&*bin)
.arg(path.as_ref()) .arg(path.as_ref())
.env("BLAG_QUERY", query_json) .env("BLAG_QUERY", query_json)
.stdout(Stdio::piped()) .stdout(Stdio::piped())
@ -103,7 +109,7 @@ impl Blag {
.stdin(Stdio::null()) .stdin(Stdio::null())
.spawn() .spawn()
.map_err(|err| { .map_err(|err| {
error!("failed to spawn {:?}: {err}", self.blag_bin); error!("failed to spawn {bin:?}: {err}");
err err
})?; })?;
@ -145,18 +151,37 @@ impl Blag {
dont_cache, dont_cache,
)) ))
} }
fn as_raw(name: &str) -> String {
let mut buf = String::with_capacity(name.len() + 3);
buf += name;
buf += ".sh";
buf
}
fn is_raw(name: &str) -> bool {
name.ends_with(".sh")
}
} }
#[async_trait] #[async_trait]
impl PostManager for Blag { impl<A> PostManager for Blag<A>
where
A: Access<BlagConfig>,
A: Sync,
A::Guard: Send,
{
async fn get_all_posts( async fn get_all_posts(
&self, &self,
filters: &[Filter<'_>], filters: &[Filter<'_>],
query: &IndexMap<String, Value>, query: &IndexMap<String, Value>,
) -> Result<Vec<(PostMetadata, Arc<str>, RenderStats)>, PostError> { ) -> Result<Vec<(PostMetadata, Arc<str>, RenderStats)>, PostError> {
let root = &self.config.load().root;
let mut set = FuturesUnordered::new(); let mut set = FuturesUnordered::new();
let mut posts = Vec::new(); let mut posts = Vec::new();
let mut files = tokio::fs::read_dir(&self.root).await?; let mut files = tokio::fs::read_dir(&root).await?;
loop { loop {
let entry = match files.next_entry().await { let entry = match files.next_entry().await {
@ -178,7 +203,7 @@ impl PostManager for Blag {
} }
}; };
if self.is_raw(&name) { if Self::is_raw(&name) {
name.truncate(name.len() - 3); name.truncate(name.len() - 3);
let name = name.into(); let name = name.into();
set.push(self.get_post(Arc::clone(&name), query).map(|v| (name, v))); set.push(self.get_post(Arc::clone(&name), query).map(|v| (name, v)));
@ -188,18 +213,19 @@ impl PostManager for Blag {
while let Some((name, result)) = set.next().await { while let Some((name, result)) = set.next().await {
let post = match result { let post = match result {
Ok(v) => match v { Ok(v) => v,
ReturnedPost::Rendered { meta, body, perf } => (meta, body, perf),
ReturnedPost::Raw { .. } => unreachable!(),
},
Err(err) => { Err(err) => {
error!("error while rendering blagpost {name:?}: {err}"); error!("error while rendering blagpost {name:?}: {err}");
continue; continue;
} }
}; };
if post.0.apply_filters(filters) { if let ReturnedPost::Rendered {
posts.push(post); meta, body, perf, ..
} = post
&& meta.apply_filters(filters)
{
posts.push((meta, body, perf));
} }
} }
@ -215,29 +241,34 @@ impl PostManager for Blag {
query: &IndexMap<String, Value>, query: &IndexMap<String, Value>,
) -> Result<ReturnedPost, PostError> { ) -> Result<ReturnedPost, PostError> {
let start = Instant::now(); let start = Instant::now();
let mut path = self.root.join(&*name); let BlagConfig {
ref root,
ref raw_access,
..
} = &*self.config.load();
if self.is_raw(&name) { if Self::is_raw(&name) {
let mut buffer = Vec::new(); let mut buffer = Vec::new();
let mut file = let mut file = OpenOptions::new()
OpenOptions::new() .read(true)
.read(true) .open(root.join(&*name))
.open(&path) .await
.await .map_err(|err| match err.kind() {
.map_err(|err| match err.kind() { std::io::ErrorKind::NotFound => PostError::NotFound(name),
std::io::ErrorKind::NotFound => PostError::NotFound(name), _ => PostError::IoError(err),
_ => PostError::IoError(err), })?;
})?;
file.read_to_end(&mut buffer).await?; file.read_to_end(&mut buffer).await?;
return Ok(ReturnedPost::Raw { return Ok(ReturnedPost::Raw {
buffer, buffer,
content_type: HeaderValue::from_static("text/x-shellscript"), content_type: HeaderValue::from_static("text/x-shellscript"),
}); });
} else {
path.add_extension("sh");
} }
let raw_name = Self::as_raw(&name);
let path = root.join(&raw_name);
let raw_name = raw_access.then_some(raw_name);
let stat = tokio::fs::metadata(&path) let stat = tokio::fs::metadata(&path)
.await .await
.map_err(|err| match err.kind() { .map_err(|err| match err.kind() {
@ -264,6 +295,7 @@ impl PostManager for Blag {
meta, meta,
body, body,
perf: RenderStats::Cached(start.elapsed()), perf: RenderStats::Cached(start.elapsed()),
raw_name,
} }
} else { } else {
let (meta, content, (parsed, rendered), dont_cache) = let (meta, content, (parsed, rendered), dont_cache) =
@ -295,6 +327,7 @@ impl PostManager for Blag {
parsed, parsed,
rendered, rendered,
}, },
raw_name,
} }
}; };
@ -307,15 +340,13 @@ impl PostManager for Blag {
async fn cleanup(&self) { async fn cleanup(&self) {
if let Some(cache) = &self.cache { if let Some(cache) = &self.cache {
let root = &self.config.load().root;
cache cache
.cleanup(|key, value| { .cleanup(|key, value| {
let mtime = std::fs::metadata( let mtime = std::fs::metadata(root.join(Self::as_raw(&key.name)))
self.root .ok()
.join(self.as_raw(&key.name).unwrap_or_else(|| unreachable!())), .and_then(|metadata| metadata.modified().ok())
) .map(as_secs);
.ok()
.and_then(|metadata| metadata.modified().ok())
.map(as_secs);
match mtime { match mtime {
Some(mtime) => mtime <= value.mtime, Some(mtime) => mtime <= value.mtime,
@ -325,16 +356,4 @@ impl PostManager for Blag {
.await .await
} }
} }
fn is_raw(&self, name: &str) -> bool {
name.ends_with(".sh")
}
fn as_raw(&self, name: &str) -> Option<String> {
let mut buf = String::with_capacity(name.len() + 3);
buf += name;
buf += ".sh";
Some(buf)
}
} }

View file

@ -7,6 +7,7 @@ use std::time::SystemTime;
use crate::config::CacheConfig; use crate::config::CacheConfig;
use crate::post::PostMetadata; use crate::post::PostMetadata;
use arc_swap::access::DynAccess;
use color_eyre::eyre::{self, Context}; use color_eyre::eyre::{self, Context};
use scc::HashMap; use scc::HashMap;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -201,13 +202,15 @@ impl Cache {
} }
} }
pub type ConfigAccess = Box<dyn DynAccess<CacheConfig> + Send + Sync>;
pub struct CacheGuard { pub struct CacheGuard {
inner: Cache, inner: Cache,
config: CacheConfig, config: ConfigAccess,
} }
impl CacheGuard { impl CacheGuard {
pub fn new(cache: Cache, config: CacheConfig) -> Self { pub fn new(cache: Cache, config: ConfigAccess) -> Self {
Self { Self {
inner: cache, inner: cache,
config, config,
@ -215,13 +218,14 @@ impl CacheGuard {
} }
fn try_drop(&mut self) -> Result<(), eyre::Report> { fn try_drop(&mut self) -> Result<(), eyre::Report> {
let config = self.config.load();
// write cache to file // write cache to file
let path = &self.config.file; let path = &*config.file;
let serialized = bitcode::serialize(&self.inner).context("failed to serialize cache")?; let serialized = bitcode::serialize(&self.inner).context("failed to serialize cache")?;
let mut cache_file = std::fs::File::create(path) let mut cache_file = std::fs::File::create(path)
.with_context(|| format!("failed to open cache at {}", path.display()))?; .with_context(|| format!("failed to open cache at {}", path.display()))?;
let compression_level = self.config.compression_level; let compression_level = config.compression_level;
if self.config.compress { if config.compress {
std::io::Write::write_all( std::io::Write::write_all(
&mut zstd::stream::write::Encoder::new(cache_file, compression_level)? &mut zstd::stream::write::Encoder::new(cache_file, compression_level)?
.auto_finish(), .auto_finish(),
@ -231,7 +235,7 @@ impl CacheGuard {
cache_file.write_all(&serialized) cache_file.write_all(&serialized)
} }
.context("failed to write cache to file")?; .context("failed to write cache to file")?;
info!("wrote cache to {}", path.display()); info!("wrote cache to {path:?}");
Ok(()) Ok(())
} }
} }

View file

@ -7,6 +7,7 @@ use std::time::Duration;
use std::time::Instant; use std::time::Instant;
use std::time::SystemTime; use std::time::SystemTime;
use arc_swap::access::Access;
use axum::async_trait; use axum::async_trait;
use axum::http::HeaderValue; use axum::http::HeaderValue;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
@ -20,7 +21,7 @@ use tokio::fs;
use tokio::io::AsyncReadExt; use tokio::io::AsyncReadExt;
use tracing::{info, instrument, warn}; use tracing::{info, instrument, warn};
use crate::config::Config; use crate::config::MarkdownConfig;
use crate::markdown_render::{build_syntect, render}; use crate::markdown_render::{build_syntect, render};
use crate::systemtime_as_secs::as_secs; use crate::systemtime_as_secs::as_secs;
@ -66,23 +67,25 @@ impl FrontMatter {
} }
} }
pub struct MarkdownPosts { pub struct MarkdownPosts<A> {
cache: Option<Arc<CacheGuard>>, cache: Option<Arc<CacheGuard>>,
config: Arc<Config>, config: A,
render_hash: u64, render_hash: u64,
syntect: SyntectAdapter, syntect: SyntectAdapter,
} }
impl MarkdownPosts { impl<A> MarkdownPosts<A>
pub async fn new( where
config: Arc<Config>, A: Access<MarkdownConfig>,
cache: Option<Arc<CacheGuard>>, A: Sync,
) -> eyre::Result<MarkdownPosts> { A::Guard: Send,
let syntect = {
build_syntect(&config.render).context("failed to create syntax highlighting engine")?; pub async fn new(config: A, cache: Option<Arc<CacheGuard>>) -> eyre::Result<Self> {
let syntect = build_syntect(&config.load().render)
.context("failed to create syntax highlighting engine")?;
let mut hasher = DefaultHasher::new(); let mut hasher = DefaultHasher::new();
config.render.hash(&mut hasher); config.load().render.hash(&mut hasher);
let render_hash = hasher.finish(); let render_hash = hasher.finish();
Ok(Self { Ok(Self {
@ -118,7 +121,7 @@ impl MarkdownPosts {
let parsing = parsing_start.elapsed(); let parsing = parsing_start.elapsed();
let before_render = Instant::now(); let before_render = Instant::now();
let post = render(body, &self.config.render, Some(&self.syntect)).into(); let post = render(body, &self.config.load().render, Some(&self.syntect)).into();
let rendering = before_render.elapsed(); let rendering = before_render.elapsed();
if let Some(cache) = &self.cache { if let Some(cache) = &self.cache {
@ -135,10 +138,27 @@ impl MarkdownPosts {
Ok((metadata, post, (parsing, rendering))) Ok((metadata, post, (parsing, rendering)))
} }
fn is_raw(name: &str) -> bool {
name.ends_with(".md")
}
fn as_raw(name: &str) -> Option<String> {
let mut buf = String::with_capacity(name.len() + 3);
buf += name;
buf += ".md";
Some(buf)
}
} }
#[async_trait] #[async_trait]
impl PostManager for MarkdownPosts { impl<A> PostManager for MarkdownPosts<A>
where
A: Access<MarkdownConfig>,
A: Sync,
A::Guard: Send,
{
async fn get_all_posts( async fn get_all_posts(
&self, &self,
filters: &[Filter<'_>], filters: &[Filter<'_>],
@ -146,7 +166,7 @@ impl PostManager for MarkdownPosts {
) -> Result<Vec<(PostMetadata, Arc<str>, RenderStats)>, PostError> { ) -> Result<Vec<(PostMetadata, Arc<str>, RenderStats)>, PostError> {
let mut posts = Vec::new(); let mut posts = Vec::new();
let mut read_dir = fs::read_dir(&self.config.dirs.posts).await?; let mut read_dir = fs::read_dir(&self.config.load().root).await?;
while let Some(entry) = read_dir.next_entry().await? { while let Some(entry) = read_dir.next_entry().await? {
let path = entry.path(); let path = entry.path();
let stat = fs::metadata(&path).await?; let stat = fs::metadata(&path).await?;
@ -161,7 +181,9 @@ impl PostManager for MarkdownPosts {
.into(); .into();
let post = self.get_post(Arc::clone(&name), query).await?; let post = self.get_post(Arc::clone(&name), query).await?;
if let ReturnedPost::Rendered { meta, body, perf } = post if let ReturnedPost::Rendered {
meta, body, perf, ..
} = post
&& meta.apply_filters(filters) && meta.apply_filters(filters)
{ {
posts.push((meta, body, perf)); posts.push((meta, body, perf));
@ -179,7 +201,7 @@ impl PostManager for MarkdownPosts {
) -> Result<Vec<PostMetadata>, PostError> { ) -> Result<Vec<PostMetadata>, PostError> {
let mut posts = Vec::new(); let mut posts = Vec::new();
let mut read_dir = fs::read_dir(&self.config.dirs.posts).await?; let mut read_dir = fs::read_dir(&self.config.load().root).await?;
while let Some(entry) = read_dir.next_entry().await? { while let Some(entry) = read_dir.next_entry().await? {
let path = entry.path(); let path = entry.path();
let stat = fs::metadata(&path).await?; let stat = fs::metadata(&path).await?;
@ -225,8 +247,9 @@ impl PostManager for MarkdownPosts {
name: Arc<str>, name: Arc<str>,
_query: &IndexMap<String, Value>, _query: &IndexMap<String, Value>,
) -> Result<ReturnedPost, PostError> { ) -> Result<ReturnedPost, PostError> {
let post = if self.config.markdown_access && self.is_raw(&name) { let config = self.config.load();
let path = self.config.dirs.posts.join(&*name); let post = if config.raw_access && Self::is_raw(&name) {
let path = config.root.join(&*name);
let mut file = match tokio::fs::OpenOptions::new().read(true).open(&path).await { let mut file = match tokio::fs::OpenOptions::new().read(true).open(&path).await {
Ok(value) => value, Ok(value) => value,
@ -248,11 +271,8 @@ impl PostManager for MarkdownPosts {
} }
} else { } else {
let start = Instant::now(); let start = Instant::now();
let path = self let raw_name = Self::as_raw(&name).unwrap_or_else(|| unreachable!());
.config let path = config.root.join(&raw_name);
.dirs
.posts
.join(self.as_raw(&name).unwrap_or_else(|| unreachable!()));
let stat = match tokio::fs::metadata(&path).await { let stat = match tokio::fs::metadata(&path).await {
Ok(value) => value, Ok(value) => value,
@ -265,26 +285,29 @@ impl PostManager for MarkdownPosts {
}; };
let mtime = as_secs(stat.modified()?); let mtime = as_secs(stat.modified()?);
if let Some(cache) = &self.cache let (meta, body, perf) = if let Some(cache) = &self.cache
&& let Some(CacheValue { meta, body, .. }) = && let Some(CacheValue { meta, body, .. }) =
cache.lookup(name.clone(), mtime, self.render_hash).await cache.lookup(name.clone(), mtime, self.render_hash).await
{ {
ReturnedPost::Rendered { (meta, body, RenderStats::Cached(start.elapsed()))
meta,
body,
perf: RenderStats::Cached(start.elapsed()),
}
} else { } else {
let (meta, body, stats) = self.parse_and_render(name, path).await?; let (meta, body, stats) = self.parse_and_render(name, path).await?;
ReturnedPost::Rendered { (
meta, meta,
body, body,
perf: RenderStats::Rendered { RenderStats::Rendered {
total: start.elapsed(), total: start.elapsed(),
parsed: stats.0, parsed: stats.0,
rendered: stats.1, rendered: stats.1,
}, },
} )
};
ReturnedPost::Rendered {
meta,
body,
perf,
raw_name: config.raw_access.then_some(raw_name),
} }
}; };
@ -306,9 +329,9 @@ impl PostManager for MarkdownPosts {
let mtime = std::fs::metadata( let mtime = std::fs::metadata(
self.config self.config
.dirs .load()
.posts .root
.join(self.as_raw(name).unwrap_or_else(|| unreachable!())), .join(Self::as_raw(name).unwrap_or_else(|| unreachable!())),
) )
.ok() .ok()
.and_then(|metadata| metadata.modified().ok()) .and_then(|metadata| metadata.modified().ok())
@ -322,16 +345,4 @@ impl PostManager for MarkdownPosts {
.await .await
} }
} }
fn is_raw(&self, name: &str) -> bool {
name.ends_with(".md")
}
fn as_raw(&self, name: &str) -> Option<String> {
let mut buf = String::with_capacity(name.len() + 3);
buf += name;
buf += ".md";
Some(buf)
}
} }

View file

@ -55,6 +55,7 @@ pub enum ReturnedPost {
meta: PostMetadata, meta: PostMetadata,
body: Arc<str>, body: Arc<str>,
perf: RenderStats, perf: RenderStats,
raw_name: Option<String>,
}, },
Raw { Raw {
buffer: Vec<u8>, buffer: Vec<u8>,
@ -149,14 +150,4 @@ pub trait PostManager {
) -> Result<ReturnedPost, PostError>; ) -> Result<ReturnedPost, PostError>;
async fn cleanup(&self) {} async fn cleanup(&self) {}
#[allow(unused)]
fn is_raw(&self, name: &str) -> bool {
false
}
#[allow(unused)]
fn as_raw(&self, name: &str) -> Option<String> {
None
}
} }

View file

@ -8,7 +8,7 @@ use notify_debouncer_full::{new_debouncer, DebouncedEvent};
use tokio::select; use tokio::select;
use tokio::sync::RwLock; use tokio::sync::RwLock;
use tokio_util::sync::CancellationToken; use tokio_util::sync::CancellationToken;
use tracing::{debug, debug_span, error, info, trace}; use tracing::{debug, debug_span, error, info, instrument, trace};
use crate::templates::*; use crate::templates::*;
@ -71,11 +71,12 @@ async fn process_event(
Ok(()) Ok(())
} }
#[instrument(skip_all)]
pub async fn watch_templates<'a>( pub async fn watch_templates<'a>(
path: impl AsRef<Path>, path: impl AsRef<Path>,
watcher_token: CancellationToken, watcher_token: CancellationToken,
reg: Arc<RwLock<Handlebars<'a>>>, reg: Arc<RwLock<Handlebars<'a>>>,
) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> { ) -> Result<(), color_eyre::eyre::Report> {
let path = path.as_ref(); let path = path.as_ref();
let (tx, mut rx) = tokio::sync::mpsc::channel(1); let (tx, mut rx) = tokio::sync::mpsc::channel(1);

View file

@ -144,14 +144,14 @@ body > main > h1:first-child {
} }
@media (min-width: 956px) { @media (min-width: 956px) {
:root { :root:has(body.cool) {
--target-ratio: 0.7; /* 669px - 1344x */ --target-ratio: 0.7; /* 669px - 1344x */
--width: min(100% * var(--target-ratio), 1920px * var(--target-ratio)); --width: min(100% * var(--target-ratio), 1920px * var(--target-ratio));
--padding: 4em; --padding: 4em;
--padded-width: calc(var(--width) - var(--padding) * 2); --padded-width: calc(var(--width) - var(--padding) * 2);
} }
body { body.cool {
padding: 4em 0; padding: 4em 0;
min-height: calc(100vh - 8em); min-height: calc(100vh - 8em);
@ -165,13 +165,13 @@ body > main > h1:first-child {
); );
} }
body > * { body.cool > * {
margin: auto; margin: auto;
padding: var(--padding); padding: var(--padding);
width: var(--padded-width); width: var(--padded-width);
} }
body > footer { body.cool > footer {
padding: initial; padding: initial;
width: var(--width); width: var(--width);
} }