revamp config and add tags support

This commit is contained in:
slonkazoid 2024-05-01 18:25:01 +03:00
parent 2fa22a2752
commit 573ea75167
Signed by: slonk
SSH key fingerprint: SHA256:tbZfJX4IOvZ0LGWOWu5Ijo8jfMPi78TU7x1VoEeCIjM
5 changed files with 194 additions and 106 deletions

View file

@ -26,7 +26,8 @@ blazingly fast markdown blog software written in rust memory safe
- [ ] date formatting respects user timezone - [ ] date formatting respects user timezone
- [ ] clean up imports and require less features - [ ] clean up imports and require less features
- [ ] improve home page - [ ] improve home page
- [ ] tags - [x] tags (backend)
- [ ] tags (frontend)
- [x] be blazingly fast - [x] be blazingly fast
- [x] 100+ MiB binary size - [x] 100+ MiB binary size
@ -35,26 +36,30 @@ blazingly fast markdown blog software written in rust memory safe
the default configuration with comments looks like this the default configuration with comments looks like this
```toml ```toml
# main settings
host = "0.0.0.0" # ip to listen on
port = 3000 # port to listen on
title = "bingus-blog" # title of the website title = "bingus-blog" # title of the website
description = "blazingly fast markdown blog software written in rust memory safe" # description of the website description = "blazingly fast markdown blog software written in rust memory safe" # description of the website
posts_dir = "posts" # where posts are stored raw_access = true # allow users to see the raw markdown of a post
markdown_access = true # allow users to see the raw markdown of a post
[cache] # cache settings [dirs]
posts = "posts" # where posts are stored
media = "media" # directory served under /media/
[http]
host = "0.0.0.0" # ip to listen on
port = 3000 # port to listen on
[cache]
enable = true # save metadata and rendered posts into RAM enable = true # save metadata and rendered posts into RAM
# highly recommended, only turn off if absolutely necessary # highly recommended, only turn off if absolutely necessary
cleanup = true # clean cache, highly recommended cleanup = true # clean cache, highly recommended
#cleanup_interval = 86400000 # clean the cache regularly instead of just at startu #cleanup_interval = 86400000 # clean the cache regularly instead of just at startup
# uncomment to enable # uncomment to enable
persistence = true # save the cache to on shutdown and load on startup persistence = true # save the cache to on shutdown and load on startup
file = "cache" # file to save the cache to file = "cache" # file to save the cache to
compress = true # compress the cache file compress = true # compress the cache file
compression_level = 3 # zstd compression level, 3 is recommended compression_level = 3 # zstd compression level, 3 is recommended
[render] # post rendering settings [render]
syntect.load_defaults = false # include default syntect themes syntect.load_defaults = false # include default syntect themes
syntect.themes_dir = "themes" # directory to include themes from syntect.themes_dir = "themes" # directory to include themes from
syntect.theme = "Catppuccin Mocha" # theme file name (without `.tmTheme`) syntect.theme = "Catppuccin Mocha" # theme file name (without `.tmTheme`)

View file

@ -1,8 +1,6 @@
use std::{ use std::env;
env, use std::net::{IpAddr, Ipv4Addr};
net::{IpAddr, Ipv4Addr}, use std::path::PathBuf;
path::PathBuf,
};
use color_eyre::eyre::{bail, Context, Result}; use color_eyre::eyre::{bail, Context, Result};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -40,28 +38,60 @@ pub struct CacheConfig {
#[derive(Serialize, Deserialize, Debug, Clone)] #[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(default)] #[serde(default)]
pub struct Config { pub struct HttpConfig {
pub host: IpAddr, pub host: IpAddr,
pub port: u16, pub port: u16,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(default)]
pub struct DirsConfig {
pub posts: PathBuf,
pub media: PathBuf,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(default)]
pub struct Config {
pub title: String, pub title: String,
pub description: String, pub description: String,
pub posts_dir: PathBuf, pub raw_access: bool,
pub num_posts: usize,
pub dirs: DirsConfig,
pub http: HttpConfig,
pub render: RenderConfig, pub render: RenderConfig,
pub cache: CacheConfig, pub cache: CacheConfig,
pub markdown_access: bool,
} }
impl Default for Config { impl Default for Config {
fn default() -> Self { fn default() -> Self {
Self { Self {
host: IpAddr::V4(Ipv4Addr::UNSPECIFIED),
port: 3000,
title: "bingus-blog".into(), title: "bingus-blog".into(),
description: "blazingly fast markdown blog software written in rust memory safe".into(), description: "blazingly fast markdown blog software written in rust memory safe".into(),
raw_access: true,
num_posts: 5,
dirs: Default::default(),
http: Default::default(),
render: Default::default(), render: Default::default(),
posts_dir: "posts".into(),
cache: Default::default(), cache: Default::default(),
markdown_access: true, }
}
}
impl Default for DirsConfig {
fn default() -> Self {
Self {
posts: "posts".into(),
media: "media".into(),
}
}
}
impl Default for HttpConfig {
fn default() -> Self {
Self {
host: IpAddr::V4(Ipv4Addr::UNSPECIFIED),
port: 3000,
} }
} }
} }

View file

@ -1,9 +1,10 @@
use std::fmt::Display; use std::fmt::Display;
use axum::{http::StatusCode, response::IntoResponse}; use askama_axum::Template;
use axum::http::StatusCode;
use axum::response::{IntoResponse, Response};
use thiserror::Error; use thiserror::Error;
// fronma is too lazy to implement std::error::Error for their own types
#[derive(Debug)] #[derive(Debug)]
#[repr(transparent)] #[repr(transparent)]
pub struct FronmaError(fronma::error::Error); pub struct FronmaError(fronma::error::Error);
@ -45,3 +46,43 @@ impl IntoResponse for PostError {
(StatusCode::INTERNAL_SERVER_ERROR, self.to_string()).into_response() (StatusCode::INTERNAL_SERVER_ERROR, self.to_string()).into_response()
} }
} }
pub type AppResult<T> = Result<T, AppError>;
#[derive(Error, Debug)]
pub enum AppError {
#[error("failed to fetch post: {0}")]
PostError(#[from] PostError),
}
impl From<std::io::Error> for AppError {
#[inline(always)]
fn from(value: std::io::Error) -> Self {
Self::PostError(PostError::IoError(value))
}
}
#[derive(Template)]
#[template(path = "error.html")]
struct ErrorTemplate {
error: String,
}
impl IntoResponse for AppError {
fn into_response(self) -> Response {
let status_code = match &self {
AppError::PostError(err) => match err {
PostError::NotFound(_) => StatusCode::NOT_FOUND,
_ => StatusCode::INTERNAL_SERVER_ERROR,
},
//_ => StatusCode::INTERNAL_SERVER_ERROR,
};
(
status_code,
ErrorTemplate {
error: self.to_string(),
},
)
.into_response()
}
}

View file

@ -17,13 +17,13 @@ use std::sync::Arc;
use std::time::Duration; use std::time::Duration;
use askama_axum::Template; use askama_axum::Template;
use axum::extract::{MatchedPath, Path, State}; use axum::extract::{Path, Query, State};
use axum::http::{Request, StatusCode}; use axum::http::Request;
use axum::response::{IntoResponse, Redirect, Response}; use axum::response::{IntoResponse, Redirect, Response};
use axum::routing::{get, Router}; use axum::routing::{get, Router};
use axum::Json; use axum::Json;
use color_eyre::eyre::{self, Context}; use color_eyre::eyre::{self, Context};
use thiserror::Error; use serde::Deserialize;
use tokio::io::{AsyncReadExt, AsyncWriteExt}; use tokio::io::{AsyncReadExt, AsyncWriteExt};
use tokio::net::TcpListener; use tokio::net::TcpListener;
use tokio::task::JoinSet; use tokio::task::JoinSet;
@ -36,7 +36,7 @@ use tracing::{debug, error, info, info_span, warn, Span};
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt, EnvFilter}; use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt, EnvFilter};
use crate::config::Config; use crate::config::Config;
use crate::error::PostError; use crate::error::{AppResult, PostError};
use crate::post::{PostManager, PostMetadata, RenderStats}; use crate::post::{PostManager, PostMetadata, RenderStats};
type ArcState = Arc<AppState>; type ArcState = Arc<AppState>;
@ -64,59 +64,46 @@ struct ViewPostTemplate {
markdown_access: bool, markdown_access: bool,
} }
type AppResult<T> = Result<T, AppError>; #[derive(Deserialize)]
struct QueryParams {
#[derive(Error, Debug)] tag: Option<String>,
enum AppError { #[serde(rename = "n")]
#[error("failed to fetch post: {0}")] num_posts: Option<usize>,
PostError(#[from] PostError),
} }
impl From<std::io::Error> for AppError { async fn index(
#[inline(always)] State(state): State<ArcState>,
fn from(value: std::io::Error) -> Self { Query(query): Query<QueryParams>,
Self::PostError(PostError::IoError(value)) ) -> AppResult<IndexTemplate> {
} let posts = state
} .posts
.get_max_n_posts_with_optional_tag_sorted(query.num_posts, query.tag.as_ref())
.await?;
#[derive(Template)]
#[template(path = "error.html")]
struct ErrorTemplate {
error: String,
}
impl IntoResponse for AppError {
fn into_response(self) -> Response {
let status_code = match &self {
AppError::PostError(err) => match err {
PostError::NotFound(_) => StatusCode::NOT_FOUND,
_ => StatusCode::INTERNAL_SERVER_ERROR,
},
//_ => StatusCode::INTERNAL_SERVER_ERROR,
};
(
status_code,
ErrorTemplate {
error: self.to_string(),
},
)
.into_response()
}
}
async fn index(State(state): State<ArcState>) -> AppResult<IndexTemplate> {
Ok(IndexTemplate { Ok(IndexTemplate {
title: state.config.title.clone(), title: state.config.title.clone(),
description: state.config.description.clone(), description: state.config.description.clone(),
posts: state.posts.list_posts().await?, posts,
}) })
} }
async fn all_posts(
State(state): State<ArcState>,
Query(query): Query<QueryParams>,
) -> AppResult<Json<Vec<PostMetadata>>> {
let posts = state
.posts
.get_max_n_posts_with_optional_tag_sorted(query.num_posts, query.tag.as_ref())
.await?;
Ok(Json(posts))
}
async fn post(State(state): State<ArcState>, Path(name): Path<String>) -> AppResult<Response> { async fn post(State(state): State<ArcState>, Path(name): Path<String>) -> AppResult<Response> {
if name.ends_with(".md") && state.config.markdown_access { if name.ends_with(".md") && state.config.raw_access {
let mut file = tokio::fs::OpenOptions::new() let mut file = tokio::fs::OpenOptions::new()
.read(true) .read(true)
.open(state.config.posts_dir.join(&name)) .open(state.config.dirs.posts.join(&name))
.await?; .await?;
let mut buf = Vec::new(); let mut buf = Vec::new();
@ -129,18 +116,13 @@ async fn post(State(state): State<ArcState>, Path(name): Path<String>) -> AppRes
meta: post.0, meta: post.0,
rendered: post.1, rendered: post.1,
rendered_in: post.2, rendered_in: post.2,
markdown_access: state.config.markdown_access, markdown_access: state.config.raw_access,
}; };
Ok(page.into_response()) Ok(page.into_response())
} }
} }
async fn all_posts(State(state): State<ArcState>) -> AppResult<Json<Vec<PostMetadata>>> {
let posts = state.posts.list_posts().await?;
Ok(Json(posts))
}
#[tokio::main] #[tokio::main]
async fn main() -> eyre::Result<()> { async fn main() -> eyre::Result<()> {
#[cfg(feature = "tokio-console")] #[cfg(feature = "tokio-console")]
@ -160,6 +142,8 @@ async fn main() -> eyre::Result<()> {
.await .await
.context("couldn't load configuration")?; .context("couldn't load configuration")?;
let socket_addr = SocketAddr::new(config.http.host, config.http.port);
let mut tasks = JoinSet::new(); let mut tasks = JoinSet::new();
let cancellation_token = CancellationToken::new(); let cancellation_token = CancellationToken::new();
@ -198,7 +182,7 @@ async fn main() -> eyre::Result<()> {
let cache = let cache =
bitcode::deserialize(serialized.as_slice()).context("failed to parse cache")?; bitcode::deserialize(serialized.as_slice()).context("failed to parse cache")?;
Ok::<PostManager, color_eyre::Report>(PostManager::new_with_cache( Ok::<PostManager, color_eyre::Report>(PostManager::new_with_cache(
config.posts_dir.clone(), config.dirs.posts.clone(),
config.render.clone(), config.render.clone(),
cache, cache,
)) ))
@ -210,7 +194,7 @@ async fn main() -> eyre::Result<()> {
error!("failed to load cache: {}", err); error!("failed to load cache: {}", err);
info!("using empty cache"); info!("using empty cache");
PostManager::new_with_cache( PostManager::new_with_cache(
config.posts_dir.clone(), config.dirs.posts.clone(),
config.render.clone(), config.render.clone(),
Default::default(), Default::default(),
) )
@ -218,13 +202,13 @@ async fn main() -> eyre::Result<()> {
} }
} else { } else {
PostManager::new_with_cache( PostManager::new_with_cache(
config.posts_dir.clone(), config.dirs.posts.clone(),
config.render.clone(), config.render.clone(),
Default::default(), Default::default(),
) )
} }
} else { } else {
PostManager::new(config.posts_dir.clone(), config.render.clone()) PostManager::new(config.dirs.posts.clone(), config.render.clone())
}; };
let state = Arc::new(AppState { config, posts }); let state = Arc::new(AppState { config, posts });
@ -265,16 +249,10 @@ async fn main() -> eyre::Result<()> {
.layer( .layer(
TraceLayer::new_for_http() TraceLayer::new_for_http()
.make_span_with(|request: &Request<_>| { .make_span_with(|request: &Request<_>| {
let matched_path = request
.extensions()
.get::<MatchedPath>()
.map(MatchedPath::as_str);
info_span!( info_span!(
"request", "request",
method = ?request.method(), method = ?request.method(),
path = ?request.uri().path(), path = ?request.uri().path(),
matched_path,
) )
}) })
.on_response(|response: &Response<_>, duration: Duration, span: &Span| { .on_response(|response: &Response<_>, duration: Duration, span: &Span| {
@ -285,14 +263,9 @@ async fn main() -> eyre::Result<()> {
) )
.with_state(state.clone()); .with_state(state.clone());
let listener = TcpListener::bind((state.config.host, state.config.port)) let listener = TcpListener::bind(socket_addr)
.await .await
.with_context(|| { .with_context(|| format!("couldn't listen on {}", socket_addr))?;
format!(
"couldn't listen on {}",
SocketAddr::new(state.config.host, state.config.port)
)
})?;
let local_addr = listener let local_addr = listener
.local_addr() .local_addr()
.context("couldn't get socket address")?; .context("couldn't get socket address")?;

View file

@ -26,6 +26,8 @@ struct FrontMatter {
pub icon: Option<String>, pub icon: Option<String>,
pub created_at: Option<DateTime<Utc>>, pub created_at: Option<DateTime<Utc>>,
pub modified_at: Option<DateTime<Utc>>, pub modified_at: Option<DateTime<Utc>>,
#[serde(default)]
pub tags: Vec<String>,
} }
impl FrontMatter { impl FrontMatter {
@ -43,11 +45,12 @@ impl FrontMatter {
icon: self.icon, icon: self.icon,
created_at: self.created_at.or_else(|| created.map(|t| t.into())), created_at: self.created_at.or_else(|| created.map(|t| t.into())),
modified_at: self.modified_at.or_else(|| modified.map(|t| t.into())), modified_at: self.modified_at.or_else(|| modified.map(|t| t.into())),
tags: self.tags,
} }
} }
} }
#[derive(Serialize, Deserialize, Clone)] #[derive(Serialize, Deserialize, Clone, Debug)]
pub struct PostMetadata { pub struct PostMetadata {
pub name: String, pub name: String,
pub title: String, pub title: String,
@ -56,6 +59,7 @@ pub struct PostMetadata {
pub icon: Option<String>, pub icon: Option<String>,
pub created_at: Option<DateTime<Utc>>, pub created_at: Option<DateTime<Utc>>,
pub modified_at: Option<DateTime<Utc>>, pub modified_at: Option<DateTime<Utc>>,
pub tags: Vec<String>,
} }
use crate::filters; use crate::filters;
@ -66,10 +70,10 @@ struct Post<'a> {
pub rendered_markdown: String, pub rendered_markdown: String,
} }
// format: TOTAL OP1 OP2
#[allow(unused)] #[allow(unused)]
pub enum RenderStats { pub enum RenderStats {
Cached(Duration), Cached(Duration),
// format: Total, Parsed in, Rendered in
ParsedAndRendered(Duration, Duration, Duration), ParsedAndRendered(Duration, Duration, Duration),
} }
@ -146,7 +150,10 @@ impl PostManager {
Ok((metadata, post, (parsing, rendering))) Ok((metadata, post, (parsing, rendering)))
} }
pub async fn list_posts(&self) -> Result<Vec<PostMetadata>, PostError> { pub async fn list_posts(
&self,
filter: impl Fn(&PostMetadata) -> bool,
) -> Result<Vec<PostMetadata>, PostError> {
let mut posts = Vec::new(); let mut posts = Vec::new();
let mut read_dir = fs::read_dir(&self.dir).await?; let mut read_dir = fs::read_dir(&self.dir).await?;
@ -166,18 +173,50 @@ impl PostManager {
if let Some(cache) = self.cache.as_ref() if let Some(cache) = self.cache.as_ref()
&& let Some(hit) = cache.lookup_metadata(&name, mtime).await && let Some(hit) = cache.lookup_metadata(&name, mtime).await
&& filter(&hit)
{ {
posts.push(hit) posts.push(hit);
} else if let Ok((metadata, ..)) = self.parse_and_render(name, path).await { } else {
match self.parse_and_render(name, path).await {
Ok((metadata, ..)) => {
if filter(&metadata) {
posts.push(metadata); posts.push(metadata);
} }
} }
Err(err) => match err {
PostError::IoError(ref io_err)
if matches!(io_err.kind(), io::ErrorKind::NotFound) =>
{
warn!("TOCTOU: {}", err)
}
_ => return Err(err),
},
}
}
}
} }
Ok(posts) Ok(posts)
} }
// third entry in the tuple is whether it got rendered and if so, how long did it take pub async fn get_max_n_posts_with_optional_tag_sorted(
&self,
n: Option<usize>,
tag: Option<&String>,
) -> Result<Vec<PostMetadata>, PostError> {
let mut posts = self
.list_posts(|metadata| !tag.is_some_and(|tag| !metadata.tags.contains(tag)))
.await?;
posts.sort_unstable_by_key(|metadata| metadata.created_at.unwrap_or_default());
if let Some(n) = n {
posts = Vec::from(&posts[posts.len().saturating_sub(n)..]);
}
posts.reverse();
Ok(posts)
}
pub async fn get_post( pub async fn get_post(
&self, &self,
name: &str, name: &str,