forked from slonk/bingus-blog
refactor part 1: move code
This commit is contained in:
parent
c1e1670db3
commit
a19c576275
5 changed files with 373 additions and 327 deletions
35
Cargo.lock
generated
35
Cargo.lock
generated
|
@ -302,6 +302,7 @@ dependencies = [
|
||||||
"color-eyre",
|
"color-eyre",
|
||||||
"comrak",
|
"comrak",
|
||||||
"console-subscriber",
|
"console-subscriber",
|
||||||
|
"derive_more",
|
||||||
"fronma",
|
"fronma",
|
||||||
"rss",
|
"rss",
|
||||||
"scc",
|
"scc",
|
||||||
|
@ -486,6 +487,12 @@ dependencies = [
|
||||||
"tracing-subscriber",
|
"tracing-subscriber",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "convert_case"
|
||||||
|
version = "0.4.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "crc32fast"
|
name = "crc32fast"
|
||||||
version = "1.4.0"
|
version = "1.4.0"
|
||||||
|
@ -585,6 +592,19 @@ dependencies = [
|
||||||
"syn 1.0.109",
|
"syn 1.0.109",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "derive_more"
|
||||||
|
version = "0.99.17"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321"
|
||||||
|
dependencies = [
|
||||||
|
"convert_case",
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"rustc_version",
|
||||||
|
"syn 1.0.109",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "deunicode"
|
name = "deunicode"
|
||||||
version = "1.4.4"
|
version = "1.4.4"
|
||||||
|
@ -1435,6 +1455,15 @@ version = "0.1.23"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76"
|
checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rustc_version"
|
||||||
|
version = "0.4.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366"
|
||||||
|
dependencies = [
|
||||||
|
"semver",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rustversion"
|
name = "rustversion"
|
||||||
version = "1.0.15"
|
version = "1.0.15"
|
||||||
|
@ -1472,6 +1501,12 @@ version = "0.2.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b84345e4c9bd703274a082fb80caaa99b7612be48dfaa1dd9266577ec412309d"
|
checksum = "b84345e4c9bd703274a082fb80caaa99b7612be48dfaa1dd9266577ec412309d"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "semver"
|
||||||
|
version = "1.0.23"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde"
|
name = "serde"
|
||||||
version = "1.0.198"
|
version = "1.0.198"
|
||||||
|
|
|
@ -36,6 +36,7 @@ comrak = { version = "0.22.0", features = [
|
||||||
"syntect",
|
"syntect",
|
||||||
], default-features = false }
|
], default-features = false }
|
||||||
console-subscriber = { version = "0.2.0", optional = true }
|
console-subscriber = { version = "0.2.0", optional = true }
|
||||||
|
derive_more = "0.99.17"
|
||||||
fronma = "0.2.0"
|
fronma = "0.2.0"
|
||||||
rss = "2.0.7"
|
rss = "2.0.7"
|
||||||
scc = { version = "2.1.0", features = ["serde"] }
|
scc = { version = "2.1.0", features = ["serde"] }
|
||||||
|
|
193
src/app.rs
Normal file
193
src/app.rs
Normal file
|
@ -0,0 +1,193 @@
|
||||||
|
use std::sync::Arc;
|
||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
use askama_axum::Template;
|
||||||
|
use axum::extract::{Path, Query, State};
|
||||||
|
use axum::http::{header, Request};
|
||||||
|
use axum::response::{IntoResponse, Redirect, Response};
|
||||||
|
use axum::routing::get;
|
||||||
|
use axum::{Json, Router};
|
||||||
|
use rss::{Category, ChannelBuilder, ItemBuilder};
|
||||||
|
use serde::Deserialize;
|
||||||
|
use tokio::io::AsyncReadExt;
|
||||||
|
use tower_http::services::ServeDir;
|
||||||
|
use tower_http::trace::TraceLayer;
|
||||||
|
use tracing::{info, info_span, Span};
|
||||||
|
|
||||||
|
use crate::config::Config;
|
||||||
|
use crate::error::{AppError, AppResult};
|
||||||
|
use crate::filters;
|
||||||
|
use crate::post::{PostManager, PostMetadata, RenderStats};
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct AppState {
|
||||||
|
pub config: Arc<Config>,
|
||||||
|
pub posts: Arc<PostManager<Arc<Config>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Template)]
|
||||||
|
#[template(path = "index.html")]
|
||||||
|
struct IndexTemplate {
|
||||||
|
title: String,
|
||||||
|
description: String,
|
||||||
|
posts: Vec<PostMetadata>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Template)]
|
||||||
|
#[template(path = "post.html")]
|
||||||
|
struct PostTemplate {
|
||||||
|
meta: PostMetadata,
|
||||||
|
rendered: String,
|
||||||
|
rendered_in: RenderStats,
|
||||||
|
markdown_access: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct QueryParams {
|
||||||
|
tag: Option<String>,
|
||||||
|
#[serde(rename = "n")]
|
||||||
|
num_posts: Option<usize>,
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn index(
|
||||||
|
State(AppState { config, posts }): State<AppState>,
|
||||||
|
Query(query): Query<QueryParams>,
|
||||||
|
) -> AppResult<IndexTemplate> {
|
||||||
|
let posts = posts
|
||||||
|
.get_max_n_post_metadata_with_optional_tag_sorted(query.num_posts, query.tag.as_ref())
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(IndexTemplate {
|
||||||
|
title: config.title.clone(),
|
||||||
|
description: config.description.clone(),
|
||||||
|
posts,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn all_posts(
|
||||||
|
State(AppState { posts, .. }): State<AppState>,
|
||||||
|
Query(query): Query<QueryParams>,
|
||||||
|
) -> AppResult<Json<Vec<PostMetadata>>> {
|
||||||
|
let posts = posts
|
||||||
|
.get_max_n_post_metadata_with_optional_tag_sorted(query.num_posts, query.tag.as_ref())
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(Json(posts))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn rss(
|
||||||
|
State(AppState { config, posts }): State<AppState>,
|
||||||
|
Query(query): Query<QueryParams>,
|
||||||
|
) -> AppResult<Response> {
|
||||||
|
if !config.rss.enable {
|
||||||
|
return Err(AppError::RssDisabled);
|
||||||
|
}
|
||||||
|
|
||||||
|
let posts = posts
|
||||||
|
.get_all_posts_filtered(|metadata, _| {
|
||||||
|
!query
|
||||||
|
.tag
|
||||||
|
.as_ref()
|
||||||
|
.is_some_and(|tag| !metadata.tags.contains(tag))
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let mut channel = ChannelBuilder::default();
|
||||||
|
channel
|
||||||
|
.title(&config.title)
|
||||||
|
.link(config.rss.link.to_string())
|
||||||
|
.description(&config.description);
|
||||||
|
//TODO: .language()
|
||||||
|
|
||||||
|
for (metadata, content, _) in posts {
|
||||||
|
channel.item(
|
||||||
|
ItemBuilder::default()
|
||||||
|
.title(metadata.title)
|
||||||
|
.description(metadata.description)
|
||||||
|
.author(metadata.author)
|
||||||
|
.categories(
|
||||||
|
metadata
|
||||||
|
.tags
|
||||||
|
.into_iter()
|
||||||
|
.map(|tag| Category {
|
||||||
|
name: tag,
|
||||||
|
domain: None,
|
||||||
|
})
|
||||||
|
.collect::<Vec<Category>>(),
|
||||||
|
)
|
||||||
|
.pub_date(metadata.created_at.map(|date| date.to_rfc2822()))
|
||||||
|
.content(content)
|
||||||
|
.link(
|
||||||
|
config
|
||||||
|
.rss
|
||||||
|
.link
|
||||||
|
.join(&format!("/posts/{}", metadata.name))?
|
||||||
|
.to_string(),
|
||||||
|
)
|
||||||
|
.build(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
let body = channel.build().to_string();
|
||||||
|
drop(channel);
|
||||||
|
|
||||||
|
Ok(([(header::CONTENT_TYPE, "text/xml")], body).into_response())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn post(
|
||||||
|
State(AppState { config, posts }): State<AppState>,
|
||||||
|
Path(name): Path<String>,
|
||||||
|
) -> AppResult<Response> {
|
||||||
|
if name.ends_with(".md") && config.raw_access {
|
||||||
|
let mut file = tokio::fs::OpenOptions::new()
|
||||||
|
.read(true)
|
||||||
|
.open(config.dirs.posts.join(&name))
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let mut buf = Vec::new();
|
||||||
|
file.read_to_end(&mut buf).await?;
|
||||||
|
|
||||||
|
Ok(([("content-type", "text/plain")], buf).into_response())
|
||||||
|
} else {
|
||||||
|
let post = posts.get_post(&name).await?;
|
||||||
|
let page = PostTemplate {
|
||||||
|
meta: post.0,
|
||||||
|
rendered: post.1,
|
||||||
|
rendered_in: post.2,
|
||||||
|
markdown_access: config.raw_access,
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(page.into_response())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn new() -> Router<AppState> {
|
||||||
|
Router::new()
|
||||||
|
.route("/", get(index))
|
||||||
|
.route(
|
||||||
|
"/post/:name",
|
||||||
|
get(
|
||||||
|
|Path(name): Path<String>| async move { Redirect::to(&format!("/posts/{}", name)) },
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.route("/posts/:name", get(post))
|
||||||
|
.route("/posts", get(all_posts))
|
||||||
|
.route("/feed.xml", get(rss))
|
||||||
|
.nest_service("/static", ServeDir::new("static").precompressed_gzip())
|
||||||
|
.nest_service("/media", ServeDir::new("media"))
|
||||||
|
.layer(
|
||||||
|
TraceLayer::new_for_http()
|
||||||
|
.make_span_with(|request: &Request<_>| {
|
||||||
|
info_span!(
|
||||||
|
"request",
|
||||||
|
method = ?request.method(),
|
||||||
|
path = ?request.uri().path(),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.on_response(|response: &Response<_>, duration: Duration, span: &Span| {
|
||||||
|
let _ = span.enter();
|
||||||
|
let status = response.status();
|
||||||
|
info!(?status, ?duration, "response");
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
}
|
316
src/main.rs
316
src/main.rs
|
@ -1,5 +1,6 @@
|
||||||
#![feature(let_chains)]
|
#![feature(let_chains)]
|
||||||
|
|
||||||
|
mod app;
|
||||||
mod config;
|
mod config;
|
||||||
mod error;
|
mod error;
|
||||||
mod filters;
|
mod filters;
|
||||||
|
@ -10,182 +11,23 @@ mod ranged_i128_visitor;
|
||||||
mod systemtime_as_secs;
|
mod systemtime_as_secs;
|
||||||
|
|
||||||
use std::future::IntoFuture;
|
use std::future::IntoFuture;
|
||||||
use std::io::Read;
|
|
||||||
use std::net::SocketAddr;
|
use std::net::SocketAddr;
|
||||||
use std::process::exit;
|
use std::process::exit;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
use askama_axum::Template;
|
|
||||||
use axum::extract::{Path, Query, State};
|
|
||||||
use axum::http::{header, Request};
|
|
||||||
use axum::response::{IntoResponse, Redirect, Response};
|
|
||||||
use axum::routing::{get, Router};
|
|
||||||
use axum::Json;
|
|
||||||
use color_eyre::eyre::{self, Context};
|
use color_eyre::eyre::{self, Context};
|
||||||
use error::AppError;
|
|
||||||
use rss::{Category, ChannelBuilder, ItemBuilder};
|
|
||||||
use serde::Deserialize;
|
|
||||||
use tokio::io::{AsyncReadExt, AsyncWriteExt};
|
|
||||||
use tokio::net::TcpListener;
|
use tokio::net::TcpListener;
|
||||||
use tokio::task::JoinSet;
|
use tokio::task::JoinSet;
|
||||||
use tokio::{select, signal};
|
use tokio::{select, signal};
|
||||||
use tokio_util::sync::CancellationToken;
|
use tokio_util::sync::CancellationToken;
|
||||||
use tower_http::services::ServeDir;
|
|
||||||
use tower_http::trace::TraceLayer;
|
|
||||||
use tracing::level_filters::LevelFilter;
|
use tracing::level_filters::LevelFilter;
|
||||||
use tracing::{debug, error, info, info_span, warn, Span};
|
use tracing::{debug, info, warn};
|
||||||
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt, EnvFilter};
|
use tracing_subscriber::layer::SubscriberExt;
|
||||||
|
use tracing_subscriber::{util::SubscriberInitExt, EnvFilter};
|
||||||
|
|
||||||
use crate::config::Config;
|
use crate::app::AppState;
|
||||||
use crate::error::{AppResult, PostError};
|
use crate::post::PostManager;
|
||||||
use crate::post::cache::{Cache, CACHE_VERSION};
|
|
||||||
use crate::post::{PostManager, PostMetadata, RenderStats};
|
|
||||||
|
|
||||||
type ArcState = Arc<AppState>;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
struct AppState {
|
|
||||||
pub config: Config,
|
|
||||||
pub posts: PostManager,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Template)]
|
|
||||||
#[template(path = "index.html")]
|
|
||||||
struct IndexTemplate {
|
|
||||||
title: String,
|
|
||||||
description: String,
|
|
||||||
posts: Vec<PostMetadata>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Template)]
|
|
||||||
#[template(path = "post.html")]
|
|
||||||
struct PostTemplate {
|
|
||||||
meta: PostMetadata,
|
|
||||||
rendered: String,
|
|
||||||
rendered_in: RenderStats,
|
|
||||||
markdown_access: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
|
||||||
struct QueryParams {
|
|
||||||
tag: Option<String>,
|
|
||||||
#[serde(rename = "n")]
|
|
||||||
num_posts: Option<usize>,
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn index(
|
|
||||||
State(state): State<ArcState>,
|
|
||||||
Query(query): Query<QueryParams>,
|
|
||||||
) -> AppResult<IndexTemplate> {
|
|
||||||
let posts = state
|
|
||||||
.posts
|
|
||||||
.get_max_n_post_metadata_with_optional_tag_sorted(query.num_posts, query.tag.as_ref())
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(IndexTemplate {
|
|
||||||
title: state.config.title.clone(),
|
|
||||||
description: state.config.description.clone(),
|
|
||||||
posts,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn all_posts(
|
|
||||||
State(state): State<ArcState>,
|
|
||||||
Query(query): Query<QueryParams>,
|
|
||||||
) -> AppResult<Json<Vec<PostMetadata>>> {
|
|
||||||
let posts = state
|
|
||||||
.posts
|
|
||||||
.get_max_n_post_metadata_with_optional_tag_sorted(query.num_posts, query.tag.as_ref())
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(Json(posts))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn rss(
|
|
||||||
State(state): State<ArcState>,
|
|
||||||
Query(query): Query<QueryParams>,
|
|
||||||
) -> AppResult<Response> {
|
|
||||||
if !state.config.rss.enable {
|
|
||||||
return Err(AppError::RssDisabled);
|
|
||||||
}
|
|
||||||
|
|
||||||
let posts = state
|
|
||||||
.posts
|
|
||||||
.get_all_posts_filtered(|metadata, _| {
|
|
||||||
!query
|
|
||||||
.tag
|
|
||||||
.as_ref()
|
|
||||||
.is_some_and(|tag| !metadata.tags.contains(tag))
|
|
||||||
})
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let mut channel = ChannelBuilder::default();
|
|
||||||
channel
|
|
||||||
.title(&state.config.title)
|
|
||||||
.link(state.config.rss.link.to_string())
|
|
||||||
.description(&state.config.description);
|
|
||||||
//TODO: .language()
|
|
||||||
|
|
||||||
for (metadata, content, _) in posts {
|
|
||||||
channel.item(
|
|
||||||
ItemBuilder::default()
|
|
||||||
.title(metadata.title)
|
|
||||||
.description(metadata.description)
|
|
||||||
.author(metadata.author)
|
|
||||||
.categories(
|
|
||||||
metadata
|
|
||||||
.tags
|
|
||||||
.into_iter()
|
|
||||||
.map(|tag| Category {
|
|
||||||
name: tag,
|
|
||||||
domain: None,
|
|
||||||
})
|
|
||||||
.collect::<Vec<Category>>(),
|
|
||||||
)
|
|
||||||
.pub_date(metadata.created_at.map(|date| date.to_rfc2822()))
|
|
||||||
.content(content)
|
|
||||||
.link(
|
|
||||||
state
|
|
||||||
.config
|
|
||||||
.rss
|
|
||||||
.link
|
|
||||||
.join(&format!("/posts/{}", metadata.name))?
|
|
||||||
.to_string(),
|
|
||||||
)
|
|
||||||
.build(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
let body = channel.build().to_string();
|
|
||||||
drop(channel);
|
|
||||||
|
|
||||||
Ok(([(header::CONTENT_TYPE, "text/xml")], body).into_response())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn post(State(state): State<ArcState>, Path(name): Path<String>) -> AppResult<Response> {
|
|
||||||
if name.ends_with(".md") && state.config.raw_access {
|
|
||||||
let mut file = tokio::fs::OpenOptions::new()
|
|
||||||
.read(true)
|
|
||||||
.open(state.config.dirs.posts.join(&name))
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let mut buf = Vec::new();
|
|
||||||
file.read_to_end(&mut buf).await?;
|
|
||||||
|
|
||||||
Ok(([("content-type", "text/plain")], buf).into_response())
|
|
||||||
} else {
|
|
||||||
let post = state.posts.get_post(&name).await?;
|
|
||||||
let page = PostTemplate {
|
|
||||||
meta: post.0,
|
|
||||||
rendered: post.1,
|
|
||||||
rendered_in: post.2,
|
|
||||||
markdown_access: state.config.raw_access,
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(page.into_response())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main() -> eyre::Result<()> {
|
async fn main() -> eyre::Result<()> {
|
||||||
|
@ -202,89 +44,26 @@ async fn main() -> eyre::Result<()> {
|
||||||
.with(tracing_subscriber::fmt::layer())
|
.with(tracing_subscriber::fmt::layer())
|
||||||
.init();
|
.init();
|
||||||
|
|
||||||
let config = config::load()
|
let config = Arc::new(
|
||||||
|
config::load()
|
||||||
.await
|
.await
|
||||||
.context("couldn't load configuration")?;
|
.context("couldn't load configuration")?,
|
||||||
|
);
|
||||||
|
|
||||||
let socket_addr = SocketAddr::new(config.http.host, config.http.port);
|
let socket_addr = SocketAddr::new(config.http.host, config.http.port);
|
||||||
|
|
||||||
let mut tasks = JoinSet::new();
|
let mut tasks = JoinSet::new();
|
||||||
let cancellation_token = CancellationToken::new();
|
let cancellation_token = CancellationToken::new();
|
||||||
|
|
||||||
let posts = if config.cache.enable {
|
let posts = Arc::new(PostManager::new(Arc::clone(&config)).await?);
|
||||||
if config.cache.persistence
|
let state = AppState {
|
||||||
&& tokio::fs::try_exists(&config.cache.file)
|
config: Arc::clone(&config),
|
||||||
.await
|
posts,
|
||||||
.with_context(|| {
|
|
||||||
format!("failed to check if {} exists", config.cache.file.display())
|
|
||||||
})?
|
|
||||||
{
|
|
||||||
info!("loading cache from file");
|
|
||||||
let path = &config.cache.file;
|
|
||||||
let load_cache = async {
|
|
||||||
let mut cache_file = tokio::fs::File::open(&path)
|
|
||||||
.await
|
|
||||||
.context("failed to open cache file")?;
|
|
||||||
let serialized = if config.cache.compress {
|
|
||||||
let cache_file = cache_file.into_std().await;
|
|
||||||
tokio::task::spawn_blocking(move || {
|
|
||||||
let mut buf = Vec::with_capacity(4096);
|
|
||||||
zstd::stream::read::Decoder::new(cache_file)?.read_to_end(&mut buf)?;
|
|
||||||
Ok::<_, std::io::Error>(buf)
|
|
||||||
})
|
|
||||||
.await
|
|
||||||
.context("failed to join blocking thread")?
|
|
||||||
.context("failed to read cache file")?
|
|
||||||
} else {
|
|
||||||
let mut buf = Vec::with_capacity(4096);
|
|
||||||
cache_file
|
|
||||||
.read_to_end(&mut buf)
|
|
||||||
.await
|
|
||||||
.context("failed to read cache file")?;
|
|
||||||
buf
|
|
||||||
};
|
|
||||||
let mut cache: Cache =
|
|
||||||
bitcode::deserialize(serialized.as_slice()).context("failed to parse cache")?;
|
|
||||||
if cache.version() < CACHE_VERSION {
|
|
||||||
warn!("cache version changed, clearing cache");
|
|
||||||
cache = Cache::default();
|
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok::<PostManager, color_eyre::Report>(PostManager::new_with_cache(
|
if config.cache.enable && config.cache.cleanup {
|
||||||
config.dirs.posts.clone(),
|
if let Some(t) = config.cache.cleanup_interval {
|
||||||
config.render.clone(),
|
let state = state.clone();
|
||||||
cache,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
.await;
|
|
||||||
match load_cache {
|
|
||||||
Ok(posts) => posts,
|
|
||||||
Err(err) => {
|
|
||||||
error!("failed to load cache: {}", err);
|
|
||||||
info!("using empty cache");
|
|
||||||
PostManager::new_with_cache(
|
|
||||||
config.dirs.posts.clone(),
|
|
||||||
config.render.clone(),
|
|
||||||
Default::default(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
PostManager::new_with_cache(
|
|
||||||
config.dirs.posts.clone(),
|
|
||||||
config.render.clone(),
|
|
||||||
Default::default(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
PostManager::new(config.dirs.posts.clone(), config.render.clone())
|
|
||||||
};
|
|
||||||
|
|
||||||
let state = Arc::new(AppState { config, posts });
|
|
||||||
|
|
||||||
if state.config.cache.enable && state.config.cache.cleanup {
|
|
||||||
if let Some(t) = state.config.cache.cleanup_interval {
|
|
||||||
let state = Arc::clone(&state);
|
|
||||||
let token = cancellation_token.child_token();
|
let token = cancellation_token.child_token();
|
||||||
debug!("setting up cleanup task");
|
debug!("setting up cleanup task");
|
||||||
tasks.spawn(async move {
|
tasks.spawn(async move {
|
||||||
|
@ -303,35 +82,7 @@ async fn main() -> eyre::Result<()> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let app = Router::new()
|
let app = app::new().with_state(state.clone());
|
||||||
.route("/", get(index))
|
|
||||||
.route(
|
|
||||||
"/post/:name",
|
|
||||||
get(
|
|
||||||
|Path(name): Path<String>| async move { Redirect::to(&format!("/posts/{}", name)) },
|
|
||||||
),
|
|
||||||
)
|
|
||||||
.route("/posts/:name", get(post))
|
|
||||||
.route("/posts", get(all_posts))
|
|
||||||
.route("/feed.xml", get(rss))
|
|
||||||
.nest_service("/static", ServeDir::new("static").precompressed_gzip())
|
|
||||||
.nest_service("/media", ServeDir::new("media"))
|
|
||||||
.layer(
|
|
||||||
TraceLayer::new_for_http()
|
|
||||||
.make_span_with(|request: &Request<_>| {
|
|
||||||
info_span!(
|
|
||||||
"request",
|
|
||||||
method = ?request.method(),
|
|
||||||
path = ?request.uri().path(),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.on_response(|response: &Response<_>, duration: Duration, span: &Span| {
|
|
||||||
let _ = span.enter();
|
|
||||||
let status = response.status();
|
|
||||||
info!(?status, ?duration, "response");
|
|
||||||
}),
|
|
||||||
)
|
|
||||||
.with_state(state.clone());
|
|
||||||
|
|
||||||
let listener = TcpListener::bind(socket_addr)
|
let listener = TcpListener::bind(socket_addr)
|
||||||
.await
|
.await
|
||||||
|
@ -379,36 +130,7 @@ async fn main() -> eyre::Result<()> {
|
||||||
task.context("failed to join task")?;
|
task.context("failed to join task")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
// write cache to file
|
drop(state);
|
||||||
let config = &state.config;
|
|
||||||
let posts = &state.posts;
|
|
||||||
if config.cache.enable
|
|
||||||
&& config.cache.persistence
|
|
||||||
&& let Some(cache) = posts.cache()
|
|
||||||
{
|
|
||||||
let path = &config.cache.file;
|
|
||||||
let serialized = bitcode::serialize(cache).context("failed to serialize cache")?;
|
|
||||||
let mut cache_file = tokio::fs::File::create(path)
|
|
||||||
.await
|
|
||||||
.with_context(|| format!("failed to open cache at {}", path.display()))?;
|
|
||||||
let compression_level = config.cache.compression_level;
|
|
||||||
if config.cache.compress {
|
|
||||||
let cache_file = cache_file.into_std().await;
|
|
||||||
tokio::task::spawn_blocking(move || {
|
|
||||||
std::io::Write::write_all(
|
|
||||||
&mut zstd::stream::write::Encoder::new(cache_file, compression_level)?
|
|
||||||
.auto_finish(),
|
|
||||||
&serialized,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.await
|
|
||||||
.context("failed to join blocking thread")?
|
|
||||||
} else {
|
|
||||||
cache_file.write_all(&serialized).await
|
|
||||||
}
|
|
||||||
.context("failed to write cache to file")?;
|
|
||||||
info!("wrote cache to {}", path.display());
|
|
||||||
}
|
|
||||||
Ok::<(), color_eyre::Report>(())
|
Ok::<(), color_eyre::Report>(())
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
151
src/post/mod.rs
151
src/post/mod.rs
|
@ -1,20 +1,22 @@
|
||||||
pub mod cache;
|
pub mod cache;
|
||||||
|
|
||||||
use std::collections::BTreeSet;
|
use std::collections::BTreeSet;
|
||||||
use std::io;
|
use std::io::{self, Read, Write};
|
||||||
use std::path::{Path, PathBuf};
|
use std::ops::Deref;
|
||||||
|
use std::path::Path;
|
||||||
use std::time::{Duration, Instant, SystemTime};
|
use std::time::{Duration, Instant, SystemTime};
|
||||||
|
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
|
use color_eyre::eyre::{self, Context};
|
||||||
use fronma::parser::{parse, ParsedData};
|
use fronma::parser::{parse, ParsedData};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use tokio::fs;
|
use tokio::fs;
|
||||||
use tokio::io::AsyncReadExt;
|
use tokio::io::AsyncReadExt;
|
||||||
use tracing::warn;
|
use tracing::{error, info, warn};
|
||||||
|
|
||||||
use crate::config::RenderConfig;
|
use crate::config::Config;
|
||||||
use crate::markdown_render::render;
|
use crate::markdown_render::render;
|
||||||
use crate::post::cache::Cache;
|
use crate::post::cache::{Cache, CACHE_VERSION};
|
||||||
use crate::systemtime_as_secs::as_secs;
|
use crate::systemtime_as_secs::as_secs;
|
||||||
use crate::PostError;
|
use crate::PostError;
|
||||||
|
|
||||||
|
@ -69,27 +71,84 @@ pub enum RenderStats {
|
||||||
ParsedAndRendered(Duration, Duration, Duration),
|
ParsedAndRendered(Duration, Duration, Duration),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
pub struct PostManager<C>
|
||||||
pub struct PostManager {
|
where
|
||||||
dir: PathBuf,
|
C: Deref<Target = Config>,
|
||||||
|
{
|
||||||
cache: Option<Cache>,
|
cache: Option<Cache>,
|
||||||
config: RenderConfig,
|
config: C,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PostManager {
|
impl<C> PostManager<C>
|
||||||
pub fn new(dir: PathBuf, config: RenderConfig) -> PostManager {
|
where
|
||||||
PostManager {
|
C: Deref<Target = Config>,
|
||||||
dir,
|
{
|
||||||
|
pub async fn new(config: C) -> eyre::Result<PostManager<C>> {
|
||||||
|
if config.cache.enable {
|
||||||
|
if config.cache.persistence
|
||||||
|
&& tokio::fs::try_exists(&config.cache.file)
|
||||||
|
.await
|
||||||
|
.with_context(|| {
|
||||||
|
format!("failed to check if {} exists", config.cache.file.display())
|
||||||
|
})?
|
||||||
|
{
|
||||||
|
info!("loading cache from file");
|
||||||
|
let path = &config.cache.file;
|
||||||
|
let load_cache = async {
|
||||||
|
let mut cache_file = tokio::fs::File::open(&path)
|
||||||
|
.await
|
||||||
|
.context("failed to open cache file")?;
|
||||||
|
let serialized = if config.cache.compress {
|
||||||
|
let cache_file = cache_file.into_std().await;
|
||||||
|
tokio::task::spawn_blocking(move || {
|
||||||
|
let mut buf = Vec::with_capacity(4096);
|
||||||
|
zstd::stream::read::Decoder::new(cache_file)?.read_to_end(&mut buf)?;
|
||||||
|
Ok::<_, std::io::Error>(buf)
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.context("failed to join blocking thread")?
|
||||||
|
.context("failed to read cache file")?
|
||||||
|
} else {
|
||||||
|
let mut buf = Vec::with_capacity(4096);
|
||||||
|
cache_file
|
||||||
|
.read_to_end(&mut buf)
|
||||||
|
.await
|
||||||
|
.context("failed to read cache file")?;
|
||||||
|
buf
|
||||||
|
};
|
||||||
|
let mut cache: Cache = bitcode::deserialize(serialized.as_slice())
|
||||||
|
.context("failed to parse cache")?;
|
||||||
|
if cache.version() < CACHE_VERSION {
|
||||||
|
warn!("cache version changed, clearing cache");
|
||||||
|
cache = Cache::default();
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok::<Cache, eyre::Report>(cache)
|
||||||
|
}
|
||||||
|
.await;
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
cache: Some(match load_cache {
|
||||||
|
Ok(cache) => cache,
|
||||||
|
Err(err) => {
|
||||||
|
error!("failed to load cache: {}", err);
|
||||||
|
info!("using empty cache");
|
||||||
|
Default::default()
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
config,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
Ok(Self {
|
||||||
|
cache: Some(Default::default()),
|
||||||
|
config,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Ok(Self {
|
||||||
cache: None,
|
cache: None,
|
||||||
config,
|
config,
|
||||||
}
|
})
|
||||||
}
|
|
||||||
|
|
||||||
pub fn new_with_cache(dir: PathBuf, config: RenderConfig, cache: Cache) -> PostManager {
|
|
||||||
PostManager {
|
|
||||||
dir,
|
|
||||||
cache: Some(cache),
|
|
||||||
config,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -118,7 +177,7 @@ impl PostManager {
|
||||||
let parsing = parsing_start.elapsed();
|
let parsing = parsing_start.elapsed();
|
||||||
|
|
||||||
let before_render = Instant::now();
|
let before_render = Instant::now();
|
||||||
let post = render(body, &self.config);
|
let post = render(body, &self.config.render);
|
||||||
let rendering = before_render.elapsed();
|
let rendering = before_render.elapsed();
|
||||||
|
|
||||||
if let Some(cache) = self.cache.as_ref() {
|
if let Some(cache) = self.cache.as_ref() {
|
||||||
|
@ -128,7 +187,7 @@ impl PostManager {
|
||||||
metadata.clone(),
|
metadata.clone(),
|
||||||
as_secs(&modified),
|
as_secs(&modified),
|
||||||
post.clone(),
|
post.clone(),
|
||||||
&self.config,
|
&self.config.render,
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.unwrap_or_else(|err| warn!("failed to insert {:?} into cache", err.0))
|
.unwrap_or_else(|err| warn!("failed to insert {:?} into cache", err.0))
|
||||||
|
@ -143,7 +202,7 @@ impl PostManager {
|
||||||
) -> Result<Vec<PostMetadata>, PostError> {
|
) -> Result<Vec<PostMetadata>, PostError> {
|
||||||
let mut posts = Vec::new();
|
let mut posts = Vec::new();
|
||||||
|
|
||||||
let mut read_dir = fs::read_dir(&self.dir).await?;
|
let mut read_dir = fs::read_dir(&self.config.dirs.posts).await?;
|
||||||
while let Some(entry) = read_dir.next_entry().await? {
|
while let Some(entry) = read_dir.next_entry().await? {
|
||||||
let path = entry.path();
|
let path = entry.path();
|
||||||
let stat = fs::metadata(&path).await?;
|
let stat = fs::metadata(&path).await?;
|
||||||
|
@ -192,7 +251,7 @@ impl PostManager {
|
||||||
) -> Result<Vec<(PostMetadata, String, RenderStats)>, PostError> {
|
) -> Result<Vec<(PostMetadata, String, RenderStats)>, PostError> {
|
||||||
let mut posts = Vec::new();
|
let mut posts = Vec::new();
|
||||||
|
|
||||||
let mut read_dir = fs::read_dir(&self.dir).await?;
|
let mut read_dir = fs::read_dir(&self.config.dirs.posts).await?;
|
||||||
while let Some(entry) = read_dir.next_entry().await? {
|
while let Some(entry) = read_dir.next_entry().await? {
|
||||||
let path = entry.path();
|
let path = entry.path();
|
||||||
let stat = fs::metadata(&path).await?;
|
let stat = fs::metadata(&path).await?;
|
||||||
|
@ -241,7 +300,7 @@ impl PostManager {
|
||||||
name: &str,
|
name: &str,
|
||||||
) -> Result<(PostMetadata, String, RenderStats), PostError> {
|
) -> Result<(PostMetadata, String, RenderStats), PostError> {
|
||||||
let start = Instant::now();
|
let start = Instant::now();
|
||||||
let path = self.dir.join(name.to_owned() + ".md");
|
let path = self.config.dirs.posts.join(name.to_owned() + ".md");
|
||||||
|
|
||||||
let stat = match tokio::fs::metadata(&path).await {
|
let stat = match tokio::fs::metadata(&path).await {
|
||||||
Ok(value) => value,
|
Ok(value) => value,
|
||||||
|
@ -258,7 +317,7 @@ impl PostManager {
|
||||||
let mtime = as_secs(&stat.modified()?);
|
let mtime = as_secs(&stat.modified()?);
|
||||||
|
|
||||||
if let Some(cache) = self.cache.as_ref()
|
if let Some(cache) = self.cache.as_ref()
|
||||||
&& let Some(hit) = cache.lookup(name, mtime, &self.config).await
|
&& let Some(hit) = cache.lookup(name, mtime, &self.config.render).await
|
||||||
{
|
{
|
||||||
Ok((
|
Ok((
|
||||||
hit.metadata,
|
hit.metadata,
|
||||||
|
@ -283,7 +342,7 @@ impl PostManager {
|
||||||
if let Some(cache) = self.cache.as_ref() {
|
if let Some(cache) = self.cache.as_ref() {
|
||||||
cache
|
cache
|
||||||
.cleanup(|name| {
|
.cleanup(|name| {
|
||||||
std::fs::metadata(self.dir.join(name.to_owned() + ".md"))
|
std::fs::metadata(self.config.dirs.posts.join(name.to_owned() + ".md"))
|
||||||
.ok()
|
.ok()
|
||||||
.and_then(|metadata| metadata.modified().ok())
|
.and_then(|metadata| metadata.modified().ok())
|
||||||
.map(|mtime| as_secs(&mtime))
|
.map(|mtime| as_secs(&mtime))
|
||||||
|
@ -291,4 +350,40 @@ impl PostManager {
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn try_drop(&mut self) -> Result<(), eyre::Report> {
|
||||||
|
// write cache to file
|
||||||
|
let config = &self.config.cache;
|
||||||
|
if config.enable
|
||||||
|
&& config.persistence
|
||||||
|
&& let Some(cache) = self.cache()
|
||||||
|
{
|
||||||
|
let path = &config.file;
|
||||||
|
let serialized = bitcode::serialize(cache).context("failed to serialize cache")?;
|
||||||
|
let mut cache_file = std::fs::File::create(path)
|
||||||
|
.with_context(|| format!("failed to open cache at {}", path.display()))?;
|
||||||
|
let compression_level = config.compression_level;
|
||||||
|
if config.compress {
|
||||||
|
std::io::Write::write_all(
|
||||||
|
&mut zstd::stream::write::Encoder::new(cache_file, compression_level)?
|
||||||
|
.auto_finish(),
|
||||||
|
&serialized,
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
cache_file.write_all(&serialized)
|
||||||
|
}
|
||||||
|
.context("failed to write cache to file")?;
|
||||||
|
info!("wrote cache to {}", path.display());
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<C> Drop for PostManager<C>
|
||||||
|
where
|
||||||
|
C: Deref<Target = Config>,
|
||||||
|
{
|
||||||
|
fn drop(&mut self) {
|
||||||
|
self.try_drop().unwrap()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue