initial slonkmit

This commit is contained in:
slonkazoid 2024-04-18 04:05:38 +03:00
commit 3e7fbea3bb
Signed by: slonk
SSH key fingerprint: SHA256:tbZfJX4IOvZ0LGWOWu5Ijo8jfMPi78TU7x1VoEeCIjM
26 changed files with 6276 additions and 0 deletions

7
.gitignore vendored Normal file
View file

@ -0,0 +1,7 @@
/target
/static/**/*.gz
/media/*
/posts/*
!/posts/README.md
/.slbg-cache
/config.toml

2639
Cargo.lock generated Normal file

File diff suppressed because it is too large Load diff

50
Cargo.toml Normal file
View file

@ -0,0 +1,50 @@
[package]
name = "silly-blog"
version = "0.1.0"
edition = "2021"
default-run = "silly-blog"
[[bin]]
name = "syntect-to-css"
required-features = ["clap"]
[features]
default = ["precompression"]
tokio-console = ["dep:console-subscriber"]
clap = ["dep:clap"]
precompression = ["dep:async-compression"]
[profile.release]
lto = "fat"
opt-level = 3
codegen-units = 1
strip = true
[dependencies]
askama = { version = "0.12.1", features = ["with-axum"] }
askama_axum = "0.4.0"
async-compression = { version = "0.4.8", optional = true }
axum = { version = "0.7.5", features = ["macros"] }
bitcode = { version = "0.6.0", features = ["serde"] }
chrono = { version = "0.4.37", features = ["serde"] }
clap = { version = "4.5.4", features = ["derive"], optional = true }
color-eyre = "0.6.3"
comrak = { version = "0.22.0", features = ["syntect"] }
console-subscriber = { version = "0.2.0", optional = true }
fronma = { version = "0.2.0", features = ["toml"] }
futures-util = "0.3.30"
notify = "6.1.1"
scc = "2.1.0"
serde = { version = "1.0.197", features = ["derive"] }
syntect = "5.2.0"
thiserror = "1.0.58"
tokio = { version = "1.37.0", features = ["full"] }
tokio-util = "0.7.10"
toml = "0.8.12"
tower-http = { version = "0.5.2", features = [
"compression-gzip",
"fs",
"trace",
], default-features = false }
tracing = "0.1.40"
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }

19
README.md Normal file
View file

@ -0,0 +1,19 @@
---
title = "README"
description = "the README.md file of this project"
author = "slonkazoid"
---
# silly-blog
blazingly fast markdown blog software written in rust memory safe
## TODO
- [ ] finish writing this document
- [ ] document config
- [ ] extend syntect options
- [ ] general cleanup of code
- [x] be blazingly fast
- [x] 100+ MiB binary size

1
posts/README.md Symbolic link
View file

@ -0,0 +1 @@
../README.md

20
src/append_path.rs Normal file
View file

@ -0,0 +1,20 @@
use std::{
ffi::{OsStr, OsString},
path::{Path, PathBuf},
};
// i will kill you rust stdlib
pub trait Append<T>
where
Self: Into<OsString>,
T: From<OsString>,
{
fn append(self, ext: impl AsRef<OsStr>) -> T {
let mut buffer: OsString = self.into();
buffer.push(ext.as_ref());
T::from(buffer)
}
}
impl Append<PathBuf> for PathBuf {}
impl Append<PathBuf> for &Path {}

76
src/bin/syntect-to-css.rs Normal file
View file

@ -0,0 +1,76 @@
use std::fs::File;
use std::io::BufReader;
use std::path::PathBuf;
use clap::Parser;
use color_eyre::eyre::{self, Context, Ok, OptionExt};
use syntect::highlighting::{Theme, ThemeSet};
use syntect::html::{css_for_theme_with_class_style, ClassStyle};
#[derive(Parser, Debug)]
#[command(about = "generate CSS from a syntect theme")]
struct Args {
#[command(subcommand)]
command: Command,
#[arg(
short,
long,
help = "prefix for generated classes",
default_value = "syntect-"
)]
prefix: String,
#[arg(
long,
help = "don't add a prefix to generated classes",
default_value_t = false
)]
no_prefix: bool,
}
#[derive(Parser, Debug)]
enum Command {
#[command(about = "generate CSS from a theme in the default theme set")]
Default {
#[arg(help = "name of theme (no .tmTheme)")]
theme_name: String,
},
#[command(about = "generate CSS from a .tmTheme file")]
File {
#[arg(help = "path to theme (including .tmTheme)")]
path: PathBuf,
},
}
fn main() -> eyre::Result<()> {
let args = Args::parse();
color_eyre::install()?;
let theme = match args.command {
Command::Default { theme_name } => {
let ts = ThemeSet::load_defaults();
ts.themes
.get(&theme_name)
.ok_or_eyre(format!("theme {:?} doesn't exist", theme_name))?
.to_owned()
}
Command::File { path } => {
let mut file = BufReader::new(
File::open(&path).with_context(|| format!("failed to open {:?}", path))?,
);
ThemeSet::load_from_reader(&mut file).with_context(|| "failed to parse theme")?
}
};
let class_style = if args.no_prefix {
ClassStyle::Spaced
} else {
ClassStyle::SpacedPrefixed {
prefix: args.prefix.leak(),
}
};
let css = css_for_theme_with_class_style(&theme, class_style)
.with_context(|| "failed to generate css")?;
println!("{css}");
Ok(())
}

60
src/compress.rs Normal file
View file

@ -0,0 +1,60 @@
// TODO: make this bearable
use std::{
fs::{self, Metadata},
io::{self, Result},
path::Path,
process::{Child, Command},
sync::Mutex,
};
fn compress_file(path: &Path, metadata: Metadata, handles: &Mutex<Vec<Child>>) -> Result<()> {
let compressed_file = format!("{}.gz", path.to_str().unwrap());
if match fs::metadata(compressed_file) {
Ok(existing_metadata) => metadata.modified()? > existing_metadata.modified()?,
Err(err) => match err.kind() {
io::ErrorKind::NotFound => true,
_ => return Err(err),
},
} {
let mut handles_guard = handles.lock().unwrap();
handles_guard.push(Command::new("gzip").arg("-kf5").arg(path).spawn()?);
}
Ok(())
}
fn compress_recursively(path: &Path, handles: &Mutex<Vec<Child>>) -> Result<()> {
let metadata = fs::metadata(path)?;
if metadata.is_dir() {
for entry in fs::read_dir(path)? {
compress_recursively(&entry?.path(), handles)?
}
Ok(())
} else if match path.extension() {
Some(ext) => ext == "gz",
None => false,
} || metadata.is_symlink()
{
Ok(())
} else {
compress_file(path, metadata, handles)
}
}
pub fn compress_epicly<P: AsRef<Path>>(path: P) -> Result<u64> {
let mut i = 0;
let handles = Mutex::new(Vec::new());
compress_recursively(AsRef::<Path>::as_ref(&path), &handles)?;
let handles = handles.into_inner().unwrap();
for mut handle in handles {
assert!(handle.wait().unwrap().success());
i += 1;
}
Ok(i)
}

121
src/config.rs Normal file
View file

@ -0,0 +1,121 @@
use std::{
env,
net::{IpAddr, Ipv4Addr},
path::PathBuf,
};
use color_eyre::eyre::{bail, Context, Result};
use serde::{Deserialize, Serialize};
use tokio::io::{AsyncReadExt, AsyncWriteExt};
use tracing::{error, info};
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)]
#[serde(default)]
pub struct RenderConfig {
pub syntect_load_defaults: bool,
pub syntect_themes_dir: Option<PathBuf>,
pub syntect_theme: Option<String>,
}
#[cfg(feature = "precompression")]
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(default)]
pub struct PrecompressionConfig {
pub enable: bool,
pub watch: bool,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(default)]
pub struct Config {
pub host: IpAddr,
pub port: u16,
pub title: String,
pub description: String,
pub posts_dir: PathBuf,
pub render: RenderConfig,
#[cfg(feature = "precompression")]
pub precompression: PrecompressionConfig,
pub cache_file: Option<PathBuf>,
}
impl Default for Config {
fn default() -> Self {
Self {
host: IpAddr::V4(Ipv4Addr::UNSPECIFIED),
port: 3000,
title: "silly-blog".into(),
description: "blazingly fast markdown blog software written in rust memory safe".into(),
render: Default::default(),
posts_dir: "posts".into(),
#[cfg(feature = "precompression")]
precompression: Default::default(),
cache_file: None,
}
}
}
impl Default for RenderConfig {
fn default() -> Self {
Self {
syntect_load_defaults: false,
syntect_themes_dir: Some("themes".into()),
syntect_theme: Some("Catppuccin Mocha".into()),
}
}
}
#[cfg(feature = "precompression")]
impl Default for PrecompressionConfig {
fn default() -> Self {
Self {
enable: false,
watch: true,
}
}
}
pub async fn load() -> Result<Config> {
let config_file = env::var(format!("{}_CONFIG", env!("CARGO_BIN_NAME")))
.unwrap_or(String::from("config.toml"));
match tokio::fs::OpenOptions::new()
.read(true)
.open(&config_file)
.await
{
Ok(mut file) => {
let mut buf = String::new();
file.read_to_string(&mut buf)
.await
.with_context(|| "couldn't read configuration file")?;
toml::from_str(&buf).with_context(|| "couldn't parse configuration")
}
Err(err) => match err.kind() {
std::io::ErrorKind::NotFound => {
let config = Config::default();
info!("configuration file doesn't exist, creating");
match tokio::fs::OpenOptions::new()
.write(true)
.create(true)
.truncate(true)
.open(&config_file)
.await
{
Ok(mut file) => file
.write_all(
toml::to_string_pretty(&config)
.with_context(|| "couldn't serialize configuration")?
.as_bytes(),
)
.await
.unwrap_or_else(|err| error!("couldn't write configuration: {}", err)),
Err(err) => {
error!("couldn't open file {:?} for writing: {}", &config_file, err)
}
}
Ok(config)
}
_ => bail!("couldn't open config file: {}", err),
},
}
}

50
src/error.rs Normal file
View file

@ -0,0 +1,50 @@
use std::fmt::Display;
use axum::{http::StatusCode, response::IntoResponse};
use thiserror::Error;
// fronma is too lazy to implement std::error::Error for their own types
#[derive(Debug)]
#[repr(transparent)]
pub struct FronmaBalls(fronma::error::Error);
impl std::error::Error for FronmaBalls {}
impl Display for FronmaBalls {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str("failed to parse front matter: ")?;
match &self.0 {
fronma::error::Error::MissingBeginningLine => f.write_str("missing beginning line"),
fronma::error::Error::MissingEndingLine => f.write_str("missing ending line"),
fronma::error::Error::SerdeYaml(_) => {
unimplemented!("no yaml allowed in this household")
}
fronma::error::Error::Toml(toml_error) => write!(f, "{}", toml_error),
}
}
}
#[derive(Error, Debug)]
#[allow(clippy::enum_variant_names)]
pub enum PostError {
#[error(transparent)]
IoError(#[from] std::io::Error),
#[error(transparent)]
AskamaError(#[from] askama::Error),
#[error(transparent)]
ParseError(#[from] FronmaBalls),
#[error("post {0:?} not found")]
NotFound(String),
}
impl From<fronma::error::Error> for PostError {
fn from(value: fronma::error::Error) -> Self {
Self::ParseError(FronmaBalls(value))
}
}
impl IntoResponse for PostError {
fn into_response(self) -> axum::response::Response {
(StatusCode::INTERNAL_SERVER_ERROR, self.to_string()).into_response()
}
}

11
src/filters.rs Normal file
View file

@ -0,0 +1,11 @@
use std::time::Duration;
use chrono::{DateTime, TimeZone};
pub fn date<T: TimeZone>(date: &DateTime<T>) -> Result<String, askama::Error> {
Ok(date.to_rfc3339_opts(chrono::SecondsFormat::Secs, true))
}
pub fn duration(duration: &&Duration) -> Result<String, askama::Error> {
Ok(format!("{:?}", duration))
}

51
src/hash_arc_store.rs Normal file
View file

@ -0,0 +1,51 @@
use std::hash::{DefaultHasher, Hash, Hasher};
use std::marker::PhantomData;
use std::sync::Arc;
pub struct HashArcStore<T, Lookup>
where
Lookup: Hash,
{
inner: Option<Arc<T>>,
hash: Option<u64>,
_phantom: PhantomData<Lookup>,
}
impl<T, Lookup> HashArcStore<T, Lookup>
where
Lookup: Hash,
{
pub fn new() -> Self {
Self {
inner: None,
hash: None,
_phantom: PhantomData,
}
}
/*pub fn get(&self, key: &Lookup) -> Option<Arc<T>> {
self.hash.and_then(|hash| {
let mut h = DefaultHasher::new();
key.hash(&mut h);
if hash == h.finish() {
self.inner.clone()
} else {
None
}
})
}*/
pub fn get_or_init(&mut self, key: &Lookup, init: impl Fn(&Lookup) -> Arc<T>) -> Arc<T> {
let mut h = DefaultHasher::new();
key.hash(&mut h);
let hash = h.finish();
if !self.hash.is_some_and(|inner_hash| inner_hash == hash) {
let mut h = DefaultHasher::new();
key.hash(&mut h);
self.inner = Some(init(key));
self.hash = Some(h.finish());
}
// safety: please.
unsafe { self.inner.as_ref().unwrap_unchecked().clone() }
}
}

352
src/main.rs Normal file
View file

@ -0,0 +1,352 @@
#![feature(let_chains, stmt_expr_attributes, proc_macro_hygiene)]
mod append_path;
mod compress;
mod config;
mod error;
mod filters;
mod hash_arc_store;
mod markdown_render;
mod post;
mod watcher;
use std::future::IntoFuture;
use std::net::SocketAddr;
use std::process::exit;
use std::sync::Arc;
use std::time::Duration;
use askama_axum::Template;
use axum::extract::{MatchedPath, Path, State};
use axum::http::{Request, StatusCode};
use axum::response::{IntoResponse, Redirect, Response};
use axum::routing::{get, Router};
use axum::Json;
use color_eyre::eyre::{self, Context};
use thiserror::Error;
use tokio::io::{AsyncReadExt, AsyncWriteExt};
use tokio::net::TcpListener;
use tokio::signal;
use tokio::task::JoinSet;
use tokio_util::sync::CancellationToken;
use tower_http::services::ServeDir;
use tower_http::trace::TraceLayer;
use tracing::level_filters::LevelFilter;
use tracing::{error, info, info_span, warn, Span};
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt, EnvFilter};
use crate::compress::compress_epicly;
use crate::config::Config;
use crate::error::PostError;
use crate::post::{PostManager, PostMetadata, RenderStats};
use crate::watcher::watch;
type ArcState = Arc<AppState>;
#[derive(Clone)]
struct AppState {
pub config: Config,
pub posts: PostManager,
}
#[derive(Template)]
#[template(path = "index.html")]
struct IndexTemplate {
title: String,
description: String,
posts: Vec<PostMetadata>,
}
#[derive(Template)]
#[template(path = "view_post.html")]
struct ViewPostTemplate {
meta: PostMetadata,
rendered: String,
rendered_in: RenderStats,
}
type AppResult<T> = Result<T, AppError>;
#[derive(Error, Debug)]
enum AppError {
#[error("failed to fetch post: {0}")]
PostError(#[from] PostError),
}
#[derive(Template)]
#[template(path = "error.html")]
struct ErrorTemplate {
error: String,
}
impl IntoResponse for AppError {
fn into_response(self) -> Response {
let status_code = match &self {
AppError::PostError(err) => match err {
PostError::NotFound(_) => StatusCode::NOT_FOUND,
_ => StatusCode::INTERNAL_SERVER_ERROR,
},
//_ => StatusCode::INTERNAL_SERVER_ERROR,
};
(
status_code,
ErrorTemplate {
error: self.to_string(),
},
)
.into_response()
}
}
async fn index(State(state): State<ArcState>) -> AppResult<IndexTemplate> {
Ok(IndexTemplate {
title: state.config.title.clone(),
description: state.config.description.clone(),
posts: state.posts.list_posts().await?,
})
}
async fn post(State(state): State<ArcState>, Path(name): Path<String>) -> AppResult<Response> {
let post = state.posts.get_post(&name).await?;
let post = ViewPostTemplate {
meta: post.0,
rendered: post.1,
rendered_in: post.2,
}
.into_response();
Ok(post)
}
async fn all_posts(State(state): State<ArcState>) -> AppResult<Json<Vec<PostMetadata>>> {
let posts = state.posts.list_posts().await?;
Ok(Json(posts))
}
#[tokio::main]
async fn main() -> eyre::Result<()> {
#[cfg(feature = "tokio-console")]
console_subscriber::init();
color_eyre::install()?;
#[cfg(not(feature = "tokio-console"))]
tracing_subscriber::registry()
.with(
EnvFilter::builder()
.with_default_directive(LevelFilter::INFO.into())
.from_env_lossy(),
)
.with(tracing_subscriber::fmt::layer())
.init();
let config = config::load()
.await
.with_context(|| "couldn't load configuration")?;
let mut tasks = JoinSet::new();
let mut cancellation_tokens = Vec::new();
#[cfg(feature = "precompression")]
if config.precompression.enable {
let span = info_span!("compression");
info!(parent: span.clone(), "compressing static");
let compressed = tokio::task::spawn_blocking(|| compress_epicly("static"))
.await
.unwrap()
.with_context(|| "couldn't compress static")?;
let _handle = span.enter();
if compressed > 0 {
info!(compressed_files=%compressed, "compressed {compressed} files");
}
if config.precompression.watch {
info!("starting compressor task");
let span = span.clone();
let token = CancellationToken::new();
let passed_token = token.clone();
tasks.spawn(async move {
watch(span, passed_token, Default::default())
.await
.with_context(|| "failed to watch static")
.unwrap()
});
cancellation_tokens.push(token);
}
}
let posts = if let Some(path) = config.cache_file.as_ref()
&& tokio::fs::try_exists(&path)
.await
.with_context(|| format!("failed to check if {} exists", path.display()))?
{
info!("loading cache from file");
let load_cache = async {
let mut cache_file = tokio::fs::File::open(&path)
.await
.with_context(|| "failed to open cache file")?;
let mut serialized = Vec::with_capacity(4096);
cache_file
.read_to_end(&mut serialized)
.await
.with_context(|| "failed to read cache file")?;
let cache = bitcode::deserialize(serialized.as_slice())
.with_context(|| "failed to parse cache")?;
Ok::<PostManager, color_eyre::Report>(PostManager::new_with_cache(
config.posts_dir.clone(),
config.render.clone(),
cache,
))
}
.await;
match load_cache {
Ok(posts) => posts,
Err(err) => {
error!("failed to load cache: {}", err);
info!("using empty cache");
PostManager::new(config.posts_dir.clone(), config.render.clone())
}
}
} else {
PostManager::new(config.posts_dir.clone(), config.render.clone())
};
let state = Arc::new(AppState { config, posts });
let app = Router::new()
.route("/", get(index))
.route(
"/post/:name",
get(
|Path(name): Path<String>| async move { Redirect::to(&format!("/posts/{}", name)) },
),
)
.route("/posts/:name", get(post))
.route("/posts", get(all_posts))
.nest_service("/static", ServeDir::new("static").precompressed_gzip())
.nest_service("/media", ServeDir::new("media"))
.layer(
TraceLayer::new_for_http()
.make_span_with(|request: &Request<_>| {
let matched_path = request
.extensions()
.get::<MatchedPath>()
.map(MatchedPath::as_str);
info_span!(
"request",
method = ?request.method(),
path = ?request.uri().path(),
matched_path,
)
})
.on_response(|response: &Response<_>, duration: Duration, span: &Span| {
let _ = span.enter();
let status = response.status();
info!(?status, ?duration, "response");
}),
)
.with_state(state.clone());
let listener = TcpListener::bind((state.config.host, state.config.port))
.await
.with_context(|| {
format!(
"couldn't listen on {}",
SocketAddr::new(state.config.host, state.config.port)
)
})?;
let local_addr = listener
.local_addr()
.with_context(|| "couldn't get socket address")?;
info!("listening on http://{}", local_addr);
let sigint = signal::ctrl_c();
#[cfg(unix)]
let mut sigterm_handler =
tokio::signal::unix::signal(tokio::signal::unix::SignalKind::terminate())?;
#[cfg(unix)]
let sigterm = sigterm_handler.recv();
#[cfg(not(unix))] // TODO: kill all windows server users
let sigterm = std::future::pending::<()>();
let axum_token = CancellationToken::new();
cancellation_tokens.push(axum_token.clone());
let mut server = axum::serve(
listener,
app.into_make_service_with_connect_info::<SocketAddr>(),
)
.with_graceful_shutdown(async move { axum_token.cancelled().await })
.into_future();
tokio::select! {
result = &mut server => {
result.with_context(|| "failed to serve app")?;
},
_ = sigint => {
info!("received SIGINT, exiting gracefully");
},
_ = sigterm => {
info!("received SIGTERM, exiting gracefully");
}
};
let cleanup = async move {
// stop tasks
for token in cancellation_tokens {
token.cancel();
}
server.await.with_context(|| "failed to serve app")?;
while let Some(task) = tasks.join_next().await {
task.with_context(|| "failed to join task")?;
}
// write cache to file
let AppState { config, posts } = Arc::<AppState>::try_unwrap(state).unwrap_or_else(|state| {
warn!("couldn't unwrap Arc over AppState, more than one strong reference exists for Arc. cloning instead");
AppState::clone(state.as_ref())
});
if let Some(path) = config.cache_file.as_ref() {
let cache = posts.into_cache();
let mut serialized =
bitcode::serialize(&cache).with_context(|| "failed to serialize cache")?;
let mut cache_file = tokio::fs::File::create(path)
.await
.with_context(|| format!("failed to open cache at {}", path.display()))?;
cache_file
.write_all(serialized.as_mut_slice())
.await
.with_context(|| "failed to write cache to file")?;
info!("wrote cache to {}", path.display());
}
Ok::<(), color_eyre::Report>(())
};
let sigint = signal::ctrl_c();
#[cfg(unix)]
let mut sigterm_handler =
tokio::signal::unix::signal(tokio::signal::unix::SignalKind::terminate())?;
#[cfg(unix)]
let sigterm = sigterm_handler.recv();
#[cfg(not(unix))]
let sigterm = std::future::pending::<()>();
tokio::select! {
result = cleanup => {
result.with_context(|| "cleanup failed, oh well")?;
},
_ = sigint => {
warn!("received second signal, exiting");
exit(1);
},
_ = sigterm => {
warn!("received second signal, exiting");
exit(1);
}
}
Ok(())
}

63
src/markdown_render.rs Normal file
View file

@ -0,0 +1,63 @@
use std::sync::{Arc, OnceLock, RwLock};
use comrak::markdown_to_html_with_plugins;
use comrak::plugins::syntect::{SyntectAdapter, SyntectAdapterBuilder};
use comrak::ComrakOptions;
use comrak::Plugins;
use comrak::RenderPlugins;
use syntect::highlighting::ThemeSet;
use crate::config::RenderConfig;
use crate::hash_arc_store::HashArcStore;
fn syntect_adapter(config: &RenderConfig) -> Arc<SyntectAdapter> {
static STATE: OnceLock<RwLock<HashArcStore<SyntectAdapter, RenderConfig>>> = OnceLock::new();
let lock = STATE.get_or_init(|| RwLock::new(HashArcStore::new()));
let mut guard = lock.write().unwrap();
guard.get_or_init(config, build_syntect)
}
fn build_syntect(config: &RenderConfig) -> Arc<SyntectAdapter> {
let mut theme_set = if config.syntect_load_defaults {
ThemeSet::load_defaults()
} else {
ThemeSet::new()
};
if let Some(path) = config.syntect_themes_dir.as_ref() {
theme_set.add_from_folder(path).unwrap();
}
let mut builder = SyntectAdapterBuilder::new().theme_set(theme_set);
if let Some(theme) = config.syntect_theme.as_ref() {
builder = builder.theme(theme);
}
Arc::new(builder.build())
}
pub fn render_with_config(markdown: &str, config: &RenderConfig, front_matter: bool) -> String {
let mut options = ComrakOptions::default();
options.extension.table = true;
options.extension.autolink = true;
options.extension.tasklist = true;
options.extension.superscript = true;
options.extension.multiline_block_quotes = true;
options.extension.header_ids = Some(String::new());
if front_matter {
options.extension.front_matter_delimiter = Some(String::from("---"));
};
let mut render_plugins = RenderPlugins::default();
let syntect = syntect_adapter(config);
render_plugins.codefence_syntax_highlighter = Some(syntect.as_ref());
let plugins = comrak::PluginsBuilder::default()
.render(render_plugins)
.build()
.unwrap();
render(markdown, &options, &plugins)
}
pub fn render(markdown: &str, options: &ComrakOptions, plugins: &Plugins) -> String {
// TODO: post-processing
markdown_to_html_with_plugins(markdown, options, plugins)
}

160
src/post/cache.rs Normal file
View file

@ -0,0 +1,160 @@
use std::hash::{DefaultHasher, Hash, Hasher};
use scc::HashMap;
use serde::de::{SeqAccess, Visitor};
use serde::{ser::SerializeSeq, Deserialize, Deserializer, Serialize, Serializer};
use crate::config::RenderConfig;
use crate::post::PostMetadata;
#[derive(Serialize, Deserialize, Clone)]
pub struct CacheValue {
pub metadata: PostMetadata,
pub rendered: String,
pub mtime: u64,
config_hash: u64,
}
#[derive(Default, Clone)]
pub struct Cache(HashMap<String, CacheValue>);
impl Cache {
pub fn from_map(cache: HashMap<String, CacheValue>) -> Self {
Self(cache)
}
pub async fn lookup(
&self,
name: &str,
mtime: u64,
config: &RenderConfig,
) -> Option<CacheValue> {
match self.0.get_async(name).await {
Some(entry) => {
let cached = entry.get();
if mtime <= cached.mtime && {
let mut hasher = DefaultHasher::new();
config.hash(&mut hasher);
hasher.finish()
} == cached.config_hash
{
Some(cached.clone())
} else {
let _ = entry.remove();
None
}
}
None => None,
}
}
pub async fn lookup_metadata(&self, name: &str, mtime: u64) -> Option<PostMetadata> {
match self.0.get_async(name).await {
Some(entry) => {
let cached = entry.get();
if mtime <= cached.mtime {
Some(cached.metadata.clone())
} else {
let _ = entry.remove();
None
}
}
None => None,
}
}
pub async fn insert(
&self,
name: String,
metadata: PostMetadata,
mtime: u64,
rendered: String,
config: &RenderConfig,
) -> Result<(), (String, (PostMetadata, String))> {
let mut hasher = DefaultHasher::new();
config.hash(&mut hasher);
let hash = hasher.finish();
let value = CacheValue {
metadata,
rendered,
mtime,
config_hash: hash,
};
if self
.0
.update_async(&name, |_, _| value.clone())
.await
.is_none()
{
self.0
.insert_async(name, value)
.await
.map_err(|x| (x.0, (x.1.metadata, x.1.rendered)))
} else {
Ok(())
}
}
pub async fn remove(&self, name: &str) -> Option<(String, CacheValue)> {
self.0.remove_async(name).await
}
#[inline(always)]
pub fn into_inner(self) -> HashMap<String, CacheValue> {
self.0
}
}
impl Serialize for Cache {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let cache = self.clone().into_inner();
let mut seq = serializer.serialize_seq(Some(cache.len()))?;
let mut entry = cache.first_entry();
while let Some(occupied) = entry {
let key = occupied.key().clone();
let value = occupied.get().clone();
seq.serialize_element(&(key, value))?;
entry = occupied.next();
}
seq.end()
}
}
impl<'de> Deserialize<'de> for Cache {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct CoolVisitor;
impl<'de> Visitor<'de> for CoolVisitor {
type Value = Cache;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(formatter, "meow")
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: SeqAccess<'de>,
{
let cache = match seq.size_hint() {
Some(size) => HashMap::with_capacity(size),
None => HashMap::new(),
};
while let Some((key, value)) = seq.next_element::<(String, CacheValue)>()? {
cache.insert(key, value).ok();
}
Ok(Cache::from_map(cache))
}
}
deserializer.deserialize_seq(CoolVisitor)
}
}

229
src/post/mod.rs Normal file
View file

@ -0,0 +1,229 @@
mod cache;
use std::io;
use std::path::{Path, PathBuf};
use std::time::{Duration, Instant, SystemTime};
use askama::Template;
use chrono::{DateTime, Utc};
use fronma::engines::Toml;
use fronma::parser::{parse_with_engine, ParsedData};
use serde::{Deserialize, Serialize};
use tokio::fs;
use tokio::io::AsyncReadExt;
use tracing::warn;
use crate::config::RenderConfig;
use crate::markdown_render;
use crate::post::cache::Cache;
use crate::PostError;
#[derive(Deserialize)]
struct FrontMatter {
pub title: String,
pub description: String,
pub author: String,
pub icon: Option<String>,
pub created_at: Option<DateTime<Utc>>,
pub modified_at: Option<DateTime<Utc>>,
}
impl FrontMatter {
pub fn into_full(
self,
name: String,
created: Option<SystemTime>,
modified: Option<SystemTime>,
) -> PostMetadata {
PostMetadata {
name,
title: self.title,
description: self.description,
author: self.author,
icon: self.icon,
created_at: self.created_at.or_else(|| created.map(|t| t.into())),
modified_at: self.modified_at.or_else(|| modified.map(|t| t.into())),
}
}
}
#[derive(Serialize, Deserialize, Clone)]
pub struct PostMetadata {
pub name: String,
pub title: String,
pub description: String,
pub author: String,
pub icon: Option<String>,
pub created_at: Option<DateTime<Utc>>,
pub modified_at: Option<DateTime<Utc>>,
}
use crate::filters;
#[derive(Template)]
#[template(path = "post.html")]
struct Post<'a> {
pub meta: &'a PostMetadata,
pub rendered_markdown: String,
}
// format: TOTAL OP1 OP2
#[allow(unused)]
pub enum RenderStats {
Cached(Duration),
ParsedAndRendered(Duration, Duration, Duration),
}
#[derive(Clone)]
pub struct PostManager {
dir: PathBuf,
cache: Cache,
config: RenderConfig,
}
impl PostManager {
pub fn new(dir: PathBuf, config: RenderConfig) -> PostManager {
PostManager {
dir,
cache: Default::default(),
config,
}
}
pub fn new_with_cache(dir: PathBuf, config: RenderConfig, cache: Cache) -> PostManager {
PostManager { dir, cache, config }
}
async fn parse_and_render(
&self,
name: String,
path: impl AsRef<Path>,
) -> Result<(PostMetadata, String, (Duration, Duration)), PostError> {
let parsing_start = Instant::now();
let mut file = match tokio::fs::OpenOptions::new().read(true).open(&path).await {
Ok(val) => val,
Err(err) => match err.kind() {
io::ErrorKind::NotFound => return Err(PostError::NotFound(name)),
_ => return Err(PostError::IoError(err)),
},
};
let stat = file.metadata().await?;
let modified = stat.modified()?;
let created = stat.created().ok();
let mut content = String::with_capacity(stat.len() as usize);
file.read_to_string(&mut content).await?;
let ParsedData { headers, body } = parse_with_engine::<FrontMatter, Toml>(&content)?;
let metadata = headers.into_full(name.to_owned(), created, Some(modified));
let parsing = parsing_start.elapsed();
let before_render = Instant::now();
let rendered_markdown = markdown_render::render_with_config(body, &self.config, false);
let post = Post {
meta: &metadata,
rendered_markdown,
}
.render()?;
let rendering = before_render.elapsed();
self.cache
.insert(
name.to_string(),
metadata.clone(),
modified
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs(),
post.clone(),
&self.config,
)
.await
.unwrap_or_else(|err| warn!("failed to insert {:?} into cache", err.0));
Ok((metadata, post, (parsing, rendering)))
}
async fn list_posts_recursive(
&self,
dir: impl AsRef<Path>,
) -> Result<Vec<PostMetadata>, PostError> {
let mut posts = Vec::new();
let mut read_dir = fs::read_dir(dir).await?;
while let Some(entry) = read_dir.next_entry().await? {
let path = entry.path();
let stat = fs::metadata(&path).await?;
if stat.is_file() && path.extension().is_some_and(|ext| ext == "md") {
let mtime = stat
.modified()?
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs();
let name = path
.clone()
.file_stem()
.unwrap()
.to_string_lossy()
.to_string();
if let Some(hit) = self.cache.lookup_metadata(&name, mtime).await {
posts.push(hit)
} else if let Ok((metadata, ..)) = self.parse_and_render(name, path).await {
posts.push(metadata);
}
}
}
Ok(posts)
}
#[allow(unused)]
pub async fn list_posts(&self) -> Result<Vec<PostMetadata>, PostError> {
self.list_posts_recursive(&self.dir).await
}
// third entry in the tuple is whether it got rendered and if so, how long did it take
pub async fn get_post(
&self,
name: &str,
) -> Result<(PostMetadata, String, RenderStats), PostError> {
let start = Instant::now();
let path = self.dir.join(name.to_owned() + ".md");
let stat = match tokio::fs::metadata(&path).await {
Ok(value) => value,
Err(err) => match err.kind() {
io::ErrorKind::NotFound => {
self.cache.remove(name).await;
return Err(PostError::NotFound(name.to_string()));
}
_ => return Err(PostError::IoError(err)),
},
};
let mtime = stat
.modified()?
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs();
if let Some(hit) = self.cache.lookup(name, mtime, &self.config).await {
Ok((
hit.metadata,
hit.rendered,
RenderStats::Cached(start.elapsed()),
))
} else {
let (metadata, rendered, stats) = self.parse_and_render(name.to_string(), path).await?;
Ok((
metadata,
rendered,
RenderStats::ParsedAndRendered(start.elapsed(), stats.0, stats.1),
))
}
}
pub fn into_cache(self) -> Cache {
self.cache
}
}

76
src/watcher.rs Normal file
View file

@ -0,0 +1,76 @@
use notify::{event::RemoveKind, Config, EventKind, RecommendedWatcher, RecursiveMode, Watcher};
use tokio_util::sync::CancellationToken;
use tracing::{info, Span};
use crate::append_path::Append;
use crate::compress::compress_epicly;
pub async fn watch(
span: Span,
token: CancellationToken,
config: Config,
) -> Result<(), notify::Error> {
let (tx, mut rx) = tokio::sync::mpsc::channel(12);
let mut watcher = RecommendedWatcher::new(
move |res| {
tx.blocking_send(res)
.expect("failed to send message over channel")
},
config,
)?;
watcher.watch(std::path::Path::new("static"), RecursiveMode::Recursive)?;
while let Some(received) = tokio::select! {
received = rx.recv() => received,
_ = token.cancelled() => return Ok(())
} {
match received {
Ok(event) => {
if event.kind.is_create() || event.kind.is_modify() {
let cloned_span = span.clone();
let compressed =
tokio::task::spawn_blocking(move || -> std::io::Result<u64> {
let _handle = cloned_span.enter();
let mut i = 0;
for path in event.paths {
if path.extension().is_some_and(|ext| ext == "gz") {
continue;
}
info!("{} changed, compressing", path.display());
i += compress_epicly(&path)?;
}
Ok(i)
})
.await
.unwrap()?;
if compressed > 0 {
let _handle = span.enter();
info!(compressed_files=%compressed, "compressed {compressed} files");
}
} else if let EventKind::Remove(remove_event) = event.kind // UNSTABLE
&& matches!(remove_event, RemoveKind::File)
{
for path in event.paths {
if path.extension().is_some_and(|ext| ext == "gz") {
continue;
}
let gz_path = path.clone().append(".gz");
if tokio::fs::try_exists(&gz_path).await? {
info!(
"{} removed, also removing {}",
path.display(),
gz_path.display()
);
tokio::fs::remove_file(&gz_path).await?
}
}
}
}
Err(err) => return Err(err),
}
}
Ok(())
}

29
static/post.css Normal file
View file

@ -0,0 +1,29 @@
.anchor {
text-decoration: none;
}
.anchor::before {
content: "§";
}
.anchor::after {
content: " ";
}
code {
font-size: larger;
padding: 0.15em 0.4em;
background-color: var(--surface0);
color: var(--subtext1);
}
/* code blocks */
pre > code {
border: 2px solid var(--surface0);
padding: 1.25em 1.5em;
display: block;
overflow-wrap: break-word;
white-space: pre-wrap;
background-color: var(--base);
color: var(--text);
}

134
static/style.css Normal file
View file

@ -0,0 +1,134 @@
/* colors */
:root {
--base: #1e1e2e;
--text: #cdd6f4;
--crust: #11111b;
--surface0: #313244;
--subtext0: #a6adc8;
--subtext1: #bac2de;
--pink: #f5c2e7;
--rosewater: #f5e0dc;
--blue: #89b4fa;
--mauve: #cba6f7;
}
@media (prefers-color-scheme: light) {
:root {
--base: #eff1f5;
--text: #4c4f69;
--crust: #dce0e8;
--surface0: #ccd0da;
--subtext0: #6c6f85;
--subtext1: #5c5f77;
--pink: #ea76cb;
--rosewater: #dc8a78;
--blue: #1e66f5;
--mauve: #8839ef;
}
}
:root {
/* please have one at least one good monospace font */
font-family: "Hack Nerd Font", "Hack", "JetBrains Mono",
"JetBrainsMono Nerd Font", "Ubuntu Mono", monospace, sans-serif;
background-color: var(--base);
color: var(--text);
}
a {
color: var(--pink);
}
a:hover {
color: var(--rosewater);
}
a:active {
color: var(--blue);
}
a:visited {
color: var(--mauve);
}
code {
padding: 0.15em 0.4em;
background-color: var(--surface0);
color: var(--subtext0);
}
.tooltipped {
border-bottom: 1px dotted var(--text);
}
hr {
color: var(--subtext1);
}
footer {
text-align: end;
font-size: small;
opacity: 50%;
}
.post-author {
font-size: smaller;
opacity: 0.65;
}
/* BEGIN cool effect everyone liked */
body {
margin: 0;
padding: 0;
background-color: var(--base);
}
main {
padding: 1em;
background-color: var(--base);
}
body > main > h1:first-child {
margin-top: 0;
}
@media (min-width: 956px) {
:root {
--target-ratio: 0.7; /* 669px - 1344x */
--width: min(100% * var(--target-ratio), 1920px * var(--target-ratio));
--padding: 4em;
--padded-width: calc(var(--width) - var(--padding) * 2);
}
body {
padding: 4em 0;
min-height: calc(100vh - 8em);
background: var(--crust);
background: linear-gradient(
90deg,
var(--base) 0%,
var(--crust) calc((100% - var(--width)) / 2),
var(--crust) calc(50% + var(--width) / 2),
var(--base) 100%
);
}
body > * {
margin: auto;
padding: var(--padding);
width: var(--padded-width);
}
body > footer {
padding: initial;
width: var(--width);
}
}
/* END cool effect everyone liked */

16
templates/error.html Normal file
View file

@ -0,0 +1,16 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>error</title>
<link rel="stylesheet" href="/static/style.css" />
</head>
<body>
<main>
<h1>error</h1>
<p>{{ error }}</p>
<a href="/">go back to home</a>
</main>
</body>
</html>

36
templates/index.html Normal file
View file

@ -0,0 +1,36 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<meta name="description" content="{{ title }}" />
<meta property="og:title" content="{{ title }}" />
<meta property="og:description" content="{{ description }}" />
<title>{{ title }}</title>
<link rel="stylesheet" href="/static/style.css" />
</head>
<body>
<main>
<h1>{{ title }}</h1>
<p>{{ description }}</p>
<h2>posts</h2>
<!-- prettier-ignore -->
<div>
{% for post in posts %}
<p>
<a href="/posts/{{ post.name }}"><b>{{ post.title }}</b></a>
<span class="post-author">- by {{ post.author }}</span>
<br />
{{ post.description }}<br />
{% match post.created_at %} {% when Some(created_at) %}
written:&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; {{ created_at|date }}<br />
{% when None %} {% endmatch %}
{% match post.modified_at %} {% when Some(modified_at) %}
last modified: {{ modified_at|date }}
{% when None %} {% endmatch %}
</p>
{% endfor %}
</div>
</main>
</body>
</html>

20
templates/post.html Normal file
View file

@ -0,0 +1,20 @@
<h1 class="post-title">
{{ meta.title }}
<span class="post-author">- by {{ meta.author }}</span>
</h1>
<p class="post-desc">{{ meta.description }}</p>
<p>
<!-- prettier-ignore -->
<div>
{% match meta.created_at %} {% when Some(created_at) %}
written:&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; {{ created_at|date }}<br />
{% when None %} {% endmatch %}
{% match meta.modified_at %} {% when Some(modified_at) %}
last modified: {{ modified_at|date }}
{% when None %} {% endmatch %}
</div>
<a href="/posts/{{ meta.name }}">link</a><br />
<a href="/">back to home</a>
</p>
<hr />
{{ rendered_markdown|escape("none") }}

0
templates/post_list.html Normal file
View file

View file

35
templates/view_post.html Normal file
View file

@ -0,0 +1,35 @@
<!DOCTYPE html>
<html lang="en">
<head>
<head>
<meta charset="UTF-8" />
<meta
name="viewport"
content="width=device-width, initial-scale=1.0"
/>
<meta name="description" content="{{ meta.title }}" />
<meta property="og:title" content="{{ meta.title }}" />
<meta property="og:description" content="{{ meta.description }}" />
{% match meta.icon %} {% when Some with (url) %}
<meta property="og:image" content="{{ url }}" />
<link rel="shortcut icon" href="{{ url }}" />
{% when None %} {% endmatch %}
<title>{{ meta.title }}</title>
<link rel="stylesheet" href="/static/style.css" />
<link rel="stylesheet" href="/static/post.css" />
</head>
</head>
<body>
<main>{{ rendered|escape("none") }}</main>
<!-- prettier-ignore -->
<footer>
{% match rendered_in %}
{% when RenderStats::ParsedAndRendered(total, parsing, rendering) %}
<span class="tooltipped" title="parsing took {{ parsing|duration }}">parsed</span> and
<span class="tooltipped" title="rendering took {{ rendering|duration }}">rendered</span> in {{ total|duration }}
{% when RenderStats::Cached(total) %}
retrieved from cache in {{ total|duration }}
{% endmatch %}
</footer>
</body>
</html>

File diff suppressed because it is too large Load diff