Compare commits

..

No commits in common. "897e1cbf880e2e41b4cea1bd41828190313a383d" and "a19c5762756b3cd8be3814aacc6bb8bb3658714c" have entirely different histories.

6 changed files with 316 additions and 348 deletions

View file

@ -17,12 +17,12 @@ use tracing::{info, info_span, Span};
use crate::config::Config; use crate::config::Config;
use crate::error::{AppError, AppResult}; use crate::error::{AppError, AppResult};
use crate::filters; use crate::filters;
use crate::post::{MarkdownPosts, PostManager, PostMetadata, RenderStats}; use crate::post::{PostManager, PostMetadata, RenderStats};
#[derive(Clone)] #[derive(Clone)]
pub struct AppState { pub struct AppState {
pub config: Arc<Config>, pub config: Arc<Config>,
pub posts: Arc<MarkdownPosts<Arc<Config>>>, pub posts: Arc<PostManager<Arc<Config>>>,
} }
#[derive(Template)] #[derive(Template)]
@ -84,7 +84,7 @@ async fn rss(
} }
let posts = posts let posts = posts
.get_all_posts(|metadata, _| { .get_all_posts_filtered(|metadata, _| {
!query !query
.tag .tag
.as_ref() .as_ref()
@ -161,7 +161,7 @@ async fn post(
} }
} }
pub fn new(config: &Config) -> Router<AppState> { pub fn new() -> Router<AppState> {
Router::new() Router::new()
.route("/", get(index)) .route("/", get(index))
.route( .route(
@ -173,11 +173,8 @@ pub fn new(config: &Config) -> Router<AppState> {
.route("/posts/:name", get(post)) .route("/posts/:name", get(post))
.route("/posts", get(all_posts)) .route("/posts", get(all_posts))
.route("/feed.xml", get(rss)) .route("/feed.xml", get(rss))
.nest_service( .nest_service("/static", ServeDir::new("static").precompressed_gzip())
"/static", .nest_service("/media", ServeDir::new("media"))
ServeDir::new(&config.dirs._static).precompressed_gzip(),
)
.nest_service("/media", ServeDir::new(&config.dirs.media))
.layer( .layer(
TraceLayer::new_for_http() TraceLayer::new_for_http()
.make_span_with(|request: &Request<_>| { .make_span_with(|request: &Request<_>| {

View file

@ -5,7 +5,7 @@ use std::path::PathBuf;
use color_eyre::eyre::{bail, Context, Result}; use color_eyre::eyre::{bail, Context, Result};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tokio::io::{AsyncReadExt, AsyncWriteExt}; use tokio::io::{AsyncReadExt, AsyncWriteExt};
use tracing::{error, info, instrument}; use tracing::{error, info};
use url::Url; use url::Url;
use crate::ranged_i128_visitor::RangedI128Visitor; use crate::ranged_i128_visitor::RangedI128Visitor;
@ -49,8 +49,6 @@ pub struct HttpConfig {
pub struct DirsConfig { pub struct DirsConfig {
pub posts: PathBuf, pub posts: PathBuf,
pub media: PathBuf, pub media: PathBuf,
#[serde(rename = "static")]
pub _static: PathBuf,
} }
#[derive(Serialize, Deserialize, Debug, Clone)] #[derive(Serialize, Deserialize, Debug, Clone)]
@ -103,7 +101,6 @@ impl Default for DirsConfig {
Self { Self {
posts: "posts".into(), posts: "posts".into(),
media: "media".into(), media: "media".into(),
_static: "static".into(),
} }
} }
} }
@ -141,7 +138,6 @@ impl Default for CacheConfig {
} }
} }
#[instrument(name = "config")]
pub async fn load() -> Result<Config> { pub async fn load() -> Result<Config> {
let config_file = env::var(format!( let config_file = env::var(format!(
"{}_CONFIG", "{}_CONFIG",

View file

@ -27,7 +27,7 @@ use tracing_subscriber::layer::SubscriberExt;
use tracing_subscriber::{util::SubscriberInitExt, EnvFilter}; use tracing_subscriber::{util::SubscriberInitExt, EnvFilter};
use crate::app::AppState; use crate::app::AppState;
use crate::post::{MarkdownPosts, PostManager}; use crate::post::PostManager;
#[tokio::main] #[tokio::main]
async fn main() -> eyre::Result<()> { async fn main() -> eyre::Result<()> {
@ -55,15 +55,15 @@ async fn main() -> eyre::Result<()> {
let mut tasks = JoinSet::new(); let mut tasks = JoinSet::new();
let cancellation_token = CancellationToken::new(); let cancellation_token = CancellationToken::new();
let posts = Arc::new(MarkdownPosts::new(Arc::clone(&config)).await?); let posts = Arc::new(PostManager::new(Arc::clone(&config)).await?);
let state = AppState { let state = AppState {
config: Arc::clone(&config), config: Arc::clone(&config),
posts: Arc::clone(&posts), posts,
}; };
if config.cache.enable && config.cache.cleanup { if config.cache.enable && config.cache.cleanup {
if let Some(t) = config.cache.cleanup_interval { if let Some(t) = config.cache.cleanup_interval {
let posts = Arc::clone(&posts); let state = state.clone();
let token = cancellation_token.child_token(); let token = cancellation_token.child_token();
debug!("setting up cleanup task"); debug!("setting up cleanup task");
tasks.spawn(async move { tasks.spawn(async move {
@ -72,17 +72,17 @@ async fn main() -> eyre::Result<()> {
select! { select! {
_ = token.cancelled() => break, _ = token.cancelled() => break,
_ = interval.tick() => { _ = interval.tick() => {
posts.cleanup().await state.posts.cleanup().await
} }
} }
} }
}); });
} else { } else {
posts.cleanup().await; state.posts.cleanup().await;
} }
} }
let app = app::new(&config).with_state(state.clone()); let app = app::new().with_state(state.clone());
let listener = TcpListener::bind(socket_addr) let listener = TcpListener::bind(socket_addr)
.await .await

View file

@ -1,13 +1,10 @@
use std::hash::{DefaultHasher, Hash, Hasher}; use std::hash::{DefaultHasher, Hash, Hasher};
use std::io::Read;
use color_eyre::eyre::{self, Context};
use scc::HashMap; use scc::HashMap;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tokio::io::AsyncReadExt;
use tracing::{debug, instrument}; use tracing::{debug, instrument};
use crate::config::{Config, RenderConfig}; use crate::config::RenderConfig;
use crate::post::PostMetadata; use crate::post::PostMetadata;
/// do not persist cache if this version number changed /// do not persist cache if this version number changed
@ -136,29 +133,3 @@ impl Cache {
self.1 self.1
} }
} }
pub(crate) async fn load_cache(config: &Config) -> Result<Cache, eyre::Report> {
let path = &config.cache.file;
let mut cache_file = tokio::fs::File::open(&path)
.await
.context("failed to open cache file")?;
let serialized = if config.cache.compress {
let cache_file = cache_file.into_std().await;
tokio::task::spawn_blocking(move || {
let mut buf = Vec::with_capacity(4096);
zstd::stream::read::Decoder::new(cache_file)?.read_to_end(&mut buf)?;
Ok::<_, std::io::Error>(buf)
})
.await?
.context("failed to read cache file")?
} else {
let mut buf = Vec::with_capacity(4096);
cache_file
.read_to_end(&mut buf)
.await
.context("failed to read cache file")?;
buf
};
bitcode::deserialize(serialized.as_slice()).context("failed to parse cache")
}

View file

@ -1,279 +0,0 @@
use std::io::{self, Write};
use std::ops::Deref;
use std::path::Path;
use std::time::Duration;
use std::time::Instant;
use color_eyre::eyre::{self, Context};
use fronma::parser::{parse, ParsedData};
use tokio::fs;
use tokio::io::AsyncReadExt;
use tracing::{error, info, warn};
use crate::config::Config;
use crate::markdown_render::render;
use crate::post::cache::{load_cache, Cache, CACHE_VERSION};
use crate::post::{FrontMatter, PostError, PostManager, PostMetadata, RenderStats};
use crate::systemtime_as_secs::as_secs;
pub struct MarkdownPosts<C>
where
C: Deref<Target = Config>,
{
cache: Option<Cache>,
config: C,
}
impl<C> MarkdownPosts<C>
where
C: Deref<Target = Config>,
{
pub async fn new(config: C) -> eyre::Result<MarkdownPosts<C>> {
if config.cache.enable {
if config.cache.persistence && tokio::fs::try_exists(&config.cache.file).await? {
info!("loading cache from file");
let mut cache = load_cache(&config).await.unwrap_or_else(|err| {
error!("failed to load cache: {}", err);
info!("using empty cache");
Default::default()
});
if cache.version() < CACHE_VERSION {
warn!("cache version changed, clearing cache");
cache = Default::default();
};
Ok(Self {
cache: Some(cache),
config,
})
} else {
Ok(Self {
cache: Some(Default::default()),
config,
})
}
} else {
Ok(Self {
cache: None,
config,
})
}
}
async fn parse_and_render(
&self,
name: String,
path: impl AsRef<Path>,
) -> Result<(PostMetadata, String, (Duration, Duration)), PostError> {
let parsing_start = Instant::now();
let mut file = match tokio::fs::OpenOptions::new().read(true).open(&path).await {
Ok(val) => val,
Err(err) => match err.kind() {
io::ErrorKind::NotFound => return Err(PostError::NotFound(name)),
_ => return Err(PostError::IoError(err)),
},
};
let stat = file.metadata().await?;
let modified = stat.modified()?;
let created = stat.created().ok();
let mut content = String::with_capacity(stat.len() as usize);
file.read_to_string(&mut content).await?;
let ParsedData { headers, body } = parse::<FrontMatter>(&content)?;
let metadata = headers.into_full(name.to_owned(), created, Some(modified));
let parsing = parsing_start.elapsed();
let before_render = Instant::now();
let post = render(body, &self.config.render);
let rendering = before_render.elapsed();
if let Some(cache) = self.cache.as_ref() {
cache
.insert(
name.to_string(),
metadata.clone(),
as_secs(&modified),
post.clone(),
&self.config.render,
)
.await
.unwrap_or_else(|err| warn!("failed to insert {:?} into cache", err.0))
};
Ok((metadata, post, (parsing, rendering)))
}
fn cache(&self) -> Option<&Cache> {
self.cache.as_ref()
}
fn try_drop(&mut self) -> Result<(), eyre::Report> {
// write cache to file
let config = &self.config.cache;
if config.enable
&& config.persistence
&& let Some(cache) = self.cache()
{
let path = &config.file;
let serialized = bitcode::serialize(cache).context("failed to serialize cache")?;
let mut cache_file = std::fs::File::create(path)
.with_context(|| format!("failed to open cache at {}", path.display()))?;
let compression_level = config.compression_level;
if config.compress {
std::io::Write::write_all(
&mut zstd::stream::write::Encoder::new(cache_file, compression_level)?
.auto_finish(),
&serialized,
)
} else {
cache_file.write_all(&serialized)
}
.context("failed to write cache to file")?;
info!("wrote cache to {}", path.display());
}
Ok(())
}
}
impl<C> Drop for MarkdownPosts<C>
where
C: Deref<Target = Config>,
{
fn drop(&mut self) {
self.try_drop().unwrap()
}
}
impl<C> PostManager for MarkdownPosts<C>
where
C: Deref<Target = Config>,
{
async fn get_all_post_metadata(
&self,
filter: impl Fn(&PostMetadata) -> bool,
) -> Result<Vec<PostMetadata>, PostError> {
let mut posts = Vec::new();
let mut read_dir = fs::read_dir(&self.config.dirs.posts).await?;
while let Some(entry) = read_dir.next_entry().await? {
let path = entry.path();
let stat = fs::metadata(&path).await?;
if stat.is_file() && path.extension().is_some_and(|ext| ext == "md") {
let mtime = as_secs(&stat.modified()?);
// TODO. this?
let name = path
.clone()
.file_stem()
.unwrap()
.to_string_lossy()
.to_string();
if let Some(cache) = self.cache.as_ref()
&& let Some(hit) = cache.lookup_metadata(&name, mtime).await
&& filter(&hit)
{
posts.push(hit);
} else {
match self.parse_and_render(name, path).await {
Ok((metadata, ..)) => {
if filter(&metadata) {
posts.push(metadata);
}
}
Err(err) => match err {
PostError::IoError(ref io_err)
if matches!(io_err.kind(), io::ErrorKind::NotFound) =>
{
warn!("TOCTOU: {}", err)
}
_ => return Err(err),
},
}
}
}
}
Ok(posts)
}
async fn get_all_posts(
&self,
filter: impl Fn(&PostMetadata, &str) -> bool,
) -> Result<Vec<(PostMetadata, String, RenderStats)>, PostError> {
let mut posts = Vec::new();
let mut read_dir = fs::read_dir(&self.config.dirs.posts).await?;
while let Some(entry) = read_dir.next_entry().await? {
let path = entry.path();
let stat = fs::metadata(&path).await?;
if stat.is_file() && path.extension().is_some_and(|ext| ext == "md") {
let name = path
.clone()
.file_stem()
.unwrap()
.to_string_lossy()
.to_string();
let post = self.get_post(&name).await?;
if filter(&post.0, &post.1) {
posts.push(post);
}
}
}
Ok(posts)
}
async fn get_post(&self, name: &str) -> Result<(PostMetadata, String, RenderStats), PostError> {
let start = Instant::now();
let path = self.config.dirs.posts.join(name.to_owned() + ".md");
let stat = match tokio::fs::metadata(&path).await {
Ok(value) => value,
Err(err) => match err.kind() {
io::ErrorKind::NotFound => {
if let Some(cache) = self.cache.as_ref() {
cache.remove(name).await;
}
return Err(PostError::NotFound(name.to_string()));
}
_ => return Err(PostError::IoError(err)),
},
};
let mtime = as_secs(&stat.modified()?);
if let Some(cache) = self.cache.as_ref()
&& let Some(hit) = cache.lookup(name, mtime, &self.config.render).await
{
Ok((
hit.metadata,
hit.rendered,
RenderStats::Cached(start.elapsed()),
))
} else {
let (metadata, rendered, stats) = self.parse_and_render(name.to_string(), path).await?;
Ok((
metadata,
rendered,
RenderStats::ParsedAndRendered(start.elapsed(), stats.0, stats.1),
))
}
}
async fn cleanup(&self) {
if let Some(cache) = self.cache.as_ref() {
cache
.cleanup(|name| {
std::fs::metadata(self.config.dirs.posts.join(name.to_owned() + ".md"))
.ok()
.and_then(|metadata| metadata.modified().ok())
.map(|mtime| as_secs(&mtime))
})
.await
}
}
}

View file

@ -1,14 +1,24 @@
pub mod cache; pub mod cache;
pub mod markdown_posts;
use std::collections::BTreeSet; use std::collections::BTreeSet;
use std::time::{Duration, SystemTime}; use std::io::{self, Read, Write};
use std::ops::Deref;
use std::path::Path;
use std::time::{Duration, Instant, SystemTime};
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use color_eyre::eyre::{self, Context};
use fronma::parser::{parse, ParsedData};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tokio::fs;
use tokio::io::AsyncReadExt;
use tracing::{error, info, warn};
use crate::error::PostError; use crate::config::Config;
pub use crate::post::markdown_posts::MarkdownPosts; use crate::markdown_render::render;
use crate::post::cache::{Cache, CACHE_VERSION};
use crate::systemtime_as_secs::as_secs;
use crate::PostError;
#[derive(Deserialize)] #[derive(Deserialize)]
struct FrontMatter { struct FrontMatter {
@ -61,28 +71,218 @@ pub enum RenderStats {
ParsedAndRendered(Duration, Duration, Duration), ParsedAndRendered(Duration, Duration, Duration),
} }
pub trait PostManager { pub struct PostManager<C>
async fn get_all_post_metadata( where
C: Deref<Target = Config>,
{
cache: Option<Cache>,
config: C,
}
impl<C> PostManager<C>
where
C: Deref<Target = Config>,
{
pub async fn new(config: C) -> eyre::Result<PostManager<C>> {
if config.cache.enable {
if config.cache.persistence
&& tokio::fs::try_exists(&config.cache.file)
.await
.with_context(|| {
format!("failed to check if {} exists", config.cache.file.display())
})?
{
info!("loading cache from file");
let path = &config.cache.file;
let load_cache = async {
let mut cache_file = tokio::fs::File::open(&path)
.await
.context("failed to open cache file")?;
let serialized = if config.cache.compress {
let cache_file = cache_file.into_std().await;
tokio::task::spawn_blocking(move || {
let mut buf = Vec::with_capacity(4096);
zstd::stream::read::Decoder::new(cache_file)?.read_to_end(&mut buf)?;
Ok::<_, std::io::Error>(buf)
})
.await
.context("failed to join blocking thread")?
.context("failed to read cache file")?
} else {
let mut buf = Vec::with_capacity(4096);
cache_file
.read_to_end(&mut buf)
.await
.context("failed to read cache file")?;
buf
};
let mut cache: Cache = bitcode::deserialize(serialized.as_slice())
.context("failed to parse cache")?;
if cache.version() < CACHE_VERSION {
warn!("cache version changed, clearing cache");
cache = Cache::default();
};
Ok::<Cache, eyre::Report>(cache)
}
.await;
Ok(Self {
cache: Some(match load_cache {
Ok(cache) => cache,
Err(err) => {
error!("failed to load cache: {}", err);
info!("using empty cache");
Default::default()
}
}),
config,
})
} else {
Ok(Self {
cache: Some(Default::default()),
config,
})
}
} else {
Ok(Self {
cache: None,
config,
})
}
}
async fn parse_and_render(
&self,
name: String,
path: impl AsRef<Path>,
) -> Result<(PostMetadata, String, (Duration, Duration)), PostError> {
let parsing_start = Instant::now();
let mut file = match tokio::fs::OpenOptions::new().read(true).open(&path).await {
Ok(val) => val,
Err(err) => match err.kind() {
io::ErrorKind::NotFound => return Err(PostError::NotFound(name)),
_ => return Err(PostError::IoError(err)),
},
};
let stat = file.metadata().await?;
let modified = stat.modified()?;
let created = stat.created().ok();
let mut content = String::with_capacity(stat.len() as usize);
file.read_to_string(&mut content).await?;
let ParsedData { headers, body } = parse::<FrontMatter>(&content)?;
let metadata = headers.into_full(name.to_owned(), created, Some(modified));
let parsing = parsing_start.elapsed();
let before_render = Instant::now();
let post = render(body, &self.config.render);
let rendering = before_render.elapsed();
if let Some(cache) = self.cache.as_ref() {
cache
.insert(
name.to_string(),
metadata.clone(),
as_secs(&modified),
post.clone(),
&self.config.render,
)
.await
.unwrap_or_else(|err| warn!("failed to insert {:?} into cache", err.0))
};
Ok((metadata, post, (parsing, rendering)))
}
pub async fn get_all_post_metadata_filtered(
&self, &self,
filter: impl Fn(&PostMetadata) -> bool, filter: impl Fn(&PostMetadata) -> bool,
) -> Result<Vec<PostMetadata>, PostError> { ) -> Result<Vec<PostMetadata>, PostError> {
self.get_all_posts(|m, _| filter(m)) let mut posts = Vec::new();
.await
.map(|vec| vec.into_iter().map(|(meta, ..)| meta).collect()) let mut read_dir = fs::read_dir(&self.config.dirs.posts).await?;
while let Some(entry) = read_dir.next_entry().await? {
let path = entry.path();
let stat = fs::metadata(&path).await?;
if stat.is_file() && path.extension().is_some_and(|ext| ext == "md") {
let mtime = as_secs(&stat.modified()?);
// TODO. this?
let name = path
.clone()
.file_stem()
.unwrap()
.to_string_lossy()
.to_string();
if let Some(cache) = self.cache.as_ref()
&& let Some(hit) = cache.lookup_metadata(&name, mtime).await
&& filter(&hit)
{
posts.push(hit);
} else {
match self.parse_and_render(name, path).await {
Ok((metadata, ..)) => {
if filter(&metadata) {
posts.push(metadata);
}
}
Err(err) => match err {
PostError::IoError(ref io_err)
if matches!(io_err.kind(), io::ErrorKind::NotFound) =>
{
warn!("TOCTOU: {}", err)
}
_ => return Err(err),
},
}
}
}
} }
async fn get_all_posts( Ok(posts)
}
pub async fn get_all_posts_filtered(
&self, &self,
filter: impl Fn(&PostMetadata, &str) -> bool, filter: impl Fn(&PostMetadata, &str) -> bool,
) -> Result<Vec<(PostMetadata, String, RenderStats)>, PostError>; ) -> Result<Vec<(PostMetadata, String, RenderStats)>, PostError> {
let mut posts = Vec::new();
async fn get_max_n_post_metadata_with_optional_tag_sorted( let mut read_dir = fs::read_dir(&self.config.dirs.posts).await?;
while let Some(entry) = read_dir.next_entry().await? {
let path = entry.path();
let stat = fs::metadata(&path).await?;
if stat.is_file() && path.extension().is_some_and(|ext| ext == "md") {
let name = path
.clone()
.file_stem()
.unwrap()
.to_string_lossy()
.to_string();
let post = self.get_post(&name).await?;
if filter(&post.0, &post.1) {
posts.push(post);
}
}
}
Ok(posts)
}
pub async fn get_max_n_post_metadata_with_optional_tag_sorted(
&self, &self,
n: Option<usize>, n: Option<usize>,
tag: Option<&String>, tag: Option<&String>,
) -> Result<Vec<PostMetadata>, PostError> { ) -> Result<Vec<PostMetadata>, PostError> {
let mut posts = self let mut posts = self
.get_all_post_metadata(|metadata| !tag.is_some_and(|tag| !metadata.tags.contains(tag))) .get_all_post_metadata_filtered(|metadata| {
!tag.is_some_and(|tag| !metadata.tags.contains(tag))
})
.await?; .await?;
// we still want some semblance of order if created_at is None so sort by mtime as well // we still want some semblance of order if created_at is None so sort by mtime as well
posts.sort_unstable_by_key(|metadata| metadata.modified_at.unwrap_or_default()); posts.sort_unstable_by_key(|metadata| metadata.modified_at.unwrap_or_default());
@ -95,12 +295,95 @@ pub trait PostManager {
Ok(posts) Ok(posts)
} }
#[allow(unused)] pub async fn get_post(
async fn get_post_metadata(&self, name: &str) -> Result<PostMetadata, PostError> { &self,
self.get_post(name).await.map(|(meta, ..)| meta) name: &str,
) -> Result<(PostMetadata, String, RenderStats), PostError> {
let start = Instant::now();
let path = self.config.dirs.posts.join(name.to_owned() + ".md");
let stat = match tokio::fs::metadata(&path).await {
Ok(value) => value,
Err(err) => match err.kind() {
io::ErrorKind::NotFound => {
if let Some(cache) = self.cache.as_ref() {
cache.remove(name).await;
}
return Err(PostError::NotFound(name.to_string()));
}
_ => return Err(PostError::IoError(err)),
},
};
let mtime = as_secs(&stat.modified()?);
if let Some(cache) = self.cache.as_ref()
&& let Some(hit) = cache.lookup(name, mtime, &self.config.render).await
{
Ok((
hit.metadata,
hit.rendered,
RenderStats::Cached(start.elapsed()),
))
} else {
let (metadata, rendered, stats) = self.parse_and_render(name.to_string(), path).await?;
Ok((
metadata,
rendered,
RenderStats::ParsedAndRendered(start.elapsed(), stats.0, stats.1),
))
}
} }
async fn get_post(&self, name: &str) -> Result<(PostMetadata, String, RenderStats), PostError>; pub fn cache(&self) -> Option<&Cache> {
self.cache.as_ref()
async fn cleanup(&self); }
pub async fn cleanup(&self) {
if let Some(cache) = self.cache.as_ref() {
cache
.cleanup(|name| {
std::fs::metadata(self.config.dirs.posts.join(name.to_owned() + ".md"))
.ok()
.and_then(|metadata| metadata.modified().ok())
.map(|mtime| as_secs(&mtime))
})
.await
}
}
fn try_drop(&mut self) -> Result<(), eyre::Report> {
// write cache to file
let config = &self.config.cache;
if config.enable
&& config.persistence
&& let Some(cache) = self.cache()
{
let path = &config.file;
let serialized = bitcode::serialize(cache).context("failed to serialize cache")?;
let mut cache_file = std::fs::File::create(path)
.with_context(|| format!("failed to open cache at {}", path.display()))?;
let compression_level = config.compression_level;
if config.compress {
std::io::Write::write_all(
&mut zstd::stream::write::Encoder::new(cache_file, compression_level)?
.auto_finish(),
&serialized,
)
} else {
cache_file.write_all(&serialized)
}
.context("failed to write cache to file")?;
info!("wrote cache to {}", path.display());
}
Ok(())
}
}
impl<C> Drop for PostManager<C>
where
C: Deref<Target = Config>,
{
fn drop(&mut self) {
self.try_drop().unwrap()
}
} }