From 3705d412b43ab5db9d06e4d324f38401209d83a6 Mon Sep 17 00:00:00 2001 From: slonkazoid Date: Wed, 13 Mar 2024 21:48:13 +0300 Subject: [PATCH] do some slonking --- Cargo.toml | 26 +- README.md | 9 +- migrations/20240313162104_remove_url.sql | 1 + src/config.rs | 58 ++- src/error.rs | 83 +++++ src/fetch_media.rs | 99 ++++++ src/fetch_post.rs | 145 ++++++++ src/main.rs | 432 +++++------------------ src/query.rs | 2 +- templates/error.html | 15 + templates/tweet.html | 6 +- 11 files changed, 509 insertions(+), 367 deletions(-) create mode 100644 migrations/20240313162104_remove_url.sql create mode 100644 src/error.rs create mode 100644 src/fetch_media.rs create mode 100644 src/fetch_post.rs create mode 100644 templates/error.html diff --git a/Cargo.toml b/Cargo.toml index b2ff6b2..d1c1ba8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,12 +4,19 @@ version = "0.1.0" edition = "2021" [profile.release] -lto = "fat" -strip = true +#strip = true [features] -default = ["db"] -db = ["sqlx"] +default = ["db", "metrics", "tracing"] +db = ["dep:sqlx"] +metrics = ["tracing"] +tracing = [ + "dep:tracing", + "dep:tracing-subscriber", + "tower-http/trace", + "axum/tracing", + "axum/matched-path", +] [dependencies] askama = { version = "0.12.1", features = ["with-axum", "markdown"] } @@ -18,9 +25,12 @@ axum = "0.7.4" serde = { version = "1.0.197", features = ["derive"] } tokio = { version = "1.36.0", features = ["rt-multi-thread", "macros"] } toml = "0.8.10" -tower-http = { version = "0.5.2", features = ["trace", "fs"] } -tracing = "0.1.40" -tracing-subscriber = { version = "0.3.18", features = ["json", "env-filter"] } +tower-http = { version = "0.5.2", features = ["fs"] } +tracing = { version = "0.1.40", optional = true } +tracing-subscriber = { version = "0.3.18", optional = true, features = [ + "json", + "env-filter", +] } sqlx = { version = "0.7.3", optional = true, features = [ "runtime-tokio", "postgres", @@ -31,5 +41,5 @@ reqwest = { version = "0.11.25", default-features = false, features = [ "json", ] } html5ever = "0.26.0" -url = { version = "2.5.0", features = ["serde"] } markup5ever_rcdom = "0.2.0" +url = { version = "2.5.0", features = ["serde"] } diff --git a/README.md b/README.md index 9f0d160..2e6f726 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,8 @@ still very under construction ## Todo -- logging -- docs -- split the 500+ line main.rs into multiple files -- styling? (i probably wont ever make this look good, pr if u want to) +- [x] logging +- [ ] docs +- [x] split the 500+ line main.rs into multiple files +- [ ] styling? (i probably wont ever make this look good, pr if u want to) +- [ ] `cache_age` diff --git a/migrations/20240313162104_remove_url.sql b/migrations/20240313162104_remove_url.sql new file mode 100644 index 0000000..bb7fe84 --- /dev/null +++ b/migrations/20240313162104_remove_url.sql @@ -0,0 +1 @@ +ALTER TABLE posts DROP COLUMN url; diff --git a/src/config.rs b/src/config.rs index 7ea5766..f370d3f 100644 --- a/src/config.rs +++ b/src/config.rs @@ -1,11 +1,10 @@ use std::{ - error::Error, + env, net::{IpAddr, Ipv4Addr}, - path::Path, }; use serde::{Deserialize, Serialize}; -use tokio::io::AsyncReadExt; +use tokio::io::{AsyncReadExt, AsyncWriteExt}; use url::Url; #[derive(Serialize, Deserialize, Debug)] @@ -19,6 +18,7 @@ pub struct Config { pub title: String, pub description: String, pub db_url: Option, + pub json: bool, } impl Default for Config { @@ -31,19 +31,51 @@ impl Default for Config { title: "biter".into(), description: "biter twitter proxy".into(), db_url: None, + json: false, } } } -pub async fn load(file: impl AsRef) -> Result> { - let mut buf = String::new(); - - tokio::fs::OpenOptions::new() +pub async fn load() -> Config { + let config_file = env::var(format!("{}_CONFIG", env!("CARGO_BIN_NAME"))) + .unwrap_or(String::from("config.toml")); + match tokio::fs::OpenOptions::new() .read(true) - .open(&file) - .await? - .read_to_string(&mut buf) - .await?; - - Ok(toml::from_str(&buf)?) + .open(&config_file) + .await + { + Ok(mut file) => { + let mut buf = String::new(); + file.read_to_string(&mut buf) + .await + .expect("couldn't read configuration file"); + toml::from_str(&buf) + .unwrap_or_else(|err| panic!("couldn't parse configuration:\n{}", err)) + } + Err(err) => match err.kind() { + std::io::ErrorKind::NotFound => { + let config = Config::default(); + println!("configuration file doesn't exist, creating"); + match tokio::fs::OpenOptions::new() + .write(true) + .open(&config_file) + .await + { + Ok(mut file) => file + .write_all( + toml::to_string_pretty(&config) + .expect("couldn't serialize configuration") + .as_bytes(), + ) + .await + .unwrap_or_else(|err| eprintln!("couldn't write configuration: {}", err)), + Err(err) => { + eprintln!("couldn't open file {:?} for writing: {}", &config_file, err) + } + } + config + } + _ => panic!("couldn't open config file: {}", err), + }, + } } diff --git a/src/error.rs b/src/error.rs new file mode 100644 index 0000000..65efbe7 --- /dev/null +++ b/src/error.rs @@ -0,0 +1,83 @@ +use askama_axum::Template; +use axum::{ + http::StatusCode, + response::{IntoResponse, Response}, +}; +#[cfg(feature = "tracing")] +use tracing::error; + +#[derive(thiserror::Error, Debug)] +#[allow(unused)] +pub enum Error { + #[error(transparent)] + ReqwestError(#[from] reqwest::Error), + #[cfg(feature = "db")] + #[error(transparent)] + SqlxError(#[from] sqlx::Error), + #[error("database not configured")] + NoDb, + #[error(transparent)] + AskamaError(#[from] askama::Error), + #[error(transparent)] + UrlParseError(#[from] url::ParseError), + #[error("error while parsing html: {0}")] + HtmlParseError(&'static str), + #[error("couldn't parse API response: {0}")] + APIParseError(&'static str), +} + +#[derive(Template)] +#[template(path = "error.html")] +struct ErrorTemplate<'a> { + status_code: StatusCode, + reason: &'a str, +} + +impl IntoResponse for Error { + fn into_response(self) -> Response { + #[cfg(feature = "tracing")] + error!("error while handling request: {}", &self); + #[cfg(not(feature = "tracing"))] + eprintln!("error while handling request: {}", &self); + + let (status_code, reason) = match self { + Self::ReqwestError(err) => { + if err.is_status() { + let status = err.status().unwrap(); + match status.as_u16() { + 404 => (StatusCode::NOT_FOUND, "Tweet not found"), + _ => ( + StatusCode::INTERNAL_SERVER_ERROR, + "Error response from twitter", + ), + } + } else { + ( + StatusCode::INTERNAL_SERVER_ERROR, + "Failed to contact twitter API", + ) + } + } + #[cfg(feature = "db")] + Self::SqlxError(err) => match err { + _ => ( + StatusCode::INTERNAL_SERVER_ERROR, + "There was an issue with the database", + ), + }, + Self::NoDb => (StatusCode::INTERNAL_SERVER_ERROR, "Database not configured"), + _ => (StatusCode::INTERNAL_SERVER_ERROR, "Unknown error"), + }; + + ( + status_code, + ErrorTemplate { + status_code, + reason, + }, + ) + .into_response() + } +} + +pub type Result = std::result::Result; diff --git a/src/fetch_media.rs b/src/fetch_media.rs new file mode 100644 index 0000000..a578b8b --- /dev/null +++ b/src/fetch_media.rs @@ -0,0 +1,99 @@ +use html5ever::{ + parse_document, + tendril::{SliceExt, TendrilSink}, +}; +use markup5ever_rcdom::{NodeData, RcDom}; +#[cfg(feature = "metrics")] +use tracing::info; + +use crate::{reqwest_client, Config, Error, Result}; + +pub async fn fetch_media(id: i64, config: &Config) -> Result)>> { + #[cfg(feature = "metrics")] + info!(metric = "fetch_media", %id, "fetching media for post"); + + let mut tweet_url = config.TWITTER_BASE_URL.clone(); + tweet_url.set_path(&format!("twitter/status/{}", id)); + + let body = reqwest_client().get(tweet_url).send().await?.text().await?; + + let document = parse_document(RcDom::default(), Default::default()) + .one(body) + .document; + + let children = document.children.borrow(); + + let html = children + .iter() + .nth(1) + .ok_or(Error::HtmlParseError("html not found (what)"))? + .children + .borrow(); + + let head = html + .iter() + .next() + .ok_or(Error::HtmlParseError("head not found (what??)"))? + .children + .borrow(); + + let og_image = match head.iter().find(|x| match &x.data { + NodeData::Element { name, attrs, .. } => { + &name.local == "meta" + && attrs + .borrow() + .iter() + .find(|y| &y.name.local == "property" && y.value == "og:image".to_tendril()) + .is_some() + } + _ => false, + }) { + Some(val) => val, + None => return Ok(None), + }; + + let mut url = match &og_image.data { + NodeData::Element { attrs, .. } => attrs + .borrow() + .iter() + .find(|attr| &attr.name.local == "content") + .ok_or(Error::HtmlParseError("og:image content attr not found"))? + .value + .to_string(), + _ => unreachable!(), + }; + + if url.ends_with(":large") { + url = url.split_at(url.len() - 6).0.to_string(); + } + + url += "?name=4096x4096"; + + let image_alt = match head.iter().find(|x| match &x.data { + NodeData::Element { name, attrs, .. } => { + &name.local == "meta" + && attrs + .borrow() + .iter() + .find(|y| &y.name.local == "property" && y.value == "og:image:alt".to_tendril()) + .is_some() + } + _ => false, + }) { + Some(x) => match &x.data { + NodeData::Element { attrs, .. } => Some( + attrs + .borrow() + .iter() + .find(|y| &y.name.local == "content") + .ok_or(Error::HtmlParseError("og:image:alt content attr not found"))? + .value + .to_string(), + ), + _ => unreachable!(), + }, + None => None, + }; + + Ok(Some((url, image_alt))) +} diff --git a/src/fetch_post.rs b/src/fetch_post.rs new file mode 100644 index 0000000..0a26010 --- /dev/null +++ b/src/fetch_post.rs @@ -0,0 +1,145 @@ +use askama::filters::{escape, urlencode_strict}; +use html5ever::{parse_fragment, tendril::TendrilSink, QualName}; +use markup5ever_rcdom::{Handle, NodeData, RcDom}; +use serde::Deserialize; +#[cfg(feature = "metrics")] +use tracing::info; +#[cfg(feature = "tracing")] +use tracing::warn; +use url::Url; + +use crate::{fetch_media::fetch_media, reqwest_client, Config, Error, Post, Result}; + +#[derive(Deserialize)] +struct OembedTweetResponse { + author_name: String, // Display name + author_url: String, + html: String, +} + +fn walk(handle: &Handle) -> Result<(String, Vec)> { + let mut html = String::new(); + let mut media = Vec::new(); + + for child in handle.children.borrow().iter() { + match &child.data { + NodeData::Text { contents } => { + html += &escape(askama::Html, contents.borrow())?.to_string() + } + NodeData::Element { name, attrs, .. } => { + if "a" == &name.local { + let children = child.children.borrow(); + if let Some(handle) = children.iter().next() { + match &handle.data { + NodeData::Text { contents } => { + let contents = contents.borrow(); + if contents.starts_with("pic.twitter.com") { + media.push( + Url::parse(&format!("https://{}", contents))?.to_string(), + ) + } else { + html += &format!( + "{}", + attrs + .borrow() + .iter() + .find(|x| &x.name.local == "href") + .ok_or(Error::HtmlParseError("Kill yourself"))? + .value + .to_string(), + contents + ); + } + } + _ => { + return Err(Error::HtmlParseError("expected text node")); + } + } + } + } else { + return Err(Error::HtmlParseError("expected anchor tag")); + } + } + _ => { + return Err(Error::HtmlParseError("expected text node or element")); + } + } + } + + Ok((html, media)) +} + +fn parse_html(html: &str) -> Result<(String, Vec)> { + let handle = parse_fragment( + RcDom::default(), + Default::default(), + QualName::new(None, ns!(html), local_name!("body")), + vec![], + ) + .one(html) + .document; + + let root = handle.children.borrow(); + + let elem_html = root.iter().next().unwrap().children.borrow(); + + let elem_blockquote = elem_html + .iter() + .next() + .ok_or(Error::HtmlParseError("couldn't get blockquote"))? + .children + .borrow(); + + let elem_p = elem_blockquote + .iter() + .next() + .ok_or(Error::HtmlParseError("couldn't get paragraph"))?; + + walk(&elem_p) +} + +pub async fn fetch_post(id: i64, config: &Config) -> Result { + #[cfg(feature = "metrics")] + info!(metric = "fetch", %id); + + let mut url = config.FUCKING_ENDPOINT.clone(); + let mut tweet_url = config.TWITTER_BASE_URL.clone(); + tweet_url.set_path(&format!("twitter/status/{}", id)); + + url.set_query(Some(&format!( + "url={}&omit_script=1&lang=en", + urlencode_strict(tweet_url)? + ))); + let res: OembedTweetResponse = reqwest_client().get(url).send().await?.json().await?; + + let author_url = Url::parse(&res.author_url)?; + let handle = author_url + .path_segments() + .and_then(|x| x.last()) + .ok_or(Error::APIParseError("couldn't parse author_url"))?; + + let (body, media) = parse_html(&res.html)?; + + let mut image = None; + let mut alt = None; + + if media.len() > 0 { + if let Some((link, alt_text)) = fetch_media(id, config).await? { + image = Some(link); + alt = alt_text; + } else { + #[cfg(feature = "tracing")] + warn!("couldn't fetch media"); + } + } + + Ok(Post { + id, + handle: handle.to_owned(), + name: res.author_name, + body, + media, + image, + alt, + }) +} diff --git a/src/main.rs b/src/main.rs index 0a24987..9e58c9c 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,96 +1,44 @@ mod config; +mod error; +mod fetch_media; +mod fetch_post; #[cfg(feature = "db")] mod query; -use crate::config::Config; -#[cfg(feature = "db")] -use crate::query::{INSERT_POST, SELECT_POST}; -use html5ever::tendril::{SliceExt, TendrilSink}; -use html5ever::{parse_document, QualName}; -use reqwest::Client; - -use std::sync::OnceLock; -use std::{net::SocketAddr, time::Duration}; - -#[macro_use] -extern crate html5ever; - -use askama::filters::{escape, urlencode_strict}; -use askama_axum::{IntoResponse, Template}; +use askama_axum::Template; +#[cfg(feature = "tracing")] +use axum::{extract::MatchedPath, http::Request, response::Response}; use axum::{ - extract::{MatchedPath, Path, State}, - http::{Request, StatusCode}, - response::Response, + extract::{Path, State}, + response::IntoResponse, routing::get, Router, }; -use html5ever::parse_fragment; -use markup5ever_rcdom::{Handle, NodeData, RcDom}; -use serde::{Deserialize, Serialize}; +use reqwest::Client; +use serde::Serialize; #[cfg(feature = "db")] use sqlx::{FromRow, PgPool}; +use std::net::SocketAddr; +use std::sync::OnceLock; +#[cfg(feature = "tracing")] +use std::time::Duration; use thiserror::Error; use tokio::net::TcpListener; +#[cfg(feature = "tracing")] use tower_http::trace::TraceLayer; -use tracing::{error, info, info_span, warn, Span}; -use url::Url; +#[cfg(feature = "tracing")] +use tracing::{info, info_span, level_filters::LevelFilter, Span}; +#[cfg(feature = "tracing")] +use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt, EnvFilter}; -#[derive(Error, Debug)] -#[allow(unused)] -enum AppError { - #[error(transparent)] - ReqwestError(#[from] reqwest::Error), - #[cfg(feature = "db")] - #[error(transparent)] - SqlxError(#[from] sqlx::Error), - #[error("database not configured")] - NoDb, - #[error(transparent)] - AskamaError(#[from] askama::Error), - #[error(transparent)] - UrlParseError(#[from] url::ParseError), - #[error("error while parsing html: {0}")] - HtmlParseError(&'static str), - #[error("couldn't parse API response: {0}")] - APIParseError(&'static str), -} +pub use crate::config::Config; +pub use crate::error::{Error, Result}; +use crate::fetch_post::fetch_post; +#[cfg(feature = "db")] +use crate::query::{INSERT_POST, SELECT_POST}; -impl IntoResponse for AppError { - fn into_response(self) -> Response { - error!("{}", &self); - match self { - Self::ReqwestError(err) => { - if err.is_status() { - let status = StatusCode::from_u16(err.status().unwrap().as_u16()).unwrap(); - (status, format!("Error response from twitter: {}", status)).into_response() - } else { - ( - StatusCode::INTERNAL_SERVER_ERROR, - "Failed to contact twitter API", - ) - .into_response() - } - } - #[cfg(feature = "db")] - Self::SqlxError(err) => match err { - sqlx::Error::RowNotFound => { - (StatusCode::NOT_FOUND, "just fucking not found").into_response() - } - _ => ( - StatusCode::INTERNAL_SERVER_ERROR, - "There was an issue with the database", - ) - .into_response(), - }, - Self::NoDb => { - (StatusCode::INTERNAL_SERVER_ERROR, "Database not configured").into_response() - } - _ => StatusCode::INTERNAL_SERVER_ERROR.into_response(), - } - } -} - -type AppResult = Result; +#[macro_use] +extern crate html5ever; struct AppState { pub config: Config, @@ -112,6 +60,7 @@ struct IndexTemplate<'a> { struct TweetTemplate<'a> { title: &'a str, author: String, + author_url: String, url: String, handle: String, content: String, @@ -120,19 +69,10 @@ struct TweetTemplate<'a> { alt: Option, } -#[derive(Deserialize)] -struct OembedTweetResponse { - url: String, - author_name: String, // Display name - author_url: String, - html: String, -} - #[cfg_attr(feature = "db", derive(FromRow, Serialize, Clone, Debug))] #[cfg_attr(not(feature = "db"), derive(Serialize, Clone, Debug))] -struct Post { +pub struct Post { pub id: i64, - pub url: String, pub handle: String, pub name: String, pub body: String, @@ -141,7 +81,7 @@ struct Post { pub alt: Option, } -fn reqwest_client() -> Client { +pub fn reqwest_client() -> Client { static CLIENT: OnceLock = OnceLock::new(); CLIENT .get_or_init(|| { @@ -153,229 +93,11 @@ fn reqwest_client() -> Client { .clone() } -async fn get_image_url_from_tweet_id_lol( - id: i64, - config: &Config, -) -> Result)>, AppError> { - info!(metric = "fetch_media", %id, "fetching media for post"); - - let mut tweet_url = config.TWITTER_BASE_URL.clone(); - tweet_url.set_path(&format!("twitter/status/{}", id)); - - let body = reqwest_client().get(tweet_url).send().await?.text().await?; - - let document = parse_document(RcDom::default(), Default::default()) - .one(body) - .document; - - let children = document.children.borrow(); - - let html = children - .iter() - .nth(1) - .ok_or(AppError::HtmlParseError("html not found (what)"))? - .children - .borrow(); - - let head = html - .iter() - .next() - .ok_or(AppError::HtmlParseError("head not found (what??)"))? - .children - .borrow(); - - let og_image = match head.iter().find(|x| match &x.data { - NodeData::Element { name, attrs, .. } => { - &name.local == "meta" - && attrs - .borrow() - .iter() - .find(|y| &y.name.local == "property" && y.value == "og:image".to_tendril()) - .is_some() - } - _ => false, - }) { - Some(val) => val, - None => return Ok(None), - }; - - let mut url = match &og_image.data { - NodeData::Element { attrs, .. } => attrs - .borrow() - .iter() - .find(|attr| &attr.name.local == "content") - .ok_or(AppError::HtmlParseError("twitter is actually trolling now"))? - .value - .to_string(), - _ => unreachable!(), - }; - - if url.ends_with(":large") { - url = url.split_at(url.len() - 6).0.to_string(); - } - - url += "?name=4096x4096"; - - let image_alt = match head.iter().find(|x| match &x.data { - NodeData::Element { name, attrs, .. } => { - &name.local == "meta" - && attrs - .borrow() - .iter() - .find(|y| &y.name.local == "property" && y.value == "og:image:alt".to_tendril()) - .is_some() - } - _ => false, - }) { - Some(x) => match &x.data { - NodeData::Element { attrs, .. } => Some( - attrs - .borrow() - .iter() - .find(|y| &y.name.local == "content") - .ok_or(AppError::HtmlParseError("fuck"))? - .value - .to_string(), - ), - _ => unreachable!(), - }, - None => None, - }; - - Ok(Some((url, image_alt))) -} - -fn walk(handle: &Handle) -> Result<(String, Vec), AppError> { - let mut html = String::new(); - let mut media = Vec::new(); - - for child in handle.children.borrow().iter() { - match &child.data { - NodeData::Text { contents } => { - html += &escape(askama::Html, contents.borrow())?.to_string() - } - NodeData::Element { name, attrs, .. } => { - if "a" == &name.local { - let children = child.children.borrow(); - if let Some(handle) = children.iter().next() { - match &handle.data { - NodeData::Text { contents } => { - let contents = contents.borrow(); - if contents.starts_with("pic.twitter.com") { - media.push( - Url::parse(&format!("https://{}", contents))?.to_string(), - ) - } else { - html += &format!( - "{}", - attrs - .borrow() - .iter() - .find(|x| &x.name.local == "href") - .ok_or(AppError::HtmlParseError("Kill yourself"))? - .value - .to_string(), - contents - ); - } - } - _ => { - return Err(AppError::HtmlParseError("no")); - } - } - } - } else { - return Err(AppError::HtmlParseError("AAAA")); - } - } - _ => { - return Err(AppError::HtmlParseError("the fuck")); - } - } - } - - Ok((html, media)) -} - -fn parse_html(html: &str) -> Result<(String, Vec), AppError> { - let handle = parse_fragment( - RcDom::default(), - Default::default(), - QualName::new(None, ns!(html), local_name!("body")), - vec![], - ) - .one(html) - .document; - - let root = handle.children.borrow(); - - let elem_html = root.iter().next().unwrap().children.borrow(); - - let elem_blockquote = elem_html - .iter() - .next() - .ok_or(AppError::HtmlParseError("couldn't get blockquote"))? - .children - .borrow(); - - let elem_p = elem_blockquote - .iter() - .next() - .ok_or(AppError::HtmlParseError("couldn't get paragraph"))?; - - walk(&elem_p) -} - -async fn fetch_post(id: i64, config: &Config) -> Result { - info!(metric = "fetch", %id, "fetching post"); - - let mut url = config.FUCKING_ENDPOINT.clone(); - let mut tweet_url = config.TWITTER_BASE_URL.clone(); - tweet_url.set_path(&format!("twitter/status/{}", id)); - - url.set_query(Some(&format!( - "url={}&omit_script=1&lang=en", - urlencode_strict(tweet_url)? - ))); - let res: OembedTweetResponse = reqwest_client().get(url).send().await?.json().await?; - - let author_url = Url::parse(&res.author_url)?; - let handle = author_url - .path_segments() - .and_then(|x| x.last()) - .ok_or(AppError::APIParseError("couldn't parse author_url"))?; - - let (body, media) = parse_html(&res.html)?; - - let mut image = None; - let mut alt = None; - - if media.len() > 0 { - if let Some((shit, fuck)) = get_image_url_from_tweet_id_lol(id, config).await? { - image = Some(shit); - alt = fuck; - } else { - warn!("couldn't fetch media"); - } - } - - Ok(Post { - id, - url: res.url, - handle: handle.to_owned(), - name: res.author_name, - body, - media, - image, - alt, - }) -} - async fn tweet<'a>( #[cfg(feature = "db")] State(AppState { config, db }): State, #[cfg(not(feature = "db"))] State(AppState { config }): State, - Path((handle, id)): Path<(String, i64)>, -) -> AppResult> { + #[allow(unused_variables)] Path((handle, id)): Path<(String, i64)>, +) -> Result> { #[cfg(feature = "db")] let post = match match db.as_ref() { Some(conn) => match sqlx::query_as::<_, Post>(SELECT_POST) @@ -384,6 +106,7 @@ async fn tweet<'a>( .await { Ok(post) => { + #[cfg(feature = "metrics")] info!(metric = "post_retrieve", from = "db", %id, "retrieved post from db"); Some(post) } @@ -397,6 +120,7 @@ async fn tweet<'a>( Some(post) => post, None => { let post = fetch_post(id, config).await?; + #[cfg(feature = "metrics")] info!(metric = "post_retrieve", from = "twitter", %id, "retrieved post from twitter"); if let Some(conn) = db.as_ref() { @@ -404,7 +128,6 @@ async fn tweet<'a>( sqlx::query(INSERT_POST) .bind(post.id) - .bind(post.url) .bind(post.handle) .bind(post.name) .bind(post.body) @@ -413,6 +136,7 @@ async fn tweet<'a>( .bind(post.alt) .execute(conn) .await?; + #[cfg(feature = "metrics")] info!(metric = "post_add", %id, "added post into db"); } @@ -422,8 +146,10 @@ async fn tweet<'a>( #[cfg(not(feature = "db"))] let post = fetch_post(id, config).await?; #[cfg(not(feature = "db"))] + #[cfg(feature = "metrics")] info!(metric = "post_retrieve", from = "twitter", %id, "retrieved post from twitter"); + #[cfg(feature = "metrics")] if handle != "twitter" && handle != post.handle { info!(metric = "dickhead", %handle, "dickhead found"); } @@ -431,12 +157,21 @@ async fn tweet<'a>( Ok(TweetTemplate { title: &config.title, author: post.name, + author_url: { + let mut url = config.TWITTER_BASE_URL.clone(); + url.set_path(&post.handle); + url.to_string() + }, + url: { + let mut url = config.TWITTER_BASE_URL.clone(); + url.set_path(&format!("{}/status/{}", post.handle, post.id)); + url.to_string() + }, + handle: post.handle, content: post.body, media: post.media, image: post.image, alt: post.alt, - url: post.url, - handle: post.handle, }) } @@ -449,9 +184,26 @@ async fn index(State(AppState { config, .. }): State) -> impl IntoR #[tokio::main] async fn main() { - tracing_subscriber::fmt().init(); + let config = config::load().await; - let config = config::load("config.toml").await.unwrap_or_default(); + #[cfg(feature = "tracing")] + tracing_subscriber::registry() + .with( + EnvFilter::builder() + .with_default_directive(LevelFilter::INFO.into()) + .from_env_lossy(), + ) + .with(if config.json { + None + } else { + Some(tracing_subscriber::fmt::layer()) + }) + .with(if config.json { + Some(tracing_subscriber::fmt::layer().json()) + } else { + None + }) + .init(); reqwest_client(); @@ -479,36 +231,42 @@ async fn main() { let app = Router::new() .nest_service("/static", tower_http::services::ServeDir::new("static")) .route("/", get(index)) - .route("/:user/status/:id", get(tweet)) - .layer( - TraceLayer::new_for_http() - .make_span_with(|request: &Request<_>| { - let matched_path = request - .extensions() - .get::() - .map(MatchedPath::as_str); + .route("/:user/status/:id", get(tweet)); - info_span!( - "request", - method = ?request.method(), - path = ?request.uri().path(), - matched_path, - some_other_field = tracing::field::Empty, - ) - }) - .on_response(|response: &Response<_>, duration: Duration, span: &Span| { - let _ = span.enter(); - let status = response.status(); - info!(?status, ?duration); - }), - ) - .with_state(state); + #[cfg(feature = "tracing")] + let app = app.layer( + TraceLayer::new_for_http() + .make_span_with(|request: &Request<_>| { + let matched_path = request + .extensions() + .get::() + .map(MatchedPath::as_str); + + info_span!( + "request", + method = ?request.method(), + path = ?request.uri().path(), + matched_path, + some_other_field = tracing::field::Empty, + ) + }) + .on_response(|response: &Response<_>, duration: Duration, span: &Span| { + let _ = span.enter(); + let status = response.status(); + info!(?status, ?duration); + }), + ); + + let app = app.with_state(state); let listener = TcpListener::bind((state.config.host, state.config.port)) .await .expect("couldn't listen"); let local_addr = listener.local_addr().expect("couldn't get socket address"); + #[cfg(feature = "tracing")] info!("listening on http://{}", local_addr); + #[cfg(not(feature = "tracing"))] + eprintln!("listening on http://{}", local_addr); axum::serve( listener, diff --git a/src/query.rs b/src/query.rs index 356033c..1986cc2 100644 --- a/src/query.rs +++ b/src/query.rs @@ -1,3 +1,3 @@ pub const SELECT_POST: &str = "SELECT * FROM posts WHERE id = $1"; pub const INSERT_POST: &str = - "INSERT INTO posts (id, url, handle, name, body, media, image, alt) VALUES ($1, $2, $3, $4, $5, $6, $7, $8)"; + "INSERT INTO posts (id, handle, name, body, media, image, alt) VALUES ($1, $2, $3, $4, $5, $6, $7)"; diff --git a/templates/error.html b/templates/error.html new file mode 100644 index 0000000..413a369 --- /dev/null +++ b/templates/error.html @@ -0,0 +1,15 @@ + + + + + + Error + + + +

{{ status_code }}

+

{{ reason }}

+ + main page + + diff --git a/templates/tweet.html b/templates/tweet.html index 4264ec7..23f514d 100644 --- a/templates/tweet.html +++ b/templates/tweet.html @@ -1,16 +1,14 @@
{{ author }} - @{{ handle }} + @{{ handle }}

{{ content|safe }}

{% match image %} {% when Some with (link) %} {% match alt %} {% when Some with (text) %} {{ text }} {% when None %}{% endmatch%}