From 6e3810476bb7fc828b88c5b96d1d25a44ba56057 Mon Sep 17 00:00:00 2001 From: Teajey <21069848+Teajey@users.noreply.github.com> Date: Wed, 7 Feb 2024 11:34:00 +1300 Subject: [PATCH] feat: next-file & prev-file headers --- src/frontmatter_file.rs | 10 +- src/frontmatter_file/keeper.rs | 2 +- src/frontmatter_query.rs | 2 +- src/main.rs | 18 +- src/route/frontmatter_file.rs | 383 +++++++++++++++++++++++++++++---- src/route/frontmatter_list.rs | 163 ++++++-------- src/route/mod.rs | 30 +++ 7 files changed, 441 insertions(+), 167 deletions(-) diff --git a/src/frontmatter_file.rs b/src/frontmatter_file.rs index a2f8907..4a5cc49 100644 --- a/src/frontmatter_file.rs +++ b/src/frontmatter_file.rs @@ -9,11 +9,11 @@ pub use keeper::Keeper; #[derive(Debug, Clone, Serialize, PartialEq, Eq)] pub struct FrontmatterFile { - name: String, - frontmatter: Option, - body: String, - modified: DateTime, - created: DateTime, + pub name: String, + pub frontmatter: Option, + pub body: String, + pub modified: DateTime, + pub created: DateTime, } impl PartialOrd for FrontmatterFile { diff --git a/src/frontmatter_file/keeper.rs b/src/frontmatter_file/keeper.rs index 36b9e61..7113f6a 100644 --- a/src/frontmatter_file/keeper.rs +++ b/src/frontmatter_file/keeper.rs @@ -40,7 +40,7 @@ impl From for FsEvent { } pub struct Keeper { - inner: HashMap, + pub inner: HashMap, } #[derive(Debug, thiserror::Error)] diff --git a/src/frontmatter_query.rs b/src/frontmatter_query.rs index 8763dae..8474cd2 100644 --- a/src/frontmatter_query.rs +++ b/src/frontmatter_query.rs @@ -44,7 +44,7 @@ impl QueryValue { } #[derive(Deserialize, Debug)] -pub struct FrontmatterQuery(HashMap); +pub struct FrontmatterQuery(pub HashMap); impl FrontmatterQuery { pub fn is_subset(&self, json_frontmatter: &serde_json::Map) -> bool { diff --git a/src/main.rs b/src/main.rs index 2619ddc..7d66531 100644 --- a/src/main.rs +++ b/src/main.rs @@ -11,23 +11,7 @@ use axum::{ routing, Json, Router, }; use camino::Utf8PathBuf; -use chrono::{DateTime, TimeZone}; use notify::{RecursiveMode, Watcher}; -use serde_yaml::Mapping; - -fn get_sort_value( - mapping: Option<&Mapping>, - sort_key: &str, - created: &DateTime, -) -> String { - mapping - .and_then(|m| m.get(sort_key)) - .map(serde_yaml::to_string) - .transpose() - .ok() - .flatten() - .unwrap_or_else(|| serde_yaml::to_string(created).expect("DateTime must serialize")) -} async fn frontmatter_collate_strings_get( State(markdown_files): State, @@ -86,7 +70,7 @@ async fn run() -> Result<()> { ) .route( "/frontmatter/file/:name", - routing::get(route::frontmatter_file::get).post(route::frontmatter_file::post), + routing::post(route::frontmatter_file::post).get(route::frontmatter_file::get), ) .route( "/frontmatter/collate_strings/:key", diff --git a/src/route/frontmatter_file.rs b/src/route/frontmatter_file.rs index ade97cb..fe98a0e 100644 --- a/src/route/frontmatter_file.rs +++ b/src/route/frontmatter_file.rs @@ -2,66 +2,357 @@ use std::collections::HashMap; use anyhow::Result; use axum::{ - extract::{Query, State}, - http::StatusCode, + extract::{Path, Query, State}, + http::{HeaderMap, StatusCode}, Json, }; -use crate::{ - frontmatter_file::{self, Short}, - frontmatter_query::FrontmatterQuery, - get_sort_value, markup, -}; +use crate::frontmatter_file; +use crate::{frontmatter_file::FrontmatterFile, frontmatter_query::FrontmatterQuery}; -pub async fn get( - State(markdown_files): State, -) -> Result>, StatusCode> { - let keeper = markdown_files.0.as_ref().lock().map_err(|err| { - eprintln!("Failed to lock data on a get_many request: {err}"); - StatusCode::INTERNAL_SERVER_ERROR - })?; - let mut files = keeper - .files() - .map(|file| file.clone().into()) - .collect::>(); - files.sort(); - files.reverse(); - Ok(Json(files)) -} +use super::{lock_keeper, query_files}; -pub async fn post( - State(markdown_files): State, - params: Query>, - Json(query): Json, -) -> Result>, StatusCode> { - let keeper = markdown_files.lock().map_err(|err| { - eprintln!("Failed to lock data on a get_many request: {err}"); - StatusCode::INTERNAL_SERVER_ERROR - })?; - - let mut files = keeper - .files() - .filter(|file| { - let Some(frontmatter) = file.frontmatter() else { - // if query is '{}', include this - return query.is_empty(); - }; - query.is_subset(&markup::yaml_to_json(frontmatter)) +fn get_sort_value(file: &FrontmatterFile, sort_key: &str) -> String { + file.frontmatter() + .and_then(|m| m.get(sort_key)) + .map(serde_yaml::to_string) + .transpose() + .ok() + .flatten() + .unwrap_or_else(|| { + serde_yaml::to_string(&file.created).expect("DateTime must serialize") }) - .map(|file| file.clone().into()) - .collect::>(); +} +fn sort_with_params(params: &HashMap, files: &mut [&FrontmatterFile]) { let sort_key = params.get("sort"); if let Some(sort_key) = sort_key { - files.sort_by(|f: &Short, g| { - let f_value = get_sort_value(f.frontmatter.as_ref(), sort_key, &f.created); - let g_value = get_sort_value(g.frontmatter.as_ref(), sort_key, &g.created); + files.sort_by(|f, g| { + let f_value = get_sort_value(f, sort_key); + let g_value = get_sort_value(g, sort_key); f_value.cmp(&g_value) }); } else { files.sort(); } files.reverse(); - Ok(Json(files)) +} + +fn assign_headers( + file: &FrontmatterFile, + prev_file_name: Option<&str>, + next_file_name: Option<&str>, +) -> Result { + let mut headers = HeaderMap::new(); + let frontmatter = file.frontmatter(); + let frontmatter_string = serde_json::to_string(&frontmatter).map_err(|err| { + eprintln!( + "Failed to serialize frontmatter ({frontmatter:?}) as JSON during get request: {err}" + ); + StatusCode::INTERNAL_SERVER_ERROR + })?; + let frontmatter_header_value = frontmatter_string.parse().map_err(|err| { + eprintln!("Failed to parse header value ({frontmatter_string:?}): {err}"); + StatusCode::INTERNAL_SERVER_ERROR + })?; + headers.insert("x-frontmatter", frontmatter_header_value); + + let created_string = file.created().to_rfc3339(); + let created_header_value = created_string.parse().map_err(|err| { + eprintln!("Failed to parse 'created' header value ({created_string:?}): {err}"); + StatusCode::INTERNAL_SERVER_ERROR + })?; + headers.insert("x-created", created_header_value); + + let modified_string = file.modified().to_rfc3339(); + let modified_header_value = modified_string.parse().map_err(|err| { + eprintln!("Failed to parse 'modified' header value ({modified_string:?}): {err}"); + StatusCode::INTERNAL_SERVER_ERROR + })?; + headers.insert("x-modified", modified_header_value); + + if let Some(prev_file_name) = prev_file_name { + let prev_file_name_header_value = prev_file_name.parse().map_err(|err| { + eprintln!("Failed to parse 'prev-file-name' header value ({prev_file_name:?}): {err}"); + StatusCode::INTERNAL_SERVER_ERROR + })?; + headers.insert("x-prev-file", prev_file_name_header_value); + } + + if let Some(next_file_name) = next_file_name { + let next_file_name_header_value = next_file_name.parse().map_err(|err| { + eprintln!("Failed to parse 'next-file-name' header value ({next_file_name:?}): {err}"); + StatusCode::INTERNAL_SERVER_ERROR + })?; + headers.insert("x-next-file", next_file_name_header_value); + } + + Ok(headers) +} + +fn find_file_and_index<'a>( + files: &'a [&'a FrontmatterFile], + name: &str, +) -> Result<(usize, &'a FrontmatterFile), StatusCode> { + files + .iter() + .enumerate() + .find(|(_, file)| file.name() == name) + .map(|(i, file)| (i, *file)) + .ok_or(StatusCode::NOT_FOUND) +} + +fn get_prev_and_next_file_names<'a>( + files: &[&'a FrontmatterFile], + i: usize, +) -> (Option<&'a str>, Option<&'a str>) { + let prev_file_name = if i > 0 { + Some(files[i - 1].name()) + } else { + None + }; + let next_file_name = files.get(i + 1).map(|f| f.name()); + (prev_file_name, next_file_name) +} + +fn post_inner( + files: &frontmatter_file::keeper::ArcMutex, + params: &HashMap, + name: &str, + query: &FrontmatterQuery, +) -> Result<(HeaderMap, String), StatusCode> { + let keeper = lock_keeper(files)?; + + let mut files = keeper.files().collect::>(); + + sort_with_params(params, &mut files); + + let filtered_files = query_files(files.clone().into_iter(), query).collect::>(); + + let (i, file) = find_file_and_index(&filtered_files, name)?; + + let (prev_file_name, next_file_name) = get_prev_and_next_file_names(&filtered_files, i); + + let headers = assign_headers(file, prev_file_name, next_file_name)?; + + Ok((headers, file.body().to_owned())) +} + +pub async fn post( + State(markdown_files): State, + params: Query>, + Path(name): Path, + Json(query): Json, +) -> Result<(HeaderMap, String), StatusCode> { + let result = post_inner(&markdown_files, ¶ms, &name, &query)?; + + Ok(result) +} + +fn get_inner( + files: &frontmatter_file::keeper::ArcMutex, + params: &HashMap, + name: &str, +) -> Result<(HeaderMap, String), StatusCode> { + let keeper = lock_keeper(files)?; + + let mut files = keeper.files().collect::>(); + + sort_with_params(params, &mut files); + + let (i, file) = find_file_and_index(&files, name)?; + + let (prev_file_name, next_file_name) = get_prev_and_next_file_names(&files, i); + + let headers = assign_headers(file, prev_file_name, next_file_name)?; + + Ok((headers, file.body().to_owned())) +} + +pub async fn get( + State(markdown_files): State, + params: Query>, + Path(name): Path, +) -> Result<(HeaderMap, String), StatusCode> { + let result = get_inner(&markdown_files, ¶ms, &name)?; + + Ok(result) +} + +#[cfg(test)] +mod test { + use std::{collections::HashMap, path::PathBuf}; + + use camino::Utf8PathBuf; + use chrono::TimeZone; + use serde_yaml::Mapping; + + use crate::{ + frontmatter_file::{keeper::ArcMutex, FrontmatterFile, Keeper}, + frontmatter_query::{FrontmatterQuery, QueryValue, Scalar}, + }; + + use super::{get_inner, post_inner}; + + macro_rules! s { + ($v:literal) => { + $v.to_string() + }; + } + + macro_rules! path { + ($v:literal) => { + Utf8PathBuf::from_path_buf(PathBuf::from($v)).unwrap() + }; + } + + macro_rules! dt { + ($y:literal) => { + chrono::Utc.with_ymd_and_hms($y, 0, 0, 0, 0, 0).unwrap() + }; + ($y:literal, $m:literal) => { + chrono::Utc.with_ymd_and_hms($y, $m, $d, 0, 0, 0).unwrap() + }; + ($y:literal, $m:literal, $d:literal) => { + chrono::Utc.with_ymd_and_hms($y, $m, $d, 0, 0, 0).unwrap() + }; + ($y:literal, $m:literal, $d:literal, $h:literal) => { + chrono::Utc.with_ymd_and_hms($y, $m, $d, $h, 0, 0).unwrap() + }; + ($y:literal, $m:literal, $d:literal, $h:literal, $mm:literal) => { + chrono::Utc + .with_ymd_and_hms($y, $m, $d, $h, $mm, 0) + .unwrap() + }; + } + + fn make_test_keeper() -> ArcMutex { + let mut hm = HashMap::new(); + let mut fm = Mapping::new(); + fm.insert( + serde_yaml::Value::String(s!("tag")), + serde_yaml::Value::String(s!("blue")), + ); + hm.insert( + path!("/about.md"), + FrontmatterFile { + name: s!("about.md"), + frontmatter: Some(fm.clone()), + body: s!(""), + modified: dt!(2024, 1, 1, 11), + created: dt!(2024, 1, 1, 9), + }, + ); + hm.insert( + path!("/something.md"), + FrontmatterFile { + name: s!("something.md"), + frontmatter: None, + body: s!(""), + modified: dt!(2024, 1, 1, 6), + created: dt!(2024, 1, 1, 5), + }, + ); + hm.insert( + path!("/blah.md"), + FrontmatterFile { + name: s!("blah.md"), + frontmatter: Some(fm), + body: s!(""), + modified: dt!(2024, 1, 1, 16), + created: dt!(2024, 1, 1, 15), + }, + ); + ArcMutex::new(Keeper { inner: hm }) + } + + #[test] + fn get() { + let keeper = make_test_keeper(); + let mut params = HashMap::new(); + params.insert(s!("sort"), s!("created")); + + let (headers, _) = get_inner(&keeper, ¶ms, "something.md").unwrap(); + let next_file_name = headers + .get("x-next-file") + .map(|h| h.to_str()) + .transpose() + .unwrap(); + let prev_file_name = headers + .get("x-prev-file") + .map(|h| h.to_str()) + .transpose() + .unwrap(); + assert_eq!(Some("about.md"), prev_file_name); + assert_eq!(None, next_file_name); + + let (headers, _) = get_inner(&keeper, ¶ms, "about.md").unwrap(); + let next_file_name = headers + .get("x-next-file") + .map(|h| h.to_str()) + .transpose() + .unwrap(); + let prev_file_name = headers + .get("x-prev-file") + .map(|h| h.to_str()) + .transpose() + .unwrap(); + assert_eq!(Some("blah.md"), prev_file_name); + assert_eq!(Some("something.md"), next_file_name); + + let (headers, _) = get_inner(&keeper, ¶ms, "blah.md").unwrap(); + let next_file_name = headers + .get("x-next-file") + .map(|h| h.to_str()) + .transpose() + .unwrap(); + let prev_file_name = headers + .get("x-prev-file") + .map(|h| h.to_str()) + .transpose() + .unwrap(); + assert_eq!(None, prev_file_name); + assert_eq!(Some("about.md"), next_file_name); + } + + #[test] + fn post() { + let keeper = make_test_keeper(); + + let mut params = HashMap::new(); + params.insert(s!("sort"), s!("created")); + + let mut query_inner = HashMap::new(); + query_inner.insert(s!("tag"), QueryValue::Scalar(Scalar::String(s!("blue")))); + let query = FrontmatterQuery(query_inner); + + let (headers, _) = post_inner(&keeper, ¶ms, "about.md", &query).unwrap(); + let next_file_name = headers + .get("x-next-file") + .map(|h| h.to_str()) + .transpose() + .unwrap(); + let prev_file_name = headers + .get("x-prev-file") + .map(|h| h.to_str()) + .transpose() + .unwrap(); + assert_eq!(Some("blah.md"), prev_file_name); + assert_eq!(None, next_file_name); + + let (headers, _) = post_inner(&keeper, ¶ms, "blah.md", &query).unwrap(); + let next_file_name = headers + .get("x-next-file") + .map(|h| h.to_str()) + .transpose() + .unwrap(); + let prev_file_name = headers + .get("x-prev-file") + .map(|h| h.to_str()) + .transpose() + .unwrap(); + assert_eq!(None, prev_file_name); + assert_eq!(Some("about.md"), next_file_name); + } } diff --git a/src/route/frontmatter_list.rs b/src/route/frontmatter_list.rs index 761e576..183d339 100644 --- a/src/route/frontmatter_list.rs +++ b/src/route/frontmatter_list.rs @@ -2,123 +2,92 @@ use std::collections::HashMap; use anyhow::Result; use axum::{ - extract::{Path, Query, State}, - http::{HeaderMap, StatusCode}, + extract::{Query, State}, + http::StatusCode, Json, }; -use crate::get_sort_value; -use crate::{frontmatter_file, markup}; -use crate::{frontmatter_file::FrontmatterFile, frontmatter_query::FrontmatterQuery}; - -fn assign_headers(file: &FrontmatterFile) -> Result { - let mut headers = HeaderMap::new(); - let frontmatter = file.frontmatter(); - let frontmatter_string = serde_json::to_string(&frontmatter).map_err(|err| { - eprintln!( - "Failed to serialize frontmatter ({frontmatter:?}) as JSON during get request: {err}" - ); - StatusCode::INTERNAL_SERVER_ERROR - })?; - let frontmatter_header_value = frontmatter_string.parse().map_err(|err| { - eprintln!("Failed to parse header value ({frontmatter_string:?}): {err}"); - StatusCode::INTERNAL_SERVER_ERROR - })?; - headers.insert("x-frontmatter", frontmatter_header_value); - - let created_string = file.created().to_rfc3339(); - let created_header_value = created_string.parse().map_err(|err| { - eprintln!("Failed to parse 'created' header value ({created_string:?}): {err}"); - StatusCode::INTERNAL_SERVER_ERROR - })?; - headers.insert("x-created", created_header_value); - - let modified_string = file.modified().to_rfc3339(); - let modified_header_value = modified_string.parse().map_err(|err| { - eprintln!("Failed to parse 'modified' header value ({modified_string:?}): {err}"); - StatusCode::INTERNAL_SERVER_ERROR - })?; - headers.insert("x-modified", modified_header_value); - - Ok(headers) -} - -pub async fn post( - State(markdown_files): State, - params: Query>, - Path(name): Path, - Json(query): Json, -) -> Result<(HeaderMap, String), StatusCode> { - let keeper = markdown_files.lock().map_err(|err| { - eprintln!("Failed to lock data on a get_file request: {err}"); - StatusCode::INTERNAL_SERVER_ERROR - })?; +use crate::{ + frontmatter_file::{self, Short}, + frontmatter_query::FrontmatterQuery, +}; - let mut files = keeper.files().collect::>(); +use super::{lock_keeper, query_files}; + +fn get_sort_value(file: &Short, sort_key: &str) -> String { + file.frontmatter + .as_ref() + .and_then(|m| m.get(sort_key)) + .map(serde_yaml::to_string) + .transpose() + .ok() + .flatten() + .unwrap_or_else(|| { + serde_yaml::to_string(&file.created).expect("DateTime must serialize") + }) +} +fn sort_with_params(params: &HashMap, files: &mut [Short]) { let sort_key = params.get("sort"); if let Some(sort_key) = sort_key { files.sort_by(|f, g| { - let f_value = get_sort_value(f.frontmatter(), sort_key, f.created()); - let g_value = get_sort_value(g.frontmatter(), sort_key, g.created()); + let f_value = get_sort_value(f, sort_key); + let g_value = get_sort_value(g, sort_key); f_value.cmp(&g_value) }); } else { files.sort(); } files.reverse(); +} - let (i, file) = files - .iter() - .enumerate() - .filter(|(_, file)| { - let Some(frontmatter) = file.frontmatter() else { - // if query is '{}', include this - return query.is_empty(); - }; - query.is_subset(&markup::yaml_to_json(frontmatter)) - }) - .find(|(_, file)| file.name() == name) - .ok_or(StatusCode::NOT_FOUND)?; - let prev_file_name = files.get(i - 1).map(|f| f.name()); - let next_file_name = files.get(i + 1).map(|f| f.name()); - - let mut headers = assign_headers(file)?; - - if let Some(prev_file_name) = prev_file_name { - let prev_file_name_header_value = prev_file_name.parse().map_err(|err| { - eprintln!("Failed to parse 'prev-file-name' header value ({prev_file_name:?}): {err}"); - StatusCode::INTERNAL_SERVER_ERROR - })?; - headers.insert("x-prev-file", prev_file_name_header_value); - } +fn get_inner( + params: &HashMap, + files: &frontmatter_file::keeper::ArcMutex, +) -> Result, StatusCode> { + let keeper = lock_keeper(files)?; - if let Some(next_file_name) = next_file_name { - let next_file_name_header_value = next_file_name.parse().map_err(|err| { - eprintln!("Failed to parse 'next-file-name' header value ({next_file_name:?}): {err}"); - StatusCode::INTERNAL_SERVER_ERROR - })?; - headers.insert("x-next-file", next_file_name_header_value); - } + let mut files = keeper.files().cloned().map(Short::from).collect::>(); + + sort_with_params(params, &mut files); - Ok((headers, file.body().to_owned())) + Ok(files) } pub async fn get( State(markdown_files): State, - Path(name): Path, -) -> Result<(HeaderMap, String), StatusCode> { - let keeper = markdown_files.lock().map_err(|err| { - eprintln!("Failed to lock data on a get_file request: {err}"); - StatusCode::INTERNAL_SERVER_ERROR - })?; - let file = keeper - .files() - .find(|file| file.name() == name) - .ok_or(StatusCode::NOT_FOUND)?; - - let headers = assign_headers(file)?; - - Ok((headers, file.body().to_owned())) + params: Query>, +) -> Result>, StatusCode> { + let files = get_inner(¶ms, &markdown_files)?; + + Ok(Json(files)) +} + +fn post_inner( + params: &HashMap, + files: &frontmatter_file::keeper::ArcMutex, + query: &FrontmatterQuery, +) -> Result, StatusCode> { + let keeper = lock_keeper(files)?; + + let files = keeper.files(); + + let mut filtered_files = query_files(files, query) + .map(|file| file.clone().into()) + .collect::>(); + + sort_with_params(params, &mut filtered_files); + + Ok(filtered_files) +} + +pub async fn post( + State(markdown_files): State, + params: Query>, + Json(query): Json, +) -> Result>, StatusCode> { + let files = post_inner(¶ms, &markdown_files, &query)?; + + Ok(Json(files)) } diff --git a/src/route/mod.rs b/src/route/mod.rs index d563dc7..84662a3 100644 --- a/src/route/mod.rs +++ b/src/route/mod.rs @@ -1,2 +1,32 @@ pub mod frontmatter_file; pub mod frontmatter_list; + +use std::sync::MutexGuard; + +use axum::http::StatusCode; + +use crate::{ + frontmatter_file::{keeper, FrontmatterFile, Keeper}, + frontmatter_query::FrontmatterQuery, + markup, +}; + +fn query_files<'a>( + files: impl Iterator, + query: &'a FrontmatterQuery, +) -> impl Iterator { + files.filter(|file| { + let Some(frontmatter) = file.frontmatter() else { + // if query is '{}', include this + return query.is_empty(); + }; + query.is_subset(&markup::yaml_to_json(frontmatter)) + }) +} + +fn lock_keeper(keeper: &keeper::ArcMutex) -> Result, StatusCode> { + keeper.lock().map_err(|err| { + eprintln!("Failed to lock files data: {err}"); + StatusCode::INTERNAL_SERVER_ERROR + }) +}