Skip to content

Commit

Permalink
fix: clippy clean up
Browse files Browse the repository at this point in the history
  • Loading branch information
TroyKomodo committed Oct 25, 2024
1 parent 35fbf2e commit b28b563
Show file tree
Hide file tree
Showing 30 changed files with 555 additions and 289 deletions.
516 changes: 379 additions & 137 deletions Cargo.lock

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions ffmpeg/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ bytes = { optional = true, version = "1" }
tokio = { optional = true, version = "1" }
crossbeam-channel = { optional = true, version = "0.5" }
tracing = { optional = true, version = "0.1" }
arc-swap = { version = "1.7.1" }

[features]
default = []
Expand Down
4 changes: 2 additions & 2 deletions ffmpeg/src/codec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ impl std::fmt::Debug for DecoderCodec {
if self.0.is_null() {
return f
.debug_struct("DecoderCodec")
.field("name", &std::ffi::CStr::from_bytes_with_nul(b"null\0").unwrap())
.field("name", &c"null")
.field("id", &AVCodecID::AV_CODEC_ID_NONE)
.finish();
}
Expand Down Expand Up @@ -67,7 +67,7 @@ impl std::fmt::Debug for EncoderCodec {
if self.0.is_null() {
return f
.debug_struct("EncoderCodec")
.field("name", &std::ffi::CStr::from_bytes_with_nul(b"null\0").unwrap())
.field("name", &c"null")
.field("id", &AVCodecID::AV_CODEC_ID_NONE)
.finish();
}
Expand Down
4 changes: 2 additions & 2 deletions ffmpeg/src/consts.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ impl<T: std::fmt::Debug> std::fmt::Debug for Const<'_, T> {
}
}

impl<'a, T> Const<'a, T> {
impl<T> Const<'_, T> {
pub(crate) fn new(value: T) -> Self {
Self(value, std::marker::PhantomData)
}
Expand All @@ -32,7 +32,7 @@ impl<T: std::fmt::Debug> std::fmt::Debug for MutConst<'_, T> {
}
}

impl<'a, T> MutConst<'a, T> {
impl<T> MutConst<'_, T> {
pub(crate) fn new(value: T) -> Self {
Self(value, std::marker::PhantomData)
}
Expand Down
37 changes: 22 additions & 15 deletions ffmpeg/src/log.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
use std::ffi::CStr;
use std::sync::RwLock;
use std::sync::Arc;

use arc_swap::ArcSwap;
use ffmpeg_sys_next::*;

#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
Expand All @@ -20,15 +21,15 @@ pub enum LogLevel {
impl LogLevel {
pub const fn from_i32(value: i32) -> Self {
match value {
AV_LOG_QUIET => Self::Quiet,
AV_LOG_PANIC => Self::Panic,
AV_LOG_FATAL => Self::Fatal,
AV_LOG_ERROR => Self::Error,
AV_LOG_WARNING => Self::Warning,
AV_LOG_INFO => Self::Info,
AV_LOG_VERBOSE => Self::Verbose,
AV_LOG_DEBUG => Self::Debug,
AV_LOG_TRACE => Self::Trace,
-8 => Self::Quiet,
0 => Self::Panic,
8 => Self::Fatal,
16 => Self::Error,
24 => Self::Warning,
32 => Self::Info,
40 => Self::Verbose,
48 => Self::Debug,
56 => Self::Trace,
_ => Self::Info,
}
}
Expand All @@ -54,9 +55,9 @@ pub fn set_log_level(level: LogLevel) {
}
}

pub fn log_callback_set<F: Fn(LogLevel, Option<String>, String) + 'static>(callback: F) {
type Function = Box<dyn Fn(LogLevel, Option<String>, String)>;
static mut LOG_CALLBACK: RwLock<Option<Function>> = RwLock::new(None);
pub fn log_callback_set<F: Fn(LogLevel, Option<String>, String) + Send + Sync + 'static>(callback: F) {
type Function = Box<dyn Fn(LogLevel, Option<String>, String) + Send + Sync>;
static LOG_CALLBACK: std::sync::OnceLock<ArcSwap<Option<Function>>> = std::sync::OnceLock::new();

unsafe extern "C" fn log_cb(
ptr: *mut libc::c_void,
Expand All @@ -80,11 +81,17 @@ pub fn log_callback_set<F: Fn(LogLevel, Option<String>, String) + 'static>(callb

let msg = CStr::from_ptr(buf.as_ptr() as *const i8).to_string_lossy().trim().to_owned();

(LOG_CALLBACK.read().unwrap().as_deref().unwrap())(level, class, msg)
if let Some(cb) = LOG_CALLBACK.get() {
if let Some(cb) = cb.load().as_ref() {
cb(level, class, msg);
}
}
}

unsafe {
*LOG_CALLBACK.write().unwrap() = Some(Box::new(callback));
LOG_CALLBACK
.get_or_init(|| ArcSwap::new(Arc::new(None)))
.store(Arc::new(Some(Box::new(callback))));
av_log_set_callback(Some(log_cb));
}
}
Expand Down
2 changes: 1 addition & 1 deletion ffmpeg/src/stream.rs
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ impl<'a> Iterator for StreamIter<'a> {
}
}

impl<'a> std::iter::ExactSizeIterator for StreamIter<'a> {}
impl std::iter::ExactSizeIterator for StreamIter<'_> {}

pub struct Stream<'a>(&'a mut AVStream, &'a AVFormatContext);

Expand Down
23 changes: 14 additions & 9 deletions foundations/examples/src/http-server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -91,12 +91,14 @@ async fn main(settings: Matches<HttpServerSettings>) {
struct NoTlsService;

impl ServiceHandler for NormalService {
async fn on_request(&self, _: Request) -> Response {
Response::builder()
.status(StatusCode::OK)
.header("Alt-Svc", "h3=\":18080\"; ma=2592000")
.body("Hello, World!".into())
.unwrap()
fn on_request(&self, _: Request) -> impl std::future::Future<Output = impl IntoResponse> + Send {
std::future::ready(
Response::builder()
.status(StatusCode::OK)
.header("Alt-Svc", "h3=\":18080\"; ma=2592000")
.body(axum::body::Body::from("Hello, World!"))
.unwrap(),
)
}
}

Expand Down Expand Up @@ -144,11 +146,14 @@ async fn main(settings: Matches<HttpServerSettings>) {
}

impl MakeService for ServiceFactory {
async fn make_service(&self, incoming: &impl IncomingConnection) -> Option<AnyService> {
fn make_service(
&self,
incoming: &impl IncomingConnection,
) -> impl std::future::Future<Output = Option<impl ServiceHandler>> + Send {
if incoming.socket_kind() == SocketKind::Tcp {
Some(AnyService::NoTls(NoTlsService))
std::future::ready(Some(AnyService::NoTls(NoTlsService)))
} else {
Some(AnyService::Normal(NormalService))
std::future::ready(Some(AnyService::Normal(NormalService)))
}
}
}
Expand Down
4 changes: 1 addition & 3 deletions foundations/macros/src/settings/types/enum_ty.rs
Original file line number Diff line number Diff line change
Expand Up @@ -203,8 +203,6 @@ pub struct EnumArgs {

impl Args for EnumArgs {
fn apply_meta(&mut self, meta: &Meta) -> syn::Result<bool> {
match meta {
meta => self.global.apply_meta(meta),
}
self.global.apply_meta(meta)
}
}
48 changes: 24 additions & 24 deletions foundations/macros/src/settings/types/serde.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,29 +6,29 @@ use syn::Meta;

#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum RenameAll {
LowerCase,
UpperCase,
PascalCase,
CamelCase,
SnakeCase,
ScreamingSnakeCase,
KebabCase,
ScreamingKebabCase,
Lower,
Upper,
Pascal,
Camel,
Snake,
ScreamingSnake,
Kebab,
ScreamingKebab,
}

impl FromStr for RenameAll {
type Err = ();

fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"lowercase" => Ok(Self::LowerCase),
"UPPERCASE" => Ok(Self::UpperCase),
"PascalCase" => Ok(Self::PascalCase),
"camelCase" => Ok(Self::CamelCase),
"snake_case" => Ok(Self::SnakeCase),
"SCREAMING_SNAKE_CASE" => Ok(Self::ScreamingSnakeCase),
"kebab-case" => Ok(Self::KebabCase),
"SCREAMING-KEBAB-CASE" => Ok(Self::ScreamingKebabCase),
"lowercase" => Ok(Self::Lower),
"UPPERCASE" => Ok(Self::Upper),
"PascalCase" => Ok(Self::Pascal),
"camelCase" => Ok(Self::Camel),
"snake_case" => Ok(Self::Snake),
"SCREAMING_SNAKE_CASE" => Ok(Self::ScreamingSnake),
"kebab-case" => Ok(Self::Kebab),
"SCREAMING-KEBAB-CASE" => Ok(Self::ScreamingKebab),
_ => Err(()),
}
}
Expand Down Expand Up @@ -57,14 +57,14 @@ impl RenameAll {

pub fn apply(&self, name: &str) -> String {
let case = match self {
Self::LowerCase => Case::Lower,
Self::UpperCase => Case::Upper,
Self::PascalCase => Case::Pascal,
Self::CamelCase => Case::Camel,
Self::SnakeCase => Case::Snake,
Self::ScreamingSnakeCase => Case::ScreamingSnake,
Self::KebabCase => Case::Kebab,
Self::ScreamingKebabCase => Case::UpperKebab,
Self::Lower => Case::Lower,
Self::Upper => Case::Upper,
Self::Pascal => Case::Pascal,
Self::Camel => Case::Camel,
Self::Snake => Case::Snake,
Self::ScreamingSnake => Case::ScreamingSnake,
Self::Kebab => Case::Kebab,
Self::ScreamingKebab => Case::UpperKebab,
};

name.to_case(case)
Expand Down
4 changes: 1 addition & 3 deletions foundations/macros/src/settings/types/struct_ty.rs
Original file line number Diff line number Diff line change
Expand Up @@ -173,8 +173,6 @@ pub struct StructArgs {

impl Args for StructArgs {
fn apply_meta(&mut self, meta: &Meta) -> syn::Result<bool> {
match meta {
meta => self.global.apply_meta(meta),
}
self.global.apply_meta(meta)
}
}
16 changes: 11 additions & 5 deletions foundations/src/batcher/dataloader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,14 +51,20 @@ impl<L: Loader<S> + 'static + Send + Sync, S: BuildHasher + Default + Send + Syn
}
}

#[tracing::instrument(skip_all, fields(name = %self.batcher.inner.name))]
pub async fn load(&self, key: L::Key) -> Result<Option<L::Value>, ()> {
self.load_many(std::iter::once(key.clone()))
self.internal_load_many(std::iter::once(key.clone()))
.await
.map(|mut map| map.remove(&key))
}

#[tracing::instrument(skip_all, fields(name = %self.batcher.inner.name))]
pub async fn load_many(&self, keys: impl IntoIterator<Item = L::Key>) -> Result<HashMap<L::Key, L::Value, S>, ()> {
self.batcher.execute_many(keys).await.map_err(|err| match err {
self.internal_load_many(keys).await
}

async fn internal_load_many(&self, keys: impl IntoIterator<Item = L::Key>) -> LoaderOutput<L, S> {
self.batcher.internal_execute_many(keys).await.map_err(|err| match err {
BatcherError::Batch(Unit) => {}
err => tracing::error!("failed to load data: {err}"),
})
Expand All @@ -77,14 +83,14 @@ impl<L: Loader<S>, S: BuildHasher + Default + Send + Sync> BatchOperation for Wr
self.0.config()
}

fn process(
async fn process(
&self,
documents: <Self::Mode as super::BatchMode<Self>>::Input,
) -> impl std::future::Future<Output = Result<<Self::Mode as super::BatchMode<Self>>::OperationOutput, Self::Error>> + Send + '_
) -> Result<<Self::Mode as super::BatchMode<Self>>::OperationOutput, Self::Error>
where
Self: Send + Sync,
{
async move { self.0.load(documents.into_iter().collect()).await.map_err(|()| Unit) }
self.0.load(documents.into_iter().collect()).await.map_err(|()| Unit)
}
}

Expand Down
14 changes: 12 additions & 2 deletions foundations/src/batcher/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -240,6 +240,7 @@ struct Batch<T: BatchOperation> {
expires_at: tokio::time::Instant,
done: DropGuardCancellationToken,
ops: <T::Mode as BatchMode<T>>::Input,
#[allow(clippy::type_complexity)]
results: Arc<OnceCell<Result<<T::Mode as BatchMode<T>>::OperationOutput, BatcherError<T::Error>>>>,
}

Expand All @@ -265,6 +266,7 @@ struct BatchInsertWaiter<T: BatchOperation> {
id: u64,
done: tokio_util::sync::CancellationToken,
tracker: <T::Mode as BatchMode<T>>::Tracker,
#[allow(clippy::type_complexity)]
results: Arc<OnceCell<Result<<T::Mode as BatchMode<T>>::OperationOutput, BatcherError<T::Error>>>>,
}

Expand Down Expand Up @@ -297,7 +299,7 @@ impl<T: BatchOperation + 'static + Send + Sync> Batch<T> {
.instrument(tracing::debug_span!("Semaphore"))
.await
.map_err(|_| BatcherError::AcquireSemaphore)?;
Ok(inner.operation.process(self.ops).await.map_err(BatcherError::Batch)?)
inner.operation.process(self.ops).await.map_err(BatcherError::Batch)
})
.await;
}
Expand Down Expand Up @@ -411,8 +413,9 @@ impl<T: BatchOperation + 'static + Send + Sync> Batcher<T> {
}
}

#[tracing::instrument(skip_all, fields(name = %self.inner.name))]
pub async fn execute(&self, document: T::Item) -> Result<T::Response, BatcherError<T::Error>> {
let output = self.execute_many(std::iter::once(document)).await;
let output = self.internal_execute_many(std::iter::once(document)).await;
let iter = T::Mode::final_output_into_iter(output)?;
T::Mode::output_item_to_result(iter.into_iter().next().ok_or(BatcherError::MissingResult)?)
}
Expand All @@ -421,6 +424,13 @@ impl<T: BatchOperation + 'static + Send + Sync> Batcher<T> {
pub async fn execute_many(
&self,
documents: impl IntoIterator<Item = T::Item>,
) -> <T::Mode as BatchMode<T>>::FinalOutput {
self.internal_execute_many(documents).await
}

pub async fn internal_execute_many(
&self,
documents: impl IntoIterator<Item = T::Item>,
) -> <T::Mode as BatchMode<T>>::FinalOutput {
let waiters = self.inner.batch_inserts(documents).await;

Expand Down
4 changes: 2 additions & 2 deletions foundations/src/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -268,7 +268,7 @@ pub struct FutureWithContext<'a, F> {
ctx: ContextRef<'a>,
}

impl<'a, F: Future> Future for FutureWithContext<'a, F> {
impl<F: Future> Future for FutureWithContext<'_, F> {
type Output = Option<F::Output>;

fn poll(mut self: Pin<&mut Self>, cx: &mut std::task::Context<'_>) -> std::task::Poll<Self::Output> {
Expand All @@ -290,7 +290,7 @@ pub struct StreamWithContext<'a, F> {
ctx: ContextRef<'a>,
}

impl<'a, F: Stream> Stream for StreamWithContext<'a, F> {
impl<F: Stream> Stream for StreamWithContext<'_, F> {
type Item = F::Item;

fn poll_next(mut self: Pin<&mut Self>, cx: &mut std::task::Context<'_>) -> Poll<Option<Self::Item>> {
Expand Down
2 changes: 1 addition & 1 deletion foundations/src/http/server/stream/tcp.rs
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ struct IncomingTcpConnection<'a> {
connection: &'a TcpStream,
}

impl<'a> IncomingConnection for IncomingTcpConnection<'a> {
impl IncomingConnection for IncomingTcpConnection<'_> {
fn socket_kind(&self) -> SocketKind {
SocketKind::Tcp
}
Expand Down
6 changes: 3 additions & 3 deletions foundations/src/settings/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,15 +23,15 @@ impl<S> SettingsParser<S> {

fn merge(&mut self, incoming: toml::Value) {
let root = self.root.take().unwrap();
self.root = Some(self.merge_loop(root, incoming));
self.root = Some(Self::merge_loop(root, incoming));
}

fn merge_loop(&self, root: toml::Value, incoming: toml::Value) -> toml::Value {
fn merge_loop(root: toml::Value, incoming: toml::Value) -> toml::Value {
match (root, incoming) {
(toml::Value::Table(mut first_map), toml::Value::Table(second_map)) => {
for (key, value) in second_map {
let combined_value = if let Some(existing_value) = first_map.remove(&key) {
self.merge_loop(existing_value, value)
Self::merge_loop(existing_value, value)
} else {
value
};
Expand Down
Loading

0 comments on commit b28b563

Please sign in to comment.