diff --git a/Cargo.lock b/Cargo.lock index 074822f2e..e448b43b8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2,27 +2,6 @@ # It is not intended for manual editing. version = 3 -[[package]] -name = "Inflector" -version = "0.11.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe438c63458706e03479442743baae6c88256498e6431708f6dfc520a26515d3" -dependencies = [ - "lazy_static", - "regex", -] - -[[package]] -name = "aac" -version = "0.0.1" -dependencies = [ - "byteorder", - "bytes", - "bytesio", - "num-derive", - "num-traits", -] - [[package]] name = "addr2line" version = "0.21.0" @@ -38,17 +17,6 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" -[[package]] -name = "ahash" -version = "0.7.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "891477e0c6a8957309ee5c45a6368af3ae14bb510732d2684ffa19af310920f9" -dependencies = [ - "getrandom", - "once_cell", - "version_check", -] - [[package]] name = "ahash" version = "0.8.11" @@ -79,20 +47,9 @@ checksum = "4aa90d7ce82d4be67b64039a3d588d38dbcc6736577de4a847025ce5b0c468d1" [[package]] name = "allocator-api2" -version = "0.2.16" +version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5" - -[[package]] -name = "amf0" -version = "0.0.1" -dependencies = [ - "byteorder", - "bytes", - "bytesio", - "num-derive", - "num-traits", -] +checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" [[package]] name = "android-tzdata" @@ -111,47 +68,48 @@ dependencies = [ [[package]] name = "anstream" -version = "0.6.13" +version = "0.6.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d96bd03f33fe50a863e394ee9718a706f988b9079b20c3784fb726e7678b62fb" +checksum = "418c75fa768af9c03be99d17643f93f79bbba589895012a80e3452a19ddda15b" dependencies = [ "anstyle", "anstyle-parse", "anstyle-query", "anstyle-wincon", "colorchoice", + "is_terminal_polyfill", "utf8parse", ] [[package]] name = "anstyle" -version = "1.0.6" +version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8901269c6307e8d93993578286ac0edf7f195079ffff5ebdeea6a59ffb7e36bc" +checksum = "038dfcf04a5feb68e9c60b21c9625a54c2c0616e79b72b0fd87075a056ae1d1b" [[package]] name = "anstyle-parse" -version = "0.2.3" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c75ac65da39e5fe5ab759307499ddad880d724eed2f6ce5b5e8a26f4f387928c" +checksum = "c03a11a9034d92058ceb6ee011ce58af4a9bf61491aa7e1e59ecd24bd40d22d4" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e28923312444cdd728e4738b3f9c9cac739500909bb3d3c94b43551b16517648" +checksum = "a64c907d4e79225ac72e2a354c9ce84d50ebb4586dee56c82b3ee73004f537f5" dependencies = [ "windows-sys 0.52.0", ] [[package]] name = "anstyle-wincon" -version = "3.0.2" +version = "3.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cd54b81ec8d6180e24654d0b371ad22fc3dd083b6ff8ba325b72e00c87660a7" +checksum = "61a38449feb7068f52bb06c12759005cf459ee52bb4adc1d5a7c4322d716fb19" dependencies = [ "anstyle", "windows-sys 0.52.0", @@ -159,12 +117,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.82" +version = "1.0.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f538837af36e6f6a9be0faa67f9a314f8119e4e4b5867c6ab40ed60360142519" -dependencies = [ - "backtrace", -] +checksum = "25bdb32cbbdce2b519a9cd7df3a678443100e265d5e25ca763b7572a5104f5f3" [[package]] name = "arbitrary" @@ -186,19 +141,7 @@ checksum = "0ae92a5119aa49cdbcf6b9f893fe4e1d98b04ccbf82ee0584ad948a44a734dea" dependencies = [ "proc-macro2", "quote", - "syn 2.0.58", -] - -[[package]] -name = "argon2" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c3610892ee6e0cbce8ae2700349fcf8f98adb0dbfbee85aec3c9179d29cc072" -dependencies = [ - "base64ct", - "blake2", - "cpufeatures", - "password-hash", + "syn 2.0.60", ] [[package]] @@ -207,112 +150,26 @@ version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" -[[package]] -name = "ascii_utils" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71938f30533e4d95a6d17aa530939da3842c2ab6f4f84b9dae68447e4129f74a" - -[[package]] -name = "async-graphql" -version = "7.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "261fa27d5bff5afdf7beff291b3bc73f99d1529804c70e51b0fbc51e70b1c6a9" -dependencies = [ - "async-graphql-derive", - "async-graphql-parser", - "async-graphql-value", - "async-stream", - "async-trait", - "base64 0.21.7", - "bytes", - "chrono", - "fast_chemail", - "fnv", - "futures-util", - "handlebars", - "http 1.1.0", - "indexmap 2.2.6", - "lru 0.7.8", - "mime", - "multer", - "num-traits", - "once_cell", - "pin-project-lite", - "regex", - "serde", - "serde_json", - "serde_urlencoded", - "sha2", - "static_assertions_next", - "tempfile", - "thiserror", - "tracing", - "tracing-futures", -] - -[[package]] -name = "async-graphql-derive" -version = "7.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3188809947798ea6db736715a60cf645ba3b87ea031c710130e1476b48e45967" -dependencies = [ - "Inflector", - "async-graphql-parser", - "darling", - "proc-macro-crate", - "proc-macro2", - "quote", - "strum", - "syn 2.0.58", - "thiserror", -] - -[[package]] -name = "async-graphql-parser" -version = "7.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4e65a0b83027f35b2a5d9728a098bc66ac394caa8191d2c65ed9eb2985cf3d8" -dependencies = [ - "async-graphql-value", - "pest", - "serde", - "serde_json", -] - -[[package]] -name = "async-graphql-value" -version = "7.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68e40849c29a39012d38bff87bfed431f1ed6c53fbec493294c1045d61a7ae75" -dependencies = [ - "bytes", - "indexmap 2.2.6", - "serde", - "serde_json", -] - [[package]] name = "async-nats" -version = "0.33.0" +version = "0.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbc1f1a75fd07f0f517322d103211f12d757658e91676def9a2e688774656c60" +checksum = "d5e47d2f7305524258908449aff6c86db36697a9b4219bfb1777e0ca1945358d" dependencies = [ - "base64 0.21.7", + "base64 0.22.1", "bytes", "futures", - "http 0.2.12", "memchr", "nkeys", "nuid", "once_cell", + "portable-atomic", "rand", "regex", - "ring 0.17.8", - "rustls 0.21.12", - "rustls-native-certs 0.6.3", - "rustls-pemfile 1.0.4", - "rustls-webpki 0.101.7", + "ring", + "rustls-native-certs 0.7.0", + "rustls-pemfile 2.1.2", + "rustls-webpki 0.102.3", "serde", "serde_json", "serde_nanos", @@ -320,9 +177,9 @@ dependencies = [ "thiserror", "time", "tokio", - "tokio-retry", - "tokio-rustls 0.24.1", + "tokio-rustls 0.26.0", "tracing", + "tryhard", "url", ] @@ -345,7 +202,7 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -356,23 +213,14 @@ checksum = "c6fa2087f2753a7da8cc1c0dbfcf89579dd57458e36769de5ac750b4671737ca" dependencies = [ "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] name = "autocfg" -version = "1.2.0" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1fdabc7756949593fe60f30ec81974b613357de856987752631dea1e3394c80" - -[[package]] -name = "av1" -version = "0.0.1" -dependencies = [ - "byteorder", - "bytes", - "bytesio", -] +checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" [[package]] name = "av1-grain" @@ -390,9 +238,9 @@ dependencies = [ [[package]] name = "aws-config" -version = "1.1.10" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48730d0b4c3d91c43d0d37168831d9fd0e065ad4a889a2ee9faf8d34c3d2804d" +checksum = "baaa0be6ee7d90b775ae6ccb6d2ba182b91219ec2001f92338773a094246af1d" dependencies = [ "aws-credential-types", "aws-runtime", @@ -411,7 +259,7 @@ dependencies = [ "hex", "http 0.2.12", "hyper 0.14.28", - "ring 0.17.8", + "ring", "time", "tokio", "tracing", @@ -421,9 +269,9 @@ dependencies = [ [[package]] name = "aws-credential-types" -version = "1.1.8" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa8587ae17c8e967e4b05a62d495be2fb7701bec52a97f7acfe8a29f938384c8" +checksum = "e16838e6c9e12125face1c1eff1343c75e3ff540de98ff7ebd61874a89bcfeb9" dependencies = [ "aws-smithy-async", "aws-smithy-runtime-api", @@ -431,11 +279,38 @@ dependencies = [ "zeroize", ] +[[package]] +name = "aws-lc-rs" +version = "1.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf7d844e282b4b56750b2d4e893b2205581ded8709fddd2b6aa5418c150ca877" +dependencies = [ + "aws-lc-sys", + "mirai-annotations", + "paste", + "zeroize", +] + +[[package]] +name = "aws-lc-sys" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3a2c29203f6bf296d01141cc8bb9dbd5ecd4c27843f2ee0767bcd5985a927da" +dependencies = [ + "bindgen", + "cc", + "cmake", + "dunce", + "fs_extra", + "libc", + "paste", +] + [[package]] name = "aws-runtime" -version = "1.1.9" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4ee6903f9d0197510eb6b44c4d86b493011d08b4992938f7b9be0333b6685aa" +checksum = "f4963ac9ff2d33a4231b3806c1c69f578f221a9cabb89ad2bde62ce2b442c8a7" dependencies = [ "aws-credential-types", "aws-sigv4", @@ -457,11 +332,11 @@ dependencies = [ [[package]] name = "aws-sdk-s3" -version = "1.22.0" +version = "1.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "644c5939c1b78097d37f3341708978d68490070d4b0f8fa91f0878678c06a7ef" +checksum = "cedc97499da49c3e36cde578340f9925284685073cb3e512aaf9ab16cd9a2541" dependencies = [ - "ahash 0.8.11", + "ahash", "aws-credential-types", "aws-runtime", "aws-sigv4", @@ -481,7 +356,7 @@ dependencies = [ "hmac", "http 0.2.12", "http-body 0.4.6", - "lru 0.12.3", + "lru", "once_cell", "percent-encoding", "regex-lite", @@ -492,9 +367,9 @@ dependencies = [ [[package]] name = "aws-sdk-sso" -version = "1.19.0" +version = "1.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2be5ba83b077b67a6f7a1927eb6b212bf556e33bd74b5eaa5aa6e421910803a" +checksum = "ca3d6c4cba4e009391b72b0fcf12aff04ea3c9c3aa2ecaafa330326a8bd7e601" dependencies = [ "aws-credential-types", "aws-runtime", @@ -514,9 +389,9 @@ dependencies = [ [[package]] name = "aws-sdk-ssooidc" -version = "1.19.0" +version = "1.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "022ca669825f841aef17b12d4354ef2b8651e4664be49f2d9ea13e4062a80c9f" +checksum = "73400dc239d14f63d932f4ca7b55af5e9ef1f857f7d70655249ccc287adb2570" dependencies = [ "aws-credential-types", "aws-runtime", @@ -536,9 +411,9 @@ dependencies = [ [[package]] name = "aws-sdk-sts" -version = "1.19.0" +version = "1.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e4a5f5cb007347c1ab34a6d56456301dfada921fc9e57d687ecb08baddd11ff" +checksum = "10f8858308af76fba3e5ffcf1bb56af5471574d2bdfaf0159470c25bc2f760e5" dependencies = [ "aws-credential-types", "aws-runtime", @@ -559,9 +434,9 @@ dependencies = [ [[package]] name = "aws-sigv4" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11d6f29688a4be9895c0ba8bef861ad0c0dac5c15e9618b9b7a6c233990fc263" +checksum = "58b56f1cbe6fd4d0c2573df72868f20ab1c125ca9c9dbce17927a463433a2e57" dependencies = [ "aws-credential-types", "aws-smithy-eventstream", @@ -576,9 +451,9 @@ dependencies = [ "http 0.2.12", "http 1.1.0", "once_cell", - "p256 0.11.1", + "p256", "percent-encoding", - "ring 0.17.8", + "ring", "sha2", "subtle", "time", @@ -631,9 +506,9 @@ dependencies = [ [[package]] name = "aws-smithy-http" -version = "0.60.7" +version = "0.60.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f10fa66956f01540051b0aa7ad54574640f748f9839e843442d99b970d3aff9" +checksum = "4a7de001a1b9a25601016d8057ea16e31a45fdca3751304c8edf4ad72e706c08" dependencies = [ "aws-smithy-eventstream", "aws-smithy-runtime-api", @@ -671,9 +546,9 @@ dependencies = [ [[package]] name = "aws-smithy-runtime" -version = "1.3.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de34bcfa1fb3c82a80e252a753db34a6658e07f23d3a5b3fc96919518fa7a3f5" +checksum = "1cf64e73ef8d4dac6c933230d56d136b75b252edcf82ed36e37d603090cd7348" dependencies = [ "aws-smithy-async", "aws-smithy-http", @@ -686,7 +561,7 @@ dependencies = [ "http-body 0.4.6", "http-body 1.0.0", "hyper 0.14.28", - "hyper-rustls", + "hyper-rustls 0.24.2", "once_cell", "pin-project-lite", "pin-utils", @@ -697,9 +572,9 @@ dependencies = [ [[package]] name = "aws-smithy-runtime-api" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4cc56a5c96ec741de6c5e6bf1ce6948be969d6506dfa9c39cffc284e31e4979b" +checksum = "8c19fdae6e3d5ac9cd01f2d6e6c359c5f5a3e028c2d148a8f5b90bf3399a18a7" dependencies = [ "aws-smithy-async", "aws-smithy-types", @@ -740,25 +615,25 @@ dependencies = [ [[package]] name = "aws-smithy-xml" -version = "0.60.7" +version = "0.60.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "872c68cf019c0e4afc5de7753c4f7288ce4b71663212771bf5e4542eb9346ca9" +checksum = "d123fbc2a4adc3c301652ba8e149bf4bc1d1725affb9784eb20c953ace06bf55" dependencies = [ "xmlparser", ] [[package]] name = "aws-types" -version = "1.1.9" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afb278e322f16f59630a83b6b2dc992a0b48aa74ed47b4130f193fae0053d713" +checksum = "5a43b56df2c529fe44cb4d92bd64d0479883fb9608ff62daede4df5405381814" dependencies = [ "aws-credential-types", "aws-smithy-async", "aws-smithy-runtime-api", "aws-smithy-types", "http 0.2.12", - "rustc_version", + "rustc_version 0.4.0", "tracing", ] @@ -803,7 +678,7 @@ dependencies = [ "http 1.1.0", "http-body 1.0.0", "http-body-util", - "hyper 1.2.0", + "hyper 1.3.1", "hyper-util", "itoa", "matchit", @@ -862,12 +737,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "az" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b7e4c2464d97fe331d41de9d5db0def0a96f4d823b8b32a2efd503578988973" - [[package]] name = "backtrace" version = "0.3.71" @@ -890,16 +759,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "349a06037c7bf932dd7e7d1f653678b2038b9ad46a74102f1fc7bd7872678cce" [[package]] -name = "base16ct" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" - -[[package]] -name = "base32" -version = "0.4.0" +name = "base64" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23ce669cd6c8588f79e15cf450314f9638f967fc5770ff1c7c1deb0925ea7cfa" +checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" [[package]] name = "base64" @@ -909,9 +772,9 @@ checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" [[package]] name = "base64" -version = "0.22.0" +version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9475866fec1451be56a3c2400fd081ff546538961565ccb5b7142cbd22bc7a51" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" [[package]] name = "base64-simd" @@ -929,82 +792,29 @@ version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" -[[package]] -name = "bcder" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c627747a6774aab38beb35990d88309481378558875a41da1a4b2e373c906ef0" -dependencies = [ - "bytes", - "smallvec", -] - -[[package]] -name = "binary-helper" -version = "0.0.1" -dependencies = [ - "anyhow", - "async-nats", - "async-stream", - "async-trait", - "aws-config", - "aws-credential-types", - "aws-sdk-s3", - "aws-smithy-types", - "bytes", - "deadpool-postgres", - "fred", - "futures-util", - "http-body 1.0.0", - "hyper 1.2.0", - "once_cell", - "pb", - "pin-project", - "postgres-from-row", - "postgres-types", - "prost 0.12.4", - "rustls 0.22.3", - "rustls-pemfile 2.1.2", - "scuffle-config", - "scuffle-utils", - "serde", - "thiserror", - "tokio", - "tokio-postgres", - "tokio-postgres-rustls", - "tonic", - "tower-layer", - "tracing", - "tracing-subscriber", - "ulid", -] - [[package]] name = "bindgen" -version = "0.64.0" +version = "0.69.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4243e6031260db77ede97ad86c27e501d646a27ab57b59a574f725d98ab1fb4" +checksum = "a00dc851838a2120612785d195287475a3ac45514741da670b735818822129a0" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.5.0", "cexpr", "clang-sys", + "itertools 0.12.1", "lazy_static", "lazycell", - "peeking_take_while", + "log", + "prettyplease", "proc-macro2", "quote", "regex", "rustc-hash", "shlex", - "syn 1.0.109", + "syn 2.0.60", + "which", ] -[[package]] -name = "bit_field" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc827186963e592360843fb5ba4b973e145841266c1357f7180c43526f2e5b61" - [[package]] name = "bitflags" version = "1.3.2" @@ -1017,16 +827,6 @@ version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" -[[package]] -name = "bitmask-enum" -version = "2.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9990737a6d5740ff51cdbbc0f0503015cb30c390f6623968281eb214a520cfc0" -dependencies = [ - "quote", - "syn 2.0.58", -] - [[package]] name = "bitstream-io" version = "2.2.0" @@ -1034,12 +834,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06c9989a51171e2e81038ab168b6ae22886fe9ded214430dbb4f41c28cf176da" [[package]] -name = "blake2" -version = "0.10.6" +name = "bitvec" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe" +checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" dependencies = [ - "digest", + "funty", + "radium", + "tap", + "wyz", ] [[package]] @@ -1051,6 +854,28 @@ dependencies = [ "generic-array", ] +[[package]] +name = "bson" +version = "2.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d43b38e074cc0de2957f10947e376a1d88b9c4dbab340b590800cc1b2e066b2" +dependencies = [ + "ahash", + "base64 0.13.1", + "bitvec", + "chrono", + "hex", + "indexmap 2.2.6", + "js-sys", + "once_cell", + "rand", + "serde", + "serde_bytes", + "serde_json", + "time", + "uuid", +] + [[package]] name = "built" version = "0.7.2" @@ -1069,12 +894,6 @@ version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5d6d68c57235a3a081186990eca2867354726650f42f7516ca50c28d6281fd15" -[[package]] -name = "byteorder" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" - [[package]] name = "bytes" version = "1.6.0" @@ -1094,29 +913,23 @@ dependencies = [ "either", ] -[[package]] -name = "bytesio" -version = "0.0.1" -dependencies = [ - "byteorder", - "bytes", - "futures", - "scuffle-utils", - "tokio", - "tokio-stream", - "tokio-util", -] - [[package]] name = "cc" -version = "1.0.92" +version = "1.0.97" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2678b2e3449475e95b0aa6f9b506a28e61b3dc8996592b983695e8ebb58a8b41" +checksum = "099a5357d84c4c61eb35fc8eafa9a79a902c2f76911e5747ced4e032edd8d9b4" dependencies = [ "jobserver", "libc", + "once_cell", ] +[[package]] +name = "cesu8" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c" + [[package]] name = "cexpr" version = "0.6.0" @@ -1144,9 +957,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chrono" -version = "0.4.37" +version = "0.4.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a0d04d43504c61aa6c7531f1871dd0d418d91130162063b789da00fd7057a5e" +checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" dependencies = [ "android-tzdata", "iana-time-zone", @@ -1154,7 +967,7 @@ dependencies = [ "num-traits", "serde", "wasm-bindgen", - "windows-targets 0.52.4", + "windows-targets 0.52.5", ] [[package]] @@ -1175,7 +988,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "90bc066a67923782aa8515dbaea16946c5bcc5addbd668bb80af688e53e548a0" dependencies = [ "clap_builder", - "clap_derive", ] [[package]] @@ -1190,18 +1002,6 @@ dependencies = [ "strsim 0.11.1", ] -[[package]] -name = "clap_derive" -version = "4.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "528131438037fd55894f62d6e9f068b8f45ac57ffa77517819645d10aed04f64" -dependencies = [ - "heck 0.5.0", - "proc-macro2", - "quote", - "syn 2.0.58", -] - [[package]] name = "clap_lex" version = "0.7.0" @@ -1217,26 +1017,20 @@ dependencies = [ "cc", ] -[[package]] -name = "color_quant" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d7b894f5411737b7867f4827955924d7c254fc9f4d91a6aad6b097804b1018b" - [[package]] name = "colorchoice" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" +checksum = "0b6a852b24ab71dffc585bcb46eaf7959d175cb865a7152e35b348d1b2960422" [[package]] -name = "console_error_panic_hook" -version = "0.1.7" +name = "combine" +version = "4.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a06aeb73f470f66dcdbf7223caeebb85984942f22f1adb2a088cf9668146bbbc" +checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" dependencies = [ - "cfg-if", - "wasm-bindgen", + "bytes", + "memchr", ] [[package]] @@ -1252,30 +1046,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3618cccc083bb987a415d85c02ca6c9994ea5b44731ec28b9ecf09658655fba9" [[package]] -name = "const_format" -version = "0.2.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3a214c7af3d04997541b18d432afaff4c455e79e2029079647e72fc2bd27673" -dependencies = [ - "const_format_proc_macros", -] - -[[package]] -name = "const_format_proc_macros" -version = "0.2.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7f6ff08fd20f4f299298a28e2dfa8a8ba1036e6cd2460ac1de7b425d76f2500" -dependencies = [ - "proc-macro2", - "quote", - "unicode-xid", -] - -[[package]] -name = "constant_time_eq" -version = "0.2.6" +name = "convert_case" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21a53c0a4d288377e7415b53dcfc3c04da5cdc2cc95c8d5ac178b58f0b861ad6" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" [[package]] name = "convert_case" @@ -1288,12 +1062,9 @@ dependencies = [ [[package]] name = "cookie-factory" -version = "0.3.3" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9885fa71e26b8ab7855e2ec7cae6e9b380edff76cd052e07c683a0319d51b3a2" -dependencies = [ - "futures", -] +checksum = "396de984970346b0d9e93d1415082923c679e5ae5c3ee3dcbd104f5610af126b" [[package]] name = "core-foundation" @@ -1341,7 +1112,7 @@ version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89254598aa9b9fa608de44b3ae54c810f0f06d755e24c50177f1f8f31ff50ce2" dependencies = [ - "rustc_version", + "rustc_version 0.4.0", ] [[package]] @@ -1396,12 +1167,6 @@ version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345" -[[package]] -name = "crunchy" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" - [[package]] name = "crypto-bigint" version = "0.4.9" @@ -1420,10 +1185,8 @@ version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" dependencies = [ - "generic-array", "rand_core", "subtle", - "zeroize", ] [[package]] @@ -1448,7 +1211,7 @@ dependencies = [ "digest", "fiat-crypto", "platforms", - "rustc_version", + "rustc_version 0.4.0", "subtle", ] @@ -1460,7 +1223,17 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", +] + +[[package]] +name = "darling" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a01d95850c592940db9b8194bc39f4bc0e89dee5c4265e4b1807c34a9aba453c" +dependencies = [ + "darling_core 0.13.4", + "darling_macro 0.13.4", ] [[package]] @@ -1469,73 +1242,65 @@ version = "0.20.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "54e36fcd13ed84ffdfda6f5be89b31287cbb80c439841fe69e04841435464391" dependencies = [ - "darling_core", - "darling_macro", + "darling_core 0.20.8", + "darling_macro 0.20.8", ] [[package]] name = "darling_core" -version = "0.20.8" +version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c2cf1c23a687a1feeb728783b993c4e1ad83d99f351801977dd809b48d0a70f" +checksum = "859d65a907b6852c9361e3185c862aae7fafd2887876799fa55f5f99dc40d610" dependencies = [ "fnv", "ident_case", "proc-macro2", "quote", "strsim 0.10.0", - "syn 2.0.58", + "syn 1.0.109", ] [[package]] -name = "darling_macro" +name = "darling_core" version = "0.20.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a668eda54683121533a393014d8692171709ff57a7d61f187b6e782719f8933f" +checksum = "9c2cf1c23a687a1feeb728783b993c4e1ad83d99f351801977dd809b48d0a70f" dependencies = [ - "darling_core", + "fnv", + "ident_case", + "proc-macro2", "quote", - "syn 2.0.58", + "strsim 0.10.0", + "syn 2.0.60", ] [[package]] -name = "data-encoding" -version = "2.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e962a19be5cfc3f3bf6dd8f61eb50107f356ad6270fbb3ed41476571db78be5" - -[[package]] -name = "deadpool" -version = "0.10.0" +name = "darling_macro" +version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb84100978c1c7b37f09ed3ce3e5f843af02c2a2c431bae5b19230dad2c1b490" +checksum = "9c972679f83bdf9c42bd905396b6c3588a843a17f0f16dfcfa3e2c5d57441835" dependencies = [ - "async-trait", - "deadpool-runtime", - "num_cpus", - "tokio", + "darling_core 0.13.4", + "quote", + "syn 1.0.109", ] [[package]] -name = "deadpool-postgres" -version = "0.12.1" +name = "darling_macro" +version = "0.20.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bda39fa1cfff190d8924d447ad04fd22772c250438ca5ce1dfb3c80621c05aaa" +checksum = "a668eda54683121533a393014d8692171709ff57a7d61f187b6e782719f8933f" dependencies = [ - "deadpool", - "tokio", - "tokio-postgres", - "tracing", + "darling_core 0.20.8", + "quote", + "syn 2.0.60", ] [[package]] -name = "deadpool-runtime" -version = "0.1.3" +name = "data-encoding" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "63dfa964fe2a66f3fde91fc70b267fe193d822c7e603e2a675a49a7f46ad3f49" -dependencies = [ - "tokio", -] +checksum = "e8566979429cf69b49a5c740c60791108e86440e8be149bbea4fe54d2c32d6e2" [[package]] name = "debugid" @@ -1546,23 +1311,6 @@ dependencies = [ "uuid", ] -[[package]] -name = "default-net" -version = "0.22.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c5a6569a908354d49b10db3c516d69aca1eccd97562fd31c98b13f00b73ca66" -dependencies = [ - "dlopen2", - "libc", - "memalloc", - "netlink-packet-core", - "netlink-packet-route", - "netlink-sys", - "once_cell", - "system-configuration", - "windows", -] - [[package]] name = "der" version = "0.6.1" @@ -1594,6 +1342,30 @@ dependencies = [ "serde", ] +[[package]] +name = "derivative" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "derive_more" +version = "0.99.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" +dependencies = [ + "convert_case 0.4.0", + "proc-macro2", + "quote", + "rustc_version 0.4.0", + "syn 1.0.109", +] + [[package]] name = "digest" version = "0.10.7" @@ -1601,28 +1373,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ "block-buffer", - "const-oid", "crypto-common", "subtle", ] [[package]] -name = "dlopen2" -version = "0.5.0" +name = "document-features" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09b4f5f101177ff01b8ec4ecc81eead416a8aa42819a2869311b3420fa114ffa" +checksum = "ef5282ad69563b5fc40319526ba27e0e7363d552a896f0297d54f767717f9b95" dependencies = [ - "libc", - "once_cell", - "winapi", + "litrs", ] -[[package]] -name = "dotenvy" -version = "0.15.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" - [[package]] name = "dtoa" version = "1.0.9" @@ -1642,25 +1405,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "413301934810f597c1d19ca71c8710e99a3f1ba28a0d2ebc01551a2daeea3c5c" dependencies = [ "der 0.6.1", - "elliptic-curve 0.12.3", - "rfc6979 0.3.1", + "elliptic-curve", + "rfc6979", "signature 1.6.4", ] -[[package]] -name = "ecdsa" -version = "0.16.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca" -dependencies = [ - "der 0.7.9", - "digest", - "elliptic-curve 0.13.8", - "rfc6979 0.4.0", - "signature 2.2.0", - "spki 0.7.3", -] - [[package]] name = "ed25519" version = "2.2.3" @@ -1685,9 +1434,9 @@ dependencies = [ [[package]] name = "either" -version = "1.10.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11157ac094ffbdde99aa67b23417ebdd801842852b500e395a45a9c0aac03e4a" +checksum = "a47c1c47d2f5964e29c61246e81db715514cd532db6b5116a25ea3c03d6780a2" [[package]] name = "elliptic-curve" @@ -1695,60 +1444,30 @@ version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e7bb888ab5300a19b8e5bceef25ac745ad065f3c9f7efc6de1b91958110891d3" dependencies = [ - "base16ct 0.1.1", + "base16ct", "crypto-bigint 0.4.9", "der 0.6.1", "digest", - "ff 0.12.1", + "ff", "generic-array", - "group 0.12.1", + "group", "pkcs8 0.9.0", "rand_core", - "sec1 0.3.0", - "subtle", - "zeroize", -] - -[[package]] -name = "elliptic-curve" -version = "0.13.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47" -dependencies = [ - "base16ct 0.2.0", - "crypto-bigint 0.5.5", - "digest", - "ff 0.13.0", - "generic-array", - "group 0.13.0", - "hkdf", - "pem-rfc7468", - "pkcs8 0.10.2", - "rand_core", - "sec1 0.7.3", + "sec1", "subtle", "zeroize", ] -[[package]] -name = "encoding_rs" -version = "0.8.34" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b45de904aa0b010bce2ab45264d0631681847fa7b6f2eaa7dab7619943bc4f59" -dependencies = [ - "cfg-if", -] - [[package]] name = "enum-as-inner" -version = "0.6.0" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ffccbb6966c05b32ef8fbac435df276c4ae4d3dc55a8cd0eb9745e6c12f546a" +checksum = "21cdad81446a7f7dc43f6a77409efeb9733d2fa65553efef6018ef257c959b73" dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.58", + "syn 1.0.109", ] [[package]] @@ -1773,53 +1492,23 @@ version = "0.1.0" dependencies = [ "axum 0.7.5", "futures", - "h3 0.0.4", + "h3 0.0.5", "h3-quinn", "http-body-util", - "hyper 1.2.0", + "hyper 1.3.1", "hyper-util", "opentelemetry", "quinn", "rand", - "rustls 0.21.12", - "rustls-pemfile 1.0.4", + "rustls 0.23.5", + "rustls-pemfile 2.1.2", "scuffle-foundations", - "socket2", + "socket2 0.5.7", "tokio", "tower", "tracing", ] -[[package]] -name = "exp_golomb" -version = "0.0.1" -dependencies = [ - "bytes", - "bytesio", -] - -[[package]] -name = "exr" -version = "1.72.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "887d93f60543e9a9362ef8a21beedd0a833c5d9610e18c67abe15a5963dcb1a4" -dependencies = [ - "bit_field", - "flume", - "half", - "lebe", - "miniz_oxide", - "rayon-core", - "smallvec", - "zune-inflate", -] - -[[package]] -name = "fallible-iterator" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7" - [[package]] name = "fallible_collections" version = "0.4.9" @@ -1829,31 +1518,23 @@ dependencies = [ "hashbrown 0.13.2", ] -[[package]] -name = "fast_chemail" -version = "0.9.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "495a39d30d624c2caabe6312bfead73e7717692b44e0b32df168c275a2e8e9e4" -dependencies = [ - "ascii_utils", -] - [[package]] name = "fast_image_resize" -version = "3.0.4" +version = "4.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9d450fac8a334ad72825596173f0f7767ff04dd6e3d59c49c894c4bc2957e8b" +checksum = "02abb58c39fa9b20678cedabab49e6c4f6ecb7480d7cb5711496b9289184a875" dependencies = [ "cfg-if", + "document-features", "num-traits", "thiserror", ] [[package]] name = "fastrand" -version = "2.0.2" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "658bd65b1cf4c852a3cc96f18a8ce7b5640f6b703f905c7d74532294c2a63984" +checksum = "9fc0510504f03c51ada170672ac806f1f105a88aa97a5281117e1ddc3368e51a" [[package]] name = "fdeflate" @@ -1874,34 +1555,11 @@ dependencies = [ "subtle", ] -[[package]] -name = "ff" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ded41244b729663b1e574f1b4fb731469f69f79c17667b5d776b16cda0479449" -dependencies = [ - "rand_core", - "subtle", -] - -[[package]] -name = "ffmpeg" -version = "0.1.0" -dependencies = [ - "bytes", - "crossbeam-channel", - "ffmpeg-sys-next", - "libc", - "scuffle-utils", - "tokio", - "tracing", -] - [[package]] name = "ffmpeg-sys-next" -version = "6.1.0" +version = "7.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2529ad916d08c3562c754c21bc9b17a26c7882c0f5706cc2cd69472175f1620" +checksum = "972a460dd8e901b737ce0482bf71a837e1751e3dd7c8f8b0a4ead808e7f174a5" dependencies = [ "bindgen", "cc", @@ -1913,15 +1571,15 @@ dependencies = [ [[package]] name = "fiat-crypto" -version = "0.2.7" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c007b1ae3abe1cb6f85a16305acd418b7ca6343b953633fee2b76d8f108b830f" +checksum = "38793c55593b33412e3ae40c2c9781ffaa6f438f6f8c10f24e71846fbd7ae01e" [[package]] name = "file-format" -version = "0.24.0" +version = "0.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ba1b81b3c213cf1c071f8bf3b83531f310df99642e58c48247272eef006cae5" +checksum = "9ffe3a660c3a1b10e96f304a9413d673b2118d62e4520f7ddf4a4faccfe8b9b9" [[package]] name = "findshlibs" @@ -1941,18 +1599,6 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8fcfdc7a0362c9f4444381a9e697c79d435fe65b52a37466fc2c1184cee9edc6" -[[package]] -name = "fixed" -version = "1.27.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fc715d38bea7b5bf487fcd79bcf8c209f0b58014f3018a7a19c2b855f472048" -dependencies = [ - "az", - "bytemuck", - "half", - "typenum", -] - [[package]] name = "fixedbitset" version = "0.4.2" @@ -1961,9 +1607,9 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "flate2" -version = "1.0.28" +version = "1.0.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46303f565772937ffe1d394a4fac6f411c6013172fadde9dcdb1e147a086940e" +checksum = "5f54427cfd1c7829e2a139fcefea601bf088ebca651d2bf53ebc600eac295dae" dependencies = [ "crc32fast", "miniz_oxide", @@ -1978,31 +1624,6 @@ dependencies = [ "num-traits", ] -[[package]] -name = "flume" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55ac459de2512911e4b674ce33cf20befaba382d05b62b008afc1c8b57cbf181" -dependencies = [ - "spin 0.9.8", -] - -[[package]] -name = "flv" -version = "0.0.1" -dependencies = [ - "aac", - "amf0", - "av1", - "byteorder", - "bytes", - "bytesio", - "h264", - "h265", - "num-derive", - "num-traits", -] - [[package]] name = "fnv" version = "1.0.7" @@ -2020,9 +1641,9 @@ dependencies = [ [[package]] name = "fred" -version = "8.0.6" +version = "9.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b8e3a1339ed45ad8fde94530c4bdcbd5f371a3c6bd3bf57682923792830aa37" +checksum = "915e065b377f6e16d5c01eae96bf31eeaf81e1e300b76f938761b3c21307cad8" dependencies = [ "arc-swap", "async-trait", @@ -2035,20 +1656,27 @@ dependencies = [ "parking_lot", "rand", "redis-protocol", - "rustls 0.22.3", - "rustls-native-certs 0.7.0", - "rustls-webpki 0.102.2", - "semver", - "socket2", + "semver 1.0.22", + "socket2 0.5.7", "tokio", - "tokio-rustls 0.25.0", "tokio-stream", "tokio-util", - "trust-dns-resolver", "url", "urlencoding", ] +[[package]] +name = "fs_extra" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" + +[[package]] +name = "funty" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" + [[package]] name = "futures" version = "0.3.30" @@ -2105,7 +1733,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -2146,20 +1774,17 @@ checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" dependencies = [ "typenum", "version_check", - "zeroize", ] [[package]] name = "getrandom" -version = "0.2.14" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94b22e06ecb0110981051723910cbf0b5f5e09a2062dd7663334ee79a9d1286c" +checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" dependencies = [ "cfg-if", - "js-sys", "libc", "wasi", - "wasm-bindgen", ] [[package]] @@ -2168,7 +1793,6 @@ version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3fb2d69b19215e18bb912fa30f7ce15846e301408695e44e0ef719f1da9e19f2" dependencies = [ - "color_quant", "weezl", ] @@ -2185,9 +1809,9 @@ dependencies = [ [[package]] name = "gifski" -version = "1.14.4" +version = "1.32.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6a6c5eab296009821c25867a4eaa9fca77df08bade4eed27bc5c211b3e6466f" +checksum = "fa3aeeed337aa658d1c2d90cb21b6db6172d1b8a84dfb462ade81f48eb0fd5eb" dependencies = [ "clap", "crossbeam-channel", @@ -2207,6 +1831,8 @@ dependencies = [ "resize", "rgb", "wild", + "y4m", + "yuv", ] [[package]] @@ -2222,50 +1848,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" [[package]] -name = "gloo-timers" -version = "0.3.0" +name = "group" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbb143cf96099802033e0d4f4963b19fd2e0b728bcf076cd9cf7f6634f092994" +checksum = "5dfbfb3a6cfbd390d5c9564ab283a0349b9b9fcd46a706c1eb10e0db70bfbac7" dependencies = [ - "futures-channel", - "futures-core", - "js-sys", - "wasm-bindgen", -] - -[[package]] -name = "gloo-utils" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "037fcb07216cb3a30f7292bd0176b050b7b9a052ba830ef7d5d65f6dc64ba58e" -dependencies = [ - "js-sys", - "serde", - "serde_json", - "wasm-bindgen", - "web-sys", -] - -[[package]] -name = "group" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dfbfb3a6cfbd390d5c9564ab283a0349b9b9fcd46a706c1eb10e0db70bfbac7" -dependencies = [ - "ff 0.12.1", - "rand_core", - "subtle", -] - -[[package]] -name = "group" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" -dependencies = [ - "ff 0.13.0", - "rand_core", - "subtle", + "ff", + "rand_core", + "subtle", ] [[package]] @@ -2306,26 +1896,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "h264" -version = "0.0.1" -dependencies = [ - "byteorder", - "bytes", - "bytesio", - "exp_golomb", -] - -[[package]] -name = "h265" -version = "0.0.1" -dependencies = [ - "byteorder", - "bytes", - "bytesio", - "exp_golomb", -] - [[package]] name = "h3" version = "0.0.3" @@ -2343,9 +1913,9 @@ dependencies = [ [[package]] name = "h3" -version = "0.0.4" +version = "0.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1c8886b9e6e93e7ed93d9433f3779e8d07e3ff96bc67b977d14c7b20c849411" +checksum = "d5069de1c2ac82d9e361b07f2b8a2c582ec071750e063530fc7f3b5197e24805" dependencies = [ "bytes", "fastrand", @@ -2358,15 +1928,14 @@ dependencies = [ [[package]] name = "h3-quinn" -version = "0.0.5" +version = "0.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73786bcc0e4c2692ba62c650f7b950ac236e5300c5de3b1d26330555e2322046" +checksum = "b8c01d99d7cf812fd34ddf135e6c940df9e24f2e759dbc7179fb0e54d4bd6551" dependencies = [ "bytes", "futures", - "h3 0.0.4", + "h3 0.0.5", "quinn", - "quinn-proto", "tokio", "tokio-util", ] @@ -2386,38 +1955,11 @@ dependencies = [ "tracing", ] -[[package]] -name = "half" -version = "2.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6dd08c532ae367adf81c312a4580bc67f1d0fe8bc9c460520283f4c0ff277888" -dependencies = [ - "cfg-if", - "crunchy", -] - -[[package]] -name = "handlebars" -version = "4.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "faa67bab9ff362228eb3d00bd024a4965d8231bbb7921167f0cfa66c6626b225" -dependencies = [ - "log", - "pest", - "pest_derive", - "serde", - "serde_json", - "thiserror", -] - [[package]] name = "hashbrown" version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" -dependencies = [ - "ahash 0.7.8", -] [[package]] name = "hashbrown" @@ -2425,16 +1967,16 @@ version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" dependencies = [ - "ahash 0.8.11", + "ahash", ] [[package]] name = "hashbrown" -version = "0.14.3" +version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" dependencies = [ - "ahash 0.8.11", + "ahash", "allocator-api2", ] @@ -2463,21 +2005,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" [[package]] -name = "hkdf" -version = "0.12.4" +name = "hmac" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" dependencies = [ - "hmac", + "digest", ] [[package]] -name = "hmac" -version = "0.12.1" +name = "home" +version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" dependencies = [ - "digest", + "windows-sys 0.52.0", ] [[package]] @@ -2592,7 +2134,7 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite", - "socket2", + "socket2 0.5.7", "tokio", "tower-service", "tracing", @@ -2601,9 +2143,9 @@ dependencies = [ [[package]] name = "hyper" -version = "1.2.0" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "186548d73ac615b32a73aafe38fb4f56c0d340e110e5a200bcadbaf2e199263a" +checksum = "fe575dd17d0862a9a33781c8c4696a55c320909004a67a00fb286ba8b1bc496d" dependencies = [ "bytes", "futures-channel", @@ -2637,30 +2179,32 @@ dependencies = [ ] [[package]] -name = "hyper-timeout" -version = "0.4.1" +name = "hyper-rustls" +version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" +checksum = "a0bea761b46ae2b24eb4aef630d8d1c398157b6fc29e6350ecf090a0b70c952c" dependencies = [ - "hyper 0.14.28", - "pin-project-lite", + "futures-util", + "http 1.1.0", + "hyper 1.3.1", + "hyper-util", + "rustls 0.22.4", + "rustls-pki-types", "tokio", - "tokio-io-timeout", + "tokio-rustls 0.25.0", + "tower-service", ] [[package]] -name = "hyper-tungstenite" -version = "0.13.0" +name = "hyper-timeout" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a343d17fe7885302ed7252767dc7bb83609a874b6ff581142241ec4b73957ad" +checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" dependencies = [ - "http-body-util", - "hyper 1.2.0", - "hyper-util", + "hyper 0.14.28", "pin-project-lite", "tokio", - "tokio-tungstenite", - "tungstenite", + "tokio-io-timeout", ] [[package]] @@ -2670,13 +2214,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ca38ef113da30126bbff9cd1705f9273e15d45498615d138b0c20279ac7a76aa" dependencies = [ "bytes", + "futures-channel", "futures-util", "http 1.1.0", "http-body 1.0.0", - "hyper 1.2.0", + "hyper 1.3.1", "pin-project-lite", - "socket2", + "socket2 0.5.7", "tokio", + "tower", + "tower-service", + "tracing", ] [[package]] @@ -2710,10 +2258,11 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" [[package]] name = "idna" -version = "0.4.0" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c" +checksum = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8" dependencies = [ + "matches", "unicode-bidi", "unicode-normalization", ] @@ -2728,29 +2277,11 @@ dependencies = [ "unicode-normalization", ] -[[package]] -name = "image" -version = "0.24.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5690139d2f55868e080017335e4b94cb7414274c74f1669c84fb5feba2c9f69d" -dependencies = [ - "bytemuck", - "byteorder", - "color_quant", - "exr", - "gif", - "jpeg-decoder", - "num-traits", - "png", - "qoi", - "tiff", -] - [[package]] name = "imagequant" -version = "4.3.0" +version = "4.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85a7f142d232ccbdc00cbef49d17f45639aeb07d9bfe28e17c21dea3efac64e5" +checksum = "09db32417831053bf246bc74fc7c139a05458552d2d98a9f58ff5744d8dea8d3" dependencies = [ "arrayvec", "once_cell", @@ -2782,8 +2313,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" dependencies = [ "equivalent", - "hashbrown 0.14.3", - "serde", + "hashbrown 0.14.5", ] [[package]] @@ -2794,7 +2324,7 @@ checksum = "c34819042dc3d3971c46c2190835914dfbe0c3c13f61449b2997f4e9722dfa60" dependencies = [ "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -2803,10 +2333,10 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b58db92f96b720de98181bbbe63c831e87005ab460c1bf306eb2622b4707997f" dependencies = [ - "socket2", + "socket2 0.5.7", "widestring", "windows-sys 0.48.0", - "winreg", + "winreg 0.50.0", ] [[package]] @@ -2815,11 +2345,17 @@ version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" +[[package]] +name = "is_terminal_polyfill" +version = "1.70.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8478577c03552c21db0e2724ffb8986a5ce7af88107e6be5d2ee6e158c12800" + [[package]] name = "itertools" -version = "0.10.5" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" dependencies = [ "either", ] @@ -2833,6 +2369,15 @@ dependencies = [ "either", ] +[[package]] +name = "itertools" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" +dependencies = [ + "either", +] + [[package]] name = "itoa" version = "1.0.11" @@ -2840,40 +2385,32 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" [[package]] -name = "jemalloc_pprof" -version = "0.1.0" +name = "jni" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45b38a2cc3eb7b0e332c6368a6fd6a1a603a5be9526f0810f8e0682513538541" +checksum = "c6df18c2e3db7e453d3c6ac5b3e9d5182664d28788126d39b91f2d1e22b017ec" dependencies = [ - "anyhow", - "flate2", - "libc", - "num", - "once_cell", - "paste", - "prost 0.11.9", - "tempfile", - "tikv-jemalloc-ctl", - "tokio", - "tracing", + "cesu8", + "combine", + "jni-sys", + "log", + "thiserror", + "walkdir", ] [[package]] -name = "jobserver" -version = "0.1.29" +name = "jni-sys" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f08474e32172238f2827bd160c67871cdb2801430f65c3979184dc362e3ca118" -dependencies = [ - "libc", -] +checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" [[package]] -name = "jpeg-decoder" -version = "0.3.1" +name = "jobserver" +version = "0.1.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5d4a7da358eff58addd2877a45865158f0d78c911d43a5784ceb7bbf52833b0" +checksum = "d2b099aaa34a9751c5bf0878add70444e1ed2dd73f347be99003d4577277de6e" dependencies = [ - "rayon", + "libc", ] [[package]] @@ -2885,35 +2422,11 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "jwt-next" -version = "0.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89327f6d992ab1b3f6c908ee32cc0bb66068e2696da2cfe21a8764e400fe9c3b" -dependencies = [ - "base64 0.21.7", - "crypto-common", - "digest", - "ecdsa 0.16.9", - "hmac", - "p256 0.13.2", - "p384", - "pem", - "rsa", - "serde", - "serde_json", - "sha2", - "signature 2.2.0", -] - [[package]] name = "lazy_static" version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" -dependencies = [ - "spin 0.5.2", -] [[package]] name = "lazycell" @@ -2921,12 +2434,6 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" -[[package]] -name = "lebe" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03087c2bad5e1034e8cace5926dec053fb3790248370865f5117a7d0213354c8" - [[package]] name = "libavif-sys" version = "0.16.0+libavif.1.0.4" @@ -2941,9 +2448,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.153" +version = "0.2.154" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" +checksum = "ae743338b92ff9146ce83992f766a31066a91a8c84a45e0e9f21e7cf6de6d346" [[package]] name = "libdav1d-sys" @@ -2972,15 +2479,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c2a198fb6b0eada2a8df47933734e6d35d350665a33a3593d7164fa52c75c19" dependencies = [ "cfg-if", - "windows-targets 0.52.4", + "windows-targets 0.52.5", ] -[[package]] -name = "libm" -version = "0.2.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" - [[package]] name = "libwebp-sys2" version = "0.1.9" @@ -3006,11 +2507,17 @@ version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c" +[[package]] +name = "litrs" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4ce301924b7887e9d637144fdade93f9dfff9b60981d4ac161db09720d39aa5" + [[package]] name = "lock_api" -version = "0.4.11" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45" +checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" dependencies = [ "autocfg", "scopeguard", @@ -3044,22 +2551,13 @@ dependencies = [ "imgref", ] -[[package]] -name = "lru" -version = "0.7.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e999beba7b6e8345721bd280141ed958096a2e4abdf74f67ff4ce49b4b54e47a" -dependencies = [ - "hashbrown 0.12.3", -] - [[package]] name = "lru" version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3262e75e648fce39813cb56ac41f3c3e3f65217ebf3844d818d1f9398cfb0dc" dependencies = [ - "hashbrown 0.14.3", + "hashbrown 0.14.5", ] [[package]] @@ -3086,6 +2584,12 @@ dependencies = [ "regex-automata 0.1.10", ] +[[package]] +name = "matches" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" + [[package]] name = "matchit" version = "0.7.3" @@ -3111,12 +2615,6 @@ dependencies = [ "digest", ] -[[package]] -name = "memalloc" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df39d232f5c40b0891c10216992c2f250c054105cb1e56f0fc9032db6203ecc1" - [[package]] name = "memchr" version = "2.7.2" @@ -3166,38 +2664,56 @@ dependencies = [ ] [[package]] -name = "mp4" -version = "0.0.1" -dependencies = [ - "aac", - "av1", - "byteorder", - "bytes", - "bytesio", - "fixed", - "h264", - "h265", - "paste", - "serde", - "serde_json", -] +name = "mirai-annotations" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9be0862c1b3f26a88803c4a49de6889c10e608b3ee9344e6ef5b45fb37ad3d1" [[package]] -name = "multer" -version = "3.0.0" +name = "mongodb" +version = "2.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a15d522be0a9c3e46fd2632e272d178f56387bdb5c9fbb3a36c649062e9b5219" +checksum = "ef206acb1b72389b49bc9985efe7eb1f8a9bb18e5680d262fac26c07f44025f1" dependencies = [ - "bytes", - "encoding_rs", + "async-trait", + "base64 0.13.1", + "bitflags 1.3.2", + "bson", + "chrono", + "derivative", + "derive_more", + "futures-core", + "futures-executor", + "futures-io", "futures-util", - "http 1.1.0", - "httparse", - "log", - "memchr", - "mime", - "spin 0.9.8", - "version_check", + "hex", + "hmac", + "lazy_static", + "md-5", + "pbkdf2", + "percent-encoding", + "rand", + "rustc_version_runtime", + "rustls 0.21.12", + "rustls-pemfile 1.0.4", + "serde", + "serde_bytes", + "serde_with", + "sha-1", + "sha2", + "socket2 0.4.10", + "stringprep", + "strsim 0.10.0", + "take_mut", + "thiserror", + "tokio", + "tokio-rustls 0.24.1", + "tokio-util", + "trust-dns-proto", + "trust-dns-resolver", + "typed-builder", + "uuid", + "webpki-roots 0.25.4", ] [[package]] @@ -3222,91 +2738,42 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "308d96db8debc727c3fd9744aac51751243420e46edf401010908da7f8d5e57c" [[package]] -name = "netlink-packet-core" -version = "0.7.0" +name = "new_debug_unreachable" +version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72724faf704479d67b388da142b186f916188505e7e0b26719019c525882eda4" -dependencies = [ - "anyhow", - "byteorder", - "netlink-packet-utils", -] +checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086" [[package]] -name = "netlink-packet-route" -version = "0.17.1" +name = "nix" +version = "0.26.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "053998cea5a306971f88580d0829e90f270f940befd7cf928da179d4187a5a66" +checksum = "598beaf3cc6fdd9a5dfb1630c2800c7acd31df7aaf0f565796fba2b53ca1af1b" dependencies = [ - "anyhow", "bitflags 1.3.2", - "byteorder", + "cfg-if", "libc", - "netlink-packet-core", - "netlink-packet-utils", ] [[package]] -name = "netlink-packet-utils" -version = "0.5.2" +name = "nkeys" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ede8a08c71ad5a95cdd0e4e52facd37190977039a4704eb82a283f713747d34" +checksum = "bc522a19199a0795776406619aa6aa78e1e55690fbeb3181b8db5265fd0e89ce" dependencies = [ - "anyhow", - "byteorder", - "paste", - "thiserror", + "data-encoding", + "ed25519", + "ed25519-dalek", + "getrandom", + "log", + "rand", + "signatory", ] [[package]] -name = "netlink-sys" -version = "0.8.6" +name = "nom" +version = "7.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "416060d346fbaf1f23f9512963e3e878f1a78e707cb699ba9215761754244307" -dependencies = [ - "bytes", - "libc", - "log", -] - -[[package]] -name = "new_debug_unreachable" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086" - -[[package]] -name = "nix" -version = "0.26.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "598beaf3cc6fdd9a5dfb1630c2800c7acd31df7aaf0f565796fba2b53ca1af1b" -dependencies = [ - "bitflags 1.3.2", - "cfg-if", - "libc", -] - -[[package]] -name = "nkeys" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aad178aad32087b19042ee36dfd450b73f5f934fbfb058b59b198684dfec4c47" -dependencies = [ - "byteorder", - "data-encoding", - "ed25519", - "ed25519-dalek", - "getrandom", - "log", - "rand", - "signatory", -] - -[[package]] -name = "nom" -version = "7.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" dependencies = [ "memchr", "minimal-lexical", @@ -3337,20 +2804,6 @@ dependencies = [ "rand", ] -[[package]] -name = "num" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3135b08af27d103b0a51f2ae0f8632117b7b185ccf931445affa8df530576a41" -dependencies = [ - "num-bigint", - "num-complex", - "num-integer", - "num-iter", - "num-rational", - "num-traits", -] - [[package]] name = "num-bigint" version = "0.4.4" @@ -3362,32 +2815,6 @@ dependencies = [ "num-traits", ] -[[package]] -name = "num-bigint-dig" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc84195820f291c7697304f3cbdadd1cb7199c0efc917ff5eafd71225c136151" -dependencies = [ - "byteorder", - "lazy_static", - "libm", - "num-integer", - "num-iter", - "num-traits", - "rand", - "smallvec", - "zeroize", -] - -[[package]] -name = "num-complex" -version = "0.4.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23c6602fda94a57c990fe0df199a035d83576b496aa29f4e634a8ac6004e68a6" -dependencies = [ - "num-traits", -] - [[package]] name = "num-conv" version = "0.1.0" @@ -3402,7 +2829,7 @@ checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202" dependencies = [ "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -3414,32 +2841,6 @@ dependencies = [ "num-traits", ] -[[package]] -name = "num-iter" -version = "0.1.44" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d869c01cc0c455284163fd0092f1f93835385ccab5a98a0dcc497b2f8bf055a9" -dependencies = [ - "autocfg", - "num-integer", - "num-traits", -] - -[[package]] -name = "num-modular" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17bb261bf36fa7d83f4c294f834e91256769097b3cb505d44831e0a179ac647f" - -[[package]] -name = "num-order" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "537b596b97c40fcf8056d153049eb22f481c17ebce72a513ec9286e4986d1bb6" -dependencies = [ - "num-modular", -] - [[package]] name = "num-rational" version = "0.4.1" @@ -3454,12 +2855,11 @@ dependencies = [ [[package]] name = "num-traits" -version = "0.2.18" +version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da0df0e5185db44f69b44f26786fe401b6c293d1907744beaa7fa62b2e5a517a" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" dependencies = [ "autocfg", - "libm", ] [[package]] @@ -3495,9 +2895,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "opentelemetry" -version = "0.22.0" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "900d57987be3f2aeb70d385fff9b27fb74c5723cc9a52d904d4f9c807a0667bf" +checksum = "1b69a91d4893e713e06f724597ad630f1fa76057a5e1026c0ca67054a9032a76" dependencies = [ "futures-core", "futures-sink", @@ -3505,14 +2905,13 @@ dependencies = [ "once_cell", "pin-project-lite", "thiserror", - "urlencoding", ] [[package]] name = "opentelemetry-http" -version = "0.11.1" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7690dc77bf776713848c4faa6501157469017eaf332baccd4eb1cea928743d94" +checksum = "b0ba633e55c5ea6f431875ba55e71664f2fa5d3a90bd34ec9302eecc41c865dd" dependencies = [ "async-trait", "bytes", @@ -3522,9 +2921,9 @@ dependencies = [ [[package]] name = "opentelemetry-otlp" -version = "0.15.0" +version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a016b8d9495c639af2145ac22387dcb88e44118e45320d9238fbf4e7889abcb" +checksum = "a94c69209c05319cdf7460c6d4c055ed102be242a0a6245835d7bc42c6ec7f54" dependencies = [ "async-trait", "futures-core", @@ -3532,9 +2931,8 @@ dependencies = [ "opentelemetry", "opentelemetry-http", "opentelemetry-proto", - "opentelemetry-semantic-conventions", "opentelemetry_sdk", - "prost 0.12.4", + "prost", "thiserror", "tokio", "tonic", @@ -3542,37 +2940,31 @@ dependencies = [ [[package]] name = "opentelemetry-proto" -version = "0.5.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a8fddc9b68f5b80dae9d6f510b88e02396f006ad48cac349411fbecc80caae4" +checksum = "984806e6cf27f2b49282e2a05e288f30594f3dbc74eb7a6e99422bc48ed78162" dependencies = [ "opentelemetry", "opentelemetry_sdk", - "prost 0.12.4", + "prost", "tonic", ] -[[package]] -name = "opentelemetry-semantic-conventions" -version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9ab5bd6c42fb9349dcf28af2ba9a0667f697f9bdcca045d39f2cec5543e2910" - [[package]] name = "opentelemetry_sdk" -version = "0.22.1" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e90c7113be649e31e9a0f8b5ee24ed7a16923b322c3c5ab6367469c049d6b7e" +checksum = "ae312d58eaa90a82d2e627fd86e075cf5230b3f11794e2ed74199ebbe572d4fd" dependencies = [ "async-trait", - "crossbeam-channel", "futures-channel", "futures-executor", "futures-util", "glob", + "lazy_static", "once_cell", "opentelemetry", - "ordered-float 4.2.0", + "ordered-float", "percent-encoding", "rand", "thiserror", @@ -3587,15 +2979,6 @@ dependencies = [ "crossbeam-channel", ] -[[package]] -name = "ordered-float" -version = "2.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68f19d67e5a2795c94e73e0bb1cc1a7edeb2e28efd39e2e1c9b7a40c1108b11c" -dependencies = [ - "num-traits", -] - [[package]] name = "ordered-float" version = "4.2.0" @@ -3623,40 +3006,16 @@ version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "51f44edd08f51e2ade572f141051021c5af22677e42b7dd28a88155151c33594" dependencies = [ - "ecdsa 0.14.8", - "elliptic-curve 0.12.3", - "sha2", -] - -[[package]] -name = "p256" -version = "0.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9863ad85fa8f4460f9c48cb909d38a0d689dba1f6f6988a5e3e0d31071bcd4b" -dependencies = [ - "ecdsa 0.16.9", - "elliptic-curve 0.13.8", - "primeorder", - "sha2", -] - -[[package]] -name = "p384" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70786f51bcc69f6a4c0360e063a4cac5419ef7c5cd5b3c99ad70f3be5ba79209" -dependencies = [ - "ecdsa 0.16.9", - "elliptic-curve 0.13.8", - "primeorder", + "ecdsa", + "elliptic-curve", "sha2", ] [[package]] name = "parking_lot" -version = "0.12.1" +version = "0.12.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" +checksum = "7e4af0ca4f6caed20e900d564c242b8e5d4903fdacf31d3daf527b66fe6f42fb" dependencies = [ "lock_api", "parking_lot_core", @@ -3664,26 +3023,15 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.9" +version = "0.9.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e" +checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" dependencies = [ "cfg-if", "libc", "redox_syscall", "smallvec", - "windows-targets 0.48.5", -] - -[[package]] -name = "password-hash" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "346f04948ba92c43e8469c1ee6736c7563d71012b17d40745260fe106aac2166" -dependencies = [ - "base64ct", - "rand_core", - "subtle", + "windows-targets 0.52.5", ] [[package]] @@ -3693,29 +3041,34 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" [[package]] -name = "path-tree" -version = "0.7.6" +name = "pbjson" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a3a8689ce29b9b1e4d2363bd5b2ee0f6765d6fb6973df48f24145c9325d4f6e" +checksum = "1030c719b0ec2a2d25a5df729d6cff1acf3cc230bf766f4f97833591f7577b90" dependencies = [ - "smallvec", + "base64 0.21.7", + "serde", ] [[package]] -name = "pb" -version = "0.0.1" +name = "pbjson-build" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2580e33f2292d34be285c5bc3dba5259542b083cfad6037b6d70345f24dcb735" dependencies = [ - "prettyplease", - "proc-macro2", - "prost 0.12.4", - "prost-build", - "quote", - "syn 2.0.58", - "tonic", - "tonic-build", - "ulid", - "uuid", - "walkdir", + "heck 0.4.1", + "itertools 0.11.0", + "prost", + "prost-types", +] + +[[package]] +name = "pbkdf2" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83a0692ec44e4cf1ef28ca317f14f8f07da2d95ec3fa01f86e4467b725e60917" +dependencies = [ + "digest", ] [[package]] @@ -3729,22 +3082,6 @@ dependencies = [ "winapi", ] -[[package]] -name = "peeking_take_while" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099" - -[[package]] -name = "pem" -version = "3.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e459365e590736a54c3fa561947c84837534b8e9af6fc5bf781307e82658fae" -dependencies = [ - "base64 0.22.0", - "serde", -] - [[package]] name = "pem-rfc7468" version = "0.7.0" @@ -3760,51 +3097,6 @@ version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" -[[package]] -name = "pest" -version = "2.7.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "311fb059dee1a7b802f036316d790138c613a4e8b180c822e3925a662e9f0c95" -dependencies = [ - "memchr", - "thiserror", - "ucd-trie", -] - -[[package]] -name = "pest_derive" -version = "2.7.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f73541b156d32197eecda1a4014d7f868fd2bcb3c550d5386087cfba442bf69c" -dependencies = [ - "pest", - "pest_generator", -] - -[[package]] -name = "pest_generator" -version = "2.7.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c35eeed0a3fab112f75165fdc026b3913f4183133f19b49be773ac9ea966e8bd" -dependencies = [ - "pest", - "pest_meta", - "proc-macro2", - "quote", - "syn 2.0.58", -] - -[[package]] -name = "pest_meta" -version = "2.7.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2adbf29bb9776f28caece835398781ab24435585fe0d4dc1374a61db5accedca" -dependencies = [ - "once_cell", - "pest", - "sha2", -] - [[package]] name = "petgraph" version = "0.6.4" @@ -3815,24 +3107,6 @@ dependencies = [ "indexmap 2.2.6", ] -[[package]] -name = "phf" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ade2d8b8f33c7333b51bcf0428d37e217e9f32192ae4772156f65063b8ce03dc" -dependencies = [ - "phf_shared", -] - -[[package]] -name = "phf_shared" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90fcb95eef784c2ac79119d1dd819e162b5da872ce6f3c3abe1e8ca1c082f72b" -dependencies = [ - "siphasher", -] - [[package]] name = "pin-project" version = "1.1.5" @@ -3850,7 +3124,7 @@ checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" dependencies = [ "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -3865,17 +3139,6 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" -[[package]] -name = "pkcs1" -version = "0.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8ffb9f10fa047879315e6625af03c164b16962a5368d724ed16323b68ace47f" -dependencies = [ - "der 0.7.9", - "pkcs8 0.10.2", - "spki 0.7.3", -] - [[package]] name = "pkcs8" version = "0.9.0" @@ -3902,103 +3165,6 @@ version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" -[[package]] -name = "platform-api" -version = "0.0.1" -dependencies = [ - "anyhow", - "arc-swap", - "argon2", - "async-graphql", - "async-nats", - "async-stream", - "async-trait", - "aws-config", - "aws-sdk-s3", - "base64 0.21.7", - "binary-helper", - "bitmask-enum", - "bytes", - "chrono", - "fred", - "futures", - "futures-util", - "hmac", - "http 1.1.0", - "http-body 1.0.0", - "http-body-util", - "hyper 1.2.0", - "hyper-tungstenite", - "hyper-util", - "jwt-next", - "multer", - "path-tree", - "pb", - "pin-project", - "postgres-from-row", - "postgres-types", - "prost 0.12.4", - "rand", - "reqwest", - "rustls 0.22.3", - "rustls-pemfile 2.1.2", - "scuffle-config", - "scuffle-utils", - "serde", - "serde_json", - "sha2", - "tempfile", - "thiserror", - "tokio", - "tokio-rustls 0.25.0", - "tokio-stream", - "tonic", - "totp-rs", - "tracing", - "ulid", - "uuid", -] - -[[package]] -name = "platform-image-processor" -version = "0.0.1" -dependencies = [ - "anyhow", - "async-nats", - "async-trait", - "aws-config", - "aws-sdk-s3", - "binary-helper", - "byteorder", - "bytes", - "fast_image_resize", - "ffmpeg", - "file-format", - "futures", - "gifski", - "imgref", - "libavif-sys", - "libwebp-sys2", - "num_cpus", - "pb", - "png", - "postgres-from-row", - "prost 0.12.4", - "reqwest", - "rgb", - "scopeguard", - "scuffle-config", - "scuffle-utils", - "serde", - "serde_json", - "sha2", - "thiserror", - "tokio", - "tonic", - "tracing", - "ulid", -] - [[package]] name = "platforms" version = "3.4.0" @@ -4019,78 +3185,10 @@ dependencies = [ ] [[package]] -name = "portpicker" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be97d76faf1bfab666e1375477b23fde79eccf0276e9b63b92a39d676a889ba9" -dependencies = [ - "rand", -] - -[[package]] -name = "postgres-derive" -version = "0.4.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83145eba741b050ef981a9a1838c843fa7665e154383325aa8b440ae703180a2" -dependencies = [ - "heck 0.4.1", - "proc-macro2", - "quote", - "syn 2.0.58", -] - -[[package]] -name = "postgres-from-row" -version = "0.5.2" -source = "git+https://github.com/ScuffleTV/postgres-from-row.git?branch=troy/from_fn#3a775f225aae7c0f54e404f3f07aa13fcec2cc9b" -dependencies = [ - "postgres-from-row-derive", - "tokio-postgres", -] - -[[package]] -name = "postgres-from-row-derive" -version = "0.5.2" -source = "git+https://github.com/ScuffleTV/postgres-from-row.git?branch=troy/from_fn#3a775f225aae7c0f54e404f3f07aa13fcec2cc9b" -dependencies = [ - "darling", - "proc-macro2", - "quote", - "syn 2.0.58", -] - -[[package]] -name = "postgres-protocol" -version = "0.6.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49b6c5ef183cd3ab4ba005f1ca64c21e8bd97ce4699cfea9e8d9a2c4958ca520" -dependencies = [ - "base64 0.21.7", - "byteorder", - "bytes", - "fallible-iterator", - "hmac", - "md-5", - "memchr", - "rand", - "sha2", - "stringprep", -] - -[[package]] -name = "postgres-types" -version = "0.2.6" +name = "portable-atomic" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d2234cdee9408b523530a9b6d2d6b373d1db34f6a8e51dc03ded1828d7fb67c" -dependencies = [ - "bytes", - "chrono", - "fallible-iterator", - "postgres-derive", - "postgres-protocol", - "serde", - "serde_json", -] +checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0" [[package]] name = "powerfmt" @@ -4112,9 +3210,9 @@ dependencies = [ "nix", "once_cell", "parking_lot", - "prost 0.12.4", + "prost", "prost-build", - "prost-derive 0.12.4", + "prost-derive", "sha2", "smallvec", "symbolic-demangle", @@ -4130,38 +3228,19 @@ checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" [[package]] name = "prettyplease" -version = "0.2.17" +version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d3928fb5db768cb86f891ff014f0144589297e3c6a1aba6ed7cecfdace270c7" +checksum = "5ac2cf0f2e4f42b49f5ffd07dae8d746508ef7526c13940e5f524012ae6c6550" dependencies = [ "proc-macro2", - "syn 2.0.58", -] - -[[package]] -name = "primeorder" -version = "0.13.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "353e1ca18966c16d9deb1c69278edbc5f194139612772bd9537af60ac231e1e6" -dependencies = [ - "elliptic-curve 0.13.8", -] - -[[package]] -name = "proc-macro-crate" -version = "1.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" -dependencies = [ - "once_cell", - "toml_edit 0.19.15", + "syn 2.0.60", ] [[package]] name = "proc-macro2" -version = "1.0.79" +version = "1.0.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e835ff2298f5721608eb1a980ecaee1aef2c132bf95ecc026a11b7bf3c01c02e" +checksum = "3d1597b0c024618f09a9c3b8655b7e430397a36d23fdafec26d6965e9eec3eba" dependencies = [ "unicode-ident", ] @@ -4182,7 +3261,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8021cf59c8ec9c432cfc2526ac6b8aa508ecaf29cd415f271b8406c1b851c3fd" dependencies = [ "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -4205,17 +3284,7 @@ checksum = "440f724eba9f6996b75d63681b0a92b06947f1457076d503a4d2e2c8f56442b8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.58", -] - -[[package]] -name = "prost" -version = "0.11.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b82eaa1d779e9a4bc1c3217db8ffbeabaae1dca241bf70183242128d48681cd" -dependencies = [ - "bytes", - "prost-derive 0.11.9", + "syn 2.0.60", ] [[package]] @@ -4225,7 +3294,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0f5d036824e4761737860779c906171497f6d55681139d8312388f8fe398922" dependencies = [ "bytes", - "prost-derive 0.12.4", + "prost-derive", ] [[package]] @@ -4242,26 +3311,13 @@ dependencies = [ "once_cell", "petgraph", "prettyplease", - "prost 0.12.4", + "prost", "prost-types", "regex", - "syn 2.0.58", + "syn 2.0.60", "tempfile", ] -[[package]] -name = "prost-derive" -version = "0.11.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5d2d8d10f3c6ded6da8b05b5fb3b8a5082514344d56c9f871412d29b4e075b4" -dependencies = [ - "anyhow", - "itertools 0.10.5", - "proc-macro2", - "quote", - "syn 1.0.109", -] - [[package]] name = "prost-derive" version = "0.12.4" @@ -4272,7 +3328,7 @@ dependencies = [ "itertools 0.12.1", "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -4281,33 +3337,7 @@ version = "0.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3235c33eb02c1f1e212abdbe34c78b264b038fb58ca612664343271e36e55ffe" dependencies = [ - "prost 0.12.4", -] - -[[package]] -name = "qoi" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f6d64c71eb498fe9eae14ce4ec935c555749aef511cca85b5568910d6e48001" -dependencies = [ - "bytemuck", -] - -[[package]] -name = "qrcodegen" -version = "1.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4339fc7a1021c9c1621d87f5e3505f2805c8c105420ba2f2a4df86814590c142" - -[[package]] -name = "qrcodegen-image" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f59a7c7ddb94c99fa3942dd761dbb305bca462b71d7bd9bcb3f9ff4d454d5736" -dependencies = [ - "base64 0.22.0", - "image", - "qrcodegen", + "prost", ] [[package]] @@ -4324,9 +3354,9 @@ checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3" [[package]] name = "quinn" -version = "0.10.2" +version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8cc2c5017e4b43d5995dcea317bc46c1e09404c0a9664d2908f7f02dfe943d75" +checksum = "e4ceeeeabace7857413798eb1ffa1e9c905a9946a57d81fb69b4b71c4d8eb3ad" dependencies = [ "bytes", "futures-io", @@ -4334,7 +3364,7 @@ dependencies = [ "quinn-proto", "quinn-udp", "rustc-hash", - "rustls 0.21.12", + "rustls 0.23.5", "thiserror", "tokio", "tracing", @@ -4342,16 +3372,16 @@ dependencies = [ [[package]] name = "quinn-proto" -version = "0.10.6" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "141bf7dfde2fbc246bfd3fe12f2455aa24b0fbd9af535d8c86c7bd1381ff2b1a" +checksum = "ddf517c03a109db8100448a4be38d498df8a210a99fe0e1b9eaf39e78c640efe" dependencies = [ "bytes", "rand", - "ring 0.16.20", + "ring", "rustc-hash", - "rustls 0.21.12", - "rustls-native-certs 0.6.3", + "rustls 0.23.5", + "rustls-platform-verifier", "slab", "thiserror", "tinyvec", @@ -4360,15 +3390,15 @@ dependencies = [ [[package]] name = "quinn-udp" -version = "0.4.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "055b4e778e8feb9f93c4e439f71dc2156ef13360b432b799e179a8c4cdf0b1d7" +checksum = "9096629c45860fc7fb143e125eb826b5e721e10be3263160c7d60ca832cf8c46" dependencies = [ - "bytes", "libc", - "socket2", + "once_cell", + "socket2 0.5.7", "tracing", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -4380,6 +3410,12 @@ dependencies = [ "proc-macro2", ] +[[package]] +name = "radium" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" + [[package]] name = "rand" version = "0.8.5" @@ -4469,9 +3505,9 @@ dependencies = [ [[package]] name = "redis-protocol" -version = "4.1.0" +version = "5.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c31deddf734dc0a39d3112e73490e88b61a05e83e074d211f348404cee4d2c6" +checksum = "65deb7c9501fbb2b6f812a30d59c0253779480853545153a51d8e9e444ddc99f" dependencies = [ "bytes", "bytes-utils", @@ -4483,11 +3519,11 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.4.1" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" +checksum = "469052894dcb553421e483e4209ee581a45100d31b4018de03e5a7ad86374a7e" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.5.0", ] [[package]] @@ -4542,20 +3578,20 @@ checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56" [[package]] name = "reqwest" -version = "0.11.27" +version = "0.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62" +checksum = "566cafdd92868e0939d3fb961bd0dc25fcfaaed179291093b3d43e6b3150ea10" dependencies = [ - "base64 0.21.7", + "base64 0.22.1", "bytes", - "encoding_rs", "futures-core", "futures-util", - "h2 0.3.26", - "http 0.2.12", - "http-body 0.4.6", - "hyper 0.14.28", - "hyper-rustls", + "http 1.1.0", + "http-body 1.0.0", + "http-body-util", + "hyper 1.3.1", + "hyper-rustls 0.26.0", + "hyper-util", "ipnet", "js-sys", "log", @@ -4563,22 +3599,22 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", - "rustls 0.21.12", - "rustls-pemfile 1.0.4", + "rustls 0.22.4", + "rustls-pemfile 2.1.2", + "rustls-pki-types", "serde", "serde_json", "serde_urlencoded", "sync_wrapper 0.1.2", - "system-configuration", "tokio", - "tokio-rustls 0.24.1", + "tokio-rustls 0.25.0", "tower-service", "url", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", - "webpki-roots", - "winreg", + "webpki-roots 0.26.1", + "winreg 0.52.0", ] [[package]] @@ -4612,16 +3648,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "rfc6979" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2" -dependencies = [ - "hmac", - "subtle", -] - [[package]] name = "rgb" version = "0.8.37" @@ -4631,21 +3657,6 @@ dependencies = [ "bytemuck", ] -[[package]] -name = "ring" -version = "0.16.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" -dependencies = [ - "cc", - "libc", - "once_cell", - "spin 0.5.2", - "untrusted 0.7.1", - "web-sys", - "winapi", -] - [[package]] name = "ring" version = "0.17.8" @@ -4656,55 +3667,11 @@ dependencies = [ "cfg-if", "getrandom", "libc", - "spin 0.9.8", - "untrusted 0.9.0", + "spin", + "untrusted", "windows-sys 0.52.0", ] -[[package]] -name = "rsa" -version = "0.9.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d0e5124fcb30e76a7e79bfee683a2746db83784b86289f6251b54b7950a0dfc" -dependencies = [ - "const-oid", - "digest", - "num-bigint-dig", - "num-integer", - "num-traits", - "pkcs1", - "pkcs8 0.10.2", - "rand_core", - "sha2", - "signature 2.2.0", - "spki 0.7.3", - "subtle", - "zeroize", -] - -[[package]] -name = "rtmp" -version = "0.0.1" -dependencies = [ - "amf0", - "async-trait", - "byteorder", - "bytes", - "bytesio", - "chrono", - "futures", - "hmac", - "num-derive", - "num-traits", - "rand", - "scuffle-utils", - "serde_json", - "sha2", - "tokio", - "tracing", - "uuid", -] - [[package]] name = "rustc-demangle" version = "0.1.23" @@ -4717,20 +3684,39 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" +[[package]] +name = "rustc_version" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" +dependencies = [ + "semver 0.9.0", +] + [[package]] name = "rustc_version" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" dependencies = [ - "semver", + "semver 1.0.22", +] + +[[package]] +name = "rustc_version_runtime" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d31b7153270ebf48bf91c65ae5b0c00e749c4cfad505f66530ac74950249582f" +dependencies = [ + "rustc_version 0.2.3", + "semver 0.9.0", ] [[package]] name = "rustix" -version = "0.38.32" +version = "0.38.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65e04861e65f21776e67888bfbea442b3642beaa0138fdb1dd7a84a52dffdb89" +checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f" dependencies = [ "bitflags 2.5.0", "errno", @@ -4746,21 +3732,37 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" dependencies = [ "log", - "ring 0.17.8", + "ring", "rustls-webpki 0.101.7", "sct", ] [[package]] name = "rustls" -version = "0.22.3" +version = "0.22.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf4ef73721ac7bcd79b2b315da7779d8fc09718c6b3d2d1b2d94850eb8c18432" +dependencies = [ + "log", + "ring", + "rustls-pki-types", + "rustls-webpki 0.102.3", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls" +version = "0.23.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99008d7ad0bbbea527ec27bddbc0e432c5b87d8175178cee68d2eec9c4a1813c" +checksum = "afabcee0551bd1aa3e18e5adbf2c0544722014b899adb31bd186ec638d3da97e" dependencies = [ + "aws-lc-rs", "log", - "ring 0.17.8", + "once_cell", + "ring", "rustls-pki-types", - "rustls-webpki 0.102.2", + "rustls-webpki 0.102.3", "subtle", "zeroize", ] @@ -4805,15 +3807,42 @@ version = "2.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "29993a25686778eb88d4189742cd713c9bce943bc54251a33509dc63cbacf73d" dependencies = [ - "base64 0.22.0", + "base64 0.22.1", "rustls-pki-types", ] [[package]] name = "rustls-pki-types" -version = "1.4.1" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "beb461507cee2c2ff151784c52762cf4d9ff6a61f3e80968600ed24fa837fa54" + +[[package]] +name = "rustls-platform-verifier" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5f0d26fa1ce3c790f9590868f0109289a044acb954525f933e2aa3b871c157d" +dependencies = [ + "core-foundation", + "core-foundation-sys", + "jni", + "log", + "once_cell", + "rustls 0.23.5", + "rustls-native-certs 0.7.0", + "rustls-platform-verifier-android", + "rustls-webpki 0.102.3", + "security-framework", + "security-framework-sys", + "webpki-roots 0.26.1", + "winapi", +] + +[[package]] +name = "rustls-platform-verifier-android" +version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecd36cc4259e3e4514335c4a138c6b43171a8d61d8f5c9348f9fc7529416f247" +checksum = "84e217e7fdc8466b5b35d30f8c0a30febd29173df4a3a0c2115d306b9c4117ad" [[package]] name = "rustls-webpki" @@ -4821,19 +3850,20 @@ version = "0.101.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" dependencies = [ - "ring 0.17.8", - "untrusted 0.9.0", + "ring", + "untrusted", ] [[package]] name = "rustls-webpki" -version = "0.102.2" +version = "0.102.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "faaa0a62740bedb9b2ef5afa303da42764c012f743917351dc9a237ea1663610" +checksum = "f3bce581c0dd41bce533ce695a1437fa16a7ab5ac3ccfa99fe1a620a7885eabf" dependencies = [ - "ring 0.17.8", + "aws-lc-rs", + "ring", "rustls-pki-types", - "untrusted 0.9.0", + "untrusted", ] [[package]] @@ -4865,9 +3895,9 @@ checksum = "0b53b0a5db882a8e2fdaae0a43f7b39e7e9082389e978398bdf223a55b581248" [[package]] name = "scc" -version = "2.1.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec96560eea317a9cc4e0bb1f6a2c93c09a19b8c4fc5cb3fcc0ec1c094cd783e2" +checksum = "76ad2bbb0ae5100a07b7a6f2ed7ab5fd0045551a4c507989b7a620046ea3efdc" dependencies = [ "sdd", ] @@ -4893,30 +3923,20 @@ version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" dependencies = [ - "ring 0.17.8", - "untrusted 0.9.0", + "ring", + "untrusted", ] [[package]] -name = "scuffle-config" -version = "0.0.1" +name = "scuffle-ffmpeg" +version = "0.1.0" dependencies = [ - "clap", - "convert_case", - "humantime", - "num-order", - "scuffle_config_derive", - "serde", - "serde-value", - "serde_ignored", - "serde_json", - "serde_path_to_error", - "serde_yaml", - "thiserror", - "toml", + "bytes", + "crossbeam-channel", + "ffmpeg-sys-next", + "libc", + "tokio", "tracing", - "ulid", - "uuid", ] [[package]] @@ -4930,17 +3950,16 @@ dependencies = [ "const-str", "flate2", "futures", - "h3 0.0.4", + "h3 0.0.5", "h3-quinn", "h3-webtransport", "http 1.1.0", "http-body 1.0.0", "humantime", "humantime-serde", - "hyper 1.2.0", + "hyper 1.3.1", "hyper-util", - "itertools 0.12.1", - "jemalloc_pprof", + "itertools 0.13.0", "matchers", "num_cpus", "once_cell", @@ -4951,24 +3970,22 @@ dependencies = [ "pin-project", "pprof", "prometheus-client", - "prost 0.12.4", + "prost", "quinn", "rand", "regex", - "rustls 0.21.12", + "rustls 0.23.5", "scc", "scuffle-foundations-macros", "serde", - "serde_yaml", - "socket2", - "spin 0.9.8", + "socket2 0.5.7", + "spin", "thiserror", "thread_local", - "tikv-jemalloc-ctl", - "tikv-jemallocator", "tokio", - "tokio-rustls 0.24.1", + "tokio-rustls 0.26.0", "tokio-util", + "toml", "tower", "tracing", "tracing-subscriber", @@ -4978,57 +3995,71 @@ dependencies = [ name = "scuffle-foundations-macros" version = "0.0.0" dependencies = [ - "convert_case", - "darling", + "convert_case 0.6.0", + "darling 0.20.8", "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] -name = "scuffle-utils" -version = "0.1.0" +name = "scuffle-image-processor" +version = "0.0.1" dependencies = [ + "anyhow", + "async-nats", "async-trait", + "aws-config", + "aws-sdk-s3", + "aws-smithy-runtime-api", + "aws-smithy-types", + "bson", "bytes", - "const_format", - "deadpool-postgres", - "dotenvy", - "fnv", + "chrono", + "fast_image_resize", + "file-format", "fred", "futures", - "futures-channel", - "futures-util", + "gifski", "http 1.1.0", - "http-body-util", - "hyper 1.2.0", - "path-tree", - "pin-project", - "portpicker", - "postgres-from-row", - "postgres-types", - "prost 0.12.4", + "humantime-serde", + "imgref", + "libavif-sys", + "libwebp-sys2", + "mongodb", + "num_cpus", + "once_cell", + "png", + "prost", + "reqwest", + "rgb", + "scopeguard", + "scuffle-ffmpeg", + "scuffle-foundations", + "scuffle-image-processor-proto", + "serde", "serde_json", - "tempfile", + "sha2", + "strfmt", "thiserror", "tokio", - "tokio-postgres", - "tokio-util", "tonic", - "tonic-build", - "tower", "tracing", - "trust-dns-resolver", - "ulid", + "url", + "urlencoding", ] [[package]] -name = "scuffle_config_derive" -version = "0.0.1" +name = "scuffle-image-processor-proto" +version = "0.0.0" dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.58", + "pbjson", + "pbjson-build", + "prost", + "prost-build", + "serde", + "tonic", + "tonic-build", ] [[package]] @@ -5043,7 +4074,7 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3be24c1842290c45df0a7bf069e0c268a747ad05a192f2fd7dcfdbc1cba40928" dependencies = [ - "base16ct 0.1.1", + "base16ct", "der 0.6.1", "generic-array", "pkcs8 0.9.0", @@ -5051,38 +4082,25 @@ dependencies = [ "zeroize", ] -[[package]] -name = "sec1" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc" -dependencies = [ - "base16ct 0.2.0", - "der 0.7.9", - "generic-array", - "pkcs8 0.10.2", - "subtle", - "zeroize", -] - [[package]] name = "security-framework" -version = "2.10.0" +version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "770452e37cad93e0a50d5abc3990d2bc351c36d0328f86cefec2f2fb206eaef6" +checksum = "c627723fd09706bacdb5cf41499e95098555af3c3c29d014dc3c458ef6be11c0" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.5.0", "core-foundation", "core-foundation-sys", "libc", + "num-bigint", "security-framework-sys", ] [[package]] name = "security-framework-sys" -version = "2.10.0" +version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41f3cc463c0ef97e11c3461a9d3787412d30e8e7eb907c79180c4a57bf7c04ef" +checksum = "317936bbbd05227752583946b9e66d7ce3b489f84e11a94a510b4437fef407d7" dependencies = [ "core-foundation-sys", "libc", @@ -5090,77 +4108,61 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.22" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92d43fe69e652f3df9bdc2b85b2854a0825b86e4fb76bc44d945137d053639ca" +checksum = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" +dependencies = [ + "semver-parser", +] [[package]] -name = "serde" -version = "1.0.197" +name = "semver" +version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2" -dependencies = [ - "serde_derive", -] +checksum = "92d43fe69e652f3df9bdc2b85b2854a0825b86e4fb76bc44d945137d053639ca" [[package]] -name = "serde-value" +name = "semver-parser" version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3a1a3341211875ef120e117ea7fd5228530ae7e7036a779fdc9117be6b3282c" -dependencies = [ - "ordered-float 2.10.1", - "serde", -] +checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" [[package]] -name = "serde-wasm-bindgen" -version = "0.6.5" +name = "serde" +version = "1.0.200" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8302e169f0eddcc139c70f139d19d6467353af16f9fce27e8c30158036a1e16b" +checksum = "ddc6f9cc94d67c0e21aaf7eda3a010fd3af78ebf6e096aa6e2e13c79749cce4f" dependencies = [ - "js-sys", - "serde", - "wasm-bindgen", + "serde_derive", ] [[package]] -name = "serde_derive" -version = "1.0.197" +name = "serde_bytes" +version = "0.11.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" +checksum = "8b8497c313fd43ab992087548117643f6fcd935cbf36f176ffda0aacf9591734" dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.58", + "serde", ] [[package]] -name = "serde_derive_internals" -version = "0.28.0" +name = "serde_derive" +version = "1.0.200" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e578a843d40b4189a4d66bba51d7684f57da5bd7c304c64e14bd63efbef49509" +checksum = "856f046b9400cee3c8c94ed572ecdb752444c24528c035cd35882aad6f492bcb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.58", -] - -[[package]] -name = "serde_ignored" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8e319a36d1b52126a0d608f24e93b2d81297091818cd70625fcf50a15d84ddf" -dependencies = [ - "serde", + "syn 2.0.60", ] [[package]] name = "serde_json" -version = "1.0.115" +version = "1.0.116" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12dc5c46daa8e9fdf4f5e71b6cf9a53f2487da0e86e55808e2d35539666497dd" +checksum = "3e17db7126d17feb94eb3fad46bf1a96b034e8aacbc2e775fe81505f8b0b2813" dependencies = [ + "indexmap 2.2.6", "itoa", "ryu", "serde", @@ -5193,7 +4195,7 @@ checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -5218,16 +4220,36 @@ dependencies = [ ] [[package]] -name = "serde_yaml" -version = "0.9.34+deprecated" +name = "serde_with" +version = "1.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "678b5a069e50bf00ecd22d0cd8ddf7c236f68581b03db652061ed5eb13a312ff" +dependencies = [ + "serde", + "serde_with_macros", +] + +[[package]] +name = "serde_with_macros" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e182d6ec6f05393cc0e5ed1bf81ad6db3a8feedf8ee515ecdd369809bcce8082" +dependencies = [ + "darling 0.13.4", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "sha-1" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" +checksum = "f5058ada175748e33390e40e872bd0fe59a19f265d0158daa551c5a88a76009c" dependencies = [ - "indexmap 2.2.6", - "itoa", - "ryu", - "serde", - "unsafe-libyaml", + "cfg-if", + "cpufeatures", + "digest", ] [[package]] @@ -5269,9 +4291,9 @@ checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" [[package]] name = "signal-hook-registry" -version = "1.4.1" +version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8229b473baa5980ac72ef434c4415e70c4b5e71b423043adb4ba059f89c99a1" +checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1" dependencies = [ "libc", ] @@ -5323,12 +4345,6 @@ dependencies = [ "quote", ] -[[package]] -name = "siphasher" -version = "0.3.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" - [[package]] name = "slab" version = "0.4.9" @@ -5346,19 +4362,23 @@ checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" [[package]] name = "socket2" -version = "0.5.6" +version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05ffd9c0a93b7543e062e759284fcf5f5e3b098501104bfbdde4d404db792871" +checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d" dependencies = [ "libc", - "windows-sys 0.52.0", + "winapi", ] [[package]] -name = "spin" -version = "0.5.2" +name = "socket2" +version = "0.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" +checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] [[package]] name = "spin" @@ -5396,10 +4416,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" [[package]] -name = "static_assertions_next" -version = "1.1.2" +name = "strfmt" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7beae5182595e9a8b683fa98c4317f956c9a2dec3b9716990d20023cc60c766" +checksum = "7a8348af2d9fc3258c8733b8d9d8db2e56f54b2363a4b5b81585c7875ed65e65" [[package]] name = "stringprep" @@ -5424,28 +4444,6 @@ version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" -[[package]] -name = "strum" -version = "0.26.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d8cec3501a5194c432b2b7976db6b7d10ec95c253208b45f83f7136aa985e29" -dependencies = [ - "strum_macros", -] - -[[package]] -name = "strum_macros" -version = "0.26.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6cf59daf282c0a494ba14fd21610a0325f9f90ec9d1231dea26bcb1d696c946" -dependencies = [ - "heck 0.4.1", - "proc-macro2", - "quote", - "rustversion", - "syn 2.0.58", -] - [[package]] name = "subtle" version = "2.5.0" @@ -5488,9 +4486,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.58" +version = "2.0.60" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44cfb93f38070beee36b3fef7d4f5a16f27751d94b187b666a5cc5e9b0d30687" +checksum = "909518bc7b1c9b779f1bbf07f2929d35af9f0f37e47c6e9ef7f9dddc1e1821f3" dependencies = [ "proc-macro2", "quote", @@ -5509,27 +4507,6 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" -[[package]] -name = "system-configuration" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" -dependencies = [ - "bitflags 1.3.2", - "core-foundation", - "system-configuration-sys", -] - -[[package]] -name = "system-configuration-sys" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" -dependencies = [ - "core-foundation-sys", - "libc", -] - [[package]] name = "system-deps" version = "6.2.2" @@ -5543,6 +4520,18 @@ dependencies = [ "version-compare", ] +[[package]] +name = "take_mut" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f764005d11ee5f36500a149ace24e00e3da98b0158b3e2d53a7495660d3f4d60" + +[[package]] +name = "tap" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" + [[package]] name = "target-lexicon" version = "0.12.14" @@ -5563,22 +4552,22 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.58" +version = "1.0.59" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03468839009160513471e86a034bb2c5c0e4baae3b43f79ffc55c4a5427b3297" +checksum = "f0126ad08bff79f29fc3ae6a55cc72352056dfff61e3ff8bb7129476d44b23aa" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.58" +version = "1.0.59" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c61f3ba182994efc43764a46c018c347bc492c79f024e705f46567b418f6d4f7" +checksum = "d1cd413b5d558b4c5bf3680e324a6fa5014e7b7c067a51e69dbdf47eb7148b66" dependencies = [ "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -5591,48 +4580,6 @@ dependencies = [ "once_cell", ] -[[package]] -name = "tiff" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba1310fcea54c6a9a4fd1aad794ecc02c31682f6bfbecdf460bf19533eed1e3e" -dependencies = [ - "flate2", - "jpeg-decoder", - "weezl", -] - -[[package]] -name = "tikv-jemalloc-ctl" -version = "0.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "619bfed27d807b54f7f776b9430d4f8060e66ee138a28632ca898584d462c31c" -dependencies = [ - "libc", - "paste", - "tikv-jemalloc-sys", -] - -[[package]] -name = "tikv-jemalloc-sys" -version = "0.5.4+5.3.0-patched" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9402443cb8fd499b6f327e40565234ff34dbda27460c5b47db0db77443dd85d1" -dependencies = [ - "cc", - "libc", -] - -[[package]] -name = "tikv-jemallocator" -version = "0.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "965fe0c26be5c56c94e38ba547249074803efd52adfb66de62107d95aab3eaca" -dependencies = [ - "libc", - "tikv-jemalloc-sys", -] - [[package]] name = "time" version = "0.3.36" @@ -5693,7 +4640,7 @@ dependencies = [ "parking_lot", "pin-project-lite", "signal-hook-registry", - "socket2", + "socket2 0.5.7", "tokio-macros", "windows-sys 0.48.0", ] @@ -5716,59 +4663,7 @@ checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.58", -] - -[[package]] -name = "tokio-postgres" -version = "0.7.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d340244b32d920260ae7448cb72b6e238bddc3d4f7603394e7dd46ed8e48f5b8" -dependencies = [ - "async-trait", - "byteorder", - "bytes", - "fallible-iterator", - "futures-channel", - "futures-util", - "log", - "parking_lot", - "percent-encoding", - "phf", - "pin-project-lite", - "postgres-protocol", - "postgres-types", - "rand", - "socket2", - "tokio", - "tokio-util", - "whoami", -] - -[[package]] -name = "tokio-postgres-rustls" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ea13f22eda7127c827983bdaf0d7fff9df21c8817bab02815ac277a21143677" -dependencies = [ - "futures", - "ring 0.17.8", - "rustls 0.22.3", - "tokio", - "tokio-postgres", - "tokio-rustls 0.25.0", - "x509-certificate", -] - -[[package]] -name = "tokio-retry" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f57eb36ecbe0fc510036adff84824dd3c24bb781e21bfa67b69d556aa85214f" -dependencies = [ - "pin-project", - "rand", - "tokio", + "syn 2.0.60", ] [[package]] @@ -5787,40 +4682,38 @@ version = "0.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "775e0c0f0adb3a2f22a00c4745d728b479985fc15ee7ca6a2608388c5569860f" dependencies = [ - "rustls 0.22.3", + "rustls 0.22.4", "rustls-pki-types", "tokio", ] [[package]] -name = "tokio-stream" -version = "0.1.15" +name = "tokio-rustls" +version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "267ac89e0bec6e691e5813911606935d77c476ff49024f98abcea3e7b15e37af" +checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4" dependencies = [ - "futures-core", - "pin-project-lite", + "rustls 0.23.5", + "rustls-pki-types", "tokio", - "tokio-util", ] [[package]] -name = "tokio-tungstenite" -version = "0.21.0" +name = "tokio-stream" +version = "0.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c83b561d025642014097b66e6c1bb422783339e0909e4429cde4749d1990bc38" +checksum = "267ac89e0bec6e691e5813911606935d77c476ff49024f98abcea3e7b15e37af" dependencies = [ - "futures-util", - "log", + "futures-core", + "pin-project-lite", "tokio", - "tungstenite", ] [[package]] name = "tokio-util" -version = "0.7.10" +version = "0.7.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15" +checksum = "9cf6b47b3771c49ac75ad09a6162f53ad4b8088b76ac60e8ec1455b31a189fe1" dependencies = [ "bytes", "futures-core", @@ -5828,7 +4721,6 @@ dependencies = [ "futures-sink", "pin-project-lite", "tokio", - "tracing", ] [[package]] @@ -5840,7 +4732,7 @@ dependencies = [ "serde", "serde_spanned", "toml_datetime", - "toml_edit 0.22.9", + "toml_edit", ] [[package]] @@ -5854,26 +4746,15 @@ dependencies = [ [[package]] name = "toml_edit" -version = "0.19.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" -dependencies = [ - "indexmap 2.2.6", - "toml_datetime", - "winnow 0.5.40", -] - -[[package]] -name = "toml_edit" -version = "0.22.9" +version = "0.22.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e40bb779c5187258fd7aad0eb68cb8706a0a81fa712fbea808ab43c4b8374c4" +checksum = "d3328d4f68a705b2a4498da1d580585d39a6510f98318a2cec3018a7ec61ddef" dependencies = [ "indexmap 2.2.6", "serde", "serde_spanned", "toml_datetime", - "winnow 0.6.6", + "winnow", ] [[package]] @@ -5894,11 +4775,8 @@ dependencies = [ "hyper-timeout", "percent-encoding", "pin-project", - "prost 0.12.4", - "rustls-pemfile 2.1.2", - "rustls-pki-types", + "prost", "tokio", - "tokio-rustls 0.25.0", "tokio-stream", "tower", "tower-layer", @@ -5916,23 +4794,7 @@ dependencies = [ "proc-macro2", "prost-build", "quote", - "syn 2.0.58", -] - -[[package]] -name = "totp-rs" -version = "5.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c4ae9724c5888c0417d2396037ed3b60665925624766416e3e342b6ba5dbd3f" -dependencies = [ - "base32", - "constant_time_eq", - "hmac", - "qrcodegen-image", - "sha1", - "sha2", - "url", - "urlencoding", + "syn 2.0.60", ] [[package]] @@ -5987,7 +4849,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -6000,18 +4862,6 @@ dependencies = [ "valuable", ] -[[package]] -name = "tracing-futures" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" -dependencies = [ - "futures", - "futures-task", - "pin-project", - "tracing", -] - [[package]] name = "tracing-log" version = "0.2.0" @@ -6040,44 +4890,22 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" dependencies = [ "chrono", - "matchers", "nu-ansi-term", - "once_cell", - "regex", "serde", "serde_json", "sharded-slab", "smallvec", "thread_local", - "tracing", "tracing-core", "tracing-log", "tracing-serde", ] -[[package]] -name = "transmuxer" -version = "0.0.1" -dependencies = [ - "aac", - "amf0", - "av1", - "byteorder", - "bytes", - "bytesio", - "flv", - "h264", - "h265", - "mp4", - "serde", - "serde_json", -] - [[package]] name = "trust-dns-proto" -version = "0.23.2" +version = "0.21.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3119112651c157f4488931a01e586aa459736e9d6046d3bd9105ffb69352d374" +checksum = "9c31f240f59877c3d4bb3b3ea0ec5a6a0cff07323580ff8c7a605cd7d08b255d" dependencies = [ "async-trait", "cfg-if", @@ -6086,36 +4914,35 @@ dependencies = [ "futures-channel", "futures-io", "futures-util", - "idna 0.4.0", + "idna 0.2.3", "ipnet", - "once_cell", + "lazy_static", + "log", "rand", "smallvec", "thiserror", "tinyvec", "tokio", - "tracing", "url", ] [[package]] name = "trust-dns-resolver" -version = "0.23.2" +version = "0.21.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10a3e6c3aff1718b3c73e395d1f35202ba2ffa847c6a62eea0db8fb4cfe30be6" +checksum = "e4ba72c2ea84515690c9fcef4c6c660bb9df3036ed1051686de84605b74fd558" dependencies = [ "cfg-if", "futures-util", "ipconfig", + "lazy_static", + "log", "lru-cache", - "once_cell", "parking_lot", - "rand", "resolv-conf", "smallvec", "thiserror", "tokio", - "tracing", "trust-dns-proto", ] @@ -6126,45 +4953,25 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" [[package]] -name = "tsify" -version = "0.4.5" -source = "git+https://github.com/ScuffleTV/tsify.git?branch=sisou/comments#e36e55bcf3c9ac7c1d8185e5ad994885f4a2eb46" +name = "tryhard" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c9f0a709784e86923586cff0d872dba54cd2d2e116b3bc57587d15737cfce9d" dependencies = [ - "gloo-utils", - "serde", - "serde_json", - "tsify-macros", - "wasm-bindgen", + "futures", + "pin-project-lite", + "tokio", ] [[package]] -name = "tsify-macros" -version = "0.4.5" -source = "git+https://github.com/ScuffleTV/tsify.git?branch=sisou/comments#e36e55bcf3c9ac7c1d8185e5ad994885f4a2eb46" +name = "typed-builder" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89851716b67b937e393b3daa8423e67ddfc4bbbf1654bcf05488e95e0828db0c" dependencies = [ "proc-macro2", "quote", - "serde_derive_internals", - "syn 2.0.58", -] - -[[package]] -name = "tungstenite" -version = "0.21.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ef1a641ea34f399a848dea702823bbecfb4c486f911735368f1f137cb8257e1" -dependencies = [ - "byteorder", - "bytes", - "data-encoding", - "http 1.1.0", - "httparse", - "log", - "rand", - "sha1", - "thiserror", - "url", - "utf-8", + "syn 1.0.109", ] [[package]] @@ -6173,27 +4980,6 @@ version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" -[[package]] -name = "ucd-trie" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9" - -[[package]] -name = "ulid" -version = "1.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34778c17965aa2a08913b57e1f34db9b4a63f5de31768b55bf20d2795f921259" -dependencies = [ - "bytes", - "getrandom", - "postgres-types", - "rand", - "serde", - "uuid", - "web-time", -] - [[package]] name = "unicode-bidi" version = "0.3.15" @@ -6207,37 +4993,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" [[package]] -name = "unicode-normalization" -version = "0.1.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" -dependencies = [ - "tinyvec", -] - -[[package]] -name = "unicode-segmentation" -version = "1.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4c87d22b6e3f4a18d4d40ef354e97c90fcb14dd91d7dc0aa9d8a1172ebf7202" - -[[package]] -name = "unicode-xid" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" - -[[package]] -name = "unsafe-libyaml" -version = "0.2.11" +name = "unicode-normalization" +version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861" +checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" +dependencies = [ + "tinyvec", +] [[package]] -name = "untrusted" -version = "0.7.1" +name = "unicode-segmentation" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" +checksum = "d4c87d22b6e3f4a18d4d40ef354e97c90fcb14dd91d7dc0aa9d8a1172ebf7202" [[package]] name = "untrusted" @@ -6263,12 +5031,6 @@ version = "2.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" -[[package]] -name = "utf-8" -version = "0.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" - [[package]] name = "utf8parse" version = "0.2.1" @@ -6320,266 +5082,6 @@ version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" -[[package]] -name = "video-api" -version = "0.0.1" -dependencies = [ - "anyhow", - "async-nats", - "async-stream", - "async-trait", - "base64 0.21.7", - "binary-helper", - "bytes", - "chrono", - "dotenvy", - "fred", - "futures", - "futures-util", - "hex", - "hmac", - "http 0.2.12", - "hyper 0.14.28", - "itertools 0.12.1", - "jwt-next", - "pb", - "postgres-from-row", - "prost 0.12.4", - "rand", - "rand_chacha", - "scuffle-config", - "scuffle-utils", - "serde", - "serde_json", - "sha2", - "tokio", - "tokio-stream", - "tonic", - "tower", - "tracing", - "ulid", - "url", - "uuid", - "video-common", -] - -[[package]] -name = "video-cli" -version = "0.0.1" -dependencies = [ - "anyhow", - "async-nats", - "async-trait", - "base64 0.21.7", - "binary-helper", - "chrono", - "clap", - "fred", - "futures", - "futures-util", - "pb", - "scuffle-config", - "scuffle-utils", - "serde", - "serde_json", - "serde_yaml", - "tokio", - "tonic", - "ulid", - "video-api", - "video-common", -] - -[[package]] -name = "video-common" -version = "0.0.1" -dependencies = [ - "async-nats", - "async-trait", - "bytes", - "chrono", - "futures", - "futures-util", - "pb", - "postgres-from-row", - "postgres-types", - "prost 0.12.4", - "scuffle-utils", - "serde", - "tokio", - "tokio-postgres", - "tracing", - "ulid", - "uuid", -] - -[[package]] -name = "video-edge" -version = "0.0.1" -dependencies = [ - "anyhow", - "async-nats", - "async-stream", - "async-trait", - "binary-helper", - "bytes", - "chrono", - "futures", - "futures-util", - "hmac", - "http-body-util", - "hyper 1.2.0", - "hyper-util", - "itertools 0.12.1", - "jwt-next", - "pb", - "postgres-from-row", - "prost 0.12.4", - "rustls 0.22.3", - "rustls-pemfile 2.1.2", - "scuffle-config", - "scuffle-utils", - "serde", - "serde_json", - "sha2", - "thiserror", - "tokio", - "tokio-rustls 0.25.0", - "tokio-stream", - "tokio-util", - "tonic", - "tracing", - "ulid", - "url", - "uuid", - "video-common", - "video-player-types", -] - -[[package]] -name = "video-ingest" -version = "0.0.1" -dependencies = [ - "aac", - "anyhow", - "async-nats", - "async-stream", - "async-trait", - "base64 0.21.7", - "binary-helper", - "bytes", - "bytesio", - "chrono", - "default-net", - "dotenvy", - "flv", - "futures", - "futures-util", - "hyper 1.2.0", - "mp4", - "pb", - "portpicker", - "postgres-from-row", - "prost 0.12.4", - "rtmp", - "rustls 0.22.3", - "rustls-pemfile 2.1.2", - "scuffle-config", - "scuffle-utils", - "serde", - "serde_json", - "tokio", - "tokio-rustls 0.25.0", - "tokio-stream", - "tonic", - "tracing", - "transmuxer", - "ulid", - "uuid", - "video-common", -] - -[[package]] -name = "video-player" -version = "0.0.1" -dependencies = [ - "bytes", - "bytesio", - "console_error_panic_hook", - "gloo-timers", - "h264", - "js-sys", - "mp4", - "serde", - "serde-wasm-bindgen", - "serde_json", - "serde_path_to_error", - "tokio", - "tracing", - "tracing-core", - "tracing-subscriber", - "tsify", - "ulid", - "url", - "video-player-types", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", -] - -[[package]] -name = "video-player-types" -version = "0.0.1" -dependencies = [ - "serde", - "ulid", - "url", -] - -[[package]] -name = "video-transcoder" -version = "0.0.1" -dependencies = [ - "aac", - "anyhow", - "async-nats", - "async-stream", - "async-trait", - "aws-config", - "aws-sdk-s3", - "binary-helper", - "bytes", - "bytesio", - "chrono", - "dotenvy", - "ffmpeg", - "flv", - "futures", - "futures-util", - "hyper 1.2.0", - "image", - "mp4", - "pb", - "portpicker", - "prost 0.12.4", - "scuffle-config", - "scuffle-utils", - "serde", - "serde_json", - "sha2", - "tempfile", - "thiserror", - "tokio", - "tokio-stream", - "tokio-util", - "tonic", - "tracing", - "transmuxer", - "ulid", - "uuid", - "video-common", -] - [[package]] name = "vsimd" version = "0.8.0" @@ -6611,12 +5113,6 @@ version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" -[[package]] -name = "wasite" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" - [[package]] name = "wasm-bindgen" version = "0.2.92" @@ -6638,7 +5134,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", "wasm-bindgen-shared", ] @@ -6672,7 +5168,7 @@ checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -6694,20 +5190,19 @@ dependencies = [ ] [[package]] -name = "web-time" -version = "1.1.0" +name = "webpki-roots" +version = "0.25.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" -dependencies = [ - "js-sys", - "wasm-bindgen", -] +checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" [[package]] name = "webpki-roots" -version = "0.25.4" +version = "0.26.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" +checksum = "b3de34ae270483955a94f4b21bdaaeb83d508bb84a01435f393818edb0012009" +dependencies = [ + "rustls-pki-types", +] [[package]] name = "weezl" @@ -6716,14 +5211,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53a85b86a771b1c87058196170769dd264f66c0782acf1ae6cc51bfd64b39082" [[package]] -name = "whoami" -version = "1.5.1" +name = "which" +version = "4.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a44ab49fad634e88f55bf8f9bb3abd2f27d7204172a112c7c9987e01c1c94ea9" +checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" dependencies = [ - "redox_syscall", - "wasite", - "web-sys", + "either", + "home", + "once_cell", + "rustix", ] [[package]] @@ -6759,11 +5255,11 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.6" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596" +checksum = "4d4cc384e1e73b93bafa6fb4f1df8c41695c8a91cf9c4c64358067d15a7b6c6b" dependencies = [ - "winapi", + "windows-sys 0.52.0", ] [[package]] @@ -6772,22 +5268,13 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" -[[package]] -name = "windows" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f" -dependencies = [ - "windows-targets 0.48.5", -] - [[package]] name = "windows-core" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" dependencies = [ - "windows-targets 0.52.4", + "windows-targets 0.52.5", ] [[package]] @@ -6805,7 +5292,7 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ - "windows-targets 0.52.4", + "windows-targets 0.52.5", ] [[package]] @@ -6825,17 +5312,18 @@ dependencies = [ [[package]] name = "windows-targets" -version = "0.52.4" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7dd37b7e5ab9018759f893a1952c9420d060016fc19a472b4bb20d1bdd694d1b" +checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb" dependencies = [ - "windows_aarch64_gnullvm 0.52.4", - "windows_aarch64_msvc 0.52.4", - "windows_i686_gnu 0.52.4", - "windows_i686_msvc 0.52.4", - "windows_x86_64_gnu 0.52.4", - "windows_x86_64_gnullvm 0.52.4", - "windows_x86_64_msvc 0.52.4", + "windows_aarch64_gnullvm 0.52.5", + "windows_aarch64_msvc 0.52.5", + "windows_i686_gnu 0.52.5", + "windows_i686_gnullvm", + "windows_i686_msvc 0.52.5", + "windows_x86_64_gnu 0.52.5", + "windows_x86_64_gnullvm 0.52.5", + "windows_x86_64_msvc 0.52.5", ] [[package]] @@ -6846,9 +5334,9 @@ checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_gnullvm" -version = "0.52.4" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcf46cf4c365c6f2d1cc93ce535f2c8b244591df96ceee75d8e83deb70a9cac9" +checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263" [[package]] name = "windows_aarch64_msvc" @@ -6858,9 +5346,9 @@ checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_aarch64_msvc" -version = "0.52.4" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da9f259dd3bcf6990b55bffd094c4f7235817ba4ceebde8e6d11cd0c5633b675" +checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6" [[package]] name = "windows_i686_gnu" @@ -6870,9 +5358,15 @@ checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_gnu" -version = "0.52.4" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b474d8268f99e0995f25b9f095bc7434632601028cf86590aea5c8a5cb7801d3" +checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9" [[package]] name = "windows_i686_msvc" @@ -6882,9 +5376,9 @@ checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_i686_msvc" -version = "0.52.4" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1515e9a29e5bed743cb4415a9ecf5dfca648ce85ee42e15873c3cd8610ff8e02" +checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf" [[package]] name = "windows_x86_64_gnu" @@ -6894,9 +5388,9 @@ checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnu" -version = "0.52.4" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5eee091590e89cc02ad514ffe3ead9eb6b660aedca2183455434b93546371a03" +checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9" [[package]] name = "windows_x86_64_gnullvm" @@ -6906,9 +5400,9 @@ checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_gnullvm" -version = "0.52.4" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77ca79f2451b49fa9e2af39f0747fe999fcda4f5e241b2898624dca97a1f2177" +checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596" [[package]] name = "windows_x86_64_msvc" @@ -6918,55 +5412,46 @@ checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "windows_x86_64_msvc" -version = "0.52.4" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8" +checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0" [[package]] name = "winnow" -version = "0.5.40" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876" +checksum = "c3c52e9c97a68071b23e836c9380edae937f17b9c4667bd021973efc689f618d" dependencies = [ "memchr", ] [[package]] -name = "winnow" -version = "0.6.6" +name = "winreg" +version = "0.50.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0c976aaaa0e1f90dbb21e9587cdaf1d9679a1cde8875c0d6bd83ab96a208352" +checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" dependencies = [ - "memchr", + "cfg-if", + "windows-sys 0.48.0", ] [[package]] name = "winreg" -version = "0.50.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" +checksum = "a277a57398d4bfa075df44f501a17cfdf8542d224f0d36095a2adc7aee4ef0a5" dependencies = [ "cfg-if", "windows-sys 0.48.0", ] [[package]] -name = "x509-certificate" -version = "0.23.1" +name = "wyz" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66534846dec7a11d7c50a74b7cdb208b9a581cad890b7866430d438455847c85" +checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed" dependencies = [ - "bcder", - "bytes", - "chrono", - "der 0.7.9", - "hex", - "pem", - "ring 0.17.8", - "signature 2.2.0", - "spki 0.7.3", - "thiserror", - "zeroize", + "tap", ] [[package]] @@ -6975,24 +5460,40 @@ version = "0.13.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "66fee0b777b0f5ac1c69bb06d361268faafa61cd4682ae064a171c16c433e9e4" +[[package]] +name = "y4m" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a5a4b21e1a62b67a2970e6831bc091d7b87e119e7f9791aef9702e3bef04448" + +[[package]] +name = "yuv" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d7933ddf59021f0147c02986654b971ce2fbf04c5a7f1c92cd9ff738578b182" +dependencies = [ + "num-traits", + "rgb", +] + [[package]] name = "zerocopy" -version = "0.7.32" +version = "0.7.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74d4d3961e53fa4c9a25a8637fc2bfaf2595b3d3ae34875568a5cf64787716be" +checksum = "087eca3c1eaf8c47b94d02790dd086cd594b912d2043d4de4bfdd466b3befb7c" dependencies = [ "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.7.32" +version = "0.7.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6" +checksum = "6f4b6c273f496d8fd4eaf18853e6b448760225dc030ff2c485a786859aea6393" dependencies = [ "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -7012,14 +5513,5 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.58", -] - -[[package]] -name = "zune-inflate" -version = "0.2.54" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73ab332fe2f6680068f3582b16a24f90ad7096d5d39b974d1c0aff0125116f02" -dependencies = [ - "simd-adler32", + "syn 2.0.60", ] diff --git a/Cargo.toml b/Cargo.toml index 145c041a9..8b40b0762 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,22 +1,23 @@ [workspace] members = [ - "platform/api", - "platform/image_processor", - "video/edge", - "video/ingest", - "video/transcoder", - "video/lib/*", - "video/api", - "video/player", - "video/player_types", - "video/common", - "video/cli", - "binary-helper", - "utils", - "proto", - "config", - "config/derive", + # "platform/api", + "image-processor", + "image-processor/proto", + # "video/edge", + # "video/ingest", + # "video/transcoder", + # "video/lib/*", + # "video/api", + # "video/player", + # "video/player_types", + # "video/common", + # "video/cli", + # "binary-helper", + # "utils", + # "proto", + # "config", + # "config/derive", "ffmpeg", "foundations", "foundations/macros", @@ -52,7 +53,7 @@ h265 = { path = "video/lib/h265" } mp4 = { path = "video/lib/mp4" } rtmp = { path = "video/lib/rtmp" } transmuxer = { path = "video/lib/transmuxer" } -utils = { path = "utils", default-features = false, package = "scuffle-utils" } +scuffle-utils = { path = "utils", default-features = false } config = { path = "config", package = "scuffle-config" } pb = { path = "proto" } video-common = { path = "video/common" } @@ -62,12 +63,12 @@ video-edge = { path = "video/edge" } video-ingest = { path = "video/ingest" } video-transcoder = { path = "video/transcoder" } binary-helper = { path = "binary-helper" } -ffmpeg = { path = "ffmpeg" } +scuffle-ffmpeg = { path = "ffmpeg" } # These patches are pending PRs to the upstream crates # TODO: Remove these once the PRs are merged [patch.crates-io] # https://github.com/remkop22/postgres-from-row/pull/9 -postgres-from-row = { git = "https://github.com/ScuffleTV/postgres-from-row.git", branch = "troy/from_fn" } -# https://github.com/madonoharu/tsify/pull/32 -tsify = { git = "https://github.com/ScuffleTV/tsify.git", branch = "sisou/comments" } +# postgres-from-row = { git = "https://github.com/ScuffleTV/postgres-from-row.git", branch = "troy/from_fn" } +# # https://github.com/madonoharu/tsify/pull/32 +# tsify = { git = "https://github.com/ScuffleTV/tsify.git", branch = "sisou/comments" } diff --git a/binary-helper/Cargo.toml b/binary-helper/Cargo.toml index e1aa0d7fd..f4d264b1e 100644 --- a/binary-helper/Cargo.toml +++ b/binary-helper/Cargo.toml @@ -9,7 +9,7 @@ tracing = "0.1" thiserror = "1.0" tokio = { version = "1.36", features = ["full"] } serde = { version = "1.0.1", features = ["derive"] } -async-nats = "0.33" +async-nats = "0.34" ulid = "1.1" async-trait = "0.1" tonic = { version = "0.11", features = ["tls"] } @@ -17,10 +17,10 @@ anyhow = "1.0" tower-layer = "0.3" async-stream = "0.3" futures-util = "0.3" -rustls = "0.22" +rustls = "0.23" rustls-pemfile = "2.0" fred = { version = "8.0.0", features = ["enable-rustls", "sentinel-client", "dns"] } -tokio-postgres-rustls = "0.11" +tokio-postgres-rustls = "0.12" tracing-subscriber = { features = ["env-filter", "fmt", "json"], version = "0.3" } once_cell = "1.19" aws-config = { version = "1.1" } @@ -31,13 +31,14 @@ http-body = { version = "1.0.0"} hyper = "1" bytes = "1.0" pin-project = "1" +tokio-rustls = "0.25" tokio-postgres = { version = "0.7" } postgres-types = { version = "0.2", features = ["with-serde_json-1", "with-chrono-0_4", "derive"] } -deadpool-postgres = { version = "0.12" } +deadpool-postgres = { version = "0.13" } postgres-from-row = { version = "0.5" } prost = { version = "0.12" } config = { workspace = true } -utils = { workspace = true, features = ["all"] } +scuffle-utils = { workspace = true, features = ["all"] } pb = { workspace = true } diff --git a/binary-helper/src/global.rs b/binary-helper/src/global.rs index ed6a44b87..2a964708a 100644 --- a/binary-helper/src/global.rs +++ b/binary-helper/src/global.rs @@ -9,10 +9,10 @@ use fred::interfaces::ClientLike; use fred::types::ServerConfig; use hyper::StatusCode; use rustls::RootCertStore; -use utils::database::deadpool_postgres::{ManagerConfig, PoolConfig, RecyclingMethod, Runtime}; -use utils::database::tokio_postgres::NoTls; -use utils::database::Pool; -use utils::http::RouteError; +use scuffle_utils::database::deadpool_postgres::{ManagerConfig, PoolConfig, RecyclingMethod, Runtime}; +use scuffle_utils::database::tokio_postgres::NoTls; +use scuffle_utils::database::Pool; +use scuffle_utils::http::RouteError; use crate::config::{DatabaseConfig, NatsConfig, RedisConfig}; @@ -40,7 +40,7 @@ macro_rules! impl_global_traits { impl binary_helper::global::GlobalDb for $struct { #[inline(always)] - fn db(&self) -> &Arc { + fn db(&self) -> &Arc { &self.db } } @@ -50,7 +50,7 @@ macro_rules! impl_global_traits { } pub trait GlobalCtx { - fn ctx(&self) -> &utils::context::Context; + fn ctx(&self) -> &scuffle_utils::context::Context; } pub trait GlobalConfig { @@ -124,16 +124,16 @@ pub async fn setup_nats( Ok((nats, jetstream)) } -pub async fn setup_database(config: &DatabaseConfig) -> anyhow::Result> { +pub async fn setup_database(config: &DatabaseConfig) -> anyhow::Result> { let mut pg_config = config .uri - .parse::() + .parse::() .context("invalid database uri")?; pg_config.ssl_mode(if config.tls.is_some() { - utils::database::tokio_postgres::config::SslMode::Require + scuffle_utils::database::tokio_postgres::config::SslMode::Require } else { - utils::database::tokio_postgres::config::SslMode::Disable + scuffle_utils::database::tokio_postgres::config::SslMode::Disable }); let manager = if let Some(tls) = &config.tls { @@ -164,7 +164,7 @@ pub async fn setup_database(config: &DatabaseConfig) -> anyhow::Result anyhow::Result anyhow::Result, _>>()?; - let mut cert_store = RootCertStore::empty(); + let mut cert_store = tokio_rustls::rustls::RootCertStore::empty(); if let Some(ca_cert) = &tls.ca_cert { let ca_cert = tokio::fs::read(ca_cert).await.context("failed to read redis ca cert")?; let ca_certs = @@ -240,11 +240,13 @@ pub async fn setup_redis(config: &RedisConfig) -> anyhow::Result anyhow::Result diff --git a/dev/docker-compose.yml b/dev/docker-compose.yml index 252f62bca..a5d72dfda 100644 --- a/dev/docker-compose.yml +++ b/dev/docker-compose.yml @@ -3,15 +3,13 @@ version: "3.1" name: "db-scuffle-dev" services: - cockroach: - image: ghcr.io/scuffletv/ci/cockroach:latest + mongo: + image: mongo:latest pull_policy: "always" - command: start-single-node --insecure --advertise-addr=0.0.0.0 - volumes: - - cockroach:/cockroach/cockroach-data ports: - - "127.0.0.1:5432:26257" - - "127.0.0.1:8080:8080" + - "27111:27017" + volumes: + - mongo:/data/db nats: image: ghcr.io/scuffletv/ci/nats:latest @@ -33,8 +31,8 @@ services: - "127.0.0.1:9000:9000" - "127.0.0.1:9001:9001" environment: - - "MINIO_ACCESS_KEY=root" - - "MINIO_SECRET_KEY=scuffle123" + - "MINIO_ACCESS_KEY=minioadmin" + - "MINIO_SECRET_KEY=minioadmin" volumes: - minio:/data command: server /data --console-address ":9001" @@ -47,26 +45,13 @@ services: entrypoint: > /bin/sh -c " set -eux; - /usr/bin/mc config host add myminio http://minio:9000 root scuffle123; - /usr/bin/mc rb --force myminio/scuffle-video || true; - /usr/bin/mc rb --force myminio/scuffle-image-processor || true; - /usr/bin/mc rb --force myminio/scuffle-image-processor-public || true; - /usr/bin/mc mb myminio/scuffle-video; - /usr/bin/mc mb myminio/scuffle-image-processor; - /usr/bin/mc mb myminio/scuffle-image-processor-public; - /usr/bin/mc anonymous set download myminio/scuffle-video; - /usr/bin/mc anonymous set download myminio/scuffle-image-processor-public; + /usr/bin/mc config host add myminio http://minio:9000 minioadmin minioadmin; + /usr/bin/mc mb myminio/image-processor; + /usr/bin/mc anonymous set download myminio/image-processor; exit 0; " - redis: - image: ghcr.io/scuffletv/ci/redis:latest - pull_policy: "always" - ports: - - "127.0.0.1:6379:6379" - volumes: - cockroach: nats: minio: - redis: + mongo: diff --git a/ffmpeg/Cargo.toml b/ffmpeg/Cargo.toml index e0f1baf5e..7fd53411d 100644 --- a/ffmpeg/Cargo.toml +++ b/ffmpeg/Cargo.toml @@ -1,21 +1,19 @@ [package] -name = "ffmpeg" +name = "scuffle-ffmpeg" version = "0.1.0" edition = "2021" license = "MIT OR Apache-2.0" [dependencies] -ffmpeg-sys-next = "6.1" +ffmpeg-sys-next = "7" libc = "0.2" bytes = { optional = true, version = "1" } tokio = { optional = true, version = "1" } crossbeam-channel = { optional = true, version = "0.5" } tracing = { optional = true, version = "0.1" } -utils = { workspace = true, optional = true } [features] default = [] -task-abort = ["dep:utils"] channel = ["dep:bytes"] tokio-channel = ["channel", "dep:tokio"] crossbeam-channel = ["channel", "dep:crossbeam-channel"] diff --git a/ffmpeg/src/decoder.rs b/ffmpeg/src/decoder.rs index 57add9ad8..f33e1b7c1 100644 --- a/ffmpeg/src/decoder.rs +++ b/ffmpeg/src/decoder.rs @@ -152,9 +152,6 @@ impl GenericDecoder { } pub fn send_packet(&mut self, packet: &Packet) -> Result<(), FfmpegError> { - #[cfg(feature = "task-abort")] - let _guard = utils::task::AbortGuard::new(); - // Safety: `packet` is a valid pointer, and `self.decoder` is a valid pointer. let ret = unsafe { avcodec_send_packet(self.decoder.as_mut_ptr(), packet.as_ptr()) }; @@ -165,9 +162,6 @@ impl GenericDecoder { } pub fn send_eof(&mut self) -> Result<(), FfmpegError> { - #[cfg(feature = "task-abort")] - let _guard = utils::task::AbortGuard::new(); - // Safety: `self.decoder` is a valid pointer. let ret = unsafe { avcodec_send_packet(self.decoder.as_mut_ptr(), std::ptr::null()) }; @@ -178,9 +172,6 @@ impl GenericDecoder { } pub fn receive_frame(&mut self) -> Result, FfmpegError> { - #[cfg(feature = "task-abort")] - let _guard = utils::task::AbortGuard::new(); - let mut frame = Frame::new()?; // Safety: `frame` is a valid pointer, and `self.decoder` is a valid pointer. diff --git a/ffmpeg/src/encoder.rs b/ffmpeg/src/encoder.rs index 5c0550661..7b699c019 100644 --- a/ffmpeg/src/encoder.rs +++ b/ffmpeg/src/encoder.rs @@ -426,9 +426,6 @@ impl Encoder { outgoing_time_base: AVRational, settings: impl Into, ) -> Result { - #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); - if codec.as_ptr().is_null() { return Err(FfmpegError::NoEncoder); } @@ -489,9 +486,6 @@ impl Encoder { } pub fn send_eof(&mut self) -> Result<(), FfmpegError> { - #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); - // Safety: `self.encoder` is a valid pointer. let ret = unsafe { avcodec_send_frame(self.encoder.as_mut_ptr(), std::ptr::null()) }; if ret == 0 { @@ -502,9 +496,6 @@ impl Encoder { } pub fn send_frame(&mut self, frame: &Frame) -> Result<(), FfmpegError> { - #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); - // Safety: `self.encoder` and `frame` are valid pointers. let ret = unsafe { avcodec_send_frame(self.encoder.as_mut_ptr(), frame.as_ptr()) }; if ret == 0 { @@ -515,9 +506,6 @@ impl Encoder { } pub fn receive_packet(&mut self) -> Result, FfmpegError> { - #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); - let mut packet = Packet::new()?; const AVERROR_EAGAIN: i32 = AVERROR(EAGAIN); @@ -631,9 +619,6 @@ impl MuxerEncoder { } pub fn send_eof(&mut self) -> Result<(), FfmpegError> { - #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); - self.encoder.send_eof()?; self.handle_packets()?; diff --git a/ffmpeg/src/filter_graph.rs b/ffmpeg/src/filter_graph.rs index 0db498737..0bf180e75 100644 --- a/ffmpeg/src/filter_graph.rs +++ b/ffmpeg/src/filter_graph.rs @@ -14,18 +14,12 @@ unsafe impl Send for FilterGraph {} impl FilterGraph { pub fn new() -> Result { - #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); - // Safety: the pointer returned from avfilter_graph_alloc is valid unsafe { Self::wrap(avfilter_graph_alloc()) } } /// Safety: `ptr` must be a valid pointer to an `AVFilterGraph`. unsafe fn wrap(ptr: *mut AVFilterGraph) -> Result { - #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); - Ok(Self( SmartPtr::wrap_non_null(ptr, |ptr| unsafe { avfilter_graph_free(ptr) }).ok_or(FfmpegError::Alloc)?, )) @@ -40,9 +34,6 @@ impl FilterGraph { } pub fn add(&mut self, filter: Filter, name: &str, args: &str) -> Result, FfmpegError> { - #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); - let name = CString::new(name).expect("failed to convert name to CString"); let args = CString::new(args).expect("failed to convert args to CString"); @@ -238,9 +229,6 @@ unsafe impl Send for FilterContextSource<'_> {} impl FilterContextSource<'_> { pub fn send_frame(&mut self, frame: &Frame) -> Result<(), FfmpegError> { - #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); - // Safety: `frame` is a valid pointer, and `self.0` is a valid pointer. unsafe { match av_buffersrc_write_frame(self.0, frame.as_ptr()) { @@ -251,9 +239,6 @@ impl FilterContextSource<'_> { } pub fn send_eof(&mut self, pts: Option) -> Result<(), FfmpegError> { - #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); - // Safety: `self.0` is a valid pointer. unsafe { match if let Some(pts) = pts { @@ -275,9 +260,6 @@ unsafe impl Send for FilterContextSink<'_> {} impl FilterContextSink<'_> { pub fn receive_frame(&mut self) -> Result, FfmpegError> { - #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); - let mut frame = Frame::new()?; // Safety: `frame` is a valid pointer, and `self.0` is a valid pointer. diff --git a/ffmpeg/src/io/internal.rs b/ffmpeg/src/io/internal.rs index 5d6cb4181..b9cedc2c2 100644 --- a/ffmpeg/src/io/internal.rs +++ b/ffmpeg/src/io/internal.rs @@ -112,9 +112,6 @@ impl Default for InnerOptions { impl Inner { pub fn new(data: T, options: InnerOptions) -> Result { - #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); - // Safety: av_malloc is safe to call let buffer = unsafe { SmartPtr::wrap_non_null(av_malloc(options.buffer_size), |ptr| { @@ -227,9 +224,6 @@ impl Inner<()> { } pub fn open_output(path: &str) -> Result { - #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); - let path = std::ffi::CString::new(path).expect("Failed to convert path to CString"); // Safety: avformat_alloc_output_context2 is safe to call diff --git a/ffmpeg/src/io/output.rs b/ffmpeg/src/io/output.rs index ddbfa2125..4eaad9891 100644 --- a/ffmpeg/src/io/output.rs +++ b/ffmpeg/src/io/output.rs @@ -149,9 +149,6 @@ impl Output { } pub fn add_stream(&mut self, codec: Option<*const AVCodec>) -> Option> { - #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); - // Safety: `avformat_new_stream` is safe to call. let stream = unsafe { avformat_new_stream(self.as_mut_ptr(), codec.unwrap_or_else(std::ptr::null)) }; if stream.is_null() { @@ -167,9 +164,6 @@ impl Output { } pub fn copy_stream<'a>(&'a mut self, stream: &Stream<'_>) -> Option> { - #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); - let codec_param = stream.codec_parameters()?; // Safety: `avformat_new_stream` is safe to call. @@ -195,9 +189,6 @@ impl Output { } pub fn write_header(&mut self) -> Result<(), FfmpegError> { - #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); - if self.witten_header { return Err(FfmpegError::Arguments("header already written")); } @@ -216,9 +207,6 @@ impl Output { } pub fn write_header_with_options(&mut self, options: &mut Dictionary) -> Result<(), FfmpegError> { - #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); - if self.witten_header { return Err(FfmpegError::Arguments("header already written")); } @@ -237,9 +225,6 @@ impl Output { } pub fn write_trailer(&mut self) -> Result<(), FfmpegError> { - #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); - if !self.witten_header { return Err(FfmpegError::Arguments("header not written")); } @@ -254,9 +239,6 @@ impl Output { } pub fn write_interleaved_packet(&mut self, mut packet: Packet) -> Result<(), FfmpegError> { - #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); - if !self.witten_header { return Err(FfmpegError::Arguments("header not written")); } @@ -272,9 +254,6 @@ impl Output { } pub fn write_packet(&mut self, packet: &Packet) -> Result<(), FfmpegError> { - #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); - if !self.witten_header { return Err(FfmpegError::Arguments("header not written")); } diff --git a/ffmpeg/src/packet.rs b/ffmpeg/src/packet.rs index b6060c368..b2ad2137b 100644 --- a/ffmpeg/src/packet.rs +++ b/ffmpeg/src/packet.rs @@ -17,9 +17,6 @@ impl<'a> Packets<'a> { } pub fn receive(&mut self) -> Result, FfmpegError> { - #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); - let mut packet = Packet::new()?; // Safety: av_read_frame is safe to call, 'packet' is a valid pointer diff --git a/ffmpeg/src/scalar.rs b/ffmpeg/src/scalar.rs index 8ab06906d..34bae5f7c 100644 --- a/ffmpeg/src/scalar.rs +++ b/ffmpeg/src/scalar.rs @@ -87,9 +87,6 @@ impl Scalar { } pub fn process<'a>(&'a mut self, frame: &Frame) -> Result<&'a VideoFrame, FfmpegError> { - #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); - // Safety: `frame` is a valid pointer, and `self.ptr` is a valid pointer. let ret = unsafe { sws_scale( diff --git a/foundations/Cargo.toml b/foundations/Cargo.toml index a1744c3f3..e57109012 100644 --- a/foundations/Cargo.toml +++ b/foundations/Cargo.toml @@ -15,18 +15,15 @@ rand = { version = "0.8", optional = true } tracing = { version = "0.1", optional = true } tracing-subscriber = { version = "0.3", optional = true } -opentelemetry = { version = "0.22", optional = true } -opentelemetry_sdk = { version = "0.22", optional = true } -opentelemetry-otlp = { version = "0.15", optional = true, features = ["http-proto"]} +opentelemetry = { version = "0.23", optional = true } +opentelemetry_sdk = { version = "0.23", optional = true } +opentelemetry-otlp = { version = "0.16", optional = true, features = ["http-proto"]} -tikv-jemallocator = { version = "0.5", optional = true, features = ["unprefixed_malloc_on_supported_platforms"] } -tikv-jemalloc-ctl = { version = "0.5", optional = true } -jemalloc_pprof = { version = "0.1", optional = true } anyhow = { version = "1" } thread_local = { version = "1", optional = true } spin = { version = "0.9", optional = true } -itertools = { version = "0.12", optional = true } +itertools = { version = "0.13", optional = true } scuffle-foundations-macros = { path = "./macros", optional = true, version = "0.0.0" } @@ -40,7 +37,7 @@ once_cell = { version = "1", optional = true } scc = { version = "2", optional = true } serde = { version = "1", optional = true, features = ["derive", "rc"] } -serde_yaml = { version = "0.9", optional = true } +toml = { version = "0.8", optional = true } clap = { version = "4", optional = true } const-str = { version = "0.5", optional = true } @@ -58,12 +55,12 @@ tower = { version = "0.4", optional = true } hyper = { version = "1", optional = true } hyper-util = { version = "0.1", optional = true } http = { version = "1", optional = true } -h3 = { version = "0.0.4", optional = true } -h3-quinn = { version = "0.0.5", optional = true } +h3 = { version = "0.0.5", optional = true } +h3-quinn = { version = "0.0.6", optional = true } h3-webtransport = { version = "0.1.0", optional = true } -quinn = { version = "0.10", default-features = false, features = ["runtime-tokio", "tls-rustls", "ring" ], optional = true} -rustls = { version = "0.21.12", optional = true } -tokio-rustls = { version = "0.24.1", optional = true } +quinn = { version = "0.11", default-features = false, features = ["runtime-tokio", "rustls", "ring" ], optional = true } +rustls = { version = "0.23", optional = true } +tokio-rustls = { version = "0.26", optional = true } bytes = { version = "1", optional = true } thiserror = { version = "1", optional = true } http-body = { version = "1", optional = true } @@ -71,8 +68,6 @@ socket2 = { version = "0.5", optional = true } [features] -disable-jemalloc-config = [] - context = [ "pin-project", "tokio-util", @@ -109,16 +104,6 @@ runtime = [ "tokio/rt-multi-thread", ] -heap = [ - "_telemetry", - "tikv-jemallocator", -] - -pprof = [ - "pprof-cpu", - "pprof-heap", -] - pprof-cpu = [ "_telemetry", "dep:pprof", @@ -126,14 +111,6 @@ pprof-cpu = [ "flate2", ] -pprof-heap = [ - "_telemetry", - "heap", - "jemalloc_pprof", - "tikv-jemallocator/profiling", - "tikv-jemalloc-ctl", -] - opentelemetry = [ "_telemetry", "itertools", @@ -183,7 +160,7 @@ health-check = [ settings = [ "serde", - "serde_yaml", + "toml", "macros", "humantime-serde", ] @@ -258,10 +235,7 @@ http3-webtransport = [ default = [ "opentelemetry", "runtime", - "heap", "pprof-cpu", - "pprof-heap", - "pprof", "macros", "logging", "env-filter", diff --git a/foundations/examples/Cargo.toml b/foundations/examples/Cargo.toml index 29e8e29e0..954eac60d 100644 --- a/foundations/examples/Cargo.toml +++ b/foundations/examples/Cargo.toml @@ -29,14 +29,14 @@ hyper = { version = "1", features = ["full"] } hyper-util = { version = "0.1", features = ["tokio", "http2"] } socket2 = "0.5" http-body-util = "0.1" -opentelemetry = { version = "0.22" } +opentelemetry = { version = "0.23" } rand = "0.8" -rustls-pemfile = { version = "1.0.4" } -rustls = "0.21" +rustls-pemfile = { version = "2" } +rustls = "0.23" futures = "0.3.21" tower = "0.4" -quinn = "0.10" +quinn = "0.11" axum = "0.7" -h3 = "0.0.4" +h3 = "0.0.5" # h3-webtransport = "0.1" -h3-quinn = "0.0.5" +h3-quinn = "0.0.6" diff --git a/foundations/examples/src/generics.rs b/foundations/examples/src/generics.rs index fec473ac8..ab8d3c7af 100644 --- a/foundations/examples/src/generics.rs +++ b/foundations/examples/src/generics.rs @@ -16,5 +16,5 @@ pub struct ExtraSettings { } fn main() { - println!("{}", BaseSettings::::default().to_yaml_string().unwrap()); + println!("{}", BaseSettings::::default().to_docs_string().unwrap()); } diff --git a/foundations/examples/src/http-server.rs b/foundations/examples/src/http-server.rs index ac650465d..5a36cd82d 100644 --- a/foundations/examples/src/http-server.rs +++ b/foundations/examples/src/http-server.rs @@ -4,12 +4,12 @@ use axum::body::Body; use axum::extract::Request; use axum::response::{IntoResponse, Response}; use hyper::{StatusCode, Uri}; -use rustls::{Certificate, PrivateKey}; +use quinn::crypto::rustls::QuicServerConfig; use scuffle_foundations::bootstrap::{bootstrap, Bootstrap, RuntimeSettings}; use scuffle_foundations::http::server::stream::{IncomingConnection, MakeService, ServiceHandler, SocketKind}; use scuffle_foundations::http::server::Server; use scuffle_foundations::settings::auto_settings; -use scuffle_foundations::settings::cli::{Matches, clap}; +use scuffle_foundations::settings::cli::{clap, Matches}; use scuffle_foundations::telemetry::settings::TelemetrySettings; use tokio::signal::unix::SignalKind; @@ -37,20 +37,22 @@ impl Bootstrap for HttpServerSettings { fn additional_args() -> Vec { vec![ - clap::Arg::new("tls-cert") - .long("tls-cert") - .value_name("FILE"), - clap::Arg::new("tls-key") - .long("tls-key") - .value_name("FILE"), + clap::Arg::new("tls-cert").long("tls-cert").value_name("FILE"), + clap::Arg::new("tls-key").long("tls-key").value_name("FILE"), ] } } #[bootstrap] async fn main(settings: Matches) { - let tls_cert = settings.args.get_one::("tls-cert").or(settings.settings.tls_cert.as_ref()); - let tls_key = settings.args.get_one::("tls-key").or(settings.settings.tls_key.as_ref()); + let tls_cert = settings + .args + .get_one::("tls-cert") + .or(settings.settings.tls_cert.as_ref()); + let tls_key = settings + .args + .get_one::("tls-key") + .or(settings.settings.tls_key.as_ref()); let Some((tls_cert, tls_key)) = tls_cert.zip(tls_key) else { panic!("TLS certificate and key are required"); @@ -61,28 +63,23 @@ async fn main(settings: Matches) { // Test TLS let certs = rustls_pemfile::certs(&mut std::io::BufReader::new(cert)) - .unwrap() - .into_iter() - .map(Certificate) - .collect::>(); + .collect::, _>>() + .unwrap(); let key = rustls_pemfile::pkcs8_private_keys(&mut std::io::BufReader::new(key)) + .next() .unwrap() - .remove(0); + .unwrap(); - let mut tls_config = rustls::ServerConfig::builder() - .with_safe_default_cipher_suites() - .with_safe_default_kx_groups() - .with_protocol_versions(&[&rustls::version::TLS13]) - .unwrap() + let mut tls_config = rustls::ServerConfig::builder_with_protocol_versions(&[&rustls::version::TLS13]) .with_no_client_auth() - .with_single_cert(certs, PrivateKey(key)) + .with_single_cert(certs, key.into()) .unwrap(); tls_config.max_early_data_size = u32::MAX; tls_config.alpn_protocols = vec![b"h3".to_vec()]; - let server_config = quinn::ServerConfig::with_crypto(Arc::new(tls_config.clone())); + let server_config = quinn::ServerConfig::with_crypto(Arc::new(QuicServerConfig::try_from(tls_config.clone()).unwrap())); #[derive(Debug, Clone)] struct ServiceFactory; diff --git a/foundations/examples/src/http.rs b/foundations/examples/src/http.rs index 6de7e2ce5..caf883b5b 100644 --- a/foundations/examples/src/http.rs +++ b/foundations/examples/src/http.rs @@ -147,9 +147,9 @@ fn map_response(result: Result, Infallible>) -> Result, optional: bool, @@ -51,16 +52,6 @@ impl Options { } } -impl Default for Options { - fn default() -> Self { - Options { - crate_path: None, - optional: false, - builder: None, - } - } -} - impl Parse for Options { fn parse(input: syn::parse::ParseStream) -> syn::Result { if input.is_empty() { @@ -361,15 +352,13 @@ fn metric_function( #constructor() } } + } else if has_args { + quote::quote! { + #crate_path::telemetry::metrics::serde::Family::default() + } } else { - if has_args { - quote::quote! { - #crate_path::telemetry::metrics::serde::Family::default() - } - } else { - quote::quote! { - Default::default() - } + quote::quote! { + Default::default() } }; diff --git a/foundations/macros/src/settings/types/mod.rs b/foundations/macros/src/settings/types/mod.rs index 8db03813d..63d56c6d5 100644 --- a/foundations/macros/src/settings/types/mod.rs +++ b/foundations/macros/src/settings/types/mod.rs @@ -53,11 +53,11 @@ trait Args: Default { Self: Sized, { attrs - .into_iter() + .iter() .filter(|a| a.path().is_ident("settings")) .try_fold(Self::default(), |mut state, attr| { let Meta::List(meta) = &attr.meta else { - return Err(syn::Error::new_spanned(&attr, "expected #[settings(...)]")); + return Err(syn::Error::new_spanned(attr, "expected #[settings(...)]")); }; let parsed = meta.parse_args_with(Punctuated::::parse_terminated)?; @@ -111,7 +111,7 @@ impl Args for GlobalArgs { }) = &meta.value { self.crate_path = - syn::parse_str(&lit.value()).map_err(|_| syn::Error::new_spanned(&lit, "expected valid path"))?; + syn::parse_str(&lit.value()).map_err(|_| syn::Error::new_spanned(lit, "expected valid path"))?; Ok(true) } else { Err(syn::Error::new_spanned(&meta.value, "expected string")) diff --git a/foundations/macros/src/settings/types/serde.rs b/foundations/macros/src/settings/types/serde.rs index 211227338..c58d280e0 100644 --- a/foundations/macros/src/settings/types/serde.rs +++ b/foundations/macros/src/settings/types/serde.rs @@ -37,7 +37,7 @@ impl FromStr for RenameAll { impl RenameAll { /// #[serde(rename_all = "name")] or #[serde(rename_all(serialize = "name", deserialize = "name"))] pub fn parse(attr: &[syn::Attribute]) -> syn::Result> { - Ok(parse_serde_attrs(attr, None, |state, meta| match &meta { + parse_serde_attrs(attr, None, |state, meta| match &meta { Meta::NameValue(meta) if meta.path.is_ident("rename_all") => { if let syn::Expr::Lit(syn::ExprLit { lit: syn::Lit::Str(lit), .. @@ -52,7 +52,7 @@ impl RenameAll { } _ => {} })? - .transpose()?) + .transpose() } pub fn apply(&self, name: &str) -> String { diff --git a/foundations/src/heap.rs b/foundations/src/heap.rs deleted file mode 100644 index 931627e78..000000000 --- a/foundations/src/heap.rs +++ /dev/null @@ -1,2 +0,0 @@ -#[global_allocator] -static ALLOC: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc; diff --git a/foundations/src/http/server/mod.rs b/foundations/src/http/server/mod.rs index 6eadd26ee..d8f2f290f 100644 --- a/foundations/src/http/server/mod.rs +++ b/foundations/src/http/server/mod.rs @@ -5,7 +5,6 @@ mod builder; pub mod stream; pub use axum; - use hyper_util::rt::TokioExecutor; #[cfg(not(feature = "runtime"))] use tokio::spawn; @@ -109,7 +108,7 @@ fn make_tcp_listener(addr: SocketAddr) -> std::io::Result Server { let make_service = self.make_service.clone(); let backend = TlsBackend::new(tcp_listener, acceptor.clone(), self.http1_2.clone(), &ctx); let span = tracing::info_span!("tls", addr = %self.bind, worker = i); - self.backends.push(AbortOnDrop::new(spawn(backend.serve(make_service).instrument(span)))); + self.backends + .push(AbortOnDrop::new(spawn(backend.serve(make_service).instrument(span)))); } } else if self.insecure_bind.is_none() { self.insecure_bind = Some(self.bind); @@ -162,7 +162,8 @@ impl Server { let make_service = self.make_service.clone(); let backend = TcpBackend::new(tcp_listener, self.http1_2.clone(), &ctx); let span = tracing::info_span!("tcp", addr = %addr, worker = i); - self.backends.push(AbortOnDrop::new(spawn(backend.serve(make_service).instrument(span)))); + self.backends + .push(AbortOnDrop::new(spawn(backend.serve(make_service).instrument(span)))); } } @@ -179,7 +180,8 @@ impl Server { let make_service = self.make_service.clone(); let backend = QuicBackend::new(endpoint, quic.h3.clone(), &ctx); let span = tracing::info_span!("quic", addr = %self.bind, worker = i); - self.backends.push(AbortOnDrop::new(spawn(backend.serve(make_service).instrument(span)))); + self.backends + .push(AbortOnDrop::new(spawn(backend.serve(make_service).instrument(span)))); } } @@ -202,7 +204,11 @@ impl Server { binds.push(format!("https+quic://{}", self.bind)); } - tracing::info!(worker_count = self.worker_count, "listening on {binds}", binds = binds.join(", ")); + tracing::info!( + worker_count = self.worker_count, + "listening on {binds}", + binds = binds.join(", ") + ); Ok(()) } diff --git a/foundations/src/http/server/stream/mod.rs b/foundations/src/http/server/stream/mod.rs index c7f24729e..4e3aa743a 100644 --- a/foundations/src/http/server/stream/mod.rs +++ b/foundations/src/http/server/stream/mod.rs @@ -6,9 +6,9 @@ pub mod tls; use std::convert::Infallible; +pub use axum::body::Body; pub use axum::extract::Request; pub use axum::response::{IntoResponse, Response}; -pub use axum::body::Body; use super::Error; diff --git a/foundations/src/http/server/stream/quic.rs b/foundations/src/http/server/stream/quic.rs index 5c74247ee..0b8da32b5 100644 --- a/foundations/src/http/server/stream/quic.rs +++ b/foundations/src/http/server/stream/quic.rs @@ -82,6 +82,8 @@ impl Backend for QuicBackend { break; }; + let connection = connection.accept()?; + let span = tracing::trace_span!("connection", remote_addr = %connection.remote_address()); let _guard = span.enter(); tracing::trace!("connection accepted"); @@ -219,7 +221,7 @@ impl Connection { tokio::spawn( async move { if let Err(err) = serve_request(&service, request, stream).await { - service.on_error(err.into()).await; + service.on_error(err).await; } drop(ctx); @@ -254,7 +256,6 @@ async fn serve_request(service: &impl ServiceHandler, request: Request, mut stre tracing::trace!(?parts, "sending response"); send.send_response(Response::from_parts(parts, ())).await?; - let mut body = std::pin::pin!(body); tracing::trace!("sending response body"); @@ -274,10 +275,10 @@ async fn serve_request(service: &impl ServiceHandler, request: Request, mut stre } None => { send.finish().await?; - }, + } } } - + tracing::trace!("response body finished"); Ok(()) diff --git a/foundations/src/http/server/stream/tcp.rs b/foundations/src/http/server/stream/tcp.rs index cd2c5c147..37698707d 100644 --- a/foundations/src/http/server/stream/tcp.rs +++ b/foundations/src/http/server/stream/tcp.rs @@ -151,7 +151,8 @@ impl Connection { let resp = service.on_request(req.map(Body::new)).await.into_response(); drop(ctx); Ok::<_, Infallible>(resp) - }.instrument(span.clone()) + } + .instrument(span.clone()) }) }; @@ -165,11 +166,10 @@ impl Connection { } }; - if let Err(err) = r { self.service.on_error(err.into()).await; } - + self.service.on_close().await; tracing::trace!("connection closed"); } diff --git a/foundations/src/http/server/stream/tls.rs b/foundations/src/http/server/stream/tls.rs index ad7188df2..a300a345d 100644 --- a/foundations/src/http/server/stream/tls.rs +++ b/foundations/src/http/server/stream/tls.rs @@ -174,7 +174,8 @@ impl Connection { let resp = service.on_request(req.map(Body::new)).await.into_response(); drop(ctx); Ok::<_, Infallible>(resp) - }.instrument(span.clone()) + } + .instrument(span.clone()) }) }; @@ -191,7 +192,7 @@ impl Connection { if let Err(err) = r { self.service.on_error(err.into()).await; } - + self.service.on_close().await; tracing::trace!("connection closed"); } diff --git a/foundations/src/lib.rs b/foundations/src/lib.rs index 6b7a52fef..f06b39044 100644 --- a/foundations/src/lib.rs +++ b/foundations/src/lib.rs @@ -1,9 +1,6 @@ #[cfg(feature = "runtime")] pub mod runtime; -#[cfg(feature = "heap")] -pub mod heap; - #[cfg(feature = "macros")] pub use scuffle_foundations_macros::wrapped; diff --git a/foundations/src/settings/cli.rs b/foundations/src/settings/cli.rs index 92bfafce3..5596ef8db 100644 --- a/foundations/src/settings/cli.rs +++ b/foundations/src/settings/cli.rs @@ -71,7 +71,7 @@ impl Cli { self } - fn load_file(file: &str, optional: bool) -> anyhow::Result> { + fn load_file(file: &str, optional: bool) -> anyhow::Result> { let contents = match std::fs::read_to_string(file) { Ok(contents) => contents, Err(err) => { @@ -84,7 +84,7 @@ impl Cli { }; let incoming = - serde_yaml::from_str(&contents).with_context(|| format!("Error parsing configuration file: {file}"))?; + toml::from_str(&contents).with_context(|| format!("Error parsing configuration file: {file}"))?; Ok(Some(incoming)) } @@ -97,7 +97,7 @@ impl Cli { .settings .parse() .context("failed to construct settings")? - .to_yaml_string() + .to_docs_string() .context("failed to serialize settings")?; std::fs::write(file, settings).with_context(|| format!("Error writing configuration file: {file}"))?; println!("Generated configuration file: {file}"); @@ -116,7 +116,7 @@ impl Cli { for (file, optional) in files { if let Some(value) = Self::load_file(&file, optional)? { - self.settings.merge(value).context("failed to merge configuration file")?; + self.settings.merge(value); } } diff --git a/foundations/src/settings/mod.rs b/foundations/src/settings/mod.rs index 12b20dd54..1d24e4105 100644 --- a/foundations/src/settings/mod.rs +++ b/foundations/src/settings/mod.rs @@ -1,9 +1,3 @@ -use std::borrow::Cow; -use std::collections::HashMap; - -use serde_yaml::value::Tag; -use serde_yaml::Value; - #[cfg(feature = "cli")] pub mod cli; @@ -12,133 +6,55 @@ pub use scuffle_foundations_macros::{auto_settings, Settings}; #[derive(Debug, Clone)] pub struct SettingsParser { - root: serde_yaml::Value, + root: Option, _marker: std::marker::PhantomData, } -enum MergeDirective { - Unset, - Replace, - Merge, -} - -impl MergeDirective { - fn from_tag(tag: &Tag) -> Self { - if tag == "!replace" { - Self::Replace - } else if tag == "!merge" { - Self::Merge - } else { - Self::Unset - } - } -} - impl SettingsParser { - pub fn new(default: &S) -> serde_yaml::Result + pub fn new(default: &S) -> Result where S: serde::Serialize, { Ok(Self { - root: serde_yaml::to_value(default)?, + root: Some(toml::Value::try_from(default)?), _marker: std::marker::PhantomData, }) } - fn merge(&mut self, mut incoming: serde_yaml::Value) -> serde_yaml::Result<()> { - self.root.apply_merge()?; - incoming.apply_merge()?; - - let root = std::mem::take(&mut self.root); - self.root = self.merge_loop(root, incoming, MergeDirective::Unset); - Ok(()) + fn merge(&mut self, incoming: toml::Value) { + let root = self.root.take().unwrap(); + self.root = Some(self.merge_loop(root, incoming)); } - fn merge_loop(&self, root: serde_yaml::Value, incoming: serde_yaml::Value, merge: MergeDirective) -> serde_yaml::Value { + fn merge_loop(&self, root: toml::Value, incoming: toml::Value) -> toml::Value { match (root, incoming) { - (serde_yaml::Value::Mapping(mut first_map), serde_yaml::Value::Mapping(second_map)) => { + (toml::Value::Table(mut first_map), toml::Value::Table(second_map)) => { for (key, value) in second_map { - // If the key is tagged we should process it - let (key, merge) = match key { - serde_yaml::Value::Tagged(tagged) => (tagged.value, MergeDirective::from_tag(&tagged.tag)), - _ => (key, MergeDirective::Unset), - }; - let combined_value = if let Some(existing_value) = first_map.remove(&key) { - if matches!(merge, MergeDirective::Replace) { - value - } else { - self.merge_loop(existing_value, value, merge) - } + self.merge_loop(existing_value, value) } else { value }; first_map.insert(key, combined_value); } - serde_yaml::Value::Mapping(first_map) - } - (serde_yaml::Value::Sequence(mut first_seq), serde_yaml::Value::Sequence(second_seq)) => { - if matches!(merge, MergeDirective::Merge) { - first_seq.extend(second_seq); - } else { - first_seq = second_seq; - } - serde_yaml::Value::Sequence(first_seq) - } - (first, serde_yaml::Value::Tagged(tagged)) => self.handle_tagged(first, *tagged, merge), - (_, second) => second, - } - } - fn handle_tagged( - &self, - first: serde_yaml::Value, - tagged: serde_yaml::value::TaggedValue, - merge: MergeDirective, - ) -> serde_yaml::Value { - // If the tag is replace it doesn't matter what the first value is - // we just return the tagged value - let merge = match (merge, MergeDirective::from_tag(&tagged.tag)) { - (MergeDirective::Unset, merge) => merge, - (merge, _) => merge, - }; - if matches!(merge, MergeDirective::Replace) { - return tagged.value; - } - // If the first value is tagged then we should compare the tags - // and act accordingly - if let serde_yaml::Value::Tagged(first_tagged) = first { - if first_tagged.tag == tagged.tag { - let value = self.merge_loop(first_tagged.value, tagged.value, merge); - // Retag the value - return serde_yaml::Value::Tagged(Box::new(serde_yaml::value::TaggedValue { - tag: first_tagged.tag, - value, - })); - } else { - return serde_yaml::Value::Tagged(Box::new(tagged)); + toml::Value::Table(first_map) } - } - - // Otherwise we do not merge and retag the value - let value = self.merge_loop(first, tagged.value, merge); - if matches!(MergeDirective::from_tag(&tagged.tag), MergeDirective::Unset) { - serde_yaml::Value::Tagged(Box::new(serde_yaml::value::TaggedValue { tag: tagged.tag, value })) - } else { - value + (_, second) => second, } } - pub fn merge_str(&mut self, s: &str) -> serde_yaml::Result<()> { - let incoming = serde_yaml::from_str(s)?; - self.merge(incoming) + pub fn merge_str(&mut self, s: &str) -> Result<(), toml::de::Error> { + let incoming = toml::from_str(s)?; + self.merge(incoming); + Ok(()) } - pub fn parse(self) -> serde_yaml::Result + pub fn parse(self) -> Result where for<'de> S: serde::Deserialize<'de>, { - serde_yaml::from_value(self.root) + self.root.unwrap().try_into() } } @@ -148,150 +64,9 @@ pub use traits::{Settings, Wrapped}; /// Converts a settings struct to a YAML string including doc comments. /// If you want to provide doc comments for keys use to_yaml_string_with_docs. -pub fn to_yaml_string(settings: &T) -> Result { - to_yaml_string_with_docs(settings, &settings.docs()) -} - -type CowStr = Cow<'static, str>; -type DocMap = HashMap, Cow<'static, [CowStr]>>; - -/// Serializes a struct to YAML with documentation comments. -/// Documentation comments are provided in a DocMap. -pub fn to_yaml_string_with_docs(settings: &T, docs: &DocMap) -> Result { - let data = serde_yaml::to_value(settings)?; - let mut result = String::new(); - convert_recursive(docs, &mut Vec::new(), &data, &mut result, 0); - - if result.ends_with("\n\n") { - result.pop(); - } else if !result.ends_with('\n') { - result.push('\n'); - } - - Ok(result) -} - -macro_rules! push_indent { - ($result:expr, $indent:expr) => {{ - for _ in 0..$indent { - $result.push(' '); - } - }}; -} - -macro_rules! push_docs { - ($result:expr, $docs:expr, $stack:expr, $indent:expr) => {{ - $docs.get($stack).into_iter().flat_map(|s| s.iter()).for_each(|doc| { - push_indent!($result, $indent); - $result.push_str("# "); - $result.push_str(doc); - push_new_line!($result); - }); - }}; -} - -macro_rules! push_key { - ($result:expr, $key:expr, $indent:expr) => {{ - push_indent!($result, $indent); - $result.push_str($key); - $result.push_str(":"); - }}; -} - -macro_rules! push_new_line { - ($result:expr) => {{ - if !$result.ends_with('\n') { - $result.push('\n'); - } - }}; +pub fn to_docs_string(settings: &T) -> Result { + toml::to_string_pretty(settings) } -fn convert_recursive(docs: &DocMap, stack: &mut Vec, value: &Value, result: &mut String, indent: usize) { - // Append doc comments at the current level - if matches!(value, Value::Mapping(_) | Value::Sequence(_)) { - stack.push(">".into()); - push_docs!(result, docs, stack, indent); - stack.pop(); - } - - match value { - Value::Mapping(map) => { - for (key, val) in map { - let key_str = key.as_str().unwrap_or_default(); - stack.push(Cow::from(key_str.to_owned())); - - push_docs!(result, docs, stack, indent); - push_key!(result, key_str, indent); - - // We dont want to push a new line if the item is a Tagged value - if matches!(val, Value::Mapping(_) | Value::Sequence(_)) { - push_new_line!(result); - } - - convert_recursive(docs, stack, val, result, indent + 2); - - push_new_line!(result); - - if (val.is_mapping() || val.is_sequence()) && !result.ends_with("\n\n") { - result.push('\n'); - } - - stack.pop(); - } - - if map.is_empty() { - if result.ends_with('\n') { - result.pop(); - } - result.push_str(" {}"); - } - } - Value::Sequence(seq) => { - for (idx, val) in seq.iter().enumerate() { - stack.push(Cow::from(idx.to_string())); - - push_docs!(result, docs, stack, indent); - - push_indent!(result, indent); - result.push('-'); - - if val.is_sequence() { - push_new_line!(result); - } - - convert_recursive(docs, stack, val, result, indent + 2); - - stack.pop(); - - push_new_line!(result); - } - - if seq.is_empty() { - if result.ends_with('\n') { - result.pop(); - } - result.push_str(" []"); - } - } - Value::Tagged(tagged) => { - result.push(' '); - result.push_str(&tagged.tag.to_string()); - - if tagged.value.is_mapping() || tagged.value.is_sequence() { - push_new_line!(result); - } - - convert_recursive(docs, stack, &tagged.value, result, indent); - } - _ => { - result.push(' '); - result.push_str(serde_yaml::to_string(value).unwrap_or_default().trim_end()); - // TODO(troy): figure out a way to do sub-docs for scalars so that - // the format isnt so janky - - // stack.push(">".into()); - // push_docs!(result, docs, stack, indent); - // stack.pop(); - } - } -} +// type CowStr = Cow<'static, str>; +// type DocMap = HashMap, Cow<'static, [CowStr]>>; diff --git a/foundations/src/settings/traits.rs b/foundations/src/settings/traits.rs index 1c1faacc6..889dd9ad8 100644 --- a/foundations/src/settings/traits.rs +++ b/foundations/src/settings/traits.rs @@ -6,7 +6,7 @@ use std::borrow::Cow; use std::collections::{BTreeMap, BTreeSet, BinaryHeap, HashMap, LinkedList, VecDeque}; use std::hash::Hash; -use super::to_yaml_string; +use super::to_docs_string; pub trait Settings { #[doc(hidden)] @@ -24,11 +24,11 @@ pub trait Settings { docs } - fn to_yaml_string(&self) -> Result + fn to_docs_string(&self) -> Result where Self: serde::Serialize + Sized, { - to_yaml_string(self) + to_docs_string(self) } } diff --git a/foundations/src/telemetry/env_filter/env/builder.rs b/foundations/src/telemetry/env_filter/env/builder.rs index 46b433c87..d7db58fd8 100644 --- a/foundations/src/telemetry/env_filter/env/builder.rs +++ b/foundations/src/telemetry/env_filter/env/builder.rs @@ -207,45 +207,21 @@ impl Builder { } if !disabled.is_empty() { - #[cfg(feature = "nu_ansi_term")] - use nu_ansi_term::{Color, Style}; // NOTE: We can't use a configured `MakeWriter` because the EnvFilter // has no knowledge of any underlying subscriber or subscriber, which // may or may not use a `MakeWriter`. let warn = |msg: &str| { - #[cfg(not(feature = "nu_ansi_term"))] let msg = format!("warning: {}", msg); - #[cfg(feature = "nu_ansi_term")] - let msg = { - let bold = Style::new().bold(); - let mut warning = Color::Yellow.paint("warning"); - warning.style_ref_mut().is_bold = true; - format!("{}{} {}", warning, bold.paint(":"), bold.paint(msg)) - }; eprintln!("{}", msg); }; let ctx_prefixed = |prefix: &str, msg: &str| { - #[cfg(not(feature = "nu_ansi_term"))] let msg = format!("{} {}", prefix, msg); - #[cfg(feature = "nu_ansi_term")] - let msg = { - let mut equal = Color::Fixed(21).paint("="); // dark blue - equal.style_ref_mut().is_bold = true; - format!(" {} {} {}", equal, Style::new().bold().paint(prefix), msg) - }; eprintln!("{}", msg); }; let ctx_help = |msg| ctx_prefixed("help:", msg); let ctx_note = |msg| ctx_prefixed("note:", msg); let ctx = |msg: &str| { - #[cfg(not(feature = "nu_ansi_term"))] let msg = format!("note: {}", msg); - #[cfg(feature = "nu_ansi_term")] - let msg = { - let mut pipe = Color::Fixed(21).paint("|"); - pipe.style_ref_mut().is_bold = true; - format!(" {} {}", pipe, msg) - }; eprintln!("{}", msg); }; warn("some trace filter directives would enable traces that are disabled statically"); diff --git a/foundations/src/telemetry/opentelemetry/node.rs b/foundations/src/telemetry/opentelemetry/node.rs index f26471e4d..2495057ad 100644 --- a/foundations/src/telemetry/opentelemetry/node.rs +++ b/foundations/src/telemetry/opentelemetry/node.rs @@ -281,7 +281,7 @@ impl SpanNode { span.parent_span_id = self.mapped_parent_id.unwrap_or(SpanId::INVALID); span.span_context = SpanContext::new(self.trace_id, self.mapped_id, TraceFlags::SAMPLED, false, TraceState::NONE); span.events.events = self.events.into_iter().map(|e| e.into_data()).collect(); - span.links.links = self.links.into_iter().map(|link| Link::new(link, Vec::new())).collect(); + span.links.links = self.links.into_iter().map(|link| Link::new(link, Vec::new(), 0)).collect(); span } diff --git a/foundations/src/telemetry/pprof/heap.rs b/foundations/src/telemetry/pprof/heap.rs deleted file mode 100644 index 37878ef5d..000000000 --- a/foundations/src/telemetry/pprof/heap.rs +++ /dev/null @@ -1,38 +0,0 @@ -use anyhow::Context; - -#[allow(non_upper_case_globals)] -#[export_name = "malloc_conf"] -#[cfg(not(feature = "disable-jemalloc-config"))] -pub static malloc_conf: &[u8] = b"prof:true,prof_active:true,lg_prof_sample:19,abort_conf:true\0"; - -pub struct Heap; - -impl Default for Heap { - fn default() -> Self { - Self::new() - } -} - -impl Heap { - pub fn new() -> Self { - Self - } - - /// Capture a heap profile for the given duration. - /// The profile can be analyzed using the `pprof` tool. - /// Warning: This method is blocking and may take a long time to complete. - /// It is recommended to run it in a separate thread. - pub fn capture(&mut self) -> anyhow::Result> { - let mut profiler = jemalloc_pprof::PROF_CTL - .as_ref() - .ok_or_else(|| anyhow::anyhow!("jemalloc profiling is not available"))? - .blocking_lock(); - - if !profiler.activated() { - // profiler.deactivate().context("failed to deactivate jemalloc profiling")?; - profiler.activate().context("failed to activate jemalloc profiling")?; - } - - profiler.dump_pprof().context("failed to dump jemalloc pprof profile") - } -} diff --git a/foundations/src/telemetry/pprof/mod.rs b/foundations/src/telemetry/pprof/mod.rs index 5db456c02..49995d20c 100644 --- a/foundations/src/telemetry/pprof/mod.rs +++ b/foundations/src/telemetry/pprof/mod.rs @@ -1,10 +1,5 @@ -#[cfg(feature = "pprof-heap")] -mod heap; - #[cfg(feature = "pprof-cpu")] mod cpu; #[cfg(feature = "pprof-cpu")] pub use cpu::Cpu; -#[cfg(feature = "pprof-heap")] -pub use heap::Heap; diff --git a/foundations/src/telemetry/server.rs b/foundations/src/telemetry/server.rs index 03ac023e2..d32990f40 100644 --- a/foundations/src/telemetry/server.rs +++ b/foundations/src/telemetry/server.rs @@ -165,18 +165,19 @@ async fn metrics( #[cfg(feature = "health-check")] pub use health_check::{ - register as register_health_check, unregister as unregister_health_check, HealthCheck, HealthCheckFn, + register as register_health_check, require as require_health_check, unregister as unregister_health_check, HealthCheck, + HealthCheckFn, }; #[cfg(feature = "health-check")] mod health_check { use std::pin::Pin; - use std::sync::atomic::AtomicUsize; + use std::sync::atomic::{AtomicBool, AtomicUsize}; use futures::Future; use scc::HashMap; - pub struct HealthCheckFn(F); + pub struct HealthCheckFn(pub F); impl HealthCheck for HealthCheckFn where @@ -207,12 +208,14 @@ mod health_check { #[derive(Default)] struct HealthChecker { id: AtomicUsize, + require_check: AtomicBool, health_checks: HashMap>, } static HEALTH_CHECK: once_cell::sync::Lazy = - once_cell::sync::Lazy::::new(|| HealthChecker::default()); + once_cell::sync::Lazy::::new(HealthChecker::default); + /// Register a health check and return an id pub fn register(check: impl HealthCheck) -> usize { let id = HEALTH_CHECK.id.fetch_add(1, std::sync::atomic::Ordering::Relaxed); HEALTH_CHECK @@ -223,11 +226,23 @@ mod health_check { id } + /// Unregister a health check by id pub fn unregister(id: usize) { HEALTH_CHECK.health_checks.remove(&id); } + /// Require a health check to be registered, if no health checks are + /// registered the server will always return 503 Service Unavailable This is + /// useful for ensuring that the server is healthy before accepting traffic + pub fn require() { + HEALTH_CHECK.require_check.store(true, std::sync::atomic::Ordering::Relaxed); + } + pub async fn is_healthy() -> bool { + if HEALTH_CHECK.require_check.load(std::sync::atomic::Ordering::Relaxed) && HEALTH_CHECK.health_checks.is_empty() { + return false; + } + let mut o_entry = HEALTH_CHECK.health_checks.first_entry_async().await; while let Some(entry) = o_entry { @@ -302,10 +317,7 @@ pub async fn init(settings: ServerSettings) -> anyhow::Result<()> { router = router.fallback(axum::routing::any(not_found)); - let mut server = settings - .builder - .build(router) - .context("failed to build server")?; + let mut server = settings.builder.build(router).context("failed to build server")?; server.start_and_wait().await.context("failed to start server")?; diff --git a/foundations/src/telemetry/settings.rs b/foundations/src/telemetry/settings.rs index 1f10a0a76..9ccabe084 100644 --- a/foundations/src/telemetry/settings.rs +++ b/foundations/src/telemetry/settings.rs @@ -7,7 +7,6 @@ use std::net::SocketAddr; use opentelemetry_otlp::WithExportConfig; #[cfg(feature = "opentelemetry")] use opentelemetry_sdk::Resource; - #[cfg(feature = "logging")] use tracing_subscriber::fmt::time::{ChronoLocal, ChronoUtc}; diff --git a/image-processor/APACHE2_LICENSE b/image-processor/APACHE2_LICENSE new file mode 120000 index 000000000..f7ae1b662 --- /dev/null +++ b/image-processor/APACHE2_LICENSE @@ -0,0 +1 @@ +../licenses/APACHE2_LICENSE \ No newline at end of file diff --git a/image-processor/Cargo.toml b/image-processor/Cargo.toml new file mode 100644 index 000000000..442a0acbb --- /dev/null +++ b/image-processor/Cargo.toml @@ -0,0 +1,54 @@ +[package] +name = "scuffle-image-processor" +version = "0.0.1" +edition = "2021" +authors = ["Scuffle "] +description = "Scuffle Image Processor" +license = "MIT OR Apache-2.0" + +[dependencies] +tracing = "0.1" +tokio = { version = "1.34", features = ["full"] } +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +prost = "0.12" +aws-config = "1.1" +aws-sdk-s3 = { version = "1.12", features = ["behavior-version-latest"] } +async-trait = "0.1" +anyhow = "1.0" +async-nats = "0.35" +tonic = "0.11" +futures = "0.3" +thiserror = "1.0" +file-format = "0.25" +scopeguard = "1.2" +rgb = "0.8" +imgref = "1.10" +libavif-sys = { version = "0.16" } +libwebp-sys2 = { version = "0.1", features = ["1_2", "demux", "mux", "static"] } +sha2 = "0.10" +gifski = "1.13" +png = "0.17" +num_cpus = "1.16" +bytes = "1.0" +reqwest = { version = "0.12", default-features = false, features = ["rustls-tls", "json"] } +fast_image_resize = "4" +chrono = { version = "0.4", features = ["serde"] } +url = { version = "2", features = ["serde"] } +http = "1" +urlencoding = "2" +humantime-serde = "1" + +scuffle-foundations = { version = "*", path = "../foundations", features = [] } +scuffle-ffmpeg = { version = "*", path = "../ffmpeg", features = ["tracing"] } + +scuffle-image-processor-proto = { version = "*", path = "./proto", features = ["server", "serde"]} + +mongodb = { version = "2", features = ["tokio-runtime", "bson-chrono-0_4"] } +bson = { version = "2", features = ["chrono-0_4"] } + +aws-smithy-types = "1" +aws-smithy-runtime-api = "1" +fred = "9.0.3" +strfmt = "0.2" +once_cell = "1.8" diff --git a/image-processor/MIT_LICENSE b/image-processor/MIT_LICENSE new file mode 120000 index 000000000..b0845139e --- /dev/null +++ b/image-processor/MIT_LICENSE @@ -0,0 +1 @@ +../licenses/MIT_LICENSE \ No newline at end of file diff --git a/image-processor/assets/AINTNOWAY.webp b/image-processor/assets/AINTNOWAY.webp new file mode 100644 index 000000000..6fe429d1b Binary files /dev/null and b/image-processor/assets/AINTNOWAY.webp differ diff --git a/image-processor/assets/FRqIinBJ_400x400.jpg b/image-processor/assets/FRqIinBJ_400x400.jpg new file mode 100644 index 000000000..4f8a248ee Binary files /dev/null and b/image-processor/assets/FRqIinBJ_400x400.jpg differ diff --git a/image-processor/assets/xd.jpg b/image-processor/assets/xd.jpg new file mode 100644 index 000000000..b60792c24 Binary files /dev/null and b/image-processor/assets/xd.jpg differ diff --git a/image-processor/proto/Cargo.toml b/image-processor/proto/Cargo.toml new file mode 100644 index 000000000..72c77320b --- /dev/null +++ b/image-processor/proto/Cargo.toml @@ -0,0 +1,28 @@ +[package] +name = "scuffle-image-processor-proto" +version = "0.0.0" +edition = "2021" +authors = ["Scuffle "] +description = "Scuffle Image Processor Protocol Buffers" +license = "MIT OR Apache-2.0" + +[dependencies] +prost = "0.12" +tonic = "0.11.0" +pbjson = { version = "0.6.0", optional = true } +serde = { version = "1.0", optional = true } + +[build-dependencies] +prost-build = "0.12.4" +tonic-build = "0.11.0" +pbjson-build = { version = "0.6.0", optional = true } + +[features] +server = [] +client = [] +serde = [ + "dep:serde", + "pbjson-build", + "pbjson", +] + diff --git a/image-processor/proto/build.rs b/image-processor/proto/build.rs new file mode 100644 index 000000000..c5ba555b7 --- /dev/null +++ b/image-processor/proto/build.rs @@ -0,0 +1,34 @@ +#[cfg(feature = "serde")] +use std::{env, path::PathBuf}; + +fn main() -> Result<(), Box> { + #[cfg(feature = "serde")] + let descriptor_path = PathBuf::from(env::var("OUT_DIR").unwrap()).join("proto_descriptor.bin"); + + let config = tonic_build::configure() + .compile_well_known_types(true) + .build_server(cfg!(feature = "server")) + .build_client(cfg!(feature = "client")); + + #[cfg(feature = "serde")] + let config = config.file_descriptor_set_path(&descriptor_path); + + config.compile( + &[ + "scuffle/image_processor/service.proto", + "scuffle/image_processor/types.proto", + "scuffle/image_processor/events.proto", + ], + &["./"], + )?; + + #[cfg(feature = "serde")] + let descriptor_set = std::fs::read(&descriptor_path)?; + + #[cfg(feature = "serde")] + pbjson_build::Builder::new() + .register_descriptors(&descriptor_set)? + .build(&[".scuffle.image_processor"])?; + + Ok(()) +} diff --git a/image-processor/proto/scuffle/image_processor/events.proto b/image-processor/proto/scuffle/image_processor/events.proto new file mode 100644 index 000000000..fba35b32d --- /dev/null +++ b/image-processor/proto/scuffle/image_processor/events.proto @@ -0,0 +1,32 @@ +syntax = "proto3"; + +package scuffle.image_processor; + +import "scuffle/image_processor/types.proto"; + +message EventCallback { + message Success { + string drive = 1; + repeated OutputFile files = 2; + InputFileMetadata input_metadata = 3; + } + + message Fail { + Error error = 1; + } + + message Cancel {} + + message Start {} + + string id = 1; + uint64 timestamp = 2; + map metadata = 3; + + oneof event { + Success success = 4; + Fail fail = 5; + Cancel cancel = 6; + Start start = 7; + } +} diff --git a/image-processor/proto/scuffle/image_processor/service.proto b/image-processor/proto/scuffle/image_processor/service.proto new file mode 100644 index 000000000..3acd37da2 --- /dev/null +++ b/image-processor/proto/scuffle/image_processor/service.proto @@ -0,0 +1,67 @@ +syntax = "proto3"; + +package scuffle.image_processor; + +import "scuffle/image_processor/types.proto"; + +// The ImageProcessor service provides methods to process images +service ImageProcessor { + // Submit a task to process an image + rpc ProcessImage(ProcessImageRequest) returns (ProcessImageResponse) {} + + // Cancel a task + rpc CancelTask(CancelTaskRequest) returns (CancelTaskResponse) {} +} + +// The Payload for a ImageProcessor.ProcessImage request +message ProcessImageRequest { + // The task to process + Task task = 1; + + // The priority of the task + // The higher the priority, the sooner the task will be processed + uint32 priority = 2; + + // The time-to-live of the task in seconds + // If the task has not started processing within the TTL, it will be removed. + optional uint32 ttl = 3; + + // Optionally provide an image to process + // Providing an image will override the input image path in the task + optional InputUpload input_upload = 4; +} + +// The Payload for a ImageProcessor.ProcessImage response +message ProcessImageResponse { + // A unique identifier for the task + string id = 1; + + // If the task had an input upload, this will be the info of the uploaded image. + optional ProcessImageResponseUploadInfo upload_info = 2; + + // Errors that occurred when creating the task. + optional Error error = 3; +} + +message ProcessImageResponseUploadInfo { + // The path of the uploaded image + DrivePath path = 1; + + // The content type of the uploaded image + string content_type = 2; + + // The size of the uploaded image in bytes + uint64 size = 3; +} + +// The Payload for a ImageProcessor.CancelTask request +message CancelTaskRequest { + // The unique identifier of the task to cancel + string id = 1; +} + +// The Payload for a ImageProcessor.CancelTask response +message CancelTaskResponse { + // The status of the response + optional Error error = 1; +} diff --git a/image-processor/proto/scuffle/image_processor/types.proto b/image-processor/proto/scuffle/image_processor/types.proto new file mode 100644 index 000000000..99a705979 --- /dev/null +++ b/image-processor/proto/scuffle/image_processor/types.proto @@ -0,0 +1,414 @@ +syntax = "proto3"; + +package scuffle.image_processor; + +// The output format type +enum OutputFormat { + // Animated WebP format + WebpAnim = 0; + // Animated AVIF format. + AvifAnim = 1; + // Animated GIF format. + GifAnim = 2; + // Static WebP format. + WebpStatic = 3; + // Static AVIF format. + AvifStatic = 4; + // Static PNG format. + PngStatic = 5; +} + +// DrivePath is used to determine where the image should be stored. +message DrivePath { + // The drive to locate the image. + string drive = 1; + // The path in the drive. + // Possible template argument values are: + // - {id} - The id of the task. + string path = 2; +} + +// The resize method determines how the image processor should resize the image. +enum ResizeMethod { + // Fit will resize the image to fit within the desired dimensions without changing the aspect ratio. + ResizeMethodFit = 0; + // Stretch will stretch the image to fit the desired dimensions. (This will change the aspect ratio of the image.) + ResizeMethodStretch = 1; + // Pad will resize the image to fit the desired dimentions and pad the bottom left of the image with the background color if necessary. + ResizeMethodPadBottomLeft = 2; + // Pad will resize the image to fit the desired dimentions and pad the bottom right of the image with the background color if necessary. + ResizeMethodPadBottomRight = 3; + // Pad will resize the image to fit the desired dimentions and pad the top left of the image with the background color if necessary. + ResizeMethodPadTopLeft = 4; + // Pad will resize the image to fit the desired dimentions and pad the top right of the image with the background color if necessary. + ResizeMethodPadTopRight = 5; + // Pad will resize the image to fit the desired dimentions and pad the center of the image with the background color if necessary. + ResizeMethodPadCenter = 6; + // Pad will resize the image to fit the desired dimentions and pad the center of the image with the background color if necessary. + ResizeMethodPadCenterRight = 7; + // Pad will resize the image to fit the desired dimentions and pad the center of the image with the background color if necessary. + ResizeMethodPadCenterLeft = 8; + // Pad will resize the image to fit the desired dimentions and pad the top center of the image with the background color if necessary. + ResizeMethodPadTopCenter = 9; + // Pad will resize the image to fit the desired dimentions and pad the bottom center of the image with the background color if necessary. + ResizeMethodPadBottomCenter = 10; + // Pad will resize the image to fit the desired dimentions and pad the top of the image with the background color if necessary, the left and right will be unchanged. + ResizeMethodPadTop = 11; + // Pad will resize the image to fit the desired dimentions and pad the bottom of the image with the background color if necessary, the left and right will be unchanged. + ResizeMethodPadBottom = 12; + // Pad will resize the image to fit the desired dimentions and pad the left of the image with the background color if necessary, the top and bottom will be unchanged. + ResizeMethodPadLeft = 13; + // Pad will resize the image to fit the desired dimentions and pad the right of the image with the background color if necessary, the top and bottom will be unchanged. + ResizeMethodPadRight = 14; +} + +// The resize algorithm determines the algorithm used to resize the image. +enum ResizeAlgorithm { + ResizeAlgorithmNearest = 0; + ResizeAlgorithmBox = 1; + ResizeAlgorithmBilinear = 2; + ResizeAlgorithmHamming = 3; + ResizeAlgorithmCatmullRom = 4; + ResizeAlgorithmMitchell = 5; + ResizeAlgorithmLanczos3 = 6; +} + +// Limits are used to determine how much processing time and resources the image processor should use. +message Limits { + // The maximum amount of time the image processor should spend processing the image. + optional uint32 max_processing_time_ms = 1; + // The maximum input frame count the image processor should accept. + optional uint32 max_input_frame_count = 2; + // The maximum input width the image processor should accept. + optional uint32 max_input_width = 3; + // The maximum input height the image processor should accept. + optional uint32 max_input_height = 4; + // The maximum input file duration the image processor should accept. (if the input is a video or animated image) + optional uint32 max_input_duration_ms = 5; +} + +// Crop is used to determine what part of the image the image processor should crop. +// The processor will crop the image before resizing it. +message Crop { + // The x coordinate of the top left corner of the crop. + uint32 x = 1; + // The y coordinate of the top left corner of the crop. + uint32 y = 2; + // The width of the crop. + uint32 width = 3; + // The height of the crop. + uint32 height = 4; +} + +// Provide extra information about the input to the image processor. +message InputMetadata { + // If the input is not animated, this will generate a fatal error. If there are not enough frames this will generate a fatal error. + // Otherwise this will be the frame used for static variants. + optional uint32 static_frame_index = 1; + // If this is different from the actual frame count the image processor will generate a fatal error. + optional uint32 frame_count = 2; + // If this is different from the actual width the image processor will generate a fatal error. + uint32 width = 3; + // If this is different from the actual height the image processor will generate a fatal error. + uint32 height = 4; +} + +// InputUpload is used to upload an image to a drive configured in the image processor config. +message InputUpload { + // The input image as a binary blob. + bytes binary = 1; + // A prefix to use for the folder the image will be stored in. + DrivePath drive_path = 2; + // Content Type of the image. + optional string content_type = 3; + // Cache control header for the image. + optional string cache_control = 4; + // Acl for the image. + optional string acl = 5; + // Content disposition for the image. + optional string content_disposition = 6; +} + +// Input is used to determine the input image to process. +message Input { + // The path to the input image. + oneof path { + // Drive path to the image. + // The image processor will download the image from the drive. + DrivePath drive_path = 1; + // Public URL to the image. + // If public downloads is disabled this will generate a fatal error. + string public_url = 2; + } + + // Extra information about the input image. + optional InputMetadata metadata = 3; +} + +// Scaling is used to specify a linear scaling factor for the various dimensions of the image. +// For example, if you have an image that is 100x100 (and use this as the base) and you want to generate 1x, 2x, and 3x images you would set the scales to [1, 2, 3]. +// The sizes of the output images would be [100x100, 200x200, 300x300]. +message Scaling { + oneof base { + // This is the scale for the input image (after cropping or aspect ratio adjustments are made). + uint32 fixed_base = 1; + // This is used to automatically determine the scale of the input image based on the width. + // We know what aspect ratio to use based on the aspect ratio adjustments made to the input image. + // We can then use that to determine the (input_width / base_width) scale. + // The scale would be the largest integer that is less than or equal to (input_width / base_width), + // or 1 if the input width is less than base_width. + uint32 base_width = 2; + // Functionally the same as base_width but allows you to specify in terms of height instead. + uint32 base_height = 3; + } + + // The various scales. + // For example to generate a 1x, 2x, and 3x image you would set scales to [1, 2, 3]. + repeated uint32 scales = 4; +} + +// A list of integers. +message IntegerList { + repeated uint32 values = 1; +} + +message AnimationConfig { + // Specify an animation loop count for animated images. + // If this is set to -1 the image will loop indefinitely. + // If this is set to 0 the image will not loop. + // If this is set to a positive number the image will loop that many times. + // If this is unset the image will be encoded with the loop value the input image has. + optional int32 loop_count = 1; + + oneof frame_rate { + // Specify the frame duration for every frame in the output image. + // This can be used to specify a constant frame rate for the output image. + // frame_rate = 1000 / frame_duration_ms + uint32 frame_duration_ms = 2; + // Frame durations for each frame in the output image. + // Specify the frame duration for each frame in the output image. + // If this number does not match the number of frames in the output image the processor will generate a fatal error. + IntegerList frame_durations_ms = 3; + // Factor to multiply the frame duration by. + // This can be used to speed up or slow down the animation. + // The frame duration will be multiplied by this value. + // Each frame has a minimum duration of 1ms, if the factor creates some frames that are less than 1ms the processor will, + // drop frames and adjust timings of others to ensure that the total duration of the animation is correctly multiplied. + // This rule only applies for when the factor is greater than 1. + double frame_rate_factor = 4; + } + + // Remove frames idx's from the input image. + // This can be used to reduce the size of the output image. + // If you specify an index that is out of bounds the processor will generate a fatal error. + // If you specify the same index multiple times the processor will ignore the duplicates. + repeated uint32 remove_frame_idxs = 5; +} + +enum OutputQuality { + // Auto quality output. (default) + OutputQualityAuto = 0; + // High quality output. (large file size) + OutputQualityHigh = 1; + // Medium quality output. (medium file size) + OutputQualityMedium = 2; + // Low quality output. (smaller file size) + OutputQualityLow = 3; + // Lossless output. (very large file size) + OutputQualityLossless = 4; +} + +message OutputFormatOptions { + // The format of the output image. + OutputFormat format = 1; + // The quality of the output image. + OutputQuality quality = 2; + // An optional format name, this will allow you to specify an overwrite for the path variable `{format}` + optional string name = 3; +} + +message OutputFile { + // The path to the output file. + string path = 1; + // The content type of the output file. + string content_type = 2; + // The acl of the output file. + optional string acl = 3; + // Width of the output image. + uint32 width = 4; + // Height of the output image. + uint32 height = 5; + // The frame count of the output image. + uint32 frame_count = 6; + // The duration of the output image. + uint32 duration_ms = 7; + // The size of the output image in bytes. + uint32 size = 8; + // The format of the output image. + OutputFormat format = 9; +} + +// Returned after the image is processed. +message InputFileMetadata { + // The width of the input image. + uint32 width = 1; + // The height of the input image. + uint32 height = 2; + // The frame count of the input image. + uint32 frame_count = 3; +} + +message Output { + // The drive path to store the output image. + // This is a prefix and the processor will append the suffix to this path to determine the final path. + // Possible template argument values are: + // - {id} - The id of the task. + // - {format} - The format of the output image. (e.g 'webp_anim', 'avif_static', 'png_static', etc.) + // - {scale} - The scale of the output image. (if scaling is used, otherwise empty) + // - {width} - The resulting width of the output image. + // - {height} - The resulting height of the output image. + // - {format_idx} - The index of the output format in the list. + // - {resize_idx} - The index of the resize operation, if the operation is width or height its the index of the value in the list. + // If its scaling its the index of the scale in the list. + // - {static} - '_static' if the input image is static, otherwise empty. + // - {ext} - The extension of the output image. (e.g. 'webp', 'avif', etc.) + DrivePath drive_path = 1; + + // Override the acl of the output images. + // By default this will use the ACL specified by the output drive config. + optional string acl_override = 2; + + // The desired format to encode the output image. + repeated OutputFormatOptions formats = 3; + + // Allow upscaling if the determined dimensions are larger than the input image. + bool upscale = 4; + + // Sometimes we might specify that we want 'WebpAnim' but the input image is a static image. + // In this case we would typically fatally error because we can't generate an animated image from a static image. + // However if this is set to true the processor will ignore these errors and skip the format. + bool skip_impossible_formats = 5; + + // Skips resizing if the resize operation is impossible. + // For example if the resize results in a width or height of less than 1. + // If this is set to true the processor will ignore these errors and skip the resize operation. + bool skip_impossible_resizes = 6; + + // Disables resize chaining. + // Resize chaining is when the processor will resize from the source down to the largest size requested. + // Then it will form every other size by taking reducing the previous resized image. + // Disabling this will resize each image from the source image. Which can be slower but more accurate. + bool disable_resize_chaining = 7; + + // Disables 2 pass decoding. + // 2 pass decoding allows for the processor to further optimize the image by deduplicating frames. + bool disable_two_pass_decoding = 8; + + // The resize method used to resize the image. + ResizeMethod resize_method = 9; + + // The resize algorithm used to resize the image. + ResizeAlgorithm resize_algorithm = 10; + + // The animation configuration for the output image. + optional AnimationConfig animation_config = 11; + + // A crop is applied to the image before resizing and before an aspect ratio change. + optional Crop crop = 12; + + // Confine the aspect ratio of the image to a specific range. + // For example if you want to allow all images that are 3:1 to 1:3 you would set min_ratio to 1/3 and max_ratio to 3. + // Setting the min and max to the same value will restrict the aspect ratio to that value. + // Setting both values to 0 will use the input image's aspect ratio. + // Setting one of the values to 0 will allow any aspect ratio that is less than or greater than the other value. + + + // The minimum ratio of the image. + // An aspect ratio is the ratio of the width to the height of the image. + optional double min_aspect_ratio = 13; + + // The maximum ratio of the image. + // An aspect ratio is the ratio of the width to the height of the image. + optional double max_aspect_ratio = 14; + + // There must be at least one element in the list. + oneof resize { + // Resize to a specific width, the height will be determined by the aspect ratio. + IntegerList widths = 15; + // Resize to a specific height, the width will be determined by the aspect ratio. + IntegerList heights = 16; + // A scaling config to determine how each dimension should be scaled. + Scaling scaling = 17; + } +} + +// Events must be in the format +// :// where event_queue is a queue defined in the image processor config. +// The topic argument is used in the template for the event queue settings defined in the image processor config. +message Events { + // The event to trigger when the task is completed successfully + optional EventQueue on_success = 1; + // The event to trigger when the task fails + optional EventQueue on_failure = 2; + // The event to trigger when the task is cancelled + optional EventQueue on_cancel = 3; + // The event to trigger when the task is started + optional EventQueue on_start = 4; + + // Metadata to send with the event. + map metadata = 5; +} + +// EventQueue is used to determine where the image processor should send events. +message EventQueue { + // The name of the event queue. + string name = 1; + // The topic of the event queue. + // Possible template argument values are: + // - {id} - The id of the task. + string topic = 2; +} + +// A task to process an image. +message Task { + // The input image to process. + Input input = 1; + // The output image to generate. + Output output = 2; + // Result output + Events events = 3; + // The limits for the image processor. + optional Limits limits = 4; +} + +// A error message. +message Error { + // The error message. + string message = 1; + // The error code. + ErrorCode code = 2; +} + +// ErrorCode is used to determine the type of error that occurred. +enum ErrorCode { + // Internal error occurred, please file a bug report. + ErrorCodeInternal = 0; + // Invalid input error. Please refer to the error message for more information, and resubmit the task with valid input. + ErrorCodeInvalidInput = 1; + // The feature is not implemented. + ErrorCodeNotImplemented = 2; + // Resize error. + ErrorCodeResize = 3; + // Encode error. + ErrorCodeEncode = 4; + // Decode error. + ErrorCodeDecode = 5; + // Input download error. + ErrorCodeInputDownload = 6; + // Output upload error. + ErrorCodeOutputUpload = 7; +} + +message EventPayload { + string id = 1; +} diff --git a/image-processor/proto/src/lib.rs b/image-processor/proto/src/lib.rs new file mode 100644 index 000000000..13be0805f --- /dev/null +++ b/image-processor/proto/src/lib.rs @@ -0,0 +1,4 @@ +tonic::include_proto!("scuffle.image_processor"); + +#[cfg(feature = "serde")] +include!(concat!(env!("OUT_DIR"), "/scuffle.image_processor.serde.rs")); diff --git a/image-processor/src/config.rs b/image-processor/src/config.rs new file mode 100644 index 000000000..d177d1c80 --- /dev/null +++ b/image-processor/src/config.rs @@ -0,0 +1,322 @@ +use std::collections::HashMap; +use std::net::SocketAddr; + +use scuffle_foundations::bootstrap::RuntimeSettings; +use scuffle_foundations::settings::auto_settings; +use scuffle_foundations::telemetry::settings::TelemetrySettings; +use url::Url; + +#[auto_settings] +#[serde(default)] +pub struct ImageProcessorConfig { + /// MongoDB database configuration + pub database: DatabaseConfig, + /// The drive configurations for the image processor + pub drives: Vec, + /// The event queues for the image processor + pub event_queues: Vec, + /// The worker configuration + pub worker: WorkerConfig, + /// The management configuration + pub management: ManagementConfig, + /// Telemetry configuration + pub telemetry: TelemetrySettings, + /// Runtime configuration + pub runtime: RuntimeSettings, +} + +#[auto_settings] +#[serde(default)] +pub struct ManagementConfig { + /// The gRPC configuration + pub grpc: GrpcConfig, + /// The HTTP configuration + pub http: HttpConfig, +} + +#[auto_settings] +#[serde(default)] +pub struct GrpcConfig { + /// Enable the gRPC server + #[settings(default = true)] + pub enabled: bool, + /// The gRPC server address + #[settings(default = SocketAddr::from(([0, 0, 0, 0], 50051)))] + pub bind: SocketAddr, +} + +#[auto_settings] +#[serde(default)] +pub struct HttpConfig { + /// Enable the HTTP server + #[settings(default = true)] + pub enabled: bool, + /// The HTTP server address + #[settings(default = SocketAddr::from(([0, 0, 0, 0], 8080)))] + pub bind: SocketAddr, +} + +#[auto_settings] +#[serde(default)] +pub struct WorkerConfig { + /// Enable the worker server + #[settings(default = true)] + pub enabled: bool, + /// The number of workers to start + /// Default is 0, which means the number of workers is equal to the number + /// of CPU cores + #[settings(default = 0)] + pub concurrency: usize, + /// The maximum number of errors before shutting down + #[settings(default = 10)] + pub error_threshold: usize, + /// The delay before retrying after an error + #[settings(default = std::time::Duration::from_secs(5))] + #[serde(with = "humantime_serde")] + pub error_delay: std::time::Duration, + /// Polling interval for fetching jobs + #[settings(default = std::time::Duration::from_secs(1))] + #[serde(with = "humantime_serde")] + pub polling_interval: std::time::Duration, + /// Worker hold time, the time a worker holds a job. The job will be + /// refreshed if the worker does not finish the job within this time. If the + /// worker crashes or is killed, the job will be released after this time, + /// at which point another worker can pick it up. + #[settings(default = std::time::Duration::from_secs(60))] + #[serde(with = "humantime_serde")] + pub hold_time: std::time::Duration, + /// Refresh interval for refreshing the job hold time + /// Default is 1/3 of the hold time + /// The refresh interval should be less than the hold time + #[settings(default = std::time::Duration::from_secs(20))] + #[serde(with = "humantime_serde")] + pub refresh_interval: std::time::Duration, +} + +#[auto_settings] +#[serde(default)] +pub struct DatabaseConfig { + #[settings(default = "mongodb://localhost:27017/scuffle-image-processor".into())] + pub uri: String, +} + +#[auto_settings(impl_default = false)] +#[serde(tag = "kind", rename_all = "kebab-case")] +pub enum DriveConfig { + /// Local drive + Local(LocalDriveConfig), + /// S3 bucket + S3(S3DriveConfig), + /// Memory drive + Memory(MemoryDriveConfig), + /// HTTP drive + Http(HttpDriveConfig), + /// Public web http drive + PublicHttp(PublicHttpDriveConfig), +} + +#[auto_settings] +pub struct LocalDriveConfig { + /// The name of the drive + pub name: String, + /// The path to the local drive + pub path: std::path::PathBuf, + /// The drive mode + #[serde(default)] + pub mode: DriveMode, +} + +#[auto_settings] +pub struct S3DriveConfig { + /// The name of the drive + pub name: String, + /// The S3 bucket name + pub bucket: String, + /// The S3 access key + pub access_key: String, + /// The S3 secret key + pub secret_key: String, + /// The S3 region + #[serde(default = "default_region")] + pub region: String, + /// The S3 endpoint + #[serde(default)] + pub endpoint: Option, + /// The S3 bucket prefix path + #[serde(default)] + pub prefix_path: Option, + /// Use path style + #[serde(default)] + pub force_path_style: Option, + /// The drive mode + #[serde(default)] + pub mode: DriveMode, + /// The maximum number of concurrent connections + #[serde(default)] + pub max_connections: Option, + /// Default ACL for files + #[serde(default)] + pub acl: Option, +} + +fn default_region() -> String { + "us-east-1".into() +} + +#[auto_settings] +pub struct MemoryDriveConfig { + /// The name of the drive + pub name: String, + /// The maximum capacity of the memory drive + #[serde(default)] + pub capacity: Option, + /// The drive mode + #[serde(default)] + pub mode: DriveMode, + /// Default ACL for files + #[serde(default)] + pub acl: Option, +} + +#[auto_settings(impl_default = false)] +pub struct HttpDriveConfig { + /// The name of the drive + pub name: String, + /// The base URL for the HTTP drive + pub url: Url, + /// The timeout for the HTTP drive + #[serde(default = "default_timeout")] + #[serde(with = "humantime_serde")] + pub timeout: Option, + /// Allow insecure TLS + #[serde(default)] + pub allow_insecure: bool, + /// The drive mode + #[serde(default)] + pub mode: DriveMode, + /// The maximum number of concurrent connections + #[serde(default)] + pub max_connections: Option, + /// Additional headers for the HTTP drive + #[serde(default)] + pub headers: HashMap, + /// Default ACL for files + #[serde(default)] + pub acl: Option, +} + +fn default_timeout() -> Option { + Some(std::time::Duration::from_secs(30)) +} + +#[auto_settings] +#[serde(rename_all = "kebab-case")] +#[derive(Copy, PartialEq, Eq, Hash)] +pub enum DriveMode { + /// Read only + Read, + #[settings(default)] + /// Read and write + ReadWrite, + /// Write only + Write, +} + +/// Public http drives do not have a name because they will be invoked if the +/// input path is a URL that starts with 'http' or 'https'. Public http drives +/// can only be read-only. If you do not have a public http drive, the image +/// processor will not be able to download images using HTTP. +#[auto_settings] +pub struct PublicHttpDriveConfig { + /// The timeout for the HTTP drive + #[serde(default = "default_timeout")] + #[serde(with = "humantime_serde")] + pub timeout: Option, + /// Allow insecure TLS + #[serde(default)] + pub allow_insecure: bool, + /// The maximum number of concurrent connections + #[serde(default)] + pub max_connections: Option, + /// Additional headers for the HTTP drive + #[serde(default)] + pub headers: HashMap, + /// Whitelist of allowed domains or IPs can be subnets or CIDR ranges + /// IPs are compared after resolving the domain name + #[serde(default)] + pub whitelist: Vec, + /// Blacklist of disallowed domains or IPs can be subnets or CIDR ranges + /// IPs are compared after resolving the domain name + #[serde(default)] + pub blacklist: Vec, +} + +#[auto_settings(impl_default = false)] +#[serde(tag = "kind", rename_all = "kebab-case")] +pub enum EventQueueConfig { + Nats(NatsEventQueueConfig), + Http(HttpEventQueueConfig), + Redis(RedisEventQueueConfig), +} + +#[auto_settings(impl_default = false)] +pub struct NatsEventQueueConfig { + /// The name of the event queue + pub name: String, + /// The Nats URL + /// For example: nats://localhost:4222 + pub url: String, + /// The message encoding for the event queue + #[serde(default)] + pub message_encoding: MessageEncoding, +} + +#[auto_settings(impl_default = false)] +pub struct HttpEventQueueConfig { + /// The name of the event queue + pub name: String, + /// The base URL for the HTTP event queue + pub url: Url, + /// The timeout for the HTTP event queue + /// Default is 30 seconds + #[serde(default = "default_timeout")] + #[serde(with = "humantime_serde")] + pub timeout: Option, + /// Allow insecure TLS (if the url is https, do not verify the certificate) + #[serde(default)] + pub allow_insecure: bool, + /// Additional headers for the HTTP event queue + /// Can be used to set the authorization header + /// Default is empty + #[serde(default)] + pub headers: HashMap, + /// The maximum number of concurrent connections + /// Default is None + #[serde(default)] + pub max_connections: Option, + /// The message encoding for the event queue + #[serde(default)] + pub message_encoding: MessageEncoding, +} + +#[auto_settings(impl_default = false)] +pub struct RedisEventQueueConfig { + /// The name of the event queue + pub name: String, + /// The Redis URL, for example: redis://localhost:6379 + pub url: String, + /// The message encoding for the event queue + #[serde(default)] + pub message_encoding: MessageEncoding, +} + +#[auto_settings] +#[derive(Copy, PartialEq, Eq, Hash)] +#[serde(rename_all = "lowercase")] +pub enum MessageEncoding { + /// JSON encoding + #[settings(default)] + Json, + /// Protobuf encoding + Protobuf, +} diff --git a/image-processor/src/database.rs b/image-processor/src/database.rs new file mode 100644 index 000000000..154cb801f --- /dev/null +++ b/image-processor/src/database.rs @@ -0,0 +1,245 @@ +use std::sync::Arc; +use std::time::Duration; + +use bson::Bson; +use mongodb::bson::oid::ObjectId; +use mongodb::options::IndexOptions; +use mongodb::{Database, IndexModel}; +use scuffle_image_processor_proto::Task; + +use crate::global::Global; + +mod protobuf { + use serde::{Deserialize, Serializer}; + + pub fn serialize(value: &T, serializer: S) -> Result { + serializer.serialize_bytes(&value.encode_to_vec()) + } + + pub fn deserialize<'de, T: prost::Message + Default, D: serde::Deserializer<'de>>( + deserializer: D, + ) -> Result { + let binary = bson::Binary::deserialize(deserializer)?; + T::decode(binary.bytes.as_slice()).map_err(serde::de::Error::custom) + } +} + +mod datetime { + use serde::{Deserialize, Serialize, Serializer}; + + pub fn deserialize<'de, D: serde::Deserializer<'de>>( + deserializer: D, + ) -> Result>, D::Error> { + let bson_datetime = Option::::deserialize(deserializer)?; + Ok(bson_datetime.map(|dt| dt.into())) + } + + pub fn serialize( + value: &Option>, + serializer: S, + ) -> Result { + match value { + Some(value) => bson::DateTime::from(value.clone()).serialize(serializer), + None => None::.serialize(serializer), + } + } +} + +#[derive(Debug, Clone, Default, serde::Deserialize, serde::Serialize)] +pub struct Job { + #[serde(rename = "_id")] + /// The id of the job + pub id: ObjectId, + /// The priority of the job, higher priority jobs are fetched first + pub priority: u32, + /// The lease time of the job on a worker. + #[serde(with = "datetime")] + pub hold_until: Option>, + #[serde(with = "protobuf")] + /// The task to be performed + pub task: Task, + /// The ttl of the job, after which it will be deleted + #[serde(with = "datetime")] + pub expires_at: Option>, + /// The id of the worker that claimed the job + pub claimed_by_id: Option, +} + +impl Job { + fn collection(database: &Database) -> mongodb::Collection { + database.collection("jobs") + } + + pub async fn setup_collection(database: &Database) -> Result<(), mongodb::error::Error> { + let collection = Self::collection(database); + + collection + .create_index( + IndexModel::builder() + .keys(bson::doc! { + "hold_until": 1, + "priority": -1, + }) + .build(), + None, + ) + .await?; + + collection + .create_index( + IndexModel::builder() + .keys(bson::doc! { + "expires_at": 1, + }) + .options(Some( + IndexOptions::builder().expire_after(Some(Duration::from_secs(0))).build(), + )) + .build(), + None, + ) + .await?; + + Ok(()) + } + + /// Creates a new job in the database + /// # Arguments + /// * `global` - The global state + /// * `task` - The task to be performed + /// * `priority` - The priority of the job + /// * `ttl` - The time-to-live of the job in seconds + /// # Returns + /// The job that was created + pub async fn new( + global: &Arc, + id: ObjectId, + task: Task, + priority: u32, + ttl: Option, + ) -> Result { + let job = Job { + id, + priority, + hold_until: None, + task, + claimed_by_id: None, + expires_at: ttl.map(|ttl| chrono::Utc::now() + chrono::Duration::seconds(ttl as i64)), + }; + + Self::collection(global.database()).insert_one(&job, None).await?; + + Ok(job) + } + + /// Fetches a job from the database + /// The job is claimed by the worker and will be held for 60 seconds, after + /// which it will be released to refresh the hold time, call `refresh`. The + /// job returned is the one with the highest priority and no hold_until or + /// hold_until in the past # Arguments + /// * `global` - The global state + /// # Returns + /// The job that was fetched or None if no job was found + pub async fn fetch(global: &Arc) -> Result, mongodb::error::Error> { + // Find with the highest priority and no hold_until or hold_until in the past + Self::collection(global.database()) + .find_one_and_update( + bson::doc! { + "$or": [ + bson::doc! { + "hold_until": Bson::Null, + }, + bson::doc! { + "hold_until": { + "$lt": chrono::Utc::now(), + }, + }, + ], + }, + bson::doc! { + "$set": { + "claimed_by_id": global.worker_id(), + "hold_until": chrono::Utc::now() + global.config().worker.hold_time, + }, + }, + Some( + mongodb::options::FindOneAndUpdateOptions::builder() + .sort(bson::doc! { + "priority": -1, + }) + .build(), + ), + ) + .await + } + + /// Refreshes the hold time of the job + /// # Arguments + /// * `global` - The global state + /// # Returns + /// Whether the job was successfully refreshed, if the job was reclaimed by + /// a different worker, it will not be refreshed and this will return false + pub async fn refresh(&self, global: &Arc) -> Result { + let success = Self::collection(global.database()) + .update_one( + bson::doc! { + "_id": self.id, + "claimed_by_id": global.worker_id(), + }, + bson::doc! { + "$set": { + "hold_until": chrono::Utc::now() + global.config().worker.hold_time, + }, + }, + None, + ) + .await?; + + Ok(success.modified_count == 1) + } + + /// Completes the job + /// # Arguments + /// * `global` - The global state + /// # Returns + /// Whether the job was successfully completed or not, if the job was + /// reclaimed by a different worker, it will not be completed and this will + /// return false + pub async fn complete(&self, global: &Arc) -> Result { + let success = Self::collection(global.database()) + .delete_one( + bson::doc! { + "_id": self.id, + "claimed_by_id": global.worker_id(), + }, + None, + ) + .await?; + + Ok(success.deleted_count == 1) + } + + /// Cancels a job + /// # Arguments + /// * `global` - The global state + /// * `id` - The id of the job to cancel + /// # Returns + /// The job that was cancelled or None if no job was found + pub async fn cancel(global: &Arc, id: ObjectId) -> Result, mongodb::error::Error> { + let Some(job) = Self::collection(global.database()) + .find_one_and_delete( + bson::doc! { + "_id": id, + }, + None, + ) + .await? + else { + return Ok(None); + }; + + // If the event had a cancel event, publish it + crate::events::on_cancel(global, &job).await; + + Ok(Some(job)) + } +} diff --git a/image-processor/src/drive/http.rs b/image-processor/src/drive/http.rs new file mode 100644 index 000000000..44761a134 --- /dev/null +++ b/image-processor/src/drive/http.rs @@ -0,0 +1,176 @@ +use bytes::Bytes; +use reqwest::header::{HeaderMap, HeaderName, HeaderValue}; +use reqwest::Method; +use url::Url; + +use super::{Drive, DriveError, DriveWriteOptions}; +use crate::config::{DriveMode, HttpDriveConfig}; + +#[derive(Debug)] +pub struct HttpDrive { + name: String, + base_url: Url, + mode: DriveMode, + semaphore: Option, + client: reqwest::Client, + acl: Option, +} + +#[derive(Debug, thiserror::Error)] +pub enum HttpDriveError { + #[error("invalid path")] + InvalidPath(#[from] url::ParseError), + #[error("reqwest: {0}")] + Reqwest(#[from] reqwest::Error), + #[error("invalid header name")] + InvalidHeaderName(#[from] reqwest::header::InvalidHeaderName), + #[error("invalid header value")] + InvalidHeaderValue(#[from] reqwest::header::InvalidHeaderValue), +} + +impl HttpDrive { + #[tracing::instrument(skip(config), name = "HttpDisk::new", fields(name = %config.name), err)] + pub async fn new(config: &HttpDriveConfig) -> Result { + tracing::debug!("setting up http disk"); + Ok(Self { + name: config.name.clone(), + base_url: config.url.clone(), + mode: config.mode, + semaphore: config.max_connections.map(|max| tokio::sync::Semaphore::new(max)), + client: { + let mut builder = reqwest::Client::builder(); + + if let Some(timeout) = config.timeout { + builder = builder.timeout(timeout); + } + + if config.allow_insecure { + builder = builder.danger_accept_invalid_certs(true); + } + + let mut headers = HeaderMap::new(); + + for (key, value) in &config.headers { + headers.insert( + key.parse::().map_err(HttpDriveError::InvalidHeaderName)?, + value.parse::().map_err(HttpDriveError::InvalidHeaderValue)?, + ); + } + + builder = builder.default_headers(headers); + + builder.build().map_err(HttpDriveError::Reqwest)? + }, + acl: config.acl.clone(), + }) + } +} + +impl Drive for HttpDrive { + fn name(&self) -> &str { + &self.name + } + + #[tracing::instrument(skip(self), name = "HttpDisk::read", fields(name = %self.name), err)] + async fn read(&self, path: &str) -> Result { + tracing::debug!("reading file"); + + if self.mode == DriveMode::Write { + return Err(DriveError::ReadOnly); + } + + let _permit = if let Some(semaphore) = &self.semaphore { + Some(semaphore.acquire().await) + } else { + None + }; + + let url = self.base_url.join(path).map_err(HttpDriveError::InvalidPath)?; + + let response = self.client.get(url).send().await.map_err(HttpDriveError::Reqwest)?; + + let response = response.error_for_status().map_err(HttpDriveError::Reqwest)?; + + Ok(response.bytes().await.map_err(HttpDriveError::Reqwest)?) + } + + #[tracing::instrument(skip(self, data), name = "HttpDisk::write", fields(name = %self.name, size = data.len()), err)] + async fn write(&self, path: &str, data: Bytes, options: Option) -> Result<(), DriveError> { + tracing::debug!("writing file"); + + if self.mode == DriveMode::Read { + return Err(DriveError::WriteOnly); + } + + let _permit = if let Some(semaphore) = &self.semaphore { + Some(semaphore.acquire().await) + } else { + None + }; + + let url = self.base_url.join(path).map_err(HttpDriveError::InvalidPath)?; + + let mut request = self + .client + .request(Method::POST, url) + .body(data) + .build() + .map_err(HttpDriveError::Reqwest)?; + + if let Some(options) = options { + if let Some(cache_control) = &options.cache_control { + request.headers_mut().insert( + reqwest::header::CACHE_CONTROL, + cache_control.parse().map_err(HttpDriveError::InvalidHeaderValue)?, + ); + } + + if let Some(content_type) = &options.content_type { + request.headers_mut().insert( + reqwest::header::CONTENT_TYPE, + content_type.parse().map_err(HttpDriveError::InvalidHeaderValue)?, + ); + } + + if let Some(acl) = options.acl.as_ref().or(self.acl.as_ref()) { + request.headers_mut().insert( + reqwest::header::HeaderName::from_static("x-amz-acl"), + acl.parse().map_err(HttpDriveError::InvalidHeaderValue)?, + ); + } + } + + let resp = self.client.execute(request).await.map_err(HttpDriveError::Reqwest)?; + + resp.error_for_status().map_err(HttpDriveError::Reqwest)?; + + Ok(()) + } + + #[tracing::instrument(skip(self), name = "HttpDisk::delete", fields(name = %self.name), err)] + async fn delete(&self, path: &str) -> Result<(), DriveError> { + tracing::debug!("deleting file"); + + if self.mode == DriveMode::Read { + return Err(DriveError::WriteOnly); + } + + let _permit = if let Some(semaphore) = &self.semaphore { + Some(semaphore.acquire().await) + } else { + None + }; + + let url = self.base_url.join(path).map_err(HttpDriveError::InvalidPath)?; + + let response = self.client.delete(url).send().await.map_err(HttpDriveError::Reqwest)?; + + response.error_for_status().map_err(HttpDriveError::Reqwest)?; + + Ok(()) + } + + fn default_acl(&self) -> Option<&str> { + self.acl.as_deref() + } +} diff --git a/image-processor/src/drive/local.rs b/image-processor/src/drive/local.rs new file mode 100644 index 000000000..0b6076cb9 --- /dev/null +++ b/image-processor/src/drive/local.rs @@ -0,0 +1,79 @@ +use std::path::PathBuf; + +use bytes::Bytes; + +use super::{Drive, DriveError, DriveWriteOptions}; +use crate::config::{DriveMode, LocalDriveConfig}; + +#[derive(Debug)] +pub struct LocalDrive { + name: String, + mode: DriveMode, + path: PathBuf, +} + +#[derive(Debug, thiserror::Error)] +pub enum LocalDriveError { + #[error("io: {0}")] + Io(#[from] std::io::Error), +} + +impl LocalDrive { + #[tracing::instrument(skip(config), name = "LocalDisk::new", fields(name = %config.name), err)] + pub async fn new(config: &LocalDriveConfig) -> Result { + tracing::debug!("setting up local disk"); + + if !config.path.exists() { + tokio::fs::create_dir_all(&config.path).await.map_err(LocalDriveError::Io)?; + } + + Ok(Self { + name: config.name.clone(), + mode: config.mode, + path: config.path.clone(), + }) + } +} + +impl Drive for LocalDrive { + fn name(&self) -> &str { + &self.name + } + + #[tracing::instrument(skip(self), name = "LocalDisk::read", err)] + async fn read(&self, path: &str) -> Result { + tracing::debug!("reading file"); + + if self.mode == DriveMode::Write { + return Err(DriveError::ReadOnly); + } + + let path = self.path.join(path); + Ok(tokio::fs::read(path).await.map_err(LocalDriveError::Io)?.into()) + } + + #[tracing::instrument(skip(self, data), name = "LocalDisk::write", err, fields(size = data.len()))] + async fn write(&self, path: &str, data: Bytes, options: Option) -> Result<(), DriveError> { + tracing::debug!("writing file"); + + if self.mode == DriveMode::Read { + return Err(DriveError::WriteOnly); + } + + let path = self.path.join(path); + Ok(tokio::fs::write(path, data).await.map_err(LocalDriveError::Io)?) + } + + #[tracing::instrument(skip(self), name = "LocalDisk::delete", err)] + async fn delete(&self, path: &str) -> Result<(), DriveError> { + tracing::debug!("deleting file"); + + if self.mode == DriveMode::Read { + return Err(DriveError::WriteOnly); + } + + let path = self.path.join(path); + tokio::fs::remove_file(path).await.map_err(LocalDriveError::Io)?; + Ok(()) + } +} diff --git a/image-processor/src/drive/memory.rs b/image-processor/src/drive/memory.rs new file mode 100644 index 000000000..8484a7033 --- /dev/null +++ b/image-processor/src/drive/memory.rs @@ -0,0 +1,133 @@ +use std::collections::HashMap; + +use bytes::Bytes; +use tokio::sync::RwLock; + +use super::{Drive, DriveError, DriveWriteOptions}; +use crate::config::{DriveMode, MemoryDriveConfig}; + +#[derive(Debug)] +struct FileHolder { + remaining_capacity: usize, + files: HashMap, +} + +impl FileHolder { + fn get(&self, path: &str) -> Option<&MemoryFile> { + self.files.get(path) + } + + fn insert(&mut self, path: String, file: MemoryFile) -> Result, DriveError> { + if file.data.len() > self.remaining_capacity { + return Err(MemoryDriveError::NoSpaceLeft.into()); + } + + self.remaining_capacity -= file.data.len(); + self.files + .insert(path, file) + .map(|file| { + self.remaining_capacity += file.data.len(); + Ok(file) + }) + .transpose() + } + + fn remove(&mut self, path: &str) -> Option { + let file = self.files.remove(path)?; + self.remaining_capacity += file.data.len(); + Some(file) + } +} + +#[derive(Debug)] +pub struct MemoryDrive { + name: String, + mode: DriveMode, + files: RwLock, + acl: Option, +} + +#[derive(Debug, Clone)] +pub struct MemoryFile { + data: Bytes, + _options: DriveWriteOptions, +} + +#[derive(Debug, Clone, thiserror::Error)] +pub enum MemoryDriveError { + #[error("no space left on disk")] + NoSpaceLeft, +} + +impl MemoryDrive { + #[tracing::instrument(skip(config), name = "MemoryDisk::new", fields(name = %config.name), err)] + pub async fn new(config: &MemoryDriveConfig) -> Result { + tracing::debug!("setting up memory disk"); + Ok(Self { + name: config.name.clone(), + mode: config.mode, + acl: config.acl.clone(), + files: RwLock::new(FileHolder { + remaining_capacity: config.capacity.unwrap_or(usize::MAX), + files: HashMap::new(), + }), + }) + } +} + +impl Drive for MemoryDrive { + fn name(&self) -> &str { + &self.name + } + + #[tracing::instrument(skip(self), name = "MemoryDisk::read", err)] + async fn read(&self, path: &str) -> Result { + tracing::debug!("reading file"); + + if self.mode == DriveMode::Write { + return Err(DriveError::ReadOnly); + } + + self.files + .read() + .await + .get(path) + .map(|file| file.data.clone()) + .ok_or(DriveError::NotFound) + } + + #[tracing::instrument(skip(self, data), name = "MemoryDisk::write", err, fields(size = data.len()))] + async fn write(&self, path: &str, data: Bytes, options: Option) -> Result<(), DriveError> { + tracing::debug!("writing file"); + + if self.mode == DriveMode::Read { + return Err(DriveError::WriteOnly); + } + + let mut files = self.files.write().await; + + let mut options = options.unwrap_or_default(); + + options.acl = options.acl.or_else(|| self.acl.clone()); + + files.insert(path.to_owned(), MemoryFile { data, _options: options })?; + + Ok(()) + } + + #[tracing::instrument(skip(self), name = "MemoryDisk::delete", err)] + async fn delete(&self, path: &str) -> Result<(), DriveError> { + tracing::debug!("deleting file"); + + if self.mode == DriveMode::Read { + return Err(DriveError::WriteOnly); + } + + self.files.write().await.remove(path).ok_or(DriveError::NotFound)?; + Ok(()) + } + + fn default_acl(&self) -> Option<&str> { + self.acl.as_deref() + } +} diff --git a/image-processor/src/drive/mod.rs b/image-processor/src/drive/mod.rs new file mode 100644 index 000000000..847b670f6 --- /dev/null +++ b/image-processor/src/drive/mod.rs @@ -0,0 +1,131 @@ +use bytes::Bytes; + +use self::http::{HttpDrive, HttpDriveError}; +use self::local::{LocalDrive, LocalDriveError}; +use self::memory::{MemoryDrive, MemoryDriveError}; +use self::public_http::{PublicHttpDrive, PublicHttpDriveError}; +use self::s3::{S3Drive, S3DriveError}; +use crate::config::DriveConfig; + +pub mod http; +pub mod local; +pub mod memory; +pub mod public_http; +pub mod s3; + +#[derive(Debug, thiserror::Error)] +pub enum DriveError { + #[error("http: {0}")] + Http(#[from] HttpDriveError), + #[error("local: {0}")] + Local(#[from] LocalDriveError), + #[error("s3: {0}")] + S3(#[from] S3DriveError), + #[error("memory: {0}")] + Memory(#[from] MemoryDriveError), + #[error("public http: {0}")] + PublicHttp(#[from] PublicHttpDriveError), + #[error("not found")] + NotFound, + #[error("read only")] + ReadOnly, + #[error("write only")] + WriteOnly, +} + +#[derive(Debug, Clone, Default)] +pub struct DriveWriteOptions { + pub cache_control: Option, + pub content_type: Option, + pub acl: Option, + pub content_disposition: Option, +} + +pub trait Drive { + /// Get the name of the drive + fn name(&self) -> &str; + + /// Read data from a drive + fn read(&self, path: &str) -> impl std::future::Future> + Send; + + /// Write data to a drive + fn write( + &self, + path: &str, + data: Bytes, + options: Option, + ) -> impl std::future::Future> + Send; + + /// Delete data from a drive + fn delete(&self, path: &str) -> impl std::future::Future> + Send; + + fn healthy(&self) -> impl std::future::Future + Send { + async { true } + } + + fn default_acl(&self) -> Option<&str> { + None + } +} + +#[derive(Debug)] +pub enum AnyDrive { + Local(LocalDrive), + S3(S3Drive), + Memory(MemoryDrive), + Http(HttpDrive), + PublicHttp(PublicHttpDrive), +} + +impl Drive for AnyDrive { + fn name(&self) -> &str { + match self { + AnyDrive::Local(drive) => drive.name(), + AnyDrive::S3(drive) => drive.name(), + AnyDrive::Memory(drive) => drive.name(), + AnyDrive::Http(drive) => drive.name(), + AnyDrive::PublicHttp(drive) => drive.name(), + } + } + + async fn read(&self, path: &str) -> Result { + match self { + AnyDrive::Local(drive) => drive.read(path).await, + AnyDrive::S3(drive) => drive.read(path).await, + AnyDrive::Memory(drvie) => drvie.read(path).await, + AnyDrive::Http(drive) => drive.read(path).await, + AnyDrive::PublicHttp(drive) => drive.read(path).await, + } + } + + async fn write(&self, path: &str, data: Bytes, options: Option) -> Result<(), DriveError> { + tracing::info!("writing to drive: {}", path); + match self { + AnyDrive::Local(drive) => drive.write(path, data, options).await, + AnyDrive::S3(drive) => drive.write(path, data, options).await, + AnyDrive::Memory(drive) => drive.write(path, data, options).await, + AnyDrive::Http(drive) => drive.write(path, data, options).await, + AnyDrive::PublicHttp(drive) => drive.write(path, data, options).await, + } + } + + async fn delete(&self, path: &str) -> Result<(), DriveError> { + match self { + AnyDrive::Local(drive) => drive.delete(path).await, + AnyDrive::S3(drive) => drive.delete(path).await, + AnyDrive::Memory(drive) => drive.delete(path).await, + AnyDrive::Http(drive) => drive.delete(path).await, + AnyDrive::PublicHttp(drive) => drive.delete(path).await, + } + } +} + +pub async fn build_drive(config: &DriveConfig) -> Result { + match config { + DriveConfig::Local(local) => Ok(AnyDrive::Local(LocalDrive::new(local).await?)), + DriveConfig::S3(s3) => Ok(AnyDrive::S3(S3Drive::new(s3).await?)), + DriveConfig::Memory(memory) => Ok(AnyDrive::Memory(MemoryDrive::new(memory).await?)), + DriveConfig::Http(http) => Ok(AnyDrive::Http(HttpDrive::new(http).await?)), + DriveConfig::PublicHttp(public_http) => Ok(AnyDrive::PublicHttp(PublicHttpDrive::new(public_http).await?)), + } +} diff --git a/image-processor/src/drive/public_http.rs b/image-processor/src/drive/public_http.rs new file mode 100644 index 000000000..541429178 --- /dev/null +++ b/image-processor/src/drive/public_http.rs @@ -0,0 +1,99 @@ +use bytes::Bytes; +use http::{HeaderName, HeaderValue}; + +use super::{Drive, DriveError, DriveWriteOptions}; +use crate::config::PublicHttpDriveConfig; + +pub const PUBLIC_HTTP_DRIVE_NAME: &str = "__public_http"; + +#[derive(Debug)] +pub struct PublicHttpDrive { + client: reqwest::Client, + semaphore: Option, +} + +#[derive(Debug, thiserror::Error)] +pub enum PublicHttpDriveError { + #[error("reqwest: {0}")] + Reqwest(#[from] reqwest::Error), + #[error("invalid header name")] + InvalidHeaderName(#[from] reqwest::header::InvalidHeaderName), + #[error("invalid header value")] + InvalidHeaderValue(#[from] reqwest::header::InvalidHeaderValue), + #[error("unsupported: {0}")] + Unsupported(&'static str), +} + +impl PublicHttpDrive { + #[tracing::instrument(skip(config), name = "PublicHttpDisk::new", err)] + pub async fn new(config: &PublicHttpDriveConfig) -> Result { + tracing::debug!("setting up public http disk"); + if !config.blacklist.is_empty() || !config.whitelist.is_empty() { + tracing::error!("blacklist and whitelist are not supported for public http disk"); + return Err(PublicHttpDriveError::Unsupported("blacklist and whitelist").into()); + } + + Ok(Self { + client: { + let mut builder = reqwest::Client::builder(); + + if let Some(timeout) = config.timeout { + builder = builder.timeout(timeout); + } + + if config.allow_insecure { + builder = builder.danger_accept_invalid_certs(true); + } + + let mut headers = reqwest::header::HeaderMap::new(); + + for (key, value) in &config.headers { + headers.insert( + key.parse::().map_err(PublicHttpDriveError::from)?, + value.parse::().map_err(PublicHttpDriveError::from)?, + ); + } + + builder = builder.default_headers(headers); + + builder.build().map_err(PublicHttpDriveError::Reqwest)? + }, + semaphore: config.max_connections.map(|max| tokio::sync::Semaphore::new(max)), + }) + } +} + +impl Drive for PublicHttpDrive { + fn name(&self) -> &str { + PUBLIC_HTTP_DRIVE_NAME + } + + #[tracing::instrument(skip(self), name = "PublicHttpDisk::read", err)] + async fn read(&self, path: &str) -> Result { + tracing::debug!("reading file"); + + let _permit = if let Some(semaphore) = &self.semaphore { + Some(semaphore.acquire().await) + } else { + None + }; + + let response = self.client.get(path).send().await.map_err(PublicHttpDriveError::Reqwest)?; + + let response = response.error_for_status().map_err(PublicHttpDriveError::Reqwest)?; + + Ok(response.bytes().await.map_err(PublicHttpDriveError::Reqwest)?) + } + + #[tracing::instrument(skip(self, data), name = "PublicHttpDisk::write", fields(size = data.len()), err)] + async fn write(&self, path: &str, data: Bytes, options: Option) -> Result<(), DriveError> { + tracing::error!("writing is not supported for public http disk"); + Err(DriveError::ReadOnly) + } + + #[tracing::instrument(skip(self), name = "PublicHttpDisk::delete", err)] + async fn delete(&self, path: &str) -> Result<(), DriveError> { + tracing::error!("deleting is not supported for public http disk"); + Err(DriveError::ReadOnly) + } +} diff --git a/image-processor/src/drive/s3.rs b/image-processor/src/drive/s3.rs new file mode 100644 index 000000000..254b3ee86 --- /dev/null +++ b/image-processor/src/drive/s3.rs @@ -0,0 +1,188 @@ +use aws_config::{AppName, Region}; +use aws_sdk_s3::config::{Credentials, SharedCredentialsProvider}; +use aws_sdk_s3::operation::delete_object::DeleteObjectError; +use aws_sdk_s3::operation::get_object::GetObjectError; +use aws_sdk_s3::operation::put_object::PutObjectError; +use aws_smithy_runtime_api::client::orchestrator::HttpResponse; +use aws_smithy_runtime_api::client::result::SdkError; +use bytes::Bytes; +use scuffle_foundations::service_info; + +use super::{Drive, DriveError, DriveWriteOptions}; +use crate::config::{DriveMode, S3DriveConfig}; + +#[derive(Debug)] +pub struct S3Drive { + name: String, + mode: DriveMode, + client: aws_sdk_s3::Client, + bucket: String, + path_prefix: Option, + semaphore: Option, + acl: Option, +} + +#[derive(Debug, thiserror::Error)] +pub enum S3DriveError { + #[error("s3: {0}")] + S3(#[from] aws_sdk_s3::Error), + #[error("byte stream: {0}")] + ByteStream(#[from] aws_smithy_types::byte_stream::error::Error), + #[error("read: {0}")] + Read(#[from] SdkError), + #[error("write: {0}")] + Write(#[from] SdkError), + #[error("delete: {0}")] + Delete(#[from] SdkError), +} + +impl S3Drive { + #[tracing::instrument(skip(config), name = "S3Disk::new", fields(name = %config.name), err)] + pub async fn new(config: &S3DriveConfig) -> Result { + tracing::debug!("setting up s3 disk"); + Ok(Self { + name: config.name.clone(), + mode: config.mode, + client: aws_sdk_s3::Client::from_conf({ + let mut builder = aws_sdk_s3::Config::builder(); + + builder.set_endpoint_url(config.endpoint.clone()); + + builder.set_app_name(Some(AppName::new(service_info!().name).unwrap())); + + builder.set_region(Some(Region::new(config.region.clone()))); + + builder.set_force_path_style(config.force_path_style); + + builder.set_credentials_provider(Some(SharedCredentialsProvider::new(Credentials::new( + config.access_key.clone(), + config.secret_key.clone(), + None, + None, + "ConfiguredCredentialsProvider", + )))); + + builder.build() + }), + path_prefix: config.prefix_path.clone(), + bucket: config.bucket.clone(), + semaphore: config.max_connections.map(tokio::sync::Semaphore::new), + acl: config.acl.clone(), + }) + } +} + +impl Drive for S3Drive { + fn name(&self) -> &str { + &self.name + } + + #[tracing::instrument(skip(self), name = "S3Disk::read", err)] + async fn read(&self, path: &str) -> Result { + if self.mode == DriveMode::Write { + return Err(DriveError::ReadOnly); + } + + let _permit = if let Some(semaphore) = &self.semaphore { + Some(semaphore.acquire().await) + } else { + None + }; + + let path = self + .path_prefix + .as_ref() + .map_or_else(|| path.to_string(), |prefix| format!("{}/{}", prefix, path)); + + let result = self + .client + .get_object() + .bucket(&self.bucket) + .key(path.trim_start_matches('/')) + .send() + .await + .map_err(S3DriveError::from)?; + + let bytes = result.body.collect().await.map_err(S3DriveError::from)?; + + Ok(bytes.into_bytes()) + } + + #[tracing::instrument(skip(self, data), name = "S3Disk::write", err, fields(size = data.len()))] + async fn write(&self, path: &str, data: Bytes, options: Option) -> Result<(), DriveError> { + if self.mode == DriveMode::Read { + return Err(DriveError::WriteOnly); + } + + let _permit = if let Some(semaphore) = &self.semaphore { + Some(semaphore.acquire().await) + } else { + None + }; + + let path = self + .path_prefix + .as_ref() + .map_or_else(|| path.to_string(), |prefix| format!("{}/{}", prefix, path)); + + let mut req = self + .client + .put_object() + .bucket(&self.bucket) + .key(path.trim_start_matches('/')) + .body(data.into()); + + let options = options.unwrap_or_default(); + + if let Some(cache_control) = &options.cache_control { + req = req.cache_control(cache_control); + } + if let Some(content_type) = &options.content_type { + req = req.content_type(content_type); + } + if let Some(content_disposition) = &options.content_disposition { + req = req.content_disposition(content_disposition); + } + if let Some(acl) = options.acl.as_ref().or(self.acl.as_ref()) { + req = req.acl(acl.as_str().into()); + } + + req.send().await.map_err(S3DriveError::from).inspect_err(|err| { + tracing::error!("failed to write to s3: {:?}", err); + })?; + + Ok(()) + } + + #[tracing::instrument(skip(self), name = "S3Disk::delete", err)] + async fn delete(&self, path: &str) -> Result<(), DriveError> { + if self.mode == DriveMode::Read { + return Err(DriveError::WriteOnly); + } + + let _permit = if let Some(semaphore) = &self.semaphore { + Some(semaphore.acquire().await) + } else { + None + }; + + let path = self + .path_prefix + .as_ref() + .map_or_else(|| path.to_string(), |prefix| format!("{}/{}", prefix, path)); + + self.client + .delete_object() + .bucket(&self.bucket) + .key(path.trim_start_matches('/')) + .send() + .await + .map_err(S3DriveError::from)?; + + Ok(()) + } + + fn default_acl(&self) -> Option<&str> { + self.acl.as_deref() + } +} diff --git a/image-processor/src/event_queue/http.rs b/image-processor/src/event_queue/http.rs new file mode 100644 index 000000000..6f1ced2fc --- /dev/null +++ b/image-processor/src/event_queue/http.rs @@ -0,0 +1,96 @@ +use prost::Message; +use scuffle_image_processor_proto::EventCallback; +use url::Url; + +use super::{EventQueue, EventQueueError, PROTOBUF_CONTENT_TYPE}; +use crate::config::{HttpEventQueueConfig, MessageEncoding}; + +#[derive(Debug)] +pub struct HttpEventQueue { + name: String, + url: Url, + client: reqwest::Client, + semaphore: Option, + message_encoding: MessageEncoding, +} + +#[derive(Debug, thiserror::Error)] +pub enum HttpEventQueueError { + #[error("reqwest: {0}")] + Reqwest(#[from] reqwest::Error), + #[error("invalid header name")] + InvalidHeaderName(#[from] reqwest::header::InvalidHeaderName), + #[error("invalid header value")] + InvalidHeaderValue(#[from] reqwest::header::InvalidHeaderValue), +} + +impl HttpEventQueue { + #[tracing::instrument(skip(config), name = "HttpEventQueue::new", fields(name = %config.name), err)] + pub async fn new(config: &HttpEventQueueConfig) -> Result { + tracing::debug!("setting up http event queue"); + Ok(Self { + name: config.name.clone(), + client: { + let mut builder = reqwest::Client::builder(); + + if let Some(timeout) = config.timeout { + builder = builder.timeout(timeout); + } + + if config.allow_insecure { + builder = builder.danger_accept_invalid_certs(true); + } + + let mut headers = reqwest::header::HeaderMap::new(); + + for (key, value) in &config.headers { + headers.insert( + key.parse::() + .map_err(HttpEventQueueError::from)?, + value + .parse::() + .map_err(HttpEventQueueError::from)?, + ); + } + + builder = builder.default_headers(headers); + + builder.build().map_err(HttpEventQueueError::Reqwest)? + }, + url: config.url.clone(), + message_encoding: config.message_encoding, + semaphore: config.max_connections.map(|max| tokio::sync::Semaphore::new(max)), + }) + } +} + +impl EventQueue for HttpEventQueue { + fn name(&self) -> &str { + &self.name + } + + #[tracing::instrument(skip(self), name = "HttpEventQueue::publish", fields(name = %self.name))] + async fn publish(&self, topic: &str, data: EventCallback) -> Result<(), EventQueueError> { + let _permit = if let Some(semaphore) = &self.semaphore { + Some(semaphore.acquire().await) + } else { + None + }; + + let mut req = self.client.post(self.url.clone()).header("X-Topic", topic); + + if self.message_encoding == MessageEncoding::Protobuf { + req = req.header("Content-Type", PROTOBUF_CONTENT_TYPE).body(data.encode_to_vec()); + } else { + req = req.json(&data); + } + + req.send() + .await + .map_err(HttpEventQueueError::Reqwest)? + .error_for_status() + .map_err(HttpEventQueueError::Reqwest)?; + + Ok(()) + } +} diff --git a/image-processor/src/event_queue/mod.rs b/image-processor/src/event_queue/mod.rs new file mode 100644 index 000000000..d09388a26 --- /dev/null +++ b/image-processor/src/event_queue/mod.rs @@ -0,0 +1,69 @@ +use scuffle_image_processor_proto::EventCallback; + +use self::http::{HttpEventQueue, HttpEventQueueError}; +use self::nats::{NatsEventQueue, NatsEventQueueError}; +use self::redis::{RedisEventQueue, RedisEventQueueError}; +use crate::config::EventQueueConfig; + +pub mod http; +pub mod nats; +pub mod redis; + +#[derive(Debug, thiserror::Error)] +pub enum EventQueueError { + #[error("nats: {0}")] + Nats(#[from] NatsEventQueueError), + #[error("http: {0}")] + Http(#[from] HttpEventQueueError), + #[error("redis: {0}")] + Redis(#[from] RedisEventQueueError), +} + +const PROTOBUF_CONTENT_TYPE: &str = "application/protobuf; proto=scuffle.image_processor.EventCallback"; + +pub trait EventQueue { + fn name(&self) -> &str; + + fn publish( + &self, + topic: &str, + data: EventCallback, + ) -> impl std::future::Future> + Send; + + fn healthy(&self) -> impl std::future::Future + Send { + async { true } + } +} + +#[derive(Debug)] +pub enum AnyEventQueue { + Nats(NatsEventQueue), + Http(HttpEventQueue), + Redis(RedisEventQueue), +} + +impl EventQueue for AnyEventQueue { + fn name(&self) -> &str { + match self { + AnyEventQueue::Nats(queue) => queue.name(), + AnyEventQueue::Http(queue) => queue.name(), + AnyEventQueue::Redis(queue) => queue.name(), + } + } + + async fn publish(&self, topic: &str, data: EventCallback) -> Result<(), EventQueueError> { + match self { + AnyEventQueue::Nats(queue) => queue.publish(topic, data).await, + AnyEventQueue::Http(queue) => queue.publish(topic, data).await, + AnyEventQueue::Redis(queue) => queue.publish(topic, data).await, + } + } +} + +pub async fn build_event_queue(config: &EventQueueConfig) -> Result { + match config { + EventQueueConfig::Nats(nats) => Ok(AnyEventQueue::Nats(NatsEventQueue::new(nats).await?)), + EventQueueConfig::Redis(redis) => Ok(AnyEventQueue::Redis(RedisEventQueue::new(redis).await?)), + EventQueueConfig::Http(http) => Ok(AnyEventQueue::Http(HttpEventQueue::new(http).await?)), + } +} diff --git a/image-processor/src/event_queue/nats.rs b/image-processor/src/event_queue/nats.rs new file mode 100644 index 000000000..1ce6f9c43 --- /dev/null +++ b/image-processor/src/event_queue/nats.rs @@ -0,0 +1,69 @@ +use prost::Message; +use scuffle_image_processor_proto::EventCallback; + +use super::{EventQueue, EventQueueError, PROTOBUF_CONTENT_TYPE}; +use crate::config::{MessageEncoding, NatsEventQueueConfig}; + +#[derive(Debug)] +pub struct NatsEventQueue { + name: String, + message_encoding: MessageEncoding, + nats: async_nats::Client, +} + +#[derive(Debug, thiserror::Error)] +pub enum NatsEventQueueError { + #[error("connect: {0}")] + Connect(#[from] async_nats::ConnectError), + #[error("encode json: {0}")] + EncodeJson(#[from] serde_json::Error), + #[error("publish: {0}")] + Publish(#[from] async_nats::PublishError), +} + +impl NatsEventQueue { + #[tracing::instrument(skip(config), name = "NatsEventQueue::new", fields(name = %config.name), err)] + pub async fn new(config: &NatsEventQueueConfig) -> Result { + tracing::debug!("setting up nats event queue"); + let nats = async_nats::connect(&config.url).await.map_err(NatsEventQueueError::from)?; + + Ok(Self { + name: config.name.clone(), + message_encoding: config.message_encoding, + nats, + }) + } +} + +impl EventQueue for NatsEventQueue { + fn name(&self) -> &str { + &self.name + } + + #[tracing::instrument(skip(self), name = "NatsEventQueue::publish", err)] + async fn publish(&self, topic: &str, data: EventCallback) -> Result<(), EventQueueError> { + let mut header_map = async_nats::HeaderMap::new(); + + let payload = if self.message_encoding == MessageEncoding::Protobuf { + header_map.insert("Content-Type", PROTOBUF_CONTENT_TYPE); + data.encode_to_vec() + } else { + header_map.insert("Content-Type", "application/json"); + serde_json::to_string(&data) + .map_err(NatsEventQueueError::EncodeJson)? + .into_bytes() + } + .into(); + + self.nats + .publish_with_headers(topic.to_owned(), header_map, payload) + .await + .map_err(NatsEventQueueError::Publish)?; + + Ok(()) + } + + async fn healthy(&self) -> bool { + matches!(self.nats.connection_state(), async_nats::connection::State::Connected) + } +} diff --git a/image-processor/src/event_queue/redis.rs b/image-processor/src/event_queue/redis.rs new file mode 100644 index 000000000..e998a57dd --- /dev/null +++ b/image-processor/src/event_queue/redis.rs @@ -0,0 +1,66 @@ +use fred::interfaces::{ClientLike, PubsubInterface}; +use fred::types::RedisConfig; +use prost::Message; +use scuffle_image_processor_proto::EventCallback; + +use super::{EventQueue, EventQueueError}; +use crate::config::{MessageEncoding, RedisEventQueueConfig}; + +#[derive(Debug)] +pub struct RedisEventQueue { + client: fred::clients::RedisClient, + name: String, + message_encoding: MessageEncoding, +} + +#[derive(Debug, thiserror::Error)] +pub enum RedisEventQueueError { + #[error("redis: {0}")] + Redis(#[from] fred::error::RedisError), + #[error("json encode: {0}")] + JsonEncode(#[from] serde_json::Error), +} + +impl RedisEventQueue { + #[tracing::instrument(skip(config), name = "RedisEventQueue::new", fields(name = %config.name), err)] + pub async fn new(config: &RedisEventQueueConfig) -> Result { + Ok(Self { + client: fred::clients::RedisClient::new( + RedisConfig::from_url(&config.url).map_err(RedisEventQueueError::from)?, + None, + None, + None, + ), + name: config.name.clone(), + message_encoding: config.message_encoding, + }) + } +} + +impl EventQueue for RedisEventQueue { + fn name(&self) -> &str { + &self.name + } + + #[tracing::instrument(skip(self), name = "RedisEventQueue::publish", err)] + async fn publish(&self, topic: &str, data: EventCallback) -> Result<(), EventQueueError> { + let payload = if self.message_encoding == MessageEncoding::Protobuf { + data.encode_to_vec() + } else { + serde_json::to_string(&data) + .map_err(RedisEventQueueError::JsonEncode)? + .into_bytes() + }; + + self.client + .publish(topic, payload) + .await + .map_err(RedisEventQueueError::Redis)?; + + Ok(()) + } + + async fn healthy(&self) -> bool { + self.client.ping::<()>().await.is_ok() + } +} diff --git a/image-processor/src/events.rs b/image-processor/src/events.rs new file mode 100644 index 000000000..074463b36 --- /dev/null +++ b/image-processor/src/events.rs @@ -0,0 +1,67 @@ +use std::sync::Arc; + +use scuffle_image_processor_proto::{event_callback, EventCallback, EventQueue as EventTopic}; + +use crate::database::Job; +use crate::event_queue::EventQueue; +use crate::global::Global; +use crate::worker::JobError; + +#[tracing::instrument(skip(global, job, event_topic), fields(topic = %event_topic.topic, name = %event_topic.name, job_id = %job.id))] +pub async fn on_event(global: &Arc, job: &Job, event_topic: &EventTopic, event: event_callback::Event) { + let Some(queue) = global.event_queue(&event_topic.name) else { + tracing::warn!("event queue not found: {}", event_topic.name); + return; + }; + + if let Err(err) = queue + .publish( + &event_topic.topic, + EventCallback { + id: job.id.to_string(), + timestamp: chrono::Utc::now().timestamp() as u64, + metadata: job.task.events.as_ref().map(|e| e.metadata.clone()).unwrap_or_default(), + event: Some(event), + }, + ) + .await + { + tracing::error!("failed to publish event: {err}"); + } +} + +pub async fn on_start(global: &Arc, job: &Job) { + if let Some(on_start) = &job.task.events.as_ref().and_then(|events| events.on_start.as_ref()) { + on_event(global, job, on_start, event_callback::Event::Start(event_callback::Start {})).await; + } +} + +pub async fn on_success(global: &Arc, job: &Job, success: event_callback::Success) { + if let Some(on_success) = &job.task.events.as_ref().and_then(|events| events.on_success.as_ref()) { + on_event(global, job, on_success, event_callback::Event::Success(success)).await; + } +} + +pub async fn on_failure(global: &Arc, job: &Job, err: JobError) { + if let Some(on_failure) = &job.task.events.as_ref().and_then(|events| events.on_failure.as_ref()) { + on_event( + global, + job, + on_failure, + event_callback::Event::Fail(event_callback::Fail { error: Some(err.into()) }), + ) + .await; + } +} + +pub async fn on_cancel(global: &Arc, job: &Job) { + if let Some(on_cancel) = &job.task.events.as_ref().and_then(|events| events.on_cancel.as_ref()) { + on_event( + global, + job, + on_cancel, + event_callback::Event::Cancel(event_callback::Cancel {}), + ) + .await; + } +} diff --git a/image-processor/src/global.rs b/image-processor/src/global.rs new file mode 100644 index 000000000..d474cc898 --- /dev/null +++ b/image-processor/src/global.rs @@ -0,0 +1,147 @@ +use std::collections::HashMap; + +use anyhow::Context; +use bson::oid::ObjectId; +use scuffle_foundations::telemetry::server::HealthCheck; +use scuffle_foundations::BootstrapResult; + +use crate::config::ImageProcessorConfig; +use crate::database::Job; +use crate::drive::public_http::PUBLIC_HTTP_DRIVE_NAME; +use crate::drive::{build_drive, AnyDrive, Drive}; +use crate::event_queue::{build_event_queue, AnyEventQueue, EventQueue}; + +pub struct Global { + worker_id: ObjectId, + config: ImageProcessorConfig, + database: mongodb::Database, + disks: HashMap, + event_queues: HashMap, +} + +impl Global { + pub async fn new(config: ImageProcessorConfig) -> BootstrapResult { + const DEFAULT_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(3); + tracing::debug!("setting up mongo client"); + + let client = tokio::time::timeout(DEFAULT_TIMEOUT, mongodb::Client::with_uri_str(&config.database.uri)) + .await + .context("mongodb timeout")? + .context("mongodb")?; + let Some(database) = client.default_database() else { + anyhow::bail!("no default database") + }; + + tracing::debug!("setting up job collection"); + + tokio::time::timeout(DEFAULT_TIMEOUT, Job::setup_collection(&database)) + .await + .context("job collection timeout")? + .context("job collection")?; + + tracing::debug!("setting up disks and event queues"); + + let mut disks = HashMap::new(); + + for disk in &config.drives { + let disk = tokio::time::timeout(DEFAULT_TIMEOUT, build_drive(disk)) + .await + .context("disk timeout")? + .context("disk")?; + + let name = disk.name().to_string(); + if disks.insert(name.clone(), disk).is_some() { + anyhow::bail!("duplicate disk name: {name}"); + } + } + + if config.drives.is_empty() { + tracing::warn!("no disks configured"); + } + + let mut event_queues = HashMap::new(); + + for event_queue in &config.event_queues { + let event_queue = tokio::time::timeout(DEFAULT_TIMEOUT, build_event_queue(event_queue)) + .await + .context("event queue timeout")? + .context("event queue")?; + + let name = event_queue.name().to_string(); + if event_queues.insert(name.clone(), event_queue).is_some() { + anyhow::bail!("duplicate event queue name: {name}"); + } + } + + if config.event_queues.is_empty() { + tracing::warn!("no event queues configured"); + } + + Ok(Self { + worker_id: ObjectId::new(), + config, + database, + disks, + event_queues, + }) + } + + pub fn worker_id(&self) -> ObjectId { + self.worker_id + } + + pub fn config(&self) -> &ImageProcessorConfig { + &self.config + } + + pub fn drive(&self, name: &str) -> Option<&AnyDrive> { + self.disks.get(name) + } + + pub fn drives(&self) -> &HashMap { + &self.disks + } + + pub fn event_queues(&self) -> &HashMap { + &self.event_queues + } + + pub fn event_queue(&self, name: &str) -> Option<&AnyEventQueue> { + self.event_queues.get(name) + } + + pub fn public_http_drive(&self) -> Option<&AnyDrive> { + self.drive(PUBLIC_HTTP_DRIVE_NAME) + } + + pub fn database(&self) -> &mongodb::Database { + &self.database + } +} + +impl HealthCheck for Global { + fn check(&self) -> std::pin::Pin + Send + '_>> { + Box::pin(async { + if let Err(err) = self.database().run_command(bson::doc! { "ping": 1 }, None).await { + tracing::error!("database ping failed: {err}"); + return false; + } + + for disk in self.drives().values() { + if !disk.healthy().await { + tracing::error!(name = %disk.name(), "disk check failed"); + return false; + } + } + + for event_queue in self.event_queues().values() { + if !event_queue.healthy().await { + tracing::error!(name = %event_queue.name(), "event queue check failed"); + return false; + } + } + + true + }) + } +} diff --git a/image-processor/src/main.rs b/image-processor/src/main.rs new file mode 100644 index 000000000..298752a00 --- /dev/null +++ b/image-processor/src/main.rs @@ -0,0 +1,103 @@ +use std::sync::Arc; + +use anyhow::Context; +use scuffle_foundations::bootstrap::{bootstrap, Bootstrap}; +use scuffle_foundations::runtime; +use scuffle_foundations::settings::cli::Matches; +use tokio::signal::unix::SignalKind; + +use self::config::ImageProcessorConfig; + +impl Bootstrap for ImageProcessorConfig { + type Settings = Self; + + fn runtime_mode(&self) -> scuffle_foundations::bootstrap::RuntimeSettings { + self.runtime.clone() + } + + fn telemetry_config(&self) -> Option { + Some(self.telemetry.clone()) + } +} + +mod config; +mod database; +mod drive; +mod event_queue; +pub mod events; +mod global; +mod management; +mod worker; + +#[bootstrap] +async fn main(cfg: Matches) { + tracing::info!("starting image processor"); + + // Require a health check to be registered + scuffle_foundations::telemetry::server::require_health_check(); + + let global = Arc::new({ + match global::Global::new(cfg.settings).await { + Ok(global) => global, + Err(err) => { + tracing::error!("error setting up global: {err}"); + std::process::exit(1); + } + } + }); + + scuffle_foundations::telemetry::server::register_health_check(global.clone()); + + let mut handles = Vec::new(); + + if global.config().management.grpc.enabled || global.config().management.http.enabled { + tracing::info!("starting management"); + handles.push(runtime::spawn(management::start(global.clone()))); + } + + if global.config().worker.enabled { + tracing::info!("starting worker"); + handles.push(runtime::spawn(worker::start(global.clone()))); + } + + let mut signal = scuffle_foundations::signal::SignalHandler::new() + .with_signal(SignalKind::interrupt()) + .with_signal(SignalKind::terminate()); + + let handles = futures::future::try_join_all( + handles + .iter_mut() + .map(|handle| async move { handle.await.context("spawn task failed")? }), + ); + + tokio::select! { + _ = signal.recv() => { + tracing::info!("received signal, shutting down"); + } + result = handles => { + match result { + Ok(_) => { + tracing::warn!("handles completed unexpectedly without error"); + }, + Err(err) => tracing::error!("error in handle: {}", err), + } + } + } + + let handle = scuffle_foundations::context::Handler::global(); + + tokio::select! { + _ = signal.recv() => { + tracing::warn!("received signal again, forcing exit"); + }, + r = tokio::time::timeout(std::time::Duration::from_secs(60), handle.shutdown()) => { + if r.is_err() { + tracing::warn!("shutdown timed out, forcing exit"); + } else { + tracing::info!("image processor stopped"); + } + } + } + + std::process::exit(0); +} diff --git a/image-processor/src/management/grpc.rs b/image-processor/src/management/grpc.rs new file mode 100644 index 000000000..94a11d269 --- /dev/null +++ b/image-processor/src/management/grpc.rs @@ -0,0 +1,41 @@ +use scuffle_image_processor_proto::{CancelTaskRequest, CancelTaskResponse, ProcessImageRequest, ProcessImageResponse}; +use tonic::{Request, Response}; + +use super::ManagementServer; + +impl ManagementServer { + pub async fn run_grpc(&self) -> Result<(), tonic::transport::Error> { + let addr = self.global.config().management.grpc.bind; + let server = tonic::transport::Server::builder() + .add_service(scuffle_image_processor_proto::image_processor_server::ImageProcessorServer::new(self.clone())) + .serve_with_shutdown(addr, scuffle_foundations::context::Context::global().into_done()); + + tracing::info!(%addr, "gRPC server listening"); + server.await + } +} + +#[async_trait::async_trait] +impl scuffle_image_processor_proto::image_processor_server::ImageProcessor for ManagementServer { + async fn process_image(&self, request: Request) -> tonic::Result> { + let resp = match self.process_image(request.into_inner()).await { + Ok(resp) => resp, + Err(err) => ProcessImageResponse { + id: "".to_owned(), + upload_info: None, + error: Some(err), + }, + }; + + Ok(Response::new(resp)) + } + + async fn cancel_task(&self, request: Request) -> tonic::Result> { + let resp = match self.cancel_task(request.into_inner()).await { + Ok(resp) => resp, + Err(err) => CancelTaskResponse { error: Some(err) }, + }; + + Ok(Response::new(resp)) + } +} diff --git a/image-processor/src/management/http.rs b/image-processor/src/management/http.rs new file mode 100644 index 000000000..3c6bf111e --- /dev/null +++ b/image-processor/src/management/http.rs @@ -0,0 +1,78 @@ +use scuffle_foundations::http::server::axum::extract::State; +use scuffle_foundations::http::server::axum::routing::post; +use scuffle_foundations::http::server::axum::{Json, Router}; +use scuffle_image_processor_proto::{ + CancelTaskRequest, CancelTaskResponse, ErrorCode, ProcessImageRequest, ProcessImageResponse, +}; + +use super::ManagementServer; + +impl ManagementServer { + pub async fn run_http(&self) -> Result<(), scuffle_foundations::http::server::Error> { + let router = Router::new() + .route("/process_image", post(process_image)) + .route("/cancel_task", post(cancel_task)) + .fallback(not_found) + .with_state(self.clone()); + + let addr = self.global.config().management.http.bind; + scuffle_foundations::http::server::Server::builder() + .bind(addr) + .build(router)? + .start_and_wait() + .await + } +} + +async fn not_found() -> (http::StatusCode, &'static str) { + (http::StatusCode::NOT_FOUND, "Not Found") +} + +async fn process_image( + State(server): State, + Json(request): Json, +) -> (http::StatusCode, Json) { + let resp = match server.process_image(request).await { + Ok(resp) => resp, + Err(err) => ProcessImageResponse { + id: "".to_owned(), + upload_info: None, + error: Some(err), + }, + }; + + let status = resp + .error + .as_ref() + .map_or(http::StatusCode::OK, |err| map_error_code(err.code())); + (status, Json(resp)) +} + +async fn cancel_task( + State(server): State, + Json(request): Json, +) -> (http::StatusCode, Json) { + let resp = match server.cancel_task(request).await { + Ok(resp) => resp, + Err(err) => CancelTaskResponse { error: Some(err) }, + }; + + let status = resp + .error + .as_ref() + .map_or(http::StatusCode::OK, |err| map_error_code(err.code())); + (status, Json(resp)) +} + +fn map_error_code(code: ErrorCode) -> http::StatusCode { + match code { + ErrorCode::InvalidInput => http::StatusCode::BAD_REQUEST, + ErrorCode::Internal => http::StatusCode::INTERNAL_SERVER_ERROR, + ErrorCode::NotImplemented => http::StatusCode::NOT_IMPLEMENTED, + ErrorCode::Decode => http::StatusCode::INTERNAL_SERVER_ERROR, + ErrorCode::Encode => http::StatusCode::INTERNAL_SERVER_ERROR, + ErrorCode::InputDownload => http::StatusCode::INTERNAL_SERVER_ERROR, + ErrorCode::OutputUpload => http::StatusCode::INTERNAL_SERVER_ERROR, + ErrorCode::Resize => http::StatusCode::INTERNAL_SERVER_ERROR, + } +} diff --git a/image-processor/src/management/mod.rs b/image-processor/src/management/mod.rs new file mode 100644 index 000000000..f18581224 --- /dev/null +++ b/image-processor/src/management/mod.rs @@ -0,0 +1,177 @@ +use std::sync::Arc; + +use anyhow::Context; +use bson::oid::ObjectId; +use bytes::Bytes; +use scuffle_image_processor_proto::{ + input, CancelTaskRequest, CancelTaskResponse, DrivePath, Error, ErrorCode, Input, ProcessImageRequest, + ProcessImageResponse, ProcessImageResponseUploadInfo, +}; + +use crate::database::Job; +use crate::drive::{Drive, DriveWriteOptions}; +use crate::global::Global; +use crate::management::validation::{validate_input_upload, validate_task, FragmentBuf}; +use crate::worker::process::DecoderFrontend; + +pub mod grpc; +pub mod http; + +mod validation; + +#[derive(Clone)] +struct ManagementServer { + global: Arc, +} + +impl ManagementServer { + async fn process_image(&self, mut request: ProcessImageRequest) -> Result { + let mut fragment = FragmentBuf::new(); + + validate_task( + &self.global, + fragment.push("task"), + request.task.as_ref(), + request.input_upload.as_ref().and_then(|upload| upload.drive_path.as_ref()), + )?; + + // We need to do validation here. + if let Some(input_upload) = request.input_upload.as_ref() { + validate_input_upload(&self.global, fragment.push("input_upload"), Some(input_upload))?; + } + + let id = ObjectId::new(); + + let upload_info = if let Some(input_upload) = request.input_upload { + let drive_path = input_upload.drive_path.unwrap(); + let drive = self.global.drive(&drive_path.drive).unwrap(); + + let file_format = file_format::FileFormat::from_bytes(&input_upload.binary); + + DecoderFrontend::from_format(file_format).map_err(|err| Error { + code: ErrorCode::Decode as i32, + message: format!("input_upload.binary: {err}"), + })?; + + let vars = [ + ("id".to_owned(), id.to_string()), + ("ext".to_owned(), file_format.extension().to_owned()), + ] + .into_iter() + .collect(); + + let path = strfmt::strfmt(&drive_path.path, &vars).map_err(|err| Error { + code: ErrorCode::InvalidInput as i32, + message: format!("input_upload.drive_path.path: {err}"), + })?; + + let drive_path = DrivePath { + drive: drive_path.drive, + path: path.clone(), + }; + + if let Some(input) = request.task.as_mut().unwrap().input.as_mut() { + input.path = Some(input::Path::DrivePath(drive_path.clone())); + } else { + request.task.as_mut().unwrap().input = Some(Input { + path: Some(input::Path::DrivePath(drive_path.clone())), + ..Default::default() + }); + } + + let upload_size = input_upload.binary.len() as u64; + + drive + .write( + &path, + Bytes::from(input_upload.binary), + Some(DriveWriteOptions { + acl: input_upload.acl, + cache_control: input_upload.cache_control, + content_disposition: input_upload.content_disposition, + content_type: input_upload.content_type, + }), + ) + .await + .map_err(|err| { + tracing::error!("failed to write input upload: {:#}", err); + Error { + code: ErrorCode::Internal as i32, + message: format!("failed to write input upload: {err}"), + } + })?; + + Some(ProcessImageResponseUploadInfo { + path: Some(drive_path), + content_type: file_format.media_type().to_owned(), + size: upload_size, + }) + } else { + None + }; + + let job = Job::new(&self.global, id, request.task.unwrap(), request.priority, request.ttl) + .await + .map_err(|err| { + tracing::error!("failed to create job: {:#}", err); + Error { + code: ErrorCode::Internal as i32, + message: format!("failed to create job: {err}"), + } + })?; + + Ok(ProcessImageResponse { + id: job.id.to_string(), + upload_info, + error: None, + }) + } + + async fn cancel_task(&self, request: CancelTaskRequest) -> Result { + match Job::cancel( + &self.global, + request.id.parse().map_err(|err| Error { + code: ErrorCode::InvalidInput as i32, + message: format!("id: {err}"), + })?, + ) + .await + { + Ok(Some(_)) => Ok(CancelTaskResponse { error: None }), + Ok(None) => Err(Error { + code: ErrorCode::InvalidInput as i32, + message: "not found".to_owned(), + }), + Err(err) => { + tracing::error!("failed to cancel job: {:#}", err); + Err(Error { + code: ErrorCode::Internal as i32, + message: format!("failed to cancel job: {err}"), + }) + } + } + } +} + +pub async fn start(global: Arc) -> anyhow::Result<()> { + let server = ManagementServer { global }; + + let http = async { + if server.global.config().management.http.enabled { + server.run_http().await.context("http") + } else { + Ok(()) + } + }; + let grpc = async { + if server.global.config().management.grpc.enabled { + server.run_grpc().await.context("grpc") + } else { + Ok(()) + } + }; + + futures::future::try_join(http, grpc).await.context("management")?; + + Ok(()) +} diff --git a/image-processor/src/management/validation.rs b/image-processor/src/management/validation.rs new file mode 100644 index 000000000..5381d2358 --- /dev/null +++ b/image-processor/src/management/validation.rs @@ -0,0 +1,707 @@ +use std::collections::HashSet; +use std::sync::Arc; + +use scuffle_image_processor_proto::{ + animation_config, input, output, scaling, AnimationConfig, Crop, DrivePath, Error, ErrorCode, EventQueue, Events, Input, + InputMetadata, InputUpload, Limits, Output, OutputFormat, OutputFormatOptions, Scaling, Task, +}; +use url::Url; + +use crate::global::Global; + +#[derive(Debug, Clone, Copy)] +pub enum FragmentItem { + Map(&'static str), + Index(usize), +} + +#[derive(Debug)] +pub struct FragmentBuf { + path: Vec, +} + +impl FragmentBuf { + pub fn new() -> Self { + Self { path: Vec::new() } + } + + pub fn push(&mut self, path: impl Into) -> Fragment<'_> { + self.path.push(path.into()); + Fragment::new(&mut self.path) + } +} + +#[derive(Debug)] +pub struct Fragment<'a> { + path: &'a mut Vec, +} + +impl<'a> Fragment<'a> { + pub fn new(path: &'a mut Vec) -> Self { + Self { path } + } +} + +impl From<&'static str> for FragmentItem { + fn from(value: &'static str) -> Self { + Self::Map(value) + } +} + +impl From for FragmentItem { + fn from(value: usize) -> Self { + Self::Index(value) + } +} + +// This is a bit of a hack to allow us to convert from a reference to a copy. +// &&'static str -> &'static str -> FragmentItem +// &usize -> usize -> FragmentItem +impl From<&T> for FragmentItem +where + T: Copy, + FragmentItem: From, +{ + fn from(value: &T) -> Self { + Self::from(*value) + } +} + +impl std::fmt::Display for Fragment<'_> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let mut first = true; + for item in self.path.iter() { + match item { + FragmentItem::Map(value) => { + if !first { + write!(f, ".")?; + } + write!(f, "{value}")?; + } + FragmentItem::Index(value) => { + write!(f, "[{value}]")?; + } + } + + first = false; + } + + Ok(()) + } +} + +impl Fragment<'_> { + pub fn push(&mut self, path: impl Into) -> Fragment<'_> { + self.path.push(path.into()); + Fragment::new(self.path) + } + + pub fn replace(self, path: impl Into) -> Self { + if self.path.is_empty() { + return self; + } + + *self.path.last_mut().unwrap() = path.into(); + self + } +} + +impl Drop for Fragment<'_> { + fn drop(&mut self) { + self.path.pop(); + } +} + +pub fn validate_input_upload( + global: &Arc, + mut fragment: Fragment, + input_upload: Option<&InputUpload>, +) -> Result<(), Error> { + let input_upload = input_upload.ok_or_else(|| Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{fragment}: is required"), + })?; + + validate_drive_path( + global, + fragment.push("drive_path"), + input_upload.drive_path.as_ref(), + &["id", "ext"], + )?; + + if input_upload.binary.is_empty() { + return Err(Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{fragment}: binary is required"), + }); + } + + Ok(()) +} + +pub fn validate_task( + global: &Arc, + mut fragment: Fragment, + task: Option<&Task>, + has_image_upload: Option<&DrivePath>, +) -> Result<(), Error> { + let task = task.ok_or_else(|| Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{fragment}: is required"), + })?; + + validate_input(global, fragment.push("input"), task.input.as_ref(), has_image_upload)?; + + validate_output(global, fragment.push("output"), task.output.as_ref())?; + + if let Some(events) = &task.events { + validate_events(global, fragment.push("events"), Some(events))?; + } + + if let Some(limits) = &task.limits { + validate_limits(fragment.push("limits"), Some(limits))?; + } + + Ok(()) +} + +fn validate_limits(mut fragment: Fragment, limits: Option<&Limits>) -> Result<(), Error> { + let limits = limits.ok_or_else(|| Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{fragment}: is required"), + })?; + + let fields = [ + (limits.max_processing_time_ms, "max_processing_time_ms"), + (limits.max_input_frame_count, "max_input_frame_count"), + (limits.max_input_width, "max_input_width"), + (limits.max_input_height, "max_input_height"), + (limits.max_input_duration_ms, "max_input_duration_ms"), + ]; + + for (value, name) in &fields { + if let Some(0) = value { + return Err(Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{}: must be non 0", fragment.push(name)), + }); + } + } + + Ok(()) +} + +fn validate_events(global: &Arc, mut fragment: Fragment, events: Option<&Events>) -> Result<(), Error> { + let events = events.ok_or_else(|| Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{fragment}: is required"), + })?; + + let events = [ + (events.on_success.as_ref(), "on_success"), + (events.on_failure.as_ref(), "on_failure"), + (events.on_cancel.as_ref(), "on_cancel"), + (events.on_start.as_ref(), "on_start"), + ]; + + for (event, name) in &events { + if let Some(event) = event { + validate_event_queue(global, fragment.push(name), Some(event))?; + } + } + + Ok(()) +} + +fn validate_event_queue(global: &Arc, mut fragment: Fragment, event: Option<&EventQueue>) -> Result<(), Error> { + let event_queue = event.ok_or_else(|| Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{fragment}: is required"), + })?; + + if event_queue.name.is_empty() { + return Err(Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{}: is required", fragment.push("name")), + }); + } + + if global.event_queue(&event_queue.name).is_none() { + return Err(Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{fragment}: event queue not found"), + }); + } + + // Validate the topic template string + validate_template_string(fragment.push("topic"), &["id"], &event_queue.topic)?; + + Ok(()) +} + +pub fn validate_output(global: &Arc, mut fragment: Fragment, output: Option<&Output>) -> Result<(), Error> { + let output = output.ok_or_else(|| Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{fragment}: is required"), + })?; + + validate_drive_path( + global, + fragment.push("path"), + output.drive_path.as_ref(), + &[ + "id", + "format", + "scale", + "width", + "height", + "format_idx", + "resize_idx", + "static", + "ext", + ], + )?; + + if output.formats.is_empty() { + return Err(Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{}: is required", fragment.push("formats")), + }); + } + + let mut formats = HashSet::new(); + for (idx, format) in output.formats.iter().enumerate() { + validate_output_format_options(fragment.push(idx), Some(format), &mut formats)?; + } + + validate_output_variants_resize(fragment.push("resize"), output.resize.as_ref())?; + + if let Some(animation_config) = output.animation_config.as_ref() { + validate_output_animation_config(fragment.push("animation_config"), Some(animation_config))?; + } + + if let Some(crop) = output.crop.as_ref() { + validate_crop(fragment.push("crop"), Some(crop))?; + } + + match (output.min_aspect_ratio, output.max_aspect_ratio) { + (Some(min_ratio), _) if min_ratio <= 0.0 => { + return Err(Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{}: must be greater than or equal to 0", fragment.push("min_ratio")), + }); + } + (_, Some(max_ratio)) if max_ratio <= 0.0 => { + return Err(Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{}: must be greater than or equal to 0", fragment.push("max_ratio")), + }); + } + (Some(min_ratio), Some(max_ratio)) if min_ratio > max_ratio => { + return Err(Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{}: min_ratio must be less than or equal to max_ratio", fragment), + }); + } + _ => {} + } + + Ok(()) +} + +pub fn validate_crop(mut fragment: Fragment, crop: Option<&Crop>) -> Result<(), Error> { + let crop = crop.ok_or_else(|| Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{fragment}: is required"), + })?; + + if crop.width == 0 { + return Err(Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{}: width must be non 0", fragment.push("width")), + }); + } + + if crop.height == 0 { + return Err(Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{}: height must be non 0", fragment.push("height")), + }); + } + + Ok(()) +} + +pub fn validate_output_animation_config( + mut fragment: Fragment, + animation_config: Option<&AnimationConfig>, +) -> Result<(), Error> { + let animation_config = animation_config.ok_or_else(|| Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{fragment}: is required"), + })?; + + if let Some(loop_count) = animation_config.loop_count { + if loop_count < -1 { + return Err(Error { + code: ErrorCode::InvalidInput as i32, + message: format!( + "{}: loop_count must be greater than or equal to -1", + fragment.push("loop_count") + ), + }); + } + } + + if let Some(frame_rate) = &animation_config.frame_rate { + match frame_rate { + animation_config::FrameRate::FrameDurationMs(duration_ms) => { + if *duration_ms == 0 { + return Err(Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{}: duration_ms must be non 0", fragment.push("frame_duration_ms")), + }); + } + } + animation_config::FrameRate::FrameDurationsMs(durations_ms) => { + let mut fragment = fragment.push("frame_durations_ms.values"); + + if durations_ms.values.is_empty() { + return Err(Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{fragment}: durations_ms must not be empty"), + }); + } + + for (idx, duration_ms) in durations_ms.values.iter().enumerate() { + if *duration_ms == 0 { + return Err(Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{}: duration_ms must be non 0", fragment.push(idx)), + }); + } + } + } + animation_config::FrameRate::FrameRateFactor(factor) => { + if *factor <= 0.0 { + return Err(Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{}: factor must be greater than 0", fragment.push("frame_rate_factor")), + }); + } + } + } + } + + Ok(()) +} + +pub fn validate_output_variants_resize(fragment: Fragment, resize: Option<&output::Resize>) -> Result<(), Error> { + let resize = resize.ok_or_else(|| Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{fragment}: is required"), + })?; + + let validate_items = |mut fragment: Fragment, items: &[u32]| { + if items.is_empty() { + return Err(Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{fragment}: is required"), + }); + } + + for (idx, item) in items.iter().enumerate() { + if *item == 0 { + return Err(Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{}: must be non 0", fragment.push(idx)), + }); + } + } + + Ok(()) + }; + + match resize { + output::Resize::Heights(height) => { + validate_items(fragment.replace("height.values"), &height.values)?; + } + output::Resize::Widths(width) => { + validate_items(fragment.replace("width.values"), &width.values)?; + } + output::Resize::Scaling(scaling) => { + validate_scaling(fragment.replace("scaling"), Some(scaling))?; + } + } + + Ok(()) +} + +pub fn validate_scaling(mut fragment: Fragment, scaling: Option<&Scaling>) -> Result<(), Error> { + let scaling = scaling.ok_or_else(|| Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{fragment}: is required"), + })?; + + if scaling.scales.is_empty() { + return Err(Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{}: is required", fragment.push("scales")), + }); + } + + for (idx, scale) in scaling.scales.iter().enumerate() { + if *scale == 0 { + return Err(Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{}: must be non 0", fragment.push(idx)), + }); + } + } + + let Some(base) = scaling.base.as_ref() else { + return Err(Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{}: is required", fragment.push("base")), + }); + }; + + match base { + scaling::Base::BaseWidth(width) if *width == 0 => { + return Err(Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{}: must be non 0", fragment.push("base_width")), + }); + } + scaling::Base::BaseHeight(height) if *height == 0 => { + return Err(Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{}: must be non 0", fragment.push("base_height")), + }); + } + scaling::Base::FixedBase(base) if *base == 0 => { + return Err(Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{}: base must be non 0", fragment.push("fixed")), + }); + } + _ => {} + } + + Ok(()) +} + +pub fn validate_output_format_options( + mut fragment: Fragment, + format: Option<&OutputFormatOptions>, + formats: &mut HashSet, +) -> Result<(), Error> { + let format = format.ok_or_else(|| Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{fragment}: is required"), + })?; + + if !formats.insert(format.format()) { + return Err(Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{}: format already exists", fragment.push("format")), + }); + } + + Ok(()) +} + +pub fn validate_input( + global: &Arc, + mut fragment: Fragment, + input: Option<&Input>, + has_image_upload: Option<&DrivePath>, +) -> Result<(), Error> { + if input.is_none() && has_image_upload.is_some() { + return Ok(()); + } + + let input = input.ok_or_else(|| Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{fragment}: is required"), + })?; + + validate_input_path(global, fragment.push("path"), input.path.as_ref(), has_image_upload)?; + + // Metadata is optional + if let Some(metadata) = &input.metadata { + validate_input_metadata(fragment.push("metadata"), Some(metadata))?; + } + + Ok(()) +} + +pub fn validate_input_metadata(mut fragment: Fragment, metadata: Option<&InputMetadata>) -> Result<(), Error> { + let metadata = metadata.ok_or_else(|| Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{} is required", fragment), + })?; + + match (metadata.static_frame_index, metadata.frame_count) { + (None, Some(frame_count)) if frame_count == 0 => { + return Err(Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{}: frame_count must be non 0", fragment), + }); + } + (Some(static_frame_index), Some(frame_count)) if static_frame_index >= frame_count => { + return Err(Error { + code: ErrorCode::InvalidInput as i32, + message: format!( + "{}: static_frame_index must be less than frame_count, {static_frame_index} >= {frame_count}", + fragment + ), + }); + } + (Some(_), None) => { + return Err(Error { + code: ErrorCode::InvalidInput as i32, + message: format!( + "{}: is required when static_frame_index is provided", + fragment.push("frame_count") + ), + }); + } + _ => {} + } + + if metadata.width == 0 { + return Err(Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{}: width must be non 0", fragment.push("width")), + }); + } + + if metadata.height == 0 { + return Err(Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{}: height must be non 0", fragment.push("height")), + }); + } + + Ok(()) +} + +pub fn validate_input_path( + global: &Arc, + fragment: Fragment, + input_path: Option<&input::Path>, + has_image_upload: Option<&DrivePath>, +) -> Result<(), Error> { + if input_path.is_some() && has_image_upload.is_some() { + return Err(Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{fragment}: cannot have both path and image_upload"), + }); + } + + let input_path = input_path.ok_or_else(|| Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{fragment} is required"), + })?; + + match input_path { + input::Path::DrivePath(drive_path) => { + validate_drive_path(global, fragment.replace("drive_path"), Some(drive_path), &["id"])?; + } + input::Path::PublicUrl(url) => { + validate_public_url(global, fragment.replace("public_url"), url)?; + } + } + + Ok(()) +} + +pub fn validate_drive_path( + global: &Arc, + mut fragment: Fragment, + drive_path: Option<&DrivePath>, + allowed_vars: &[&str], +) -> Result<(), Error> { + let drive_path = drive_path.ok_or_else(|| Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{} is required", fragment), + })?; + + if global.drive(&drive_path.drive).is_none() { + return Err(Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{}: drive not found", fragment.push("drive")), + }); + } + + validate_template_string(fragment.push("path"), allowed_vars, &drive_path.path)?; + + Ok(()) +} + +pub fn validate_public_url(global: &Arc, fragment: Fragment, url: &str) -> Result<(), Error> { + if url.is_empty() { + return Err(Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{fragment}: is required"), + }); + } else if global.public_http_drive().is_none() { + return Err(Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{fragment}: public http drive not found"), + }); + } + + let url = Url::parse(url).map_err(|e| Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{fragment}: {e}"), + })?; + + if url.scheme() != "http" && url.scheme() != "https" { + return Err(Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{fragment}: scheme must be http or https"), + }); + } + + if url.host().is_none() { + return Err(Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{fragment}: url host is required"), + }); + } + + Ok(()) +} + +fn validate_template_string(fragment: Fragment, allowed_vars: &[&str], template: &str) -> Result { + if template.is_empty() { + return Err(Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{fragment}: is required"), + }); + } + + let formatter = |fmt: strfmt::Formatter| { + let k: &str = fmt.key; + if !allowed_vars.contains(&k) { + return Err(strfmt::FmtError::KeyError(k.to_owned())); + } + Ok(()) + }; + + strfmt::strfmt_map(template, formatter).map_err(|err| match err { + strfmt::FmtError::KeyError(key) => Error { + code: ErrorCode::InvalidInput as i32, + message: format!( + "{fragment}: invalid variable '{key}', the allowed variables are {:?}", + allowed_vars + ), + }, + strfmt::FmtError::TypeError(_) | strfmt::FmtError::Invalid(_) => Error { + code: ErrorCode::InvalidInput as i32, + message: format!("{fragment}: invalid template syntax"), + }, + }) +} diff --git a/image-processor/src/worker/error.rs b/image-processor/src/worker/error.rs new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/image-processor/src/worker/error.rs @@ -0,0 +1 @@ + diff --git a/image-processor/src/worker/mod.rs b/image-processor/src/worker/mod.rs new file mode 100644 index 000000000..f1110b2f1 --- /dev/null +++ b/image-processor/src/worker/mod.rs @@ -0,0 +1,66 @@ +use std::sync::Arc; + +use anyhow::Context; +use scuffle_foundations::context::{self, ContextFutExt}; + +use crate::database::Job; +use crate::global::Global; + +pub mod process; + +pub use self::process::JobError; + +pub async fn start(global: Arc) -> anyhow::Result<()> { + let config = global.config(); + + let mut concurrency = config.worker.concurrency; + + if concurrency == 0 { + concurrency = num_cpus::get(); + } + + let semaphore = Arc::new(tokio::sync::Semaphore::new(concurrency)); + + let mut error_count = 0; + let (_, handle) = context::Context::new(); + + loop { + let ctx = handle.context(); + let Some(permit) = semaphore + .clone() + .acquire_owned() + .with_context(&ctx) + .await + .transpose() + .expect("semaphore permit") + else { + break; + }; + + let job = match Job::fetch(&global).await { + Ok(Some(job)) => job, + Ok(None) => { + tokio::time::sleep(config.worker.polling_interval).await; + continue; + } + Err(err) => { + tracing::error!("failed to fetch job: {err}"); + error_count += 1; + if error_count >= config.worker.error_threshold { + Err(err).context("reached error threshold")?; + } + + tokio::time::sleep(config.worker.error_delay).await; + + continue; + } + }; + + error_count = 0; + tokio::spawn(self::process::spawn(job, global.clone(), ctx, permit)); + } + + handle.shutdown().await; + + Ok(()) +} diff --git a/image-processor/src/worker/process/blocking.rs b/image-processor/src/worker/process/blocking.rs new file mode 100644 index 000000000..73b0537be --- /dev/null +++ b/image-processor/src/worker/process/blocking.rs @@ -0,0 +1,392 @@ +use std::borrow::Cow; +use std::sync::atomic::AtomicBool; +use std::sync::Arc; + +use bytes::Bytes; +use file_format::FileFormat; +use scuffle_image_processor_proto::{animation_config, InputFileMetadata, Output, OutputFormat, OutputFormatOptions, Task}; +use tokio::sync::OwnedSemaphorePermit; + +use super::decoder::{AnyDecoder, Decoder, DecoderFrontend, DecoderInfo, LoopCount}; +use super::encoder::{AnyEncoder, Encoder, EncoderBackend, EncoderError, EncoderSettings}; +use super::resize::{ImageResizer, ResizeOutputTarget}; +use super::JobError; + +pub struct JobOutput { + pub input: InputFileMetadata, + pub output: Vec, +} + +pub struct OutputImage { + pub format: OutputFormat, + pub format_name: Option, + pub format_idx: usize, + pub resize_idx: usize, + pub scale: Option, + pub width: usize, + pub height: usize, + pub data: Vec, + pub frame_count: usize, + pub duration_ms: u64, +} + +#[derive(Clone, Copy)] +enum FrameConfig { + Skip, + DurationMs(u32), +} + +#[derive(Clone)] +struct CancelToken { + cancelled: Arc, +} + +impl CancelToken { + fn new() -> Self { + Self { + cancelled: Arc::new(AtomicBool::new(false)), + } + } + + fn is_cancelled(&self) -> bool { + self.cancelled.load(std::sync::atomic::Ordering::Relaxed) + } +} + +impl Drop for CancelToken { + fn drop(&mut self) { + self.cancelled.store(true, std::sync::atomic::Ordering::Relaxed); + } +} + +pub async fn spawn(task: Task, input: Bytes, permit: Arc) -> Result { + let cancel_token = CancelToken::new(); + let _cancel_guard = cancel_token.clone(); + + let span = tracing::Span::current(); + + tokio::task::spawn_blocking(move || { + // This prevents the permit from being dropped before the task is finished. + // This is because there is no way to cancel a blocking task. + // So, if we cancel the parent future we need to make sure the permit is still + // held, as we are still technically running. If we dont do this we might use + // too many system resources. + let _span = span.enter(); + let _permit = permit; + let mut task = BlockingTask::new(&task, &input)?; + + while task.drive()? { + // Check if the task has been cancelled. + if cancel_token.is_cancelled() { + return Err(JobError::Internal("cancelled")); + } + } + + task.finish() + }) + .await? +} + +struct BlockingTask<'a> { + decoder: AnyDecoder<'a>, + decoder_info: DecoderInfo, + frame_configs: Vec>, + resizer: ImageResizer, + static_encoders: Vec<(usize, Vec<(ResizeOutputTarget, AnyEncoder)>)>, + anim_encoders: Vec<(usize, Vec<(ResizeOutputTarget, AnyEncoder)>)>, + static_frame_idx: usize, + frame_idx: usize, + duration_carried_ms: f64, + frame_rate_factor: Option, +} + +fn split_formats(output: &Output) -> (Vec<(usize, &OutputFormatOptions)>, Vec<(usize, &OutputFormatOptions)>) { + output + .formats + .iter() + .enumerate() + .fold((Vec::new(), Vec::new()), |mut acc, (idx, format_options)| { + match format_options.format() { + OutputFormat::AvifStatic | OutputFormat::WebpStatic | OutputFormat::PngStatic => { + acc.0.push((idx, format_options)) + } + OutputFormat::AvifAnim | OutputFormat::GifAnim | OutputFormat::WebpAnim => acc.1.push((idx, format_options)), + } + acc + }) +} + +fn build_encoder_set( + format_options: &OutputFormatOptions, + resize_outputs: &[ResizeOutputTarget], + loop_count: LoopCount, +) -> Result, JobError> { + let encoder_frontend = match format_options.format() { + OutputFormat::AvifStatic | OutputFormat::AvifAnim => EncoderBackend::LibAvif, + OutputFormat::PngStatic => EncoderBackend::Png, + OutputFormat::WebpStatic | OutputFormat::WebpAnim => EncoderBackend::LibWebp, + OutputFormat::GifAnim => EncoderBackend::Gifski, + }; + + resize_outputs + .iter() + .map(|target| { + Ok(( + *target, + encoder_frontend.build(EncoderSettings { + loop_count, + format: format_options.format(), + name: format_options.name.clone(), + quality: format_options.quality(), + static_image: matches!( + format_options.format(), + OutputFormat::AvifStatic | OutputFormat::WebpStatic | OutputFormat::PngStatic + ), + timescale: 1000, // millisecond timescale + })?, + )) + }) + .collect::, JobError>>() +} + +impl<'a> BlockingTask<'a> { + fn new(task: &'a Task, input: &'a [u8]) -> Result { + let output = task.output.as_ref().ok_or(JobError::InvalidJob)?; + let anim_config = output.animation_config.as_ref(); + + let (static_formats, anim_formats) = split_formats(output); + + if static_formats.is_empty() && anim_formats.is_empty() { + return Err(JobError::InvalidJob); + } + + let file_format = DecoderFrontend::from_format(FileFormat::from_bytes(input))?; + let decoder = file_format.build(task, Cow::Borrowed(input))?; + + let decoder_info = decoder.info(); + + if let Some(metadata) = task.input.as_ref().and_then(|input| input.metadata.as_ref()) { + if decoder_info.width != metadata.width as usize || decoder_info.height != metadata.height as usize { + return Err(JobError::MismatchedDimensions { + width: decoder_info.width, + height: decoder_info.height, + expected_width: metadata.width as usize, + expected_height: metadata.height as usize, + }); + } + + if let Some(frame_count) = metadata.frame_count { + if decoder_info.frame_count != frame_count as usize { + return Err(JobError::MismatchedFrameCount { + frame_count: decoder_info.frame_count, + expected_frame_count: frame_count as usize, + }); + } + } + + if let Some(static_frame_index) = metadata.static_frame_index { + if static_frame_index as usize >= decoder_info.frame_count { + return Err(JobError::StaticFrameIndexOutOfBounds { + idx: static_frame_index as usize, + frame_count: decoder_info.frame_count, + }); + } + } + } + + let mut frame_configs = vec![None; decoder_info.frame_count]; + + if let Some(anim_config) = anim_config { + match anim_config.frame_rate.as_ref() { + Some(animation_config::FrameRate::FrameDurationsMs(durations)) => { + if durations.values.len() != decoder_info.frame_count { + return Err(JobError::MismatchedFrameCount { + frame_count: decoder_info.frame_count, + expected_frame_count: durations.values.len(), + }); + } + + for (idx, duration) in durations.values.iter().enumerate() { + frame_configs[idx] = Some(FrameConfig::DurationMs(*duration)) + } + } + Some(animation_config::FrameRate::FrameDurationMs(duration)) => { + for config in frame_configs.iter_mut() { + *config = Some(FrameConfig::DurationMs(*duration)) + } + } + _ => {} + } + + for idx in anim_config.remove_frame_idxs.iter() { + let idx = *idx as usize; + if idx > decoder_info.frame_count { + return Err(JobError::MismatchedFrameCount { + frame_count: decoder_info.frame_count, + expected_frame_count: idx + 1, + }); + } + + frame_configs[idx] = Some(FrameConfig::Skip); + } + } + + let resizer = ImageResizer::new(&decoder_info, output)?; + + let loop_count = anim_config + .and_then(|anim_config| anim_config.loop_count) + .map(|loop_count| { + if loop_count < 0 { + LoopCount::Infinite + } else { + LoopCount::Finite(loop_count as usize) + } + }) + .unwrap_or(decoder_info.loop_count); + + let static_encoders = static_formats + .into_iter() + .map(|(f_idx, format_options)| { + build_encoder_set(format_options, resizer.outputs(), loop_count).map(|encoders| (f_idx, encoders)) + }) + .collect::, JobError>>()?; + + let anim_encoders = if decoder_info.frame_count > 1 { + anim_formats + .into_iter() + .map(|(f_idx, format_options)| { + build_encoder_set(format_options, resizer.outputs(), loop_count).map(|encoders| (f_idx, encoders)) + }) + .collect::, JobError>>()? + } else if !anim_formats.is_empty() && !output.skip_impossible_formats { + return Err(JobError::ImpossibleOutput(anim_formats[0].1.format())); + } else { + Vec::new() + }; + + if static_encoders.is_empty() && anim_encoders.is_empty() { + return Err(JobError::NoPossibleOutputs); + } + + let static_frame_idx = task + .input + .as_ref() + .and_then(|input| input.metadata.as_ref()) + .and_then(|metadata| metadata.static_frame_index) + .unwrap_or_default() as usize; + + Ok(Self { + decoder, + decoder_info, + frame_configs, + resizer, + static_encoders, + anim_encoders, + static_frame_idx, + frame_idx: 0, + duration_carried_ms: 0.0, + frame_rate_factor: anim_config.and_then(|config| match config.frame_rate.as_ref()? { + animation_config::FrameRate::FrameRateFactor(factor) => Some(*factor), + _ => None, + }), + }) + } + + pub fn drive(&mut self) -> Result { + let Some(mut frame) = self.decoder.decode()? else { + return Ok(false); + }; + + let idx = self.frame_idx; + self.frame_idx += 1; + + let variants = if idx == self.static_frame_idx { + let variants = self.resizer.resize(frame)?; + + self.static_encoders.iter_mut().try_for_each(|(_, encoders)| { + encoders + .iter_mut() + .zip(variants.iter()) + .try_for_each(|((_, encoder), frame)| encoder.add_frame(frame.as_ref())) + })?; + + Some(variants) + } else { + None + }; + + // Convert from the decode timescale into ms. + frame.duration_ts = (frame.duration_ts as f64 * 1000.0 / self.decoder_info.timescale as f64).round() as u64; + + if let Some(config) = self.frame_configs.get(idx).ok_or(JobError::Internal(""))? { + match config { + FrameConfig::Skip => { + return Ok(true); + } + FrameConfig::DurationMs(duration) => { + frame.duration_ts = *duration as u64; + } + } + } + + if let Some(factor) = self.frame_rate_factor { + let new_duration = (frame.duration_ts as f64 + self.duration_carried_ms) / factor; + let rounded_duration = new_duration.round(); + self.duration_carried_ms = new_duration - rounded_duration; + + if rounded_duration == 0.0 { + return Ok(true); + } + + frame.duration_ts = rounded_duration as u64; + } + + let variants = match variants { + Some(variants) => variants, + None => self.resizer.resize(frame)?, + }; + + self.anim_encoders.iter_mut().try_for_each(|(_, encoders)| { + encoders + .iter_mut() + .zip(variants.iter()) + .try_for_each(|((_, encoder), frame)| encoder.add_frame(frame.as_ref())) + })?; + + Ok(true) + } + + pub fn finish(self) -> Result { + let output = self + .static_encoders + .into_iter() + .chain(self.anim_encoders) + .flat_map(|(f_idx, encoders)| { + encoders.into_iter().map(move |(output, encoder)| { + let info = encoder.info(); + Ok(OutputImage { + format: info.format, + format_name: info.name.clone(), + format_idx: f_idx, + resize_idx: output.index, + scale: output.scale.map(|s| s as usize), + width: info.width, + height: info.height, + frame_count: info.frame_count, + duration_ms: info.duration, + data: encoder.finish()?, + }) + }) + }) + .collect::>()?; + + Ok(JobOutput { + input: InputFileMetadata { + width: self.decoder_info.width as u32, + height: self.decoder_info.height as u32, + frame_count: self.decoder_info.frame_count as u32, + }, + output, + }) + } +} diff --git a/image-processor/src/worker/process/decoder/ffmpeg.rs b/image-processor/src/worker/process/decoder/ffmpeg.rs new file mode 100644 index 000000000..458aace88 --- /dev/null +++ b/image-processor/src/worker/process/decoder/ffmpeg.rs @@ -0,0 +1,226 @@ +use std::borrow::Cow; + +use imgref::Img; +use rgb::RGBA8; +use scuffle_image_processor_proto::Task; + +use super::{Decoder, DecoderError, DecoderFrontend, DecoderInfo, LoopCount}; +use crate::worker::process::frame::{Frame, FrameRef}; + +pub struct FfmpegDecoder<'data> { + input: scuffle_ffmpeg::io::Input>>, + decoder: scuffle_ffmpeg::decoder::VideoDecoder, + scaler: scuffle_ffmpeg::scalar::Scalar, + info: DecoderInfo, + input_stream_index: i32, + average_frame_duration: u64, + previous_timestamp: Option, + send_packet: bool, + eof: bool, + done: bool, + frame: Frame, +} + +const fn cast_bytes_to_rgba(bytes: &[u8]) -> &[rgb::RGBA8] { + unsafe { std::slice::from_raw_parts(bytes.as_ptr() as *const _, bytes.len() / 4) } +} + +static FFMPEG_LOGGING_INITIALIZED: std::sync::Once = std::sync::Once::new(); + +impl<'data> FfmpegDecoder<'data> { + #[tracing::instrument(skip(task, data), fields(name = "FfmpegDecoder::new"))] + pub fn new(task: &Task, data: Cow<'data, [u8]>) -> Result { + FFMPEG_LOGGING_INITIALIZED.call_once(|| { + scuffle_ffmpeg::log::log_callback_tracing(); + }); + + let input = scuffle_ffmpeg::io::Input::seekable(std::io::Cursor::new(data))?; + + let input_stream = input + .streams() + .best(scuffle_ffmpeg::ffi::AVMediaType::AVMEDIA_TYPE_VIDEO) + .ok_or(DecoderError::NoVideoStream)?; + + let input_stream_index = input_stream.index(); + + let input_stream_time_base = input_stream.time_base(); + let input_stream_duration = input_stream.duration().unwrap_or(0); + let input_stream_frames = input_stream.nb_frames().ok_or(DecoderError::NoFrameCount)?.max(1); + + if input_stream_time_base.den == 0 || input_stream_time_base.num == 0 { + return Err(DecoderError::InvalidTimeBase); + } + + let decoder = match scuffle_ffmpeg::decoder::Decoder::new(&input_stream)? { + scuffle_ffmpeg::decoder::Decoder::Video(decoder) => decoder, + _ => { + return Err(DecoderError::InvalidVideoDecoder); + } + }; + + if let Some(max_input_width) = task.limits.as_ref().and_then(|l| l.max_input_width) { + if decoder.width() > max_input_width as i32 { + return Err(DecoderError::TooWide(decoder.width())); + } + } + + if let Some(max_input_height) = task.limits.as_ref().and_then(|l| l.max_input_height) { + if decoder.height() > max_input_height as i32 { + return Err(DecoderError::TooHigh(decoder.height())); + } + } + + if let Some(max_input_frame_count) = task.limits.as_ref().and_then(|l| l.max_input_frame_count) { + if input_stream_frames > max_input_frame_count as i64 { + return Err(DecoderError::TooManyFrames(input_stream_frames)); + } + } + + if let Some(max_input_duration_ms) = task.limits.as_ref().and_then(|l| l.max_input_duration_ms) { + // actual duration + // = duration * (time_base.num / time_base.den) * 1000 + // = (duration * time_base.num * 1000) / time_base.den + let duration = + (input_stream_duration * input_stream_time_base.num as i64 * 1000) / input_stream_time_base.den as i64; + + if duration > max_input_duration_ms as i64 { + return Err(DecoderError::TooLong(duration)); + } + } + + let scaler = scuffle_ffmpeg::scalar::Scalar::new( + decoder.width(), + decoder.height(), + decoder.pixel_format(), + decoder.width(), + decoder.height(), + scuffle_ffmpeg::ffi::AVPixelFormat::AV_PIX_FMT_RGBA, + )?; + + let info = DecoderInfo { + width: decoder.width() as usize, + height: decoder.height() as usize, + frame_count: input_stream_frames as usize, + // TODO: Support loop count from ffmpeg. + loop_count: LoopCount::Infinite, + timescale: input_stream_time_base.den as u64, + }; + + let average_frame_duration = (input_stream_duration / input_stream_frames) as u64; + + let frame = Frame { + image: Img::new(vec![RGBA8::default(); info.width * info.height], info.width, info.height), + duration_ts: average_frame_duration, + }; + + Ok(Self { + info, + input, + scaler, + decoder, + input_stream_index, + done: false, + eof: false, + send_packet: true, + frame, + average_frame_duration, + previous_timestamp: Some(0), + }) + } +} + +impl Decoder for FfmpegDecoder<'_> { + fn backend(&self) -> DecoderFrontend { + DecoderFrontend::Ffmpeg + } + + #[tracing::instrument(skip(self), fields(name = "FfmpegDecoder::decode"))] + fn decode(&mut self) -> Result, DecoderError> { + if self.done { + return Ok(None); + } + + loop { + if self.send_packet && !self.eof { + let packet = self + .input + .packets() + .find_map(|packet| match packet { + Ok(packet) => { + if packet.stream_index() == self.input_stream_index { + Some(Ok(packet)) + } else { + None + } + } + Err(err) => { + self.done = true; + Some(Err(err)) + } + }) + .transpose()?; + + if let Some(packet) = packet { + self.decoder.send_packet(&packet).map_err(|err| { + self.done = true; + err + })?; + } else { + self.decoder.send_eof().map_err(|err| { + self.done = true; + err + })?; + self.eof = true; + } + + self.send_packet = false; + } + + let frame = self.decoder.receive_frame().map_err(|err| { + self.done = true; + err + })?; + + if let Some(frame) = frame { + let frame = self.scaler.process(&frame).map_err(|err| { + self.done = true; + err + })?; + + // The frame has padding, so we need to copy the data. + let frame_data = frame.data(0).unwrap(); + let frame_linesize = frame.linesize(0).unwrap(); + + if frame_linesize == frame.width() as i32 * 4 { + // No padding, so we can just copy the data. + self.frame.image.buf_mut().copy_from_slice(cast_bytes_to_rgba(frame_data)); + } else { + // The frame has padding, so we need to copy the data. + for (i, row) in self.frame.image.buf_mut().chunks_exact_mut(frame.width()).enumerate() { + let row_data = &frame_data[i * frame_linesize as usize..][..frame.width() * 4]; + row.copy_from_slice(cast_bytes_to_rgba(row_data)); + } + } + + let timestamp = frame + .best_effort_timestamp() + .and_then(|ts| if ts > 0 { Some(ts as u64) } else { None }); + self.frame.duration_ts = timestamp + .map(|ts| ts - self.previous_timestamp.unwrap_or_default()) + .unwrap_or(self.average_frame_duration); + self.previous_timestamp = timestamp; + + return Ok(Some(self.frame.as_ref())); + } else if self.eof { + self.done = true; + return Ok(None); + } else { + self.send_packet = true; + } + } + } + + fn info(&self) -> DecoderInfo { + self.info + } +} diff --git a/image-processor/src/worker/process/decoder/libavif.rs b/image-processor/src/worker/process/decoder/libavif.rs new file mode 100644 index 000000000..d7d4614be --- /dev/null +++ b/image-processor/src/worker/process/decoder/libavif.rs @@ -0,0 +1,135 @@ +use std::borrow::Cow; +use std::ptr::NonNull; + +use scuffle_image_processor_proto::Task; + +use super::{Decoder, DecoderError, DecoderFrontend, DecoderInfo, LoopCount}; +use crate::worker::process::frame::FrameRef; +use crate::worker::process::libavif::{AvifError, AvifRgbImage}; +use crate::worker::process::smart_object::SmartPtr; + +#[derive(Debug)] +pub struct AvifDecoder<'data> { + decoder: SmartPtr, + info: DecoderInfo, + _data: Cow<'data, [u8]>, + img: AvifRgbImage, + total_duration: u64, + max_input_duration: Option, +} + +impl<'data> AvifDecoder<'data> { + #[tracing::instrument(skip(task, data), fields(name = "AvifDecoder::new"))] + pub fn new(task: &Task, data: Cow<'data, [u8]>) -> Result { + let mut decoder = SmartPtr::new( + NonNull::new(unsafe { libavif_sys::avifDecoderCreate() }).ok_or(AvifError::OutOfMemory)?, + |ptr| { + // Safety: The decoder is valid. + unsafe { + libavif_sys::avifDecoderDestroy(ptr.as_ptr()); + } + }, + ); + + if let (Some(max_input_width), Some(max_input_height)) = ( + task.limits.as_ref().and_then(|l| l.max_input_width), + task.limits.as_ref().and_then(|l| l.max_input_height), + ) { + decoder.as_mut().imageDimensionLimit = max_input_width * max_input_height; + } + + if let Some(max_input_frame_count) = task.limits.as_ref().and_then(|l| l.max_input_frame_count) { + decoder.as_mut().imageCountLimit = max_input_frame_count; + } + + // Safety: The decoder is valid. + let io = NonNull::new(unsafe { libavif_sys::avifIOCreateMemoryReader(data.as_ptr(), data.len()) }) + .ok_or(AvifError::OutOfMemory)?; + + // Set the io pointer. + decoder.as_mut().io = io.as_ptr(); + + // Parse the data. + AvifError::from_code(unsafe { libavif_sys::avifDecoderParse(decoder.as_ptr()) })?; + + let image = AvifRgbImage::new(decoder.as_ref()); + + let info = DecoderInfo { + width: image.width as usize, + height: image.height as usize, + loop_count: if decoder.as_ref().repetitionCount <= 0 { + LoopCount::Infinite + } else { + LoopCount::Finite(decoder.as_ref().repetitionCount as usize) + }, + frame_count: decoder.as_ref().imageCount.max(1) as _, + timescale: decoder.as_ref().timescale, + }; + + if let Some(max_input_width) = task.limits.as_ref().and_then(|l| l.max_input_width) { + if info.width > max_input_width as usize { + return Err(DecoderError::TooWide(info.width as i32)); + } + } + + if let Some(max_input_height) = task.limits.as_ref().and_then(|l| l.max_input_height) { + if info.height > max_input_height as usize { + return Err(DecoderError::TooHigh(info.height as i32)); + } + } + + if let Some(max_input_frame_count) = task.limits.as_ref().and_then(|l| l.max_input_frame_count) { + if info.frame_count > max_input_frame_count as usize { + return Err(DecoderError::TooManyFrames(info.frame_count as i64)); + } + } + + Ok(Self { + _data: data, + img: AvifRgbImage::new(decoder.as_ref()), + decoder, + max_input_duration: task + .limits + .as_ref() + .and_then(|l| l.max_input_duration_ms) + .map(|max_input_duration_ms| max_input_duration_ms as u64 * info.timescale / 1000), + total_duration: 0, + info, + }) + } +} + +impl Decoder for AvifDecoder<'_> { + fn backend(&self) -> DecoderFrontend { + DecoderFrontend::LibAvif + } + + fn info(&self) -> DecoderInfo { + self.info + } + + #[tracing::instrument(skip(self), fields(name = "AvifDecoder::decode"))] + fn decode(&mut self) -> Result, DecoderError> { + if AvifError::from_code(unsafe { libavif_sys::avifDecoderNextImage(self.decoder.as_ptr()) }).is_err() { + return Ok(None); + } + + AvifError::from_code(unsafe { libavif_sys::avifImageYUVToRGB(self.decoder.as_ref().image, &mut *self.img) })?; + + let duration_ts = self.decoder.as_ref().imageTiming.durationInTimescales; + self.total_duration += duration_ts; + + if let Some(max_input_duration) = self.max_input_duration { + if self.total_duration > max_input_duration { + return Err(DecoderError::TooLong(self.total_duration as i64)); + } + } + + Ok(Some(FrameRef::new( + self.img.data(), + self.img.width as usize, + self.img.height as usize, + duration_ts, + ))) + } +} diff --git a/image-processor/src/worker/process/decoder/libwebp.rs b/image-processor/src/worker/process/decoder/libwebp.rs new file mode 100644 index 000000000..252983827 --- /dev/null +++ b/image-processor/src/worker/process/decoder/libwebp.rs @@ -0,0 +1,132 @@ +use std::borrow::Cow; +use std::ptr::NonNull; + +use scuffle_image_processor_proto::Task; + +use super::{Decoder, DecoderError, DecoderFrontend, DecoderInfo, LoopCount}; +use crate::worker::process::frame::FrameRef; +use crate::worker::process::libwebp::{zero_memory_default, WebPError}; +use crate::worker::process::smart_object::SmartPtr; + +pub struct WebpDecoder<'data> { + info: DecoderInfo, + decoder: SmartPtr, + _data: Cow<'data, [u8]>, + timestamp: i32, + total_duration: u64, + max_input_duration: Option, +} + +impl<'data> WebpDecoder<'data> { + #[tracing::instrument(skip(task, data), fields(name = "WebpDecoder::new"))] + pub fn new(task: &Task, data: Cow<'data, [u8]>) -> Result { + let decoder = SmartPtr::new( + NonNull::new(unsafe { + libwebp_sys::WebPAnimDecoderNew( + &libwebp_sys::WebPData { + bytes: data.as_ptr(), + size: data.len(), + }, + std::ptr::null(), + ) + }) + .ok_or(WebPError::OutOfMemory)?, + |decoder| { + // Safety: The decoder is valid. + unsafe { + libwebp_sys::WebPAnimDecoderDelete(decoder.as_ptr()); + } + }, + ); + + let mut info = zero_memory_default::(); + + // Safety: both pointers are valid and the decoder is valid. + if unsafe { libwebp_sys::WebPAnimDecoderGetInfo(decoder.as_ptr(), &mut info) } == 0 { + return Err(DecoderError::LibWebp(WebPError::InvalidData)); + } + + if let Some(max_input_width) = task.limits.as_ref().and_then(|l| l.max_input_width) { + if info.canvas_width > max_input_width { + return Err(DecoderError::TooWide(info.canvas_width as i32)); + } + } + + if let Some(max_input_height) = task.limits.as_ref().and_then(|l| l.max_input_height) { + if info.canvas_height > max_input_height { + return Err(DecoderError::TooHigh(info.canvas_height as i32)); + } + } + + if let Some(max_input_frame_count) = task.limits.as_ref().and_then(|l| l.max_input_frame_count) { + if info.frame_count > max_input_frame_count { + return Err(DecoderError::TooManyFrames(info.frame_count as i64)); + } + } + + Ok(Self { + info: DecoderInfo { + width: info.canvas_width as _, + height: info.canvas_height as _, + loop_count: match info.loop_count { + 0 => LoopCount::Infinite, + _ => LoopCount::Finite(info.loop_count as _), + }, + frame_count: info.frame_count as _, + timescale: 1000, + }, + max_input_duration: task + .limits + .as_ref() + .and_then(|l| l.max_input_duration_ms) + .map(|dur| dur as u64), + decoder, + _data: data, + total_duration: 0, + timestamp: 0, + }) + } +} + +impl Decoder for WebpDecoder<'_> { + fn backend(&self) -> DecoderFrontend { + DecoderFrontend::LibWebp + } + + fn info(&self) -> DecoderInfo { + self.info + } + + #[tracing::instrument(skip(self), fields(name = "WebpDecoder::decode"))] + fn decode(&mut self) -> Result, DecoderError> { + let mut buf = std::ptr::null_mut(); + let previous_timestamp = self.timestamp; + + // Safety: The buffer is a valid pointer to a null ptr, timestamp is a valid + // pointer to i32, and the decoder is valid. + let result = unsafe { libwebp_sys::WebPAnimDecoderGetNext(self.decoder.as_ptr(), &mut buf, &mut self.timestamp) }; + + // If 0 is returned, the animation is over. + if result == 0 { + return Ok(None); + } + + let buf = NonNull::new(buf).ok_or(WebPError::OutOfMemory)?; + + let buf = + unsafe { std::slice::from_raw_parts(buf.as_ptr() as *const rgb::RGBA8, self.info.width * self.info.height) }; + + let duration_ts = (self.timestamp - previous_timestamp).max(0) as u64; + self.total_duration += duration_ts; + + if let Some(max_input_duration) = self.max_input_duration { + if self.total_duration > max_input_duration { + return Err(DecoderError::TooLong(self.total_duration as i64)); + } + } + + let duration_ts = (self.timestamp - previous_timestamp).max(0) as u64; + + Ok(Some(FrameRef::new(buf, self.info.width, self.info.height, duration_ts))) + } +} diff --git a/platform/image_processor/src/processor/job/decoder/mod.rs b/image-processor/src/worker/process/decoder/mod.rs similarity index 63% rename from platform/image_processor/src/processor/job/decoder/mod.rs rename to image-processor/src/worker/process/decoder/mod.rs index e08d8edde..ee7f22afd 100644 --- a/platform/image_processor/src/processor/job/decoder/mod.rs +++ b/image-processor/src/worker/process/decoder/mod.rs @@ -1,24 +1,54 @@ use std::borrow::Cow; use file_format::FileFormat; +use scuffle_ffmpeg::error::FfmpegError; +use scuffle_image_processor_proto::Task; -use super::frame::Frame; -use crate::database::Job; -use crate::processor::error::{ProcessorError, Result}; +use super::frame::FrameRef; +use super::libavif::AvifError; +use super::libwebp::WebPError; mod ffmpeg; mod libavif; mod libwebp; +#[derive(Debug, thiserror::Error)] +pub enum DecoderError { + #[error("ffmpeg: {0}")] + Ffmpeg(#[from] FfmpegError), + #[error("libavif: {0}")] + LibAvif(#[from] AvifError), + #[error("libwebp: {0}")] + LibWebp(#[from] WebPError), + #[error("unsupported input format: {0}")] + UnsupportedInputFormat(FileFormat), + #[error("no video stream")] + NoVideoStream, + #[error("no frame count")] + NoFrameCount, + #[error("invalid time base")] + InvalidTimeBase, + #[error("invalid video decoder")] + InvalidVideoDecoder, + #[error("exceeded maximum input width: {0}")] + TooWide(i32), + #[error("exceeded maximum input height: {0}")] + TooHigh(i32), + #[error("exceeded maximum input frame count: {0}")] + TooManyFrames(i64), + #[error("exceeded maximum input duration: {0}")] + TooLong(i64), +} + #[derive(Debug, Clone, Copy)] -pub enum DecoderBackend { +pub enum DecoderFrontend { Ffmpeg, LibWebp, LibAvif, } -impl DecoderBackend { - pub const fn from_format(format: FileFormat) -> Result { +impl DecoderFrontend { + pub const fn from_format(format: FileFormat) -> Result { match format { FileFormat::Webp => Ok(Self::LibWebp), // .webp FileFormat::Av1ImageFileFormat // .avif @@ -44,15 +74,15 @@ impl DecoderBackend { | FileFormat::Webm // .webm | FileFormat::BdavMpeg2TransportStream // .m2ts | FileFormat::Mpeg2TransportStream => Ok(Self::Ffmpeg), // .ts - _ => Err(ProcessorError::UnsupportedInputFormat(format)), + _ => Err(DecoderError::UnsupportedInputFormat(format)), } } - pub fn build<'a>(&self, job: &Job, data: Cow<'a, [u8]>) -> Result> { + pub fn build<'a>(&self, task: &Task, data: Cow<'a, [u8]>) -> Result, DecoderError> { match self { - Self::Ffmpeg => Ok(AnyDecoder::Ffmpeg(ffmpeg::FfmpegDecoder::new(job, data)?)), - Self::LibAvif => Ok(AnyDecoder::LibAvif(libavif::AvifDecoder::new(job, data)?)), - Self::LibWebp => Ok(AnyDecoder::LibWebp(libwebp::WebpDecoder::new(job, data)?)), + Self::Ffmpeg => Ok(AnyDecoder::Ffmpeg(ffmpeg::FfmpegDecoder::new(task, data)?)), + Self::LibAvif => Ok(AnyDecoder::LibAvif(libavif::AvifDecoder::new(task, data)?)), + Self::LibWebp => Ok(AnyDecoder::LibWebp(libwebp::WebpDecoder::new(task, data)?)), } } } @@ -64,9 +94,9 @@ pub enum AnyDecoder<'a> { } pub trait Decoder { - fn backend(&self) -> DecoderBackend; + fn backend(&self) -> DecoderFrontend; fn info(&self) -> DecoderInfo; - fn decode(&mut self) -> Result>; + fn decode(&mut self) -> Result, DecoderError>; } #[derive(Debug, Clone, Copy)] @@ -85,7 +115,7 @@ pub enum LoopCount { } impl Decoder for AnyDecoder<'_> { - fn backend(&self) -> DecoderBackend { + fn backend(&self) -> DecoderFrontend { match self { Self::Ffmpeg(decoder) => decoder.backend(), Self::LibAvif(decoder) => decoder.backend(), @@ -101,7 +131,7 @@ impl Decoder for AnyDecoder<'_> { } } - fn decode(&mut self) -> Result> { + fn decode(&mut self) -> Result, DecoderError> { match self { Self::Ffmpeg(decoder) => decoder.decode(), Self::LibAvif(decoder) => decoder.decode(), diff --git a/image-processor/src/worker/process/encoder/gifski.rs b/image-processor/src/worker/process/encoder/gifski.rs new file mode 100644 index 000000000..48de33f15 --- /dev/null +++ b/image-processor/src/worker/process/encoder/gifski.rs @@ -0,0 +1,87 @@ +use scuffle_image_processor_proto::OutputQuality; + +use super::{Encoder, EncoderBackend, EncoderError, EncoderInfo, EncoderSettings}; +use crate::worker::process::decoder::LoopCount; +use crate::worker::process::frame::FrameRef; + +pub struct GifskiEncoder { + collector: gifski::Collector, + writer: std::thread::JoinHandle, EncoderError>>, + info: EncoderInfo, +} + +impl GifskiEncoder { + #[tracing::instrument(skip(settings), fields(name = "GifskiEncoder::new"))] + pub fn new(settings: EncoderSettings) -> Result { + let (collector, writer) = gifski::new(gifski::Settings { + repeat: match settings.loop_count { + LoopCount::Infinite => gifski::Repeat::Infinite, + LoopCount::Finite(count) => gifski::Repeat::Finite(count as u16), + }, + quality: match settings.quality { + OutputQuality::Auto => 100, + OutputQuality::High => 100, + OutputQuality::Lossless => 100, + OutputQuality::Medium => 75, + OutputQuality::Low => 50, + }, + fast: match settings.quality { + OutputQuality::Auto => true, + OutputQuality::High => false, + OutputQuality::Lossless => false, + OutputQuality::Medium => true, + OutputQuality::Low => true, + }, + ..Default::default() + })?; + + Ok(Self { + collector, + writer: std::thread::spawn(move || { + let mut buffer = Vec::new(); + writer.write(&mut buffer, &mut gifski::progress::NoProgress {})?; + Ok(buffer) + }), + info: EncoderInfo { + name: settings.name, + duration: 0, + frame_count: 0, + format: settings.format, + frontend: EncoderBackend::Gifski, + height: 0, + loop_count: settings.loop_count, + timescale: settings.timescale, + width: 0, + }, + }) + } + + fn duration(&mut self, duration: u64) -> f64 { + self.info.duration += duration; + self.info.duration as f64 / self.info.timescale as f64 + } +} + +impl Encoder for GifskiEncoder { + fn info(&self) -> &EncoderInfo { + &self.info + } + + #[tracing::instrument(skip(self), fields(name = "GifskiEncoder::add_frame"))] + fn add_frame(&mut self, frame: FrameRef) -> Result<(), EncoderError> { + let frame = frame.to_owned(); + self.info.height = frame.image.height(); + self.info.width = frame.image.width(); + let duration = self.duration(frame.duration_ts); + self.collector.add_frame_rgba(self.info.frame_count, frame.image, duration)?; + self.info.frame_count += 1; + Ok(()) + } + + #[tracing::instrument(skip(self), fields(name = "GifskiEncoder::finish"))] + fn finish(self) -> Result, EncoderError> { + drop(self.collector); + + self.writer.join().map_err(|_| EncoderError::Thread)? + } +} diff --git a/platform/image_processor/src/processor/job/encoder/libavif.rs b/image-processor/src/worker/process/encoder/libavif.rs similarity index 60% rename from platform/image_processor/src/processor/job/encoder/libavif.rs rename to image-processor/src/worker/process/encoder/libavif.rs index 8c7254a16..7086ff34f 100644 --- a/platform/image_processor/src/processor/job/encoder/libavif.rs +++ b/image-processor/src/worker/process/encoder/libavif.rs @@ -1,12 +1,12 @@ use std::ptr::NonNull; -use anyhow::Context; +use libavif_sys::{AVIF_QUALITY_LOSSLESS, AVIF_QUANTIZER_BEST_QUALITY, AVIF_SPEED_FASTEST, AVIF_SPEED_SLOWEST}; +use scuffle_image_processor_proto::OutputQuality; -use super::{Encoder, EncoderFrontend, EncoderInfo, EncoderSettings}; -use crate::processor::error::{ProcessorError, Result}; -use crate::processor::job::frame::Frame; -use crate::processor::job::libavif::AvifError; -use crate::processor::job::smart_object::{SmartObject, SmartPtr}; +use super::{Encoder, EncoderBackend, EncoderError, EncoderInfo, EncoderSettings}; +use crate::worker::process::frame::FrameRef; +use crate::worker::process::libavif::AvifError; +use crate::worker::process::smart_object::{SmartObject, SmartPtr}; pub struct AvifEncoder { encoder: SmartPtr, @@ -18,25 +18,41 @@ pub struct AvifEncoder { } impl AvifEncoder { - pub fn new(settings: EncoderSettings) -> Result { + #[tracing::instrument(skip(settings), fields(name = "AvifEncoder::new"))] + pub fn new(settings: EncoderSettings) -> Result { let mut encoder = SmartPtr::new( - NonNull::new(unsafe { libavif_sys::avifEncoderCreate() }) - .ok_or(AvifError::OutOfMemory) - .context("failed to create avif encoder") - .map_err(ProcessorError::AvifEncode)?, + NonNull::new(unsafe { libavif_sys::avifEncoderCreate() }).ok_or(AvifError::OutOfMemory)?, |ptr| unsafe { libavif_sys::avifEncoderDestroy(ptr.as_ptr()) }, ); encoder.as_mut().maxThreads = 1; encoder.as_mut().timescale = settings.timescale; encoder.as_mut().autoTiling = 1; - encoder.as_mut().speed = if settings.fast { 8 } else { 2 }; + encoder.as_mut().quality = match settings.quality { + OutputQuality::Auto => encoder.as_mut().quality, + OutputQuality::Lossless => AVIF_QUALITY_LOSSLESS as i32, + OutputQuality::High => 75, + OutputQuality::Medium => 50, + OutputQuality::Low => 25_i32, + }; + encoder.as_mut().qualityAlpha = encoder.as_mut().quality; + encoder.as_mut().minQuantizer = match settings.quality { + OutputQuality::Auto => encoder.as_mut().minQuantizer, + OutputQuality::Lossless => AVIF_QUANTIZER_BEST_QUALITY as i32, + OutputQuality::High => 5, + OutputQuality::Medium => 15, + OutputQuality::Low => 30, + }; + encoder.as_mut().speed = match settings.quality { + OutputQuality::Auto => 8, + OutputQuality::Lossless => AVIF_SPEED_SLOWEST as i32, + OutputQuality::High => 5, + OutputQuality::Medium => 8, + OutputQuality::Low => AVIF_SPEED_FASTEST as i32, + }; let mut image = SmartPtr::new( - NonNull::new(unsafe { libavif_sys::avifImageCreateEmpty() }) - .ok_or(AvifError::OutOfMemory) - .context("failed to create avif image") - .map_err(ProcessorError::AvifEncode)?, + NonNull::new(unsafe { libavif_sys::avifImageCreateEmpty() }).ok_or(AvifError::OutOfMemory)?, |ptr| unsafe { libavif_sys::avifImageDestroy(ptr.as_ptr()) }, ); @@ -56,9 +72,11 @@ impl AvifEncoder { first_duration: None, static_image: settings.static_image, info: EncoderInfo { + name: settings.name, duration: 0, frame_count: 0, - frontend: EncoderFrontend::LibAvif, + format: settings.format, + frontend: EncoderBackend::LibAvif, height: 0, loop_count: settings.loop_count, timescale: settings.timescale, @@ -67,26 +85,23 @@ impl AvifEncoder { }) } - fn flush_frame(&mut self, duration: u64, flags: u32) -> Result<()> { + fn flush_frame(&mut self, duration: u64, flags: u32) -> Result<(), EncoderError> { // Safety: The image is valid. AvifError::from_code(unsafe { libavif_sys::avifEncoderAddImage(self.encoder.as_mut(), self.image.as_mut(), duration, flags) - }) - .context("failed to add image to encoder") - .map_err(ProcessorError::AvifEncode)?; + })?; Ok(()) } } impl Encoder for AvifEncoder { - fn info(&self) -> EncoderInfo { - self.info + fn info(&self) -> &EncoderInfo { + &self.info } - fn add_frame(&mut self, frame: &Frame) -> Result<()> { - let _abort_guard = utils::task::AbortGuard::new(); - + #[tracing::instrument(skip(self), fields(name = "AvifEncoder::add_frame"))] + fn add_frame(&mut self, frame: FrameRef) -> Result<(), EncoderError> { if self.rgb.is_none() { self.image.as_mut().width = frame.image.width() as u32; self.image.as_mut().height = frame.image.height() as u32; @@ -104,7 +119,7 @@ impl Encoder for AvifEncoder { self.first_duration = Some(frame.duration_ts); } else if let Some(first_duration) = self.first_duration.take() { if self.static_image { - return Err(ProcessorError::AvifEncode(anyhow::anyhow!("static image already added"))); + return Err(EncoderError::MultipleFrames); } // Flush the first frame to the encoder. @@ -117,9 +132,7 @@ impl Encoder for AvifEncoder { rgb.pixels = frame.image.buf().as_ptr() as _; // Safety: The image and rgb are valid. - AvifError::from_code(unsafe { libavif_sys::avifImageRGBToYUV(self.image.as_mut(), rgb) }) - .context("failed to convert rgb to yuv") - .map_err(ProcessorError::AvifEncode)?; + AvifError::from_code(unsafe { libavif_sys::avifImageRGBToYUV(self.image.as_mut(), rgb) })?; // On the first frame we dont want to flush the image to the encoder yet, this // is because we don't know if there will be more frames. @@ -135,11 +148,10 @@ impl Encoder for AvifEncoder { Ok(()) } - fn finish(mut self) -> Result> { - let _abort_guard = utils::task::AbortGuard::new(); - + #[tracing::instrument(skip(self), fields(name = "AvifEncoder::finish"))] + fn finish(mut self) -> Result, EncoderError> { if self.rgb.is_none() { - return Err(ProcessorError::AvifEncode(anyhow::anyhow!("no frames added"))); + return Err(EncoderError::NoFrames); } if let Some(first_duration) = self.first_duration.take() { @@ -150,16 +162,11 @@ impl Encoder for AvifEncoder { libavif_sys::avifRWDataFree(ptr) }); - AvifError::from_code(unsafe { libavif_sys::avifEncoderFinish(self.encoder.as_mut(), &mut *output) }) - .context("failed to finish encoding") - .map_err(ProcessorError::AvifEncode)?; + AvifError::from_code(unsafe { libavif_sys::avifEncoderFinish(self.encoder.as_mut(), &mut *output) })?; let output = output.free(); - let mut data = NonNull::new(output.data) - .ok_or(AvifError::OutOfMemory) - .context("failed to get output data") - .map_err(ProcessorError::AvifEncode)?; + let mut data = NonNull::new(output.data).ok_or(AvifError::OutOfMemory)?; // Safety: The output is valid, and we own the data. let vec = unsafe { std::vec::Vec::from_raw_parts(data.as_mut(), output.size, output.size) }; diff --git a/platform/image_processor/src/processor/job/encoder/libwebp.rs b/image-processor/src/worker/process/encoder/libwebp.rs similarity index 73% rename from platform/image_processor/src/processor/job/encoder/libwebp.rs rename to image-processor/src/worker/process/encoder/libwebp.rs index 8ffce5a8d..d09c68b6a 100644 --- a/platform/image_processor/src/processor/job/encoder/libwebp.rs +++ b/image-processor/src/worker/process/encoder/libwebp.rs @@ -1,14 +1,13 @@ use std::ptr::NonNull; -use anyhow::Context; use libwebp_sys::WebPMuxAnimParams; +use scuffle_image_processor_proto::OutputQuality; -use super::{Encoder, EncoderFrontend, EncoderInfo, EncoderSettings}; -use crate::processor::error::{ProcessorError, Result}; -use crate::processor::job::decoder::LoopCount; -use crate::processor::job::frame::Frame; -use crate::processor::job::libwebp::{zero_memory_default, WebPError}; -use crate::processor::job::smart_object::{SmartObject, SmartPtr}; +use super::{Encoder, EncoderBackend, EncoderError, EncoderInfo, EncoderSettings}; +use crate::worker::process::decoder::LoopCount; +use crate::worker::process::frame::FrameRef; +use crate::worker::process::libwebp::{zero_memory_default, WebPError}; +use crate::worker::process::smart_object::{SmartObject, SmartPtr}; pub struct WebpEncoder { config: libwebp_sys::WebPConfig, @@ -20,26 +19,39 @@ pub struct WebpEncoder { static_image: bool, } -fn wrap_error(status: i32, err: &'static str, message: &'static str) -> Result<()> { +fn wrap_error(status: i32, err: &'static str) -> Result<(), WebPError> { if status == 0 { Err(WebPError::UnknownError(err)) - .context(message) - .map_err(ProcessorError::WebPEncode) } else { Ok(()) } } impl WebpEncoder { - pub fn new(settings: EncoderSettings) -> Result { + #[tracing::instrument(skip(settings), fields(name = "WebpEncoder::new"))] + pub fn new(settings: EncoderSettings) -> Result { let mut config = zero_memory_default::(); + config.lossless = if settings.quality == OutputQuality::Lossless { 1 } else { 0 }; + config.quality = match settings.quality { + OutputQuality::Auto => 90.0, + OutputQuality::High => 100.0, + OutputQuality::Lossless => 60.0, // 0-6 + OutputQuality::Medium => 75.0, + OutputQuality::Low => 50.0, + }; + config.method = match settings.quality { + OutputQuality::Auto => 4, + OutputQuality::High => 4, + OutputQuality::Lossless => 6, + OutputQuality::Medium => 3, + OutputQuality::Low => 2, + }; config.thread_level = 1; wrap_error( unsafe { libwebp_sys::WebPConfigInit(&mut config) }, "failed to initialize webp config", - "libwebp_sys::WebPConfigInit", )?; let mut picture = SmartObject::new(zero_memory_default::(), |ptr| unsafe { @@ -49,27 +61,28 @@ impl WebpEncoder { wrap_error( unsafe { libwebp_sys::WebPPictureInit(&mut *picture) }, "failed to initialize webp picture", - "libwebp_sys::WebPPictureInit", )?; picture.use_argb = 1; Ok(Self { config, - settings, - picture, - encoder: None, - first_duration: None, - static_image: settings.static_image, info: EncoderInfo { + name: settings.name.clone(), duration: 0, frame_count: 0, - frontend: EncoderFrontend::LibWebp, + format: settings.format, + frontend: EncoderBackend::LibWebp, height: 0, loop_count: settings.loop_count, timescale: settings.timescale, width: 0, }, + static_image: settings.static_image, + settings, + picture, + encoder: None, + first_duration: None, }) } @@ -77,9 +90,7 @@ impl WebpEncoder { self.info.duration * 1000 / self.settings.timescale } - fn flush_frame(&mut self, duration: u64) -> Result<()> { - let _abort_guard = utils::task::AbortGuard::new(); - + fn flush_frame(&mut self, duration: u64) -> Result<(), EncoderError> { // Safety: The picture is valid. wrap_error( unsafe { @@ -91,7 +102,6 @@ impl WebpEncoder { ) }, "failed to add webp frame", - "libwebp_sys::WebPAnimEncoderAdd", )?; self.info.duration += duration; @@ -101,20 +111,19 @@ impl WebpEncoder { } impl Encoder for WebpEncoder { - fn info(&self) -> EncoderInfo { - self.info + fn info(&self) -> &EncoderInfo { + &self.info } - fn add_frame(&mut self, frame: &Frame) -> Result<()> { - let _abort_guard = utils::task::AbortGuard::new(); - + #[tracing::instrument(skip(self), fields(name = "WebpEncoder::add_frame"))] + fn add_frame(&mut self, frame: FrameRef) -> Result<(), EncoderError> { if self.first_duration.is_none() && self.encoder.is_none() { self.picture.width = frame.image.width() as _; self.picture.height = frame.image.height() as _; self.first_duration = Some(frame.duration_ts); } else if let Some(first_duration) = self.first_duration.take() { if self.static_image { - return Err(ProcessorError::WebPEncode(anyhow::anyhow!("static image already added"))); + return Err(EncoderError::MultipleFrames); } let encoder = SmartPtr::new( @@ -127,8 +136,8 @@ impl Encoder for WebpEncoder { anim_params: WebPMuxAnimParams { bgcolor: 0, loop_count: match self.settings.loop_count { - LoopCount::Infinite => 0, LoopCount::Finite(count) => count as _, + LoopCount::Infinite => 0, }, }, kmax: 0, @@ -139,9 +148,7 @@ impl Encoder for WebpEncoder { }, ) }) - .ok_or(WebPError::OutOfMemory) - .context("failed to create webp encoder") - .map_err(ProcessorError::WebPEncode)?, + .ok_or(WebPError::OutOfMemory)?, |encoder| { // Safety: The encoder is valid. unsafe { @@ -163,7 +170,6 @@ impl Encoder for WebpEncoder { ) }, "failed to import webp frame", - "libwebp_sys::WebPPictureImportRGBA", )?; if self.encoder.is_some() { @@ -177,20 +183,18 @@ impl Encoder for WebpEncoder { Ok(()) } - fn finish(mut self) -> Result> { - let _abort_guard = utils::task::AbortGuard::new(); - + #[tracing::instrument(skip(self), fields(name = "WebpEncoder::finish"))] + fn finish(mut self) -> Result, EncoderError> { let timestamp = self.timestamp(); if self.encoder.is_none() && self.first_duration.is_none() { - Err(ProcessorError::WebPEncode(anyhow::anyhow!("no frames added"))) + Err(EncoderError::NoFrames) } else if let Some(mut encoder) = self.encoder { wrap_error( unsafe { libwebp_sys::WebPAnimEncoderAdd(encoder.as_mut(), std::ptr::null_mut(), timestamp as _, std::ptr::null()) }, "failed to add null webp frame", - "libwebp_sys::WebPAnimEncoderAdd", )?; let mut webp_data = SmartObject::new(zero_memory_default::(), |ptr| unsafe { @@ -203,15 +207,11 @@ impl Encoder for WebpEncoder { wrap_error( unsafe { libwebp_sys::WebPAnimEncoderAssemble(encoder.as_mut(), &mut *webp_data) }, "failed to assemble webp", - "libwebp_sys::WebPAnimEncoderAssemble", )?; let webp_data = webp_data.free(); - let mut data = NonNull::new(webp_data.bytes as _) - .ok_or(WebPError::OutOfMemory) - .context("failed to get output data") - .map_err(ProcessorError::WebPEncode)?; + let mut data = NonNull::new(webp_data.bytes as _).ok_or(WebPError::OutOfMemory)?; // Safety: The data is valid and we are taking ownership of it. let vec = unsafe { std::vec::Vec::from_raw_parts(data.as_mut(), webp_data.size, webp_data.size) }; @@ -236,15 +236,11 @@ impl Encoder for WebpEncoder { wrap_error( unsafe { libwebp_sys::WebPEncode(&self.config, &mut *self.picture) }, "failed to encode webp", - "libwebp_sys::WebPEncode", )?; let memory_writer = memory_writer.free(); - let mut data = NonNull::new(memory_writer.mem) - .ok_or(WebPError::OutOfMemory) - .context("failed to get output data") - .map_err(ProcessorError::WebPEncode)?; + let mut data = NonNull::new(memory_writer.mem).ok_or(WebPError::OutOfMemory)?; // Safety: The data is valid and we are taking ownership of it. let vec = unsafe { std::vec::Vec::from_raw_parts(data.as_mut(), memory_writer.size, memory_writer.max_size) }; diff --git a/platform/image_processor/src/processor/job/encoder/mod.rs b/image-processor/src/worker/process/encoder/mod.rs similarity index 58% rename from platform/image_processor/src/processor/job/encoder/mod.rs rename to image-processor/src/worker/process/encoder/mod.rs index 23a43987d..e4fff30e3 100644 --- a/platform/image_processor/src/processor/job/encoder/mod.rs +++ b/image-processor/src/worker/process/encoder/mod.rs @@ -1,6 +1,9 @@ +use scuffle_image_processor_proto::{OutputFormat, OutputQuality}; + use super::decoder::LoopCount; -use super::frame::Frame; -use crate::processor::error::Result; +use super::frame::FrameRef; +use super::libavif::AvifError; +use super::libwebp::WebPError; mod gifski; mod libavif; @@ -8,24 +11,28 @@ mod libwebp; mod png; #[derive(Debug, Clone, Copy)] -pub enum EncoderFrontend { +pub enum EncoderBackend { Gifski, Png, LibWebp, LibAvif, } -#[derive(Debug, Clone, Copy)] +#[derive(Debug, Clone)] pub struct EncoderSettings { - pub fast: bool, + pub name: Option, + pub format: OutputFormat, + pub quality: OutputQuality, pub loop_count: LoopCount, pub timescale: u64, pub static_image: bool, } -#[derive(Debug, Clone, Copy)] +#[derive(Debug, Clone)] pub struct EncoderInfo { - pub frontend: EncoderFrontend, + pub name: Option, + pub format: OutputFormat, + pub frontend: EncoderBackend, pub width: usize, pub height: usize, pub loop_count: LoopCount, @@ -34,8 +41,26 @@ pub struct EncoderInfo { pub frame_count: usize, } -impl EncoderFrontend { - pub fn build(&self, settings: EncoderSettings) -> Result { +#[derive(Debug, thiserror::Error)] +pub enum EncoderError { + #[error("gifski: {0}")] + Gifski(#[from] ::gifski::Error), + #[error("thread panicked")] + Thread, + #[error("avif: {0}")] + Avif(#[from] AvifError), + #[error("no frames added")] + NoFrames, + #[error("static image has multiple frames")] + MultipleFrames, + #[error("webp: {0}")] + Webp(#[from] WebPError), + #[error("png: {0}")] + Png(#[from] ::png::EncodingError), +} + +impl EncoderBackend { + pub fn build(&self, settings: EncoderSettings) -> Result { match self { Self::Png => Ok(AnyEncoder::Png(png::PngEncoder::new(settings)?)), Self::Gifski => Ok(AnyEncoder::Gifski(gifski::GifskiEncoder::new(settings)?)), @@ -53,13 +78,13 @@ pub enum AnyEncoder { } pub trait Encoder { - fn info(&self) -> EncoderInfo; - fn add_frame(&mut self, frame: &Frame) -> Result<()>; - fn finish(self) -> Result>; + fn info(&self) -> &EncoderInfo; + fn add_frame(&mut self, frame: FrameRef) -> Result<(), EncoderError>; + fn finish(self) -> Result, EncoderError>; } impl Encoder for AnyEncoder { - fn info(&self) -> EncoderInfo { + fn info(&self) -> &EncoderInfo { match self { Self::Gifski(encoder) => encoder.info(), Self::Png(encoder) => encoder.info(), @@ -68,7 +93,7 @@ impl Encoder for AnyEncoder { } } - fn add_frame(&mut self, frame: &Frame) -> Result<()> { + fn add_frame(&mut self, frame: FrameRef) -> Result<(), EncoderError> { match self { Self::Gifski(encoder) => encoder.add_frame(frame), Self::Png(encoder) => encoder.add_frame(frame), @@ -77,7 +102,7 @@ impl Encoder for AnyEncoder { } } - fn finish(self) -> Result> { + fn finish(self) -> Result, EncoderError> { match self { Self::Gifski(encoder) => encoder.finish(), Self::Png(encoder) => encoder.finish(), diff --git a/platform/image_processor/src/processor/job/encoder/png.rs b/image-processor/src/worker/process/encoder/png.rs similarity index 50% rename from platform/image_processor/src/processor/job/encoder/png.rs rename to image-processor/src/worker/process/encoder/png.rs index bbcad0da0..c4d5b1f67 100644 --- a/platform/image_processor/src/processor/job/encoder/png.rs +++ b/image-processor/src/worker/process/encoder/png.rs @@ -1,9 +1,7 @@ -use anyhow::Context; use rgb::ComponentBytes; -use super::{Encoder, EncoderFrontend, EncoderInfo, EncoderSettings}; -use crate::processor::error::{ProcessorError, Result}; -use crate::processor::job::frame::Frame; +use super::{Encoder, EncoderBackend, EncoderError, EncoderInfo, EncoderSettings}; +use crate::worker::process::frame::FrameRef; pub struct PngEncoder { result: Option>, @@ -11,13 +9,16 @@ pub struct PngEncoder { } impl PngEncoder { - pub fn new(settings: EncoderSettings) -> Result { + #[tracing::instrument(skip(settings), fields(name = "PngEncoder::new"))] + pub fn new(settings: EncoderSettings) -> Result { Ok(Self { result: None, info: EncoderInfo { + name: settings.name, duration: 0, frame_count: 0, - frontend: EncoderFrontend::Png, + format: settings.format, + frontend: EncoderBackend::Png, height: 0, loop_count: settings.loop_count, timescale: settings.timescale, @@ -28,15 +29,14 @@ impl PngEncoder { } impl Encoder for PngEncoder { - fn info(&self) -> EncoderInfo { - self.info + fn info(&self) -> &EncoderInfo { + &self.info } - fn add_frame(&mut self, frame: &Frame) -> Result<()> { - let _abort_guard = utils::task::AbortGuard::new(); - + #[tracing::instrument(skip(self), fields(name = "PngEncoder::add_frame"))] + fn add_frame(&mut self, frame: FrameRef) -> Result<(), EncoderError> { if self.result.is_some() { - return Err(ProcessorError::PngEncode(anyhow::anyhow!("encoder already finished"))); + return Err(EncoderError::MultipleFrames); } self.info.height = frame.image.height(); @@ -54,21 +54,15 @@ impl Encoder for PngEncoder { encoder.set_color(png::ColorType::Rgba); encoder.set_depth(png::BitDepth::Eight); - encoder - .write_header() - .context("failed to write png header") - .map_err(ProcessorError::PngEncode)? - .write_image_data(frame.image.buf().as_bytes()) - .context("failed to write png data") - .map_err(ProcessorError::PngEncode)?; + encoder.write_header()?.write_image_data(frame.image.buf().as_bytes())?; self.result = Some(result); Ok(()) } - fn finish(self) -> Result> { - self.result - .ok_or_else(|| ProcessorError::PngEncode(anyhow::anyhow!("encoder not finished"))) + #[tracing::instrument(skip(self), fields(name = "PngEncoder::finish"))] + fn finish(self) -> Result, EncoderError> { + self.result.ok_or(EncoderError::NoFrames) } } diff --git a/image-processor/src/worker/process/frame.rs b/image-processor/src/worker/process/frame.rs new file mode 100644 index 000000000..b65275176 --- /dev/null +++ b/image-processor/src/worker/process/frame.rs @@ -0,0 +1,47 @@ +use imgref::{Img, ImgVec}; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct Frame { + pub image: ImgVec, + pub duration_ts: u64, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct FrameRef<'a> { + pub image: Img<&'a [rgb::RGBA8]>, + pub duration_ts: u64, +} + +impl FrameRef<'_> { + pub fn to_owned(&self) -> Frame { + Frame { + image: Img::new(self.image.buf().to_vec(), self.image.width(), self.image.height()), + duration_ts: self.duration_ts, + } + } +} + +impl Frame { + pub fn new(width: usize, height: usize) -> Self { + Self { + image: ImgVec::new(vec![rgb::RGBA8::default(); width * height], width, height), + duration_ts: 0, + } + } + + pub fn as_ref(&self) -> FrameRef<'_> { + FrameRef { + image: self.image.as_ref(), + duration_ts: self.duration_ts, + } + } +} + +impl<'a> FrameRef<'a> { + pub fn new(buf: &'a [rgb::RGBA8], width: usize, height: usize, duration_ts: u64) -> Self { + Self { + duration_ts, + image: Img::new(buf, width, height), + } + } +} diff --git a/image-processor/src/worker/process/input_download.rs b/image-processor/src/worker/process/input_download.rs new file mode 100644 index 000000000..8ba349a4c --- /dev/null +++ b/image-processor/src/worker/process/input_download.rs @@ -0,0 +1,61 @@ +use std::sync::Arc; + +use bson::oid::ObjectId; +use bytes::Bytes; +use scuffle_image_processor_proto::{input, Input}; + +use crate::drive::Drive; +use crate::global::Global; + +#[derive(Debug, thiserror::Error)] +pub enum InputDownloadError { + #[error("missing public http drive")] + MissingPublicHttpDrive, + #[error("missing drive")] + MissingDrive, + #[error("missing input")] + MissingInput, + #[error("drive error: {0}")] + DriveError(#[from] crate::drive::DriveError), + #[error("strfmt error: {0}")] + StrFmtError(#[from] strfmt::FmtError), +} + +fn get_path(input: Option<&Input>) -> Option<&str> { + match input?.path.as_ref()? { + input::Path::DrivePath(drive) => Some(&drive.path), + input::Path::PublicUrl(url) => Some(url), + } +} + +fn get_drive(input: Option<&Input>) -> Option<&str> { + match input?.path.as_ref()? { + input::Path::DrivePath(drive) => Some(&drive.drive), + input::Path::PublicUrl(_) => None, + } +} + +#[tracing::instrument(skip(global, input), fields(input_path = get_path(input), input_drive = get_drive(input)))] +pub async fn download_input(global: &Arc, id: ObjectId, input: Option<&Input>) -> Result { + match input + .ok_or(InputDownloadError::MissingInput)? + .path + .as_ref() + .ok_or(InputDownloadError::MissingInput)? + { + input::Path::DrivePath(drive) => { + let path = strfmt::strfmt(&drive.path, &([("id".to_owned(), id.to_string())].into_iter().collect()))?; + + Ok(global + .drive(&drive.drive) + .ok_or(InputDownloadError::MissingDrive)? + .read(&path) + .await?) + } + input::Path::PublicUrl(url) => Ok(global + .public_http_drive() + .ok_or(InputDownloadError::MissingPublicHttpDrive)? + .read(url) + .await?), + } +} diff --git a/platform/image_processor/src/processor/job/libavif.rs b/image-processor/src/worker/process/libavif.rs similarity index 92% rename from platform/image_processor/src/processor/job/libavif.rs rename to image-processor/src/worker/process/libavif.rs index 63d67fcdd..5c42d81b1 100644 --- a/platform/image_processor/src/processor/job/libavif.rs +++ b/image-processor/src/worker/process/libavif.rs @@ -1,8 +1,7 @@ -use imgref::ImgVec; use rgb::ComponentBytes; #[derive(Debug)] -pub struct AvifRgbImage(libavif_sys::avifRGBImage, imgref::ImgVec); +pub struct AvifRgbImage(libavif_sys::avifRGBImage, Vec); impl AvifRgbImage { pub fn new(dec: &libavif_sys::avifDecoder) -> Self { @@ -17,19 +16,15 @@ impl AvifRgbImage { assert_eq!(channels, 4, "unexpected channel count"); - let mut data = imgref::ImgVec::new( - vec![rgb::RGBA::default(); img.width as usize * img.height as usize], - img.width as usize, - img.height as usize, - ); + let mut data = vec![rgb::RGBA::default(); img.width as usize * img.height as usize]; - img.pixels = data.as_mut().buf_mut().as_bytes_mut().as_mut_ptr() as *mut _; + img.pixels = data.as_bytes_mut().as_mut_ptr(); img.rowBytes = img.width * 4; Self(img, data) } - pub fn data(&self) -> &ImgVec { + pub fn data(&self) -> &Vec { &self.1 } } diff --git a/platform/image_processor/src/processor/job/libwebp.rs b/image-processor/src/worker/process/libwebp.rs similarity index 86% rename from platform/image_processor/src/processor/job/libwebp.rs rename to image-processor/src/worker/process/libwebp.rs index f573db476..769cf8dba 100644 --- a/platform/image_processor/src/processor/job/libwebp.rs +++ b/image-processor/src/worker/process/libwebp.rs @@ -4,6 +4,8 @@ pub enum WebPError { UnknownError(&'static str), #[error("out of memory")] OutOfMemory, + #[error("invalid data")] + InvalidData, } pub fn zero_memory_default() -> T { diff --git a/image-processor/src/worker/process/mod.rs b/image-processor/src/worker/process/mod.rs new file mode 100644 index 000000000..cc686bec6 --- /dev/null +++ b/image-processor/src/worker/process/mod.rs @@ -0,0 +1,308 @@ +use std::collections::HashMap; +use std::sync::Arc; + +use bson::oid::ObjectId; +use scuffle_foundations::context::Context; +use scuffle_image_processor_proto::{event_callback, ErrorCode, OutputFile, OutputFormat}; + +use self::blocking::JobOutput; +pub use self::decoder::DecoderFrontend; +use self::resize::ResizeError; +use crate::database::Job; +use crate::drive::{Drive, DriveWriteOptions}; +use crate::global::Global; + +mod blocking; +mod decoder; +mod encoder; +mod frame; +mod input_download; +mod libavif; +mod libwebp; +mod resize; +mod smart_object; + +#[derive(Debug, thiserror::Error)] +pub enum JobError { + #[error("resize: {0}")] + Resize(#[from] ResizeError), + #[error("encoder: {0}")] + Encoder(#[from] encoder::EncoderError), + #[error("decoder: {0}")] + Decoder(#[from] decoder::DecoderError), + #[error("input download: {0}")] + InputDownload(#[from] input_download::InputDownloadError), + #[error("output upload: {0}")] + OutputUpload(#[from] crate::drive::DriveError), + #[error("mongodb: {0}")] + Mongo(#[from] mongodb::error::Error), + #[error("join error: {0}")] + Join(#[from] tokio::task::JoinError), + #[error("mismatched dimensions: {width}x{height} != {expected_width}x{expected_height}")] + MismatchedDimensions { + width: usize, + height: usize, + expected_width: usize, + expected_height: usize, + }, + #[error("mismatched frame count: {frame_count} != {expected_frame_count}")] + MismatchedFrameCount { + frame_count: usize, + expected_frame_count: usize, + }, + #[error("static frame index out of bounds: {idx} >= {frame_count}")] + StaticFrameIndexOutOfBounds { idx: usize, frame_count: usize }, + #[error("invalid job")] + InvalidJob, + #[error("impossible output format, {0:?}, image is not animated")] + ImpossibleOutput(OutputFormat), + #[error("no possible outputs")] + NoPossibleOutputs, + #[error("{0}")] + Internal(&'static str), +} + +impl From for scuffle_image_processor_proto::Error { + fn from(value: JobError) -> Self { + let message = format!("{:#}", value); + + Self { + code: match value { + JobError::Resize(_) => ErrorCode::Resize as i32, + JobError::Encoder(_) => ErrorCode::Encode as i32, + JobError::Decoder(_) => ErrorCode::Decode as i32, + JobError::InputDownload(_) => ErrorCode::InputDownload as i32, + JobError::Mongo(_) => ErrorCode::Internal as i32, + JobError::Join(_) => ErrorCode::Internal as i32, + JobError::MismatchedDimensions { .. } => ErrorCode::InvalidInput as i32, + JobError::MismatchedFrameCount { .. } => ErrorCode::InvalidInput as i32, + JobError::StaticFrameIndexOutOfBounds { .. } => ErrorCode::InvalidInput as i32, + JobError::InvalidJob => ErrorCode::InvalidInput as i32, + JobError::ImpossibleOutput(_) => ErrorCode::InvalidInput as i32, + JobError::Internal(_) => ErrorCode::Internal as i32, + JobError::NoPossibleOutputs => ErrorCode::InvalidInput as i32, + JobError::OutputUpload(_) => ErrorCode::OutputUpload as i32, + }, + message, + } + } +} + +#[derive(Debug)] +pub struct ProcessJob { + job: Job, + _ctx: Context, + permit: Arc, +} + +pub async fn spawn(job: Job, global: Arc, ctx: Context, permit: tokio::sync::OwnedSemaphorePermit) { + let job = ProcessJob::new(job, ctx, permit); + job.process(global).await; +} + +impl ProcessJob { + pub fn new(job: Job, ctx: Context, permit: tokio::sync::OwnedSemaphorePermit) -> Self { + Self { + job, + _ctx: ctx, + permit: Arc::new(permit), + } + } + + #[tracing::instrument(skip(global, self), fields(job_id = %self.job.id), name = "ProcessJob::process")] + pub async fn process(&self, global: Arc) { + tracing::info!("starting job"); + + let start = tokio::time::Instant::now(); + + crate::events::on_start(&global, &self.job).await; + + let mut future = self.process_inner(&global); + let mut future = std::pin::pin!(future); + + let mut timeout_fut = self + .job + .task + .limits + .as_ref() + .and_then(|l| l.max_input_duration_ms) + .map(|timeout| Box::pin(tokio::time::sleep(std::time::Duration::from_millis(timeout as u64)))); + + let result = loop { + tokio::select! { + _ = tokio::time::sleep(global.config().worker.refresh_interval) => { + match self.job.refresh(&global).await { + Ok(true) => {}, + Ok(false) => { + tracing::warn!("lost job"); + return; + } + Err(err) => { + tracing::error!("failed to refresh job: {err}"); + return; + } + } + } + Some(_) = async { + if let Some(fut) = timeout_fut.as_mut() { + Some(fut.await) + } else { + None + } + } => { + tracing::warn!("timeout"); + break Err(JobError::Internal("timeout")); + } + result = &mut future => break result, + } + }; + + match result { + Ok(success) => { + tracing::info!("job completed in {:?}", start.elapsed()); + crate::events::on_success(&global, &self.job, success).await; + } + Err(err) => { + tracing::error!("failed to process job: {err}"); + crate::events::on_failure(&global, &self.job, err).await; + } + } + + if let Err(err) = self.job.complete(&global).await { + tracing::error!("failed to complete job: {err}"); + } + } + + async fn process_inner(&self, global: &Arc) -> Result { + let input = input_download::download_input(global, self.job.id, self.job.task.input.as_ref()).await?; + let output = self.job.task.output.as_ref().ok_or(JobError::InvalidJob)?; + + let output_drive_path = output.drive_path.as_ref().ok_or(JobError::InvalidJob)?; + + let output_drive = global.drive(&output_drive_path.drive).ok_or(JobError::InvalidJob)?; + + let job = self.job.clone(); + + let JobOutput { + output: output_results, + input: input_metadata, + } = blocking::spawn(job.task.clone(), input, self.permit.clone()).await?; + + let is_animated = output_results.iter().any(|r| r.frame_count > 1); + + let mut files = Vec::new(); + + for output_result in output_results { + let vars = setup_vars( + self.job.id, + output_result.format_name.clone(), + output_result.format, + output_result.scale, + output_result.width, + output_result.height, + output_result.format_idx, + output_result.resize_idx, + is_animated, + ); + + let file_path = strfmt::strfmt(&output_drive_path.path, &vars).map_err(|err| { + tracing::error!("failed to format path: {err}"); + JobError::Internal("failed to format path") + })?; + + let size = output_result.data.len(); + + output_drive + .write( + &file_path, + output_result.data.into(), + Some(DriveWriteOptions { + content_type: Some(content_type(output_result.format).to_owned()), + acl: output.acl_override.clone(), + ..Default::default() + }), + ) + .await?; + + files.push(OutputFile { + path: file_path, + size: size as u32, + format: output_result.format as i32, + frame_count: output_result.frame_count as u32, + height: output_result.height as u32, + width: output_result.width as u32, + duration_ms: output_result.duration_ms as u32, + content_type: content_type(output_result.format).to_owned(), + acl: output + .acl_override + .as_deref() + .or(output_drive.default_acl()) + .map(|s| s.to_owned()), + }); + } + + Ok(event_callback::Success { + drive: output_drive_path.drive.clone(), + input_metadata: Some(input_metadata), + files, + }) + } +} + +fn setup_vars( + id: ObjectId, + format_name: Option, + format: OutputFormat, + scale: Option, + width: usize, + height: usize, + format_idx: usize, + resize_idx: usize, + is_animated: bool, +) -> HashMap { + let format_name = format_name.unwrap_or_else(|| match format { + OutputFormat::AvifAnim => "avif_anim".to_owned(), + OutputFormat::AvifStatic => "avif_static".to_owned(), + OutputFormat::WebpAnim => "webp_anim".to_owned(), + OutputFormat::WebpStatic => "webp_static".to_owned(), + OutputFormat::PngStatic => "png_static".to_owned(), + OutputFormat::GifAnim => "gif_anim".to_owned(), + }); + + let scale = scale.map(|scale| scale.to_string()).unwrap_or_else(|| "".to_owned()); + + let static_ = match format { + OutputFormat::AvifStatic | OutputFormat::PngStatic | OutputFormat::WebpStatic if is_animated => "_static", + _ => "", + }; + + let ext = match format { + OutputFormat::AvifAnim | OutputFormat::AvifStatic => "avif", + OutputFormat::PngStatic => "png", + OutputFormat::WebpAnim | OutputFormat::WebpStatic => "webp", + OutputFormat::GifAnim => "gif", + }; + + [ + ("id".to_owned(), id.to_string()), + ("format".to_owned(), format_name), + ("scale".to_owned(), scale), + ("width".to_owned(), width.to_string()), + ("height".to_owned(), height.to_string()), + ("format_idx".to_owned(), format_idx.to_string()), + ("resize_idx".to_owned(), resize_idx.to_string()), + ("static".to_owned(), static_.to_owned()), + ("ext".to_owned(), ext.to_owned()), + ] + .into_iter() + .collect::>() +} + +fn content_type(format: OutputFormat) -> &'static str { + match format { + OutputFormat::AvifAnim | OutputFormat::AvifStatic => "image/avif", + OutputFormat::WebpAnim | OutputFormat::WebpStatic => "image/webp", + OutputFormat::PngStatic => "image/png", + OutputFormat::GifAnim => "image/gif", + } +} diff --git a/image-processor/src/worker/process/resize.rs b/image-processor/src/worker/process/resize.rs new file mode 100644 index 000000000..b95e1682b --- /dev/null +++ b/image-processor/src/worker/process/resize.rs @@ -0,0 +1,442 @@ +use fast_image_resize::{self as fr, images::{CroppedImage, CroppedImageMut}, ResizeOptions}; + +use rgb::ComponentBytes; +use scuffle_image_processor_proto::{output, scaling, Output, ResizeAlgorithm, ResizeMethod}; + +use super::decoder::DecoderInfo; +use super::frame::{Frame, FrameRef}; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] +pub struct Dimensions { + pub width: usize, + pub height: usize, +} + +impl Dimensions { + fn new(width: usize, height: usize) -> Self { + Self { width, height } + } + + fn aspect_ratio(&self) -> f64 { + self.width as f64 / self.height as f64 + } + + fn convert_aspect_ratio(&self, aspect_ratio: f64) -> Self { + if aspect_ratio > self.aspect_ratio() { + Self::new(self.width, (self.width as f64 / aspect_ratio) as usize) + } else { + Self::new((self.height as f64 * aspect_ratio) as usize, self.height) + } + } +} + +enum ImageRef<'a> { + Ref((&'a fr::images::Image<'a>, CropBox)), + Owned((fr::images::Image<'a>, CropBox)), +} + +impl ImageRef<'_> { + fn crop(&self) -> CropBox { + match self { + ImageRef::Owned((_, c)) => *c, + ImageRef::Ref((_, c)) => *c, + } + } +} + +impl<'a> std::ops::Deref for ImageRef<'a> { + type Target = fr::images::Image<'a>; + + fn deref(&self) -> &Self::Target { + match self { + ImageRef::Owned(o) => &o.0, + ImageRef::Ref(r) => r.0, + } + } +} + +/// Resizes images to the given target size. +pub struct ImageResizer { + resizer: fr::Resizer, + input_dims: Dimensions, + cropped_dims: Dimensions, + crop: Option, + resize_dims: Vec, + outputs: Vec, + resize_method: ResizeMethod, + output_frames: Vec, + disable_resize_chaining: bool, + method: fr::ResizeAlg, +} + +#[derive(Debug, Clone, Copy)] +pub struct ResizeOutputTarget { + pub dimensions: Dimensions, + pub index: usize, + pub scale: Option, +} + +#[derive(Debug, Clone, Copy)] +struct CropBox { + left: u32, + top: u32, + width: u32, + height: u32, +} + +impl CropBox { + pub fn new(left: u32, top: u32, width: u32, height: u32) -> Self { + Self { left, top, width, height } + } +} + +#[derive(thiserror::Error, Debug)] +pub enum ResizeError { + #[error("crop: {0}")] + Crop(#[from] fr::CropBoxError), + #[error("resize: {0}")] + Resize(#[from] fr::ResizeError), + #[error("buffer: {0}")] + Buffer(#[from] fr::ImageBufferError), + #[error("crop dimensions are larger than the input dimensions")] + CropDimensions, + #[error("aspect ratio is too small")] + AspectTooSmall, + #[error("aspect ratio is too large")] + AspectTooLarge, + #[error("invalid crop")] + InvalidCrop, + #[error("missing resize")] + MissingResize, + #[error("no valid resize targets")] + NoValidResizeTargets, + #[error("impossible resize[{0}] {1}x{2} is larger than the input size ({3}x{4})")] + ImpossibleResize(usize, usize, usize, usize, usize), + #[error("input frame has mismatched dimensions")] + MismatchedDimensions, + #[error("{0}")] + Internal(&'static str), +} + +impl ImageResizer { + pub fn new(info: &DecoderInfo, output: &Output) -> Result { + let input_dims = Dimensions::new(info.width, info.height); + + // If there is a crop, we should use that aspect ratio instead. + let cropped_dims = if let Some(crop) = output.crop.as_ref() { + if crop.width == 0 || crop.height == 0 { + return Err(ResizeError::InvalidCrop); + } + + if crop.width + crop.x > info.width as u32 || crop.height + crop.y > info.height as u32 { + return Err(ResizeError::CropDimensions); + } + + Dimensions::new(crop.width as usize, crop.height as usize) + } else { + input_dims + }; + + let resize_method = output.resize_method(); + let mut target_aspect_ratio = cropped_dims.aspect_ratio(); + + if output + .min_aspect_ratio + .is_some_and(|min_aspect_ratio| target_aspect_ratio < min_aspect_ratio) + { + // If the resize method is one of these, we can't make the aspect ratio larger. + // Because we are not allowed to pad the left or right. + if matches!( + resize_method, + ResizeMethod::Fit | ResizeMethod::PadTop | ResizeMethod::PadBottom + ) { + return Err(ResizeError::AspectTooSmall); + } + + target_aspect_ratio = output.min_aspect_ratio(); + } else if output + .max_aspect_ratio + .is_some_and(|max_aspect_ratio| target_aspect_ratio > max_aspect_ratio) + { + // If the resize method is one of these, we can't make the aspect ratio smaller. + // Because we are not allowed to pad the top or bottom. + if matches!( + resize_method, + ResizeMethod::Fit | ResizeMethod::PadLeft | ResizeMethod::PadRight + ) { + return Err(ResizeError::AspectTooLarge); + } + + target_aspect_ratio = output.max_aspect_ratio(); + } + + let mut output_targets: Vec<_> = match output.resize.as_ref().ok_or(ResizeError::MissingResize)? { + output::Resize::Widths(widths) => widths + .values + .iter() + .copied() + .enumerate() + .map(|(index, width)| ResizeOutputTarget { + dimensions: Dimensions::new(width as usize, (width as f64 / target_aspect_ratio) as usize), + index, + scale: None, + }) + .collect(), + output::Resize::Heights(heights) => heights + .values + .iter() + .copied() + .enumerate() + .map(|(index, height)| ResizeOutputTarget { + dimensions: Dimensions::new((height as f64 * target_aspect_ratio) as usize, height as usize), + index, + scale: None, + }) + .collect(), + output::Resize::Scaling(scaling) => { + let (base_width, base_height) = match scaling.base.clone().ok_or(ResizeError::MissingResize)? { + scaling::Base::FixedBase(scale) => { + let input = cropped_dims.convert_aspect_ratio(target_aspect_ratio); + + (input.width / scale as usize, input.height / scale as usize) + } + scaling::Base::BaseWidth(width) => (width as usize, (width as f64 / target_aspect_ratio) as usize), + scaling::Base::BaseHeight(height) => ((height as f64 * target_aspect_ratio) as usize, height as usize), + }; + + scaling + .scales + .iter() + .copied() + .enumerate() + .map(|(index, scale)| ResizeOutputTarget { + dimensions: Dimensions::new(base_width * scale as usize, base_height * scale as usize), + index, + scale: Some(scale), + }) + .collect() + } + }; + + if !output.upscale { + let input_after_transforms = cropped_dims.convert_aspect_ratio(target_aspect_ratio); + + if output.skip_impossible_resizes { + output_targets.retain(|target| target.dimensions <= input_after_transforms); + } else if let Some(target) = output_targets + .iter() + .find(|target| target.dimensions > input_after_transforms) + { + return Err(ResizeError::ImpossibleResize( + target.index, + target.dimensions.width, + target.dimensions.height, + input_after_transforms.width, + input_after_transforms.height, + )); + } + } + + // Build the output frames. + // This is going to be the in the target aspect ratio. + // therefore needs to be done before we convert the aspect ratio back. + let output_frames = output_targets + .iter() + .map(|target| Frame::new(target.dimensions.width, target.dimensions.height)) + .collect(); + + // Convert the apect ratios back to the original aspect ratio. + // This is because padding is added AFTER we resize the image. + // Thus we need to resize the image to the target aspect ratio. + // However if we are stretching the image, we don't need to do this, + // because we want to warp the image. + let resize_targets: Vec<_> = + if target_aspect_ratio != cropped_dims.aspect_ratio() && output.resize_method() != ResizeMethod::Stretch { + output_targets + .iter() + .map(|target| target.dimensions.convert_aspect_ratio(cropped_dims.aspect_ratio())) + .collect() + } else { + output_targets.iter().map(|target| target.dimensions).collect() + }; + + if resize_targets.is_empty() { + return Err(ResizeError::NoValidResizeTargets); + } + + Ok(Self { + resizer: fr::Resizer::new(), + input_dims, + cropped_dims, + method: match output.resize_algorithm() { + ResizeAlgorithm::Nearest => fr::ResizeAlg::Nearest, + ResizeAlgorithm::Box => fr::ResizeAlg::Convolution(fr::FilterType::Box), + ResizeAlgorithm::Bilinear => fr::ResizeAlg::Convolution(fr::FilterType::Bilinear), + ResizeAlgorithm::Hamming => fr::ResizeAlg::Convolution(fr::FilterType::Hamming), + ResizeAlgorithm::CatmullRom => fr::ResizeAlg::Convolution(fr::FilterType::CatmullRom), + ResizeAlgorithm::Mitchell => fr::ResizeAlg::Convolution(fr::FilterType::Mitchell), + ResizeAlgorithm::Lanczos3 => fr::ResizeAlg::Convolution(fr::FilterType::Lanczos3), + }, + crop: output.crop.as_ref().map(|crop| CropBox { + left: crop.x, + top: crop.y, + width: crop.width, + height: crop.height, + }), + resize_method: output.resize_method(), + resize_dims: resize_targets, + outputs: output_targets, + output_frames, + disable_resize_chaining: output.disable_resize_chaining, + }) + } + + pub fn outputs(&self) -> &[ResizeOutputTarget] { + &self.outputs + } + + /// Resize the given frame to the target size, returning a reference to the + /// resized frame. After this function returns original frame can be + /// dropped, the returned frame is valid for the lifetime of the Resizer. + pub fn resize(&mut self, frame: FrameRef) -> Result<&[Frame], ResizeError> { + if frame.image.width() != self.input_dims.width || frame.image.height() != self.input_dims.height { + return Err(ResizeError::MismatchedDimensions); + } + + let input_image = fr::images::Image::from_slice_u8( + frame.image.width() as u32, + frame.image.height() as u32, + // Safety: The input_image type is non_mut which disallows mutable actions on the underlying buffer. + unsafe { + let buf = frame.image.buf().as_bytes(); + std::slice::from_raw_parts_mut(buf.as_ptr() as *mut u8, buf.len()) + }, + fr::PixelType::U8x4, + )?; + + let resize_dims = self.resize_dims.iter().rev().copied(); + let output_dims = self.outputs.iter().rev().map(|output| output.dimensions); + let output_frames = self.output_frames.iter_mut().rev(); + + let source_crop = self.crop.unwrap_or(CropBox { + left: 0, + top: 0, + width: input_image.width(), + height: input_image.height(), + }); + + let resize_options = ResizeOptions::new().resize_alg(self.method); + + let mut previous_image = ImageRef::Ref((&input_image, source_crop)); + + for ((resize_dims, output_dims), output_frame) in resize_dims.zip(output_dims).zip(output_frames) { + output_frame.duration_ts = frame.duration_ts; + + let mut target_image = fr::images::Image::from_slice_u8( + output_dims.width as u32, + output_dims.height as u32, + output_frame.image.buf_mut().as_mut_slice().as_bytes_mut(), + fr::PixelType::U8x4, + )?; + + let source_crop = previous_image.crop(); + let source_view = CroppedImage::new(&*previous_image, source_crop.left, source_crop.top, source_crop.width, source_crop.height)?; + + let target_crop = if resize_dims != output_dims { + resize_method_to_crop_dims(self.resize_method, output_dims, resize_dims)? + } else { + CropBox { + left: 0, + top: 0, + width: resize_dims.width as u32, + height: resize_dims.height as u32, + } + }; + let mut target_view = CroppedImageMut::new(&mut target_image, target_crop.left, target_crop.top, target_crop.width, target_crop.height)?; + + self.resizer.resize(&source_view, &mut target_view, Some(&resize_options))?; + + // If we are upscaling then we dont want to downscale from an upscaled image. + // Or if the user has explicitly disabled the resize chain. + if self.disable_resize_chaining || self.cropped_dims < resize_dims { + previous_image = ImageRef::Ref((&input_image, source_crop)); + } else { + previous_image = ImageRef::Owned((target_image, target_crop)); + } + } + + Ok(&self.output_frames) + } +} + +fn resize_method_to_crop_dims( + resize_method: ResizeMethod, + padded_dims: Dimensions, + target_dims: Dimensions, +) -> Result { + let check = |cmp: bool, msg: &'static str| if cmp { Ok(()) } else { Err(ResizeError::Internal(msg)) }; + + check(padded_dims.width >= target_dims.width, "padded width less then target width")?; + check( + padded_dims.height >= target_dims.height, + "padded height less then target height", + )?; + + let delta_x = (padded_dims.width - target_dims.width) as u32; + let delta_y = (padded_dims.height - target_dims.height) as u32; + let center_x = delta_x / 2; + let center_y = delta_y / 2; + + let width = target_dims.width as u32; + let height = target_dims.height as u32; + + if width == 0 || height == 0 { + return Err(ResizeError::Internal("width or height is zero")); + } + + let left = match resize_method { + ResizeMethod::PadLeft | ResizeMethod::PadTopLeft | ResizeMethod::PadBottomLeft => 0, + ResizeMethod::PadRight | ResizeMethod::PadTopRight | ResizeMethod::PadBottomRight => delta_x, + ResizeMethod::PadCenter | ResizeMethod::PadCenterLeft | ResizeMethod::PadCenterRight => center_x, + _ => 0, + }; + + let top = match resize_method { + ResizeMethod::PadTop | ResizeMethod::PadTopLeft | ResizeMethod::PadTopRight => 0, + ResizeMethod::PadBottom | ResizeMethod::PadBottomLeft | ResizeMethod::PadBottomRight => delta_y, + ResizeMethod::PadCenter | ResizeMethod::PadTopCenter | ResizeMethod::PadBottomCenter => center_y, + _ => 0, + }; + + match resize_method { + ResizeMethod::Fit => Err(ResizeError::Internal("fit should never be called here")), + ResizeMethod::Stretch => Err(ResizeError::Internal("stretch should never be called here")), + ResizeMethod::PadLeft => { + check( + target_dims.width != padded_dims.width, + "pad left should only be called for width padding", + ) + } + ResizeMethod::PadRight => { + check( + target_dims.height != padded_dims.height, + "pad right should only be called for height padding", + ) + } + ResizeMethod::PadBottom => { + check( + target_dims.width != padded_dims.width, + "pad bottom should only be called for height padding", + ) + } + ResizeMethod::PadTop => { + check( + target_dims.width != padded_dims.width, + "pad top should only be called for height padding", + ) + } + _ => Ok(()), + }?; + + Ok(CropBox::new(left, top, width, height)) +} diff --git a/platform/image_processor/src/processor/job/smart_object.rs b/image-processor/src/worker/process/smart_object.rs similarity index 68% rename from platform/image_processor/src/processor/job/smart_object.rs rename to image-processor/src/worker/process/smart_object.rs index aa59c1b55..50892194a 100644 --- a/platform/image_processor/src/processor/job/smart_object.rs +++ b/image-processor/src/worker/process/smart_object.rs @@ -4,24 +4,29 @@ pub type SmartPtr = SmartObject>; #[derive(Debug)] pub struct SmartObject { - value: T, + value: Option, destructor: fn(&mut T), } impl SmartObject { pub fn new(value: T, destructor: fn(&mut T)) -> Self { - Self { value, destructor } + Self { + value: Some(value), + destructor, + } } pub fn free(mut self) -> T { self.destructor = |_| {}; - std::mem::replace(&mut self.value, unsafe { std::mem::zeroed() }) + self.value.take().unwrap() } } impl Drop for SmartObject { fn drop(&mut self) { - (self.destructor)(&mut self.value); + if let Some(mut value) = self.value.take() { + (self.destructor)(&mut value); + } } } @@ -29,24 +34,24 @@ impl std::ops::Deref for SmartObject { type Target = T; fn deref(&self) -> &Self::Target { - &self.value + self.value.as_ref().unwrap() } } impl std::ops::DerefMut for SmartObject { fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.value + self.value.as_mut().unwrap() } } impl AsRef for SmartPtr { fn as_ref(&self) -> &T { - unsafe { self.value.as_ref() } + unsafe { self.value.unwrap().as_ref() } } } impl AsMut for SmartPtr { fn as_mut(&mut self) -> &mut T { - unsafe { self.value.as_mut() } + unsafe { self.value.unwrap().as_mut() } } } diff --git a/platform/api/Cargo.toml b/platform/api/Cargo.toml index 8b5449dd4..014d9ae06 100644 --- a/platform/api/Cargo.toml +++ b/platform/api/Cargo.toml @@ -11,13 +11,13 @@ tracing = "0.1" tokio = { version = "1.36", features = ["full"] } serde = { version = "1.0", features = ["derive"] } hyper = { version = "1.1", features = ["full"] } -utils = { workspace = true, features = ["all"] } -rustls = "0.22" +scuffle-utils = { workspace = true, features = ["all"] } +rustls = "0.23" rustls-pemfile = "2.0" -tokio-rustls = "0.25" +tokio-rustls = "0.26" path-tree = "0.7" serde_json = "1.0" -reqwest = { version = "0.11", features = ["json", "rustls-tls"], default-features = false} +reqwest = { version = "0.12", features = ["json", "rustls-tls"], default-features = false} chrono = { version = "0.4", default-features = false, features = ["serde", "clock"] } async-graphql = { version = "7.0.1", features = ["apollo_tracing", "apollo_persisted_queries", "tracing", "string_number"] } hyper-tungstenite = "0" @@ -36,7 +36,7 @@ argon2 = "0.5" ulid = { version = "1.1", features = ["uuid"] } rand = "0.8" tokio-stream = { version = "0.1", features = ["sync"] } -async-nats = "0.33" +async-nats = "0.34" async-trait = "0.1" bytes = "1.5" totp-rs = { version = "5.4", features = ["qr"] } @@ -49,7 +49,7 @@ http-body = "1.0" http-body-util = "0.1" hyper-util = "0.1" pin-project = "1.1" -base64 = "0.21" +base64 = "0.22" postgres-from-row = "0.5" postgres-types = "0.2" fred = { version = "8.0", features = ["enable-rustls", "sentinel-client", "dns"] } diff --git a/platform/api/src/api/auth.rs b/platform/api/src/api/auth.rs index f944055b1..7801b8311 100644 --- a/platform/api/src/api/auth.rs +++ b/platform/api/src/api/auth.rs @@ -2,8 +2,8 @@ use std::collections::HashMap; use std::sync::Arc; use hyper::StatusCode; +use scuffle_utils::http::RouteError; use ulid::Ulid; -use utils::http::RouteError; use super::error::ApiError; use crate::database::{Role, RolePermission, Session, User}; diff --git a/platform/api/src/api/error.rs b/platform/api/src/api/error.rs index c485f333c..a4fda5d18 100644 --- a/platform/api/src/api/error.rs +++ b/platform/api/src/api/error.rs @@ -1,4 +1,4 @@ -use utils::http::RouteError; +use scuffle_utils::http::RouteError; use super::auth::AuthError; use crate::turnstile::TurnstileError; @@ -18,11 +18,11 @@ pub enum ApiError { #[error("failed to query turnstile: {0}")] Turnstile(#[from] TurnstileError), #[error("failed to query database: {0}")] - Database(#[from] utils::database::deadpool_postgres::PoolError), + Database(#[from] scuffle_utils::database::deadpool_postgres::PoolError), } impl From for ApiError { - fn from(value: utils::database::tokio_postgres::Error) -> Self { + fn from(value: scuffle_utils::database::tokio_postgres::Error) -> Self { Self::Database(value.into()) } } diff --git a/platform/api/src/api/middleware/auth.rs b/platform/api/src/api/middleware/auth.rs index 30fd791fa..272a86738 100644 --- a/platform/api/src/api/middleware/auth.rs +++ b/platform/api/src/api/middleware/auth.rs @@ -3,10 +3,10 @@ use std::sync::Arc; use binary_helper::global::RequestGlobalExt; use hyper::body::Incoming; use hyper::http::header; -use utils::http::ext::*; -use utils::http::router::ext::RequestExt; -use utils::http::router::middleware::{middleware_fn, Middleware}; -use utils::http::RouteError; +use scuffle_utils::http::ext::*; +use scuffle_utils::http::router::ext::RequestExt; +use scuffle_utils::http::router::middleware::{middleware_fn, Middleware}; +use scuffle_utils::http::RouteError; use crate::api::auth::{AuthData, AuthError}; use crate::api::error::ApiError; diff --git a/platform/api/src/api/mod.rs b/platform/api/src/api/mod.rs index 9ac043564..3329b06d6 100644 --- a/platform/api/src/api/mod.rs +++ b/platform/api/src/api/mod.rs @@ -9,14 +9,14 @@ use hyper::body::Incoming; use hyper::server::conn::http1; use hyper::service::service_fn; use hyper_util::rt::TokioIo; +use scuffle_utils::context::ContextExt; +use scuffle_utils::http::router::middleware::{CorsMiddleware, CorsOptions, ResponseHeadersMiddleware}; +use scuffle_utils::http::router::Router; +use scuffle_utils::http::RouteError; +use scuffle_utils::prelude::FutureTimeout; +use scuffle_utilsmake_response; use serde_json::json; use tokio::net::TcpSocket; -use utils::context::ContextExt; -use utils::http::router::middleware::{CorsMiddleware, CorsOptions, ResponseHeadersMiddleware}; -use utils::http::router::Router; -use utils::http::RouteError; -use utils::make_response; -use utils::prelude::FutureTimeout; use self::error::ApiError; use crate::config::ApiConfig; diff --git a/platform/api/src/api/v1/gql/error.rs b/platform/api/src/api/v1/gql/error.rs index 5ea50f172..d43528315 100644 --- a/platform/api/src/api/v1/gql/error.rs +++ b/platform/api/src/api/v1/gql/error.rs @@ -74,7 +74,7 @@ pub enum GqlError { } impl From for GqlError { - fn from(value: utils::database::tokio_postgres::Error) -> Self { + fn from(value: scuffle_utils::database::tokio_postgres::Error) -> Self { Self::Database(Arc::new(value.into())) } } diff --git a/platform/api/src/api/v1/gql/handlers.rs b/platform/api/src/api/v1/gql/handlers.rs index b463e19b5..caa38da9f 100644 --- a/platform/api/src/api/v1/gql/handlers.rs +++ b/platform/api/src/api/v1/gql/handlers.rs @@ -14,11 +14,11 @@ use hyper_tungstenite::tungstenite::protocol::frame::coding::CloseCode; use hyper_tungstenite::tungstenite::protocol::CloseFrame; use hyper_tungstenite::tungstenite::Message; use hyper_tungstenite::HyperWebsocket; +use scuffle_utils::context::ContextExt; +use scuffle_utils::http::ext::*; +use scuffle_utils::http::router::compat::BodyExt as _; +use scuffle_utils::http::router::ext::RequestExt; use serde_json::json; -use utils::context::ContextExt; -use utils::http::ext::*; -use utils::http::router::compat::BodyExt as _; -use utils::http::router::ext::RequestExt; use super::error::GqlError; use super::ext::RequestExt as _; diff --git a/platform/api/src/api/v1/gql/mod.rs b/platform/api/src/api/v1/gql/mod.rs index cfe0d5945..68f3f33f4 100644 --- a/platform/api/src/api/v1/gql/mod.rs +++ b/platform/api/src/api/v1/gql/mod.rs @@ -3,9 +3,9 @@ use std::sync::Arc; use async_graphql::{extensions, Schema}; use hyper::body::Incoming; use hyper::Response; -use utils::http::router::builder::RouterBuilder; -use utils::http::router::Router; -use utils::http::RouteError; +use scuffle_utils::http::router::builder::RouterBuilder; +use scuffle_utils::http::router::Router; +use scuffle_utils::http::RouteError; use crate::api::error::ApiError; use crate::api::Body; diff --git a/platform/api/src/api/v1/gql/models/channel.rs b/platform/api/src/api/v1/gql/models/channel.rs index b39f70a59..158b53e7d 100644 --- a/platform/api/src/api/v1/gql/models/channel.rs +++ b/platform/api/src/api/v1/gql/models/channel.rs @@ -57,7 +57,7 @@ impl Channel { async fn followers_count(&self, ctx: &Context<'_>) -> Result { let global = ctx.get_global::(); - let followers = utils::database::query( + let followers = scuffle_utils::database::query( r#" SELECT COUNT(*) @@ -125,7 +125,7 @@ impl ChannelLive { .await .map_err_gql("failed to fetch playback session count")?; - utils::database::query( + scuffle_utils::database::query( "UPDATE users SET channel_live_viewer_count = $1, channel_live_viewer_count_updated_at = NOW() WHERE id = $2", ) .bind(live_viewer_count) diff --git a/platform/api/src/api/v1/gql/models/image_upload.rs b/platform/api/src/api/v1/gql/models/image_upload.rs index 0d0c61cf4..d67d73c49 100644 --- a/platform/api/src/api/v1/gql/models/image_upload.rs +++ b/platform/api/src/api/v1/gql/models/image_upload.rs @@ -23,7 +23,6 @@ pub struct ImageUpload { pub struct ImageUploadVariant { pub width: u32, pub height: u32, - pub scale: u32, pub url: String, pub format: ImageUploadFormat, pub byte_size: u32, @@ -76,7 +75,6 @@ impl From for Ima Self { width: value.width, height: value.height, - scale: value.scale, format: value.format().into(), byte_size: value.byte_size, url: value.path, diff --git a/platform/api/src/api/v1/gql/mutations/auth.rs b/platform/api/src/api/v1/gql/mutations/auth.rs index aad6c8bf0..5ad6d0cc4 100644 --- a/platform/api/src/api/v1/gql/mutations/auth.rs +++ b/platform/api/src/api/v1/gql/mutations/auth.rs @@ -90,7 +90,7 @@ impl AuthMutation { if user.totp_enabled { let request_id = ulid::Ulid::new(); - utils::database::query( + scuffle_utils::database::query( r#" INSERT INTO two_fa_requests ( id, @@ -149,7 +149,7 @@ impl AuthMutation { let request_context = ctx.get_req_context(); // TODO: Make this a dataloader - let request: database::TwoFaRequest = utils::database::query( + let request: database::TwoFaRequest = scuffle_utils::database::query( r#" SELECT * @@ -180,7 +180,7 @@ impl AuthMutation { .into()); } - utils::database::query( + scuffle_utils::database::query( r#" DELETE FROM two_fa_requests @@ -242,7 +242,7 @@ impl AuthMutation { })?; // TODO: maybe look to batch this - let session: database::Session = utils::database::query( + let session: database::Session = scuffle_utils::database::query( r#" UPDATE user_sessions @@ -355,7 +355,7 @@ impl AuthMutation { let tx = client.transaction().await?; // TODO: maybe look to batch this - let user: database::User = utils::database::query( + let user: database::User = scuffle_utils::database::query( r#" INSERT INTO users ( id, @@ -394,7 +394,7 @@ impl AuthMutation { let expires_at = Utc::now() + Duration::seconds(login_duration as i64); // TODO: maybe look to batch this - let session: database::Session = utils::database::query( + let session: database::Session = scuffle_utils::database::query( r#" INSERT INTO user_sessions ( id, @@ -476,7 +476,7 @@ impl AuthMutation { }; // TODO: maybe look to batch this - utils::database::query( + scuffle_utils::database::query( r#" DELETE FROM user_sessions diff --git a/platform/api/src/api/v1/gql/mutations/channel.rs b/platform/api/src/api/v1/gql/mutations/channel.rs index 4224d8ead..52879bf6c 100644 --- a/platform/api/src/api/v1/gql/mutations/channel.rs +++ b/platform/api/src/api/v1/gql/mutations/channel.rs @@ -29,7 +29,7 @@ impl ChannelMutation { .await? .map_err_gql(GqlError::Auth(AuthError::NotLoggedIn))?; - let user: database::User = utils::database::query( + let user: database::User = scuffle_utils::database::query( r#" UPDATE users SET diff --git a/platform/api/src/api/v1/gql/mutations/chat.rs b/platform/api/src/api/v1/gql/mutations/chat.rs index ac1c15076..df49f2607 100644 --- a/platform/api/src/api/v1/gql/mutations/chat.rs +++ b/platform/api/src/api/v1/gql/mutations/chat.rs @@ -41,7 +41,7 @@ impl ChatMutation { // TODO: Check if the user is allowed to send messages in this chat let message_id = Ulid::new(); - let chat_message: database::ChatMessage = utils::database::query( + let chat_message: database::ChatMessage = scuffle_utils::database::query( r#" INSERT INTO chat_messages ( id, diff --git a/platform/api/src/api/v1/gql/mutations/user.rs b/platform/api/src/api/v1/gql/mutations/user.rs index e4be6e233..d4aa2880a 100644 --- a/platform/api/src/api/v1/gql/mutations/user.rs +++ b/platform/api/src/api/v1/gql/mutations/user.rs @@ -50,7 +50,7 @@ impl UserMutation { .await? .map_err_gql(GqlError::Auth(AuthError::NotLoggedIn))?; - let user: database::User = utils::database::query( + let user: database::User = scuffle_utils::database::query( r#" UPDATE users SET @@ -102,7 +102,7 @@ impl UserMutation { .into()); } - let user: database::User = utils::database::query( + let user: database::User = scuffle_utils::database::query( r#" UPDATE users SET @@ -152,7 +152,7 @@ impl UserMutation { .await? .ok_or(GqlError::Auth(AuthError::NotLoggedIn))?; - let user: database::User = utils::database::query( + let user: database::User = scuffle_utils::database::query( r#" UPDATE users SET @@ -196,7 +196,7 @@ impl UserMutation { .await? .ok_or(GqlError::Auth(AuthError::NotLoggedIn))?; - let user: database::User = utils::database::query( + let user: database::User = scuffle_utils::database::query( "UPDATE users SET profile_picture_id = NULL, pending_profile_picture_id = NULL WHERE id = $1 RETURNING *", ) .bind(auth.session.user_id) @@ -257,7 +257,7 @@ impl UserMutation { if user.totp_enabled { let request_id = ulid::Ulid::new(); - utils::database::query( + scuffle_utils::database::query( r#" INSERT INTO two_fa_requests ( id, @@ -311,7 +311,7 @@ impl UserMutation { .into()); } - utils::database::query( + scuffle_utils::database::query( r#" UPSERT INTO channel_user ( user_id, diff --git a/platform/api/src/api/v1/gql/mutations/user/two_fa.rs b/platform/api/src/api/v1/gql/mutations/user/two_fa.rs index 5d629f41c..14ca69465 100644 --- a/platform/api/src/api/v1/gql/mutations/user/two_fa.rs +++ b/platform/api/src/api/v1/gql/mutations/user/two_fa.rs @@ -67,7 +67,7 @@ impl TwoFaMutation { let hex_backup_codes = backup_codes.iter().map(|c| format!("{:08x}", c)).collect(); // Save secret and backup codes to database. - utils::database::query( + scuffle_utils::database::query( r#" UPDATE users @@ -130,7 +130,7 @@ impl TwoFaMutation { } // Enable 2fa - let user: database::User = utils::database::query( + let user: database::User = scuffle_utils::database::query( r#" UPDATE users @@ -179,7 +179,7 @@ impl TwoFaMutation { } // Disable 2fa, remove secret and backup codes. - let user: database::User = utils::database::query( + let user: database::User = scuffle_utils::database::query( r#" UPDATE users SET diff --git a/platform/api/src/api/v1/gql/queries/category.rs b/platform/api/src/api/v1/gql/queries/category.rs index 50860906a..a1cc681cd 100644 --- a/platform/api/src/api/v1/gql/queries/category.rs +++ b/platform/api/src/api/v1/gql/queries/category.rs @@ -61,7 +61,7 @@ impl CategoryQuery { ) -> Result { let global = ctx.get_global::(); - let categories: Vec> = utils::database::query("SELECT categories.*, similarity(name, $1), COUNT(*) OVER() AS total_count FROM categories WHERE name % $1 ORDER BY similarity DESC LIMIT $2 OFFSET $3") + let categories: Vec> = scuffle_utils::database::query("SELECT categories.*, similarity(name, $1), COUNT(*) OVER() AS total_count FROM categories WHERE name % $1 ORDER BY similarity DESC LIMIT $2 OFFSET $3") .bind(query) .bind(limit.unwrap_or(5)) .bind(offset.unwrap_or(0)) diff --git a/platform/api/src/api/v1/gql/queries/mod.rs b/platform/api/src/api/v1/gql/queries/mod.rs index 7bf34cbe3..2699dd58b 100644 --- a/platform/api/src/api/v1/gql/queries/mod.rs +++ b/platform/api/src/api/v1/gql/queries/mod.rs @@ -49,7 +49,7 @@ impl Query { ) -> Result> { let global = ctx.get_global::(); - let query_results: Vec = utils::database::query( + let query_results: Vec = scuffle_utils::database::query( r#" WITH CombinedResults AS ( SELECT diff --git a/platform/api/src/api/v1/gql/queries/user.rs b/platform/api/src/api/v1/gql/queries/user.rs index e2cefa958..758d51145 100644 --- a/platform/api/src/api/v1/gql/queries/user.rs +++ b/platform/api/src/api/v1/gql/queries/user.rs @@ -98,7 +98,7 @@ impl UserQuery { ) -> Result> { let global = ctx.get_global::(); - let users: Vec> = utils::database::query("SELECT users.*, similarity(username, $1), COUNT(*) OVER() AS total_count FROM users WHERE username % $1 ORDER BY similarity DESC LIMIT $2 OFFSET $3") + let users: Vec> = scuffle_utils::database::query("SELECT users.*, similarity(username, $1), COUNT(*) OVER() AS total_count FROM users WHERE username % $1 ORDER BY similarity DESC LIMIT $2 OFFSET $3") .bind(query) .bind(limit.unwrap_or(5)) .bind(offset.unwrap_or(0)) @@ -120,7 +120,7 @@ impl UserQuery { .await? .ok_or(GqlError::Auth(AuthError::NotLoggedIn))?; - let is_following = utils::database::query( + let is_following = scuffle_utils::database::query( r#" SELECT following @@ -161,7 +161,7 @@ impl UserQuery { } // This query is not very good, we should have some paging mechinsm with ids. - let channels: Vec = utils::database::query( + let channels: Vec = scuffle_utils::database::query( r#" SELECT users.* diff --git a/platform/api/src/api/v1/gql/subscription/channel.rs b/platform/api/src/api/v1/gql/subscription/channel.rs index a23ab6ec9..5870edc31 100644 --- a/platform/api/src/api/v1/gql/subscription/channel.rs +++ b/platform/api/src/api/v1/gql/subscription/channel.rs @@ -88,7 +88,7 @@ impl ChannelSubscription { let stream = self.channel_follows(ctx, channel_id).await?; - let mut followers = utils::database::query( + let mut followers = scuffle_utils::database::query( r#" SELECT COUNT(*) diff --git a/platform/api/src/api/v1/gql/subscription/chat.rs b/platform/api/src/api/v1/gql/subscription/chat.rs index c8f792a33..85090fe93 100644 --- a/platform/api/src/api/v1/gql/subscription/chat.rs +++ b/platform/api/src/api/v1/gql/subscription/chat.rs @@ -52,7 +52,7 @@ impl ChatSubscription { // load old messages not older than 10 minutes, max 100 messages let not_older_than = chrono::Utc::now() - chrono::Duration::minutes(10); let not_older_than = ulid::Ulid::from_parts(not_older_than.timestamp() as u64, u128::MAX); - let messages: Vec = utils::database::query( + let messages: Vec = scuffle_utils::database::query( "SELECT * FROM chat_messages WHERE channel_id = $1 AND deleted_at IS NULL AND id >= $2 ORDER BY id LIMIT 100", ) .bind(channel_id.to_ulid()) diff --git a/platform/api/src/api/v1/gql/subscription/user.rs b/platform/api/src/api/v1/gql/subscription/user.rs index e02cf68f2..92e29b35c 100644 --- a/platform/api/src/api/v1/gql/subscription/user.rs +++ b/platform/api/src/api/v1/gql/subscription/user.rs @@ -231,7 +231,7 @@ impl UserSubscription { Ok(async_stream::stream!({ if let Some(channel_id) = channel_id { - let is_following = utils::database::query( + let is_following = scuffle_utils::database::query( r#" SELECT following diff --git a/platform/api/src/api/v1/mod.rs b/platform/api/src/api/v1/mod.rs index f9577edda..65e94ccad 100644 --- a/platform/api/src/api/v1/mod.rs +++ b/platform/api/src/api/v1/mod.rs @@ -1,9 +1,9 @@ use std::sync::Arc; use hyper::body::Incoming; -use utils::http::router::builder::RouterBuilder; -use utils::http::router::Router; -use utils::http::RouteError; +use scuffle_utils::http::router::builder::RouterBuilder; +use scuffle_utils::http::router::Router; +use scuffle_utils::http::RouteError; use super::error::ApiError; use super::Body; diff --git a/platform/api/src/api/v1/upload/mod.rs b/platform/api/src/api/v1/upload/mod.rs index eb6c36997..cadd6933a 100644 --- a/platform/api/src/api/v1/upload/mod.rs +++ b/platform/api/src/api/v1/upload/mod.rs @@ -5,12 +5,12 @@ use bytes::Bytes; use hyper::body::Incoming; use hyper::{Request, Response, StatusCode}; use multer::{Constraints, SizeLimit}; -use utils::http::ext::{OptionExt, ResultExt}; -use utils::http::router::builder::RouterBuilder; -use utils::http::router::compat::BodyExt; -use utils::http::router::ext::RequestExt; -use utils::http::router::Router; -use utils::http::RouteError; +use scuffle_utils::http::ext::{OptionExt, ResultExt}; +use scuffle_utils::http::router::builder::RouterBuilder; +use scuffle_utils::http::router::compat::BodyExt; +use scuffle_utils::http::router::ext::RequestExt; +use scuffle_utils::http::router::Router; +use scuffle_utils::http::RouteError; use self::profile_picture::ProfilePicture; use crate::api::auth::AuthData; diff --git a/platform/api/src/api/v1/upload/profile_picture.rs b/platform/api/src/api/v1/upload/profile_picture.rs index 8bce17174..5e0374944 100644 --- a/platform/api/src/api/v1/upload/profile_picture.rs +++ b/platform/api/src/api/v1/upload/profile_picture.rs @@ -6,11 +6,11 @@ use bytes::Bytes; use hyper::{Response, StatusCode}; use pb::scuffle::platform::internal::image_processor; use pb::scuffle::platform::internal::types::{uploaded_file_metadata, ImageFormat, UploadedFileMetadata}; +use scuffle_utils::http::ext::ResultExt; +use scuffle_utils::http::RouteError; +use scuffle_utilsmake_response; use serde_json::json; use ulid::Ulid; -use utils::http::ext::ResultExt; -use utils::http::RouteError; -use utils::make_response; use super::UploadType; use crate::api::auth::AuthData; @@ -23,8 +23,11 @@ use crate::global::ApiGlobal; fn create_task(file_id: Ulid, input_path: &str, config: &ImageUploaderConfig, owner_id: Ulid) -> image_processor::Task { image_processor::Task { input_path: input_path.to_string(), - base_height: 128, // 128, 256, 384, 512 - base_width: 128, // 128, 256, 384, 512 + aspect_ratio: Some(image_processor::task::Ratio { + numerator: 1, + denominator: 1, + }), + clamp_aspect_ratio: true, formats: vec![ ImageFormat::PngStatic as i32, ImageFormat::AvifStatic as i32, @@ -42,8 +45,9 @@ fn create_task(file_id: Ulid, input_path: &str, config: &ImageUploaderConfig, ow max_processing_time_ms: 60 * 1000, // 60 seconds }), resize_algorithm: image_processor::task::ResizeAlgorithm::Lanczos3 as i32, - upscale: true, // For profile pictures we want to have a consistent size - scales: vec![1, 2, 3, 4], + upscale: image_processor::task::Upscale::NoPreserveSource as i32, + input_image_scaling: true, + scales: vec![64, 128, 256, 384], resize_method: image_processor::task::ResizeMethod::PadCenter as i32, output_prefix: format!("{owner_id}/{file_id}"), } @@ -183,7 +187,7 @@ impl UploadType for ProfilePicture { .await .map_err_route((StatusCode::INTERNAL_SERVER_ERROR, "failed to start transaction"))?; - utils::database::query("INSERT INTO image_jobs (id, priority, task) VALUES ($1, $2, $3)") + scuffle_utils::database::query("INSERT INTO image_jobs (id, priority, task) VALUES ($1, $2, $3)") .bind(file_id) .bind(config.profile_picture_task_priority) .bind(utils::database::Protobuf(create_task( @@ -197,7 +201,7 @@ impl UploadType for ProfilePicture { .await .map_err_route((StatusCode::INTERNAL_SERVER_ERROR, "failed to insert image job"))?; - utils::database::query("INSERT INTO uploaded_files(id, owner_id, uploader_id, name, type, metadata, total_size, path, status) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)") + scuffle_utils::database::query("INSERT INTO uploaded_files(id, owner_id, uploader_id, name, type, metadata, total_size, path, status) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)") .bind(file_id) // id .bind(auth.session.user_id) // owner_id .bind(auth.session.user_id) // uploader_id @@ -217,7 +221,7 @@ impl UploadType for ProfilePicture { .map_err_route((StatusCode::INTERNAL_SERVER_ERROR, "failed to insert uploaded file"))?; if self.set_active { - utils::database::query("UPDATE users SET pending_profile_picture_id = $1 WHERE id = $2") + scuffle_utils::database::query("UPDATE users SET pending_profile_picture_id = $1 WHERE id = $2") .bind(file_id) .bind(auth.session.user_id) .build() diff --git a/platform/api/src/database/channel.rs b/platform/api/src/database/channel.rs index 9fb592da1..6d440f54c 100644 --- a/platform/api/src/database/channel.rs +++ b/platform/api/src/database/channel.rs @@ -1,7 +1,7 @@ use async_graphql::SimpleObject; use chrono::{DateTime, Utc}; +use scuffle_utils::database::json; use ulid::Ulid; -use utils::database::json; #[derive(Debug, Clone, Default, postgres_from_row::FromRow)] pub struct Channel { diff --git a/platform/api/src/database/two_fa_request.rs b/platform/api/src/database/two_fa_request.rs index 7ffbecdd0..6b921ef13 100644 --- a/platform/api/src/database/two_fa_request.rs +++ b/platform/api/src/database/two_fa_request.rs @@ -4,8 +4,8 @@ use chrono::{Duration, Utc}; use pb::ext::UlidExt; use pb::scuffle::platform::internal::two_fa::two_fa_request_action::{ChangePassword, Login}; use pb::scuffle::platform::internal::two_fa::TwoFaRequestAction; +use scuffle_utils::database::protobuf; use ulid::Ulid; -use utils::database::protobuf; use super::{Session, User}; use crate::global::ApiGlobal; @@ -27,7 +27,7 @@ pub trait TwoFaRequestActionTrait { } impl TwoFaRequestActionTrait for Login { - type Result = Result; + type Result = Result; async fn execute(self, global: &Arc, user_id: Ulid) -> Self::Result { let expires_at = Utc::now() + Duration::seconds(self.login_duration as i64); @@ -36,7 +36,7 @@ impl TwoFaRequestActionTrait for Login { let mut client = global.db().get().await?; let tx = client.transaction().await?; - let session = utils::database::query( + let session = scuffle_utils::database::query( r#" INSERT INTO user_sessions ( id, @@ -56,7 +56,7 @@ impl TwoFaRequestActionTrait for Login { .fetch_one(&tx) .await?; - utils::database::query( + scuffle_utils::database::query( r#" UPDATE users SET @@ -76,13 +76,13 @@ impl TwoFaRequestActionTrait for Login { } impl TwoFaRequestActionTrait for ChangePassword { - type Result = Result<(), utils::database::deadpool_postgres::PoolError>; + type Result = Result<(), scuffle_utils::database::deadpool_postgres::PoolError>; async fn execute(self, global: &Arc, user_id: Ulid) -> Self::Result { let mut client = global.db().get().await?; let tx = client.transaction().await?; - let user: User = utils::database::query( + let user: User = scuffle_utils::database::query( r#" UPDATE users @@ -100,7 +100,7 @@ impl TwoFaRequestActionTrait for ChangePassword { .await?; // Delete all sessions except current - utils::database::query( + scuffle_utils::database::query( r#" DELETE FROM user_sessions diff --git a/platform/api/src/database/uploaded_file.rs b/platform/api/src/database/uploaded_file.rs index 57fd25ba9..b29fcc37a 100644 --- a/platform/api/src/database/uploaded_file.rs +++ b/platform/api/src/database/uploaded_file.rs @@ -1,5 +1,5 @@ +use scuffle_utils::database::protobuf; use ulid::Ulid; -use utils::database::protobuf; use super::{FileType, UploadedFileStatus}; diff --git a/platform/api/src/dataloader/category.rs b/platform/api/src/dataloader/category.rs index 0b1f8877d..a0124a531 100644 --- a/platform/api/src/dataloader/category.rs +++ b/platform/api/src/dataloader/category.rs @@ -1,7 +1,7 @@ use std::sync::Arc; +use scuffle_utilsdataloader::{DataLoader, Loader, LoaderOutput}; use ulid::Ulid; -use utils::dataloader::{DataLoader, Loader, LoaderOutput}; use crate::database::Category; @@ -21,7 +21,7 @@ impl Loader for CategoryByIdLoader { type Value = Category; async fn load(&self, keys: &[Self::Key]) -> LoaderOutput { - let results: Vec = utils::database::query("SELECT * FROM categories WHERE id = ANY($1)") + let results: Vec = scuffle_utils::database::query("SELECT * FROM categories WHERE id = ANY($1)") .bind(keys) .build_query_as() .fetch_all(&self.db) diff --git a/platform/api/src/dataloader/global_state.rs b/platform/api/src/dataloader/global_state.rs index 9aa84e238..636e350aa 100644 --- a/platform/api/src/dataloader/global_state.rs +++ b/platform/api/src/dataloader/global_state.rs @@ -1,7 +1,7 @@ use std::collections::HashMap; use std::sync::Arc; -use utils::dataloader::{DataLoader, Loader, LoaderOutput}; +use scuffle_utilsdataloader::{DataLoader, Loader, LoaderOutput}; use crate::database::GlobalState; @@ -21,7 +21,7 @@ impl Loader for GlobalStateLoader { type Value = GlobalState; async fn load(&self, _: &[Self::Key]) -> LoaderOutput { - let state = utils::database::query("SELECT * FROM global_state") + let state = scuffle_utils::database::query("SELECT * FROM global_state") .build_query_as() .fetch_one(&self.db) .await diff --git a/platform/api/src/dataloader/role.rs b/platform/api/src/dataloader/role.rs index 11c3f083d..6b73b8c16 100644 --- a/platform/api/src/dataloader/role.rs +++ b/platform/api/src/dataloader/role.rs @@ -1,7 +1,7 @@ use std::sync::Arc; +use scuffle_utilsdataloader::{DataLoader, Loader, LoaderOutput}; use ulid::Ulid; -use utils::dataloader::{DataLoader, Loader, LoaderOutput}; use crate::database::Role; @@ -21,7 +21,7 @@ impl Loader for RoleByIdLoader { type Value = Role; async fn load(&self, keys: &[Self::Key]) -> LoaderOutput { - let results: Vec = utils::database::query("SELECT * FROM roles WHERE id = ANY($1)") + let results: Vec = scuffle_utils::database::query("SELECT * FROM roles WHERE id = ANY($1)") .bind(keys) .build_query_as() .fetch_all(self.db.as_ref()) diff --git a/platform/api/src/dataloader/session.rs b/platform/api/src/dataloader/session.rs index 00ef276c6..8247fc45f 100644 --- a/platform/api/src/dataloader/session.rs +++ b/platform/api/src/dataloader/session.rs @@ -1,7 +1,7 @@ use std::sync::Arc; +use scuffle_utilsdataloader::{DataLoader, Loader, LoaderOutput}; use ulid::Ulid; -use utils::dataloader::{DataLoader, Loader, LoaderOutput}; use crate::database::Session; @@ -21,7 +21,7 @@ impl Loader for SessionByIdLoader { type Value = Session; async fn load(&self, keys: &[Self::Key]) -> LoaderOutput { - let results: Vec = utils::database::query("SELECT * FROM user_sessions WHERE id = ANY($1)") + let results: Vec = scuffle_utils::database::query("SELECT * FROM user_sessions WHERE id = ANY($1)") .bind(keys) .build_query_as() .fetch_all(self.db.as_ref()) diff --git a/platform/api/src/dataloader/uploaded_file.rs b/platform/api/src/dataloader/uploaded_file.rs index 82c2996de..8fef1dbcf 100644 --- a/platform/api/src/dataloader/uploaded_file.rs +++ b/platform/api/src/dataloader/uploaded_file.rs @@ -1,7 +1,7 @@ use std::sync::Arc; +use scuffle_utilsdataloader::{DataLoader, Loader, LoaderOutput}; use ulid::Ulid; -use utils::dataloader::{DataLoader, Loader, LoaderOutput}; use crate::database::UploadedFile; @@ -21,7 +21,7 @@ impl Loader for UploadedFileByIdLoader { type Value = UploadedFile; async fn load(&self, keys: &[Self::Key]) -> LoaderOutput { - let results: Vec = utils::database::query("SELECT * FROM uploaded_files WHERE id = ANY($1)") + let results: Vec = scuffle_utils::database::query("SELECT * FROM uploaded_files WHERE id = ANY($1)") .bind(keys) .build_query_as() .fetch_all(self.db.as_ref()) diff --git a/platform/api/src/dataloader/user.rs b/platform/api/src/dataloader/user.rs index 7121e3a69..2c5117447 100644 --- a/platform/api/src/dataloader/user.rs +++ b/platform/api/src/dataloader/user.rs @@ -1,7 +1,7 @@ use std::sync::Arc; +use scuffle_utilsdataloader::{DataLoader, Loader, LoaderOutput}; use ulid::Ulid; -use utils::dataloader::{DataLoader, Loader, LoaderOutput}; use crate::database::User; @@ -21,7 +21,7 @@ impl Loader for UserByUsernameLoader { type Value = User; async fn load(&self, keys: &[Self::Key]) -> LoaderOutput { - let results: Vec = utils::database::query("SELECT * FROM users WHERE username = ANY($1)") + let results: Vec = scuffle_utils::database::query("SELECT * FROM users WHERE username = ANY($1)") .bind(keys) .build_query_as() .fetch_all(self.db.as_ref()) @@ -50,7 +50,7 @@ impl Loader for UserByIdLoader { type Value = User; async fn load(&self, keys: &[Self::Key]) -> LoaderOutput { - let results: Vec = utils::database::query("SELECT * FROM users WHERE id = ANY($1)") + let results: Vec = scuffle_utils::database::query("SELECT * FROM users WHERE id = ANY($1)") .bind(keys) .build_query_as() .fetch_all(self.db.as_ref()) diff --git a/platform/api/src/global.rs b/platform/api/src/global.rs index 4def750b8..ce02686c0 100644 --- a/platform/api/src/global.rs +++ b/platform/api/src/global.rs @@ -1,4 +1,4 @@ -use utils::dataloader::DataLoader; +use scuffle_utilsdataloader::DataLoader; use crate::config::{ApiConfig, IgDbConfig, ImageUploaderConfig, JwtConfig, TurnstileConfig, VideoApiConfig}; use crate::dataloader::category::CategoryByIdLoader; diff --git a/platform/api/src/igdb_cron.rs b/platform/api/src/igdb_cron.rs index e1672ceab..b95951ef1 100644 --- a/platform/api/src/igdb_cron.rs +++ b/platform/api/src/igdb_cron.rs @@ -314,7 +314,7 @@ async fn refresh_igdb(global: &Arc, config: &IgDbConfig) -> any uploaded_file_id: Ulid, } - utils::database::query("INSERT INTO igdb_image (uploaded_file_id, image_id)") + scuffle_utils::database::query("INSERT INTO igdb_image (uploaded_file_id, image_id)") .push_values(&image_ids, |mut sep, item| { sep.push_bind(item.0); sep.push_bind(item.1); @@ -325,13 +325,14 @@ async fn refresh_igdb(global: &Arc, config: &IgDbConfig) -> any .await .context("insert igdb_image")?; - let image_ids = - utils::database::query("SELECT image_id, uploaded_file_id FROM igdb_image WHERE image_id = ANY($1::TEXT[])") - .bind(image_ids.iter().map(|x| x.1).collect::>()) - .build_query_as::() - .fetch_all(&tx) - .await - .context("select igdb_image")?; + let image_ids = scuffle_utils::database::query( + "SELECT image_id, uploaded_file_id FROM igdb_image WHERE image_id = ANY($1::TEXT[])", + ) + .bind(image_ids.iter().map(|x| x.1).collect::>()) + .build_query_as::() + .fetch_all(&tx) + .await + .context("select igdb_image")?; let image_ids = image_ids .into_iter() @@ -387,22 +388,23 @@ async fn refresh_igdb(global: &Arc, config: &IgDbConfig) -> any }) .collect::>(); - let uploaded_files_ids = - utils::database::query("INSERT INTO uploaded_files (id, name, type, metadata, total_size, path, status) ") - .push_values(&uploaded_files, |mut sep, item| { - sep.push_bind(item.id); - sep.push_bind(&item.name); - sep.push_bind(item.ty); - sep.push_bind(utils::database::Protobuf(item.metadata.clone())); - sep.push_bind(item.total_size); - sep.push_bind(&item.path); - sep.push_bind(item.status); - }) - .push("ON CONFLICT (id) DO NOTHING RETURNING id;") - .build_query_single_scalar::() - .fetch_all(&tx) - .await - .context("insert uploaded_files")?; + let uploaded_files_ids = scuffle_utils::database::query( + "INSERT INTO uploaded_files (id, name, type, metadata, total_size, path, status) ", + ) + .push_values(&uploaded_files, |mut sep, item| { + sep.push_bind(item.id); + sep.push_bind(&item.name); + sep.push_bind(item.ty); + sep.push_bind(utils::database::Protobuf(item.metadata.clone())); + sep.push_bind(item.total_size); + sep.push_bind(&item.path); + sep.push_bind(item.status); + }) + .push("ON CONFLICT (id) DO NOTHING RETURNING id;") + .build_query_single_scalar::() + .fetch_all(&tx) + .await + .context("insert uploaded_files")?; let resp = resp .into_iter() @@ -433,7 +435,7 @@ async fn refresh_igdb(global: &Arc, config: &IgDbConfig) -> any offset += resp.len(); let count = resp.len(); - let categories = utils::database::query("INSERT INTO categories (id, igdb_id, name, aliases, keywords, storyline, summary, over_18, cover_id, rating, updated_at, artwork_ids, igdb_similar_game_ids, websites) ") + let categories = scuffle_utils::database::query("INSERT INTO categories (id, igdb_id, name, aliases, keywords, storyline, summary, over_18, cover_id, rating, updated_at, artwork_ids, igdb_similar_game_ids, websites) ") .push_values(&resp, |mut sep, item| { sep.push_bind(item.id); sep.push_bind(item.igdb_id); @@ -480,7 +482,7 @@ async fn refresh_igdb(global: &Arc, config: &IgDbConfig) -> any }) .collect::>(); - utils::database::query("WITH updated(id, category) AS (") + scuffle_utils::database::query("WITH updated(id, category) AS (") .push_values(categories.iter().collect::>(), |mut sep, item| { sep.push_bind(item.0).push_unseparated("::UUID"); sep.push_bind(item.1).push_unseparated("::UUID"); @@ -505,7 +507,7 @@ async fn refresh_igdb(global: &Arc, config: &IgDbConfig) -> any .await .context("start transaction image_jobs")?; - let unqueued = utils::database::query( + let unqueued = scuffle_utils::database::query( "UPDATE uploaded_files SET status = 'queued' WHERE id = ANY($1::UUID[]) AND status = 'unqueued' RETURNING id, path;", ) .bind(uploaded_files_ids) @@ -515,7 +517,7 @@ async fn refresh_igdb(global: &Arc, config: &IgDbConfig) -> any .context("update uploaded_files")?; if !unqueued.is_empty() { - utils::database::query("INSERT INTO image_jobs (id, priority, task) ") + scuffle_utils::database::query("INSERT INTO image_jobs (id, priority, task) ") .bind(image_processor_config.igdb_image_task_priority as i64) .push_values(unqueued, |mut sep, (id, path)| { sep.push_bind(id).push("$1").push_bind(utils::database::Protobuf(create_task( @@ -587,9 +589,10 @@ fn create_task( ) -> image_processor::Task { image_processor::Task { callback_subject: config.callback_subject.clone(), - upscale: false, + upscale: image_processor::task::Upscale::NoPreserveSource as i32, output_prefix: format!("categories/{category_id}/{id}"), - scales: vec![1], + scales: vec![720, 1080], + input_image_scaling: true, limits: Some(image_processor::task::Limits { max_processing_time_ms: 60000, ..Default::default() @@ -601,8 +604,11 @@ fn create_task( ], input_path: path, resize_method: image_processor::task::ResizeMethod::Fit as i32, - base_height: 1080, - base_width: 1920, + clamp_aspect_ratio: false, + aspect_ratio: Some(image_processor::task::Ratio { + numerator: 1, + denominator: 1, + }), resize_algorithm: image_processor::task::ResizeAlgorithm::Lanczos3 as i32, } } diff --git a/platform/api/src/image_processor_callback.rs b/platform/api/src/image_processor_callback.rs deleted file mode 100644 index ab5d245ca..000000000 --- a/platform/api/src/image_processor_callback.rs +++ /dev/null @@ -1,257 +0,0 @@ -use std::sync::Arc; -use std::time::Duration; - -use anyhow::Context; -use async_nats::jetstream::consumer::pull::MessagesErrorKind; -use async_nats::jetstream::stream::RetentionPolicy; -use async_nats::jetstream::AckKind; -use futures_util::StreamExt; -use pb::ext::UlidExt; -use pb::scuffle::platform::internal::events::{processed_image, ProcessedImage}; -use pb::scuffle::platform::internal::types::{uploaded_file_metadata, UploadedFileMetadata}; -use prost::Message; -use utils::context::ContextExt; - -use crate::config::ImageUploaderConfig; -use crate::database::{FileType, UploadedFile, UploadedFileStatus}; -use crate::global::ApiGlobal; -use crate::subscription::SubscriptionTopic; - -pub async fn run(global: Arc) -> anyhow::Result<()> { - let config = global.config::(); - - // It can't contain dots for some reason - let stream_name = config.callback_subject.replace('.', "-"); - - let stream = global - .jetstream() - .get_or_create_stream(async_nats::jetstream::stream::Config { - name: stream_name.clone(), - subjects: vec![config.callback_subject.clone()], - max_consumers: 1, - retention: RetentionPolicy::WorkQueue, - ..Default::default() - }) - .await - .context("stream")?; - - let consumer = stream - .get_or_create_consumer( - &stream_name, - async_nats::jetstream::consumer::pull::Config { - name: Some(stream_name.clone()), - ..Default::default() - }, - ) - .await - .context("consumer")?; - - let mut messages = consumer.messages().await.context("messages")?; - - while let Ok(message) = messages.next().context(global.ctx()).await { - handle_message(&global, message).await?; - } - - Ok(()) -} - -async fn handle_message( - global: &Arc, - message: Option>>, -) -> anyhow::Result<()> { - let message = match message { - Some(Ok(message)) => message, - Some(Err(err)) if matches!(err.kind(), MessagesErrorKind::MissingHeartbeat) => { - tracing::warn!("missing heartbeat"); - return Ok(()); - } - Some(Err(err)) => { - anyhow::bail!("message: {:#}", err) - } - None => { - anyhow::bail!("stream closed"); - } - }; - - let (job_id, job_result) = match ProcessedImage::decode(message.payload.as_ref()) { - Ok(ProcessedImage { - job_id, - result: Some(result), - }) => (job_id, result), - err => { - if let Err(err) = err { - tracing::warn!(error = %err, "failed to decode image upload job result"); - } else { - tracing::warn!("malformed image upload job result"); - } - message - .ack() - .await - .map_err(|err| anyhow::anyhow!(err)) - .context("failed to ack")?; - return Ok(()); - } - }; - tracing::trace!("received image upload job result: {:?}", job_result); - - let mut client = global.db().get().await.context("failed to get db connection")?; - let tx = client.transaction().await.context("failed to start transaction")?; - - let uploaded_file: UploadedFile = match utils::database::query("UPDATE uploaded_files SET status = $1, failed = $2, metadata = $3, updated_at = NOW() WHERE id = $4 AND status = 'queued' RETURNING *") - .bind(if matches!(job_result, processed_image::Result::Success(_)) { - UploadedFileStatus::Completed - } else { - UploadedFileStatus::Failed - }) - .bind(match &job_result { - processed_image::Result::Success(_) => None, - processed_image::Result::Failure(processed_image::Failure { reason, .. }) => { - Some(reason) - } - }) - .bind(utils::database::Protobuf(UploadedFileMetadata { - metadata: Some(uploaded_file_metadata::Metadata::Image(uploaded_file_metadata::Image { - versions: match &job_result { - processed_image::Result::Success(processed_image::Success { variants }) => variants.clone(), - processed_image::Result::Failure(_) => Vec::new(), - }, - })), - })) - .bind(job_id.into_ulid()) - .build_query_as() - .fetch_optional(&tx) - .await - .context("failed to get uploaded file")? { - Some(uploaded_file) => uploaded_file, - None => { - tracing::warn!("uploaded file not found"); - message.ack().await.map_err(|err| anyhow::anyhow!(err)).context("failed to ack")?; - return Ok(()); - } - }; - - match job_result { - processed_image::Result::Success(_) => { - global - .nats() - .publish( - SubscriptionTopic::UploadedFileStatus(uploaded_file.id), - pb::scuffle::platform::internal::events::UploadedFileStatus { - file_id: Some(uploaded_file.id.into()), - status: Some( - pb::scuffle::platform::internal::events::uploaded_file_status::Status::Success( - pb::scuffle::platform::internal::events::uploaded_file_status::Success {}, - ), - ), - } - .encode_to_vec() - .into(), - ) - .await - .context("failed to publish file update event")?; - - let updated = match uploaded_file.ty { - FileType::ProfilePicture => { - let owner_id = uploaded_file - .owner_id - .ok_or_else(|| anyhow::anyhow!("uploaded file owner id is null"))?; - - if utils::database::query("UPDATE users SET profile_picture_id = $1, pending_profile_picture_id = NULL, updated_at = NOW() WHERE id = $2 AND pending_profile_picture_id = $3") - .bind(uploaded_file.id) - .bind(owner_id) - .bind(uploaded_file.id) - .build() - .execute(&tx) - .await - .context("failed to update user")? == 1 { - Some(( - SubscriptionTopic::UserProfilePicture(uploaded_file.owner_id.unwrap()), - pb::scuffle::platform::internal::events::UserProfilePicture { - user_id: Some(uploaded_file.owner_id.unwrap().into()), - profile_picture_id: Some(uploaded_file.id.into()), - } - .encode_to_vec() - .into(), - )) - } else { - None - } - } - FileType::CategoryCover => None, - FileType::CategoryArtwork => None, - }; - - if let Some((topic, payload)) = updated { - global - .nats() - .publish(topic, payload) - .await - .context("failed to publish image upload update event")?; - } - } - processed_image::Result::Failure(processed_image::Failure { - reason, - friendly_message, - }) => { - global - .nats() - .publish( - SubscriptionTopic::UploadedFileStatus(uploaded_file.id), - pb::scuffle::platform::internal::events::UploadedFileStatus { - file_id: Some(uploaded_file.id.into()), - status: Some( - pb::scuffle::platform::internal::events::uploaded_file_status::Status::Failure( - pb::scuffle::platform::internal::events::uploaded_file_status::Failure { - reason, - friendly_message, - }, - ), - ), - } - .encode_to_vec() - .into(), - ) - .await - .context("failed to publish file update event")?; - - match uploaded_file.ty { - FileType::ProfilePicture => { - let owner_id = uploaded_file - .owner_id - .ok_or_else(|| anyhow::anyhow!("uploaded file owner id is null"))?; - - utils::database::query( - "UPDATE users SET pending_profile_picture_id = NULL, updated_at = NOW() WHERE id = $1 AND pending_profile_picture_id = $2", - ) - .bind(owner_id) - .bind(uploaded_file.id) - .build() - .execute(&tx) - .await - .context("failed to update user")?; - } - FileType::CategoryCover => {} - FileType::CategoryArtwork => {} - } - } - } - - if let Err(err) = tx.commit().await.context("failed to commit transaction") { - tracing::warn!(error = %err, "failed to commit transaction"); - message - .ack_with(AckKind::Nak(Some(Duration::from_secs(5)))) - .await - .map_err(|err| anyhow::anyhow!(err)) - .context("failed to ack")?; - return Ok(()); - } - - message - .ack() - .await - .map_err(|err| anyhow::anyhow!(err)) - .context("failed to ack")?; - - tracing::debug!("processed image upload job result"); - Ok(()) -} diff --git a/platform/api/src/image_upload_callback.rs b/platform/api/src/image_upload_callback.rs new file mode 100644 index 000000000..087f59142 --- /dev/null +++ b/platform/api/src/image_upload_callback.rs @@ -0,0 +1,289 @@ +use std::sync::Arc; +use std::time::Duration; + +use anyhow::Context; +use async_nats::jetstream::stream::RetentionPolicy; +use async_nats::jetstream::AckKind; +use futures_util::StreamExt; +use pb::ext::UlidExt; +use pb::scuffle::platform::internal::events::{processed_image, ProcessedImage}; +use pb::scuffle::platform::internal::types::{uploaded_file_metadata, ProcessedImageVariant, UploadedFileMetadata}; +use prost::Message; +use scuffle_utils::context::ContextExt; + +use crate::config::ImageUploaderConfig; +use crate::database::{FileType, UploadedFile}; +use crate::global::ApiGlobal; +use crate::subscription::SubscriptionTopic; + +const CONSUMER_NAME: &str = "image-upload-consumer"; + +pub async fn run(global: Arc) -> anyhow::Result<()> { + let config = global.config::(); + + let image_upload_callback = global + .jetstream() + .get_or_create_stream(async_nats::jetstream::stream::Config { + name: config.callback_subject.clone(), + subjects: vec![config.callback_subject.clone()], + max_consumers: 1, + retention: RetentionPolicy::WorkQueue, + ..Default::default() + }) + .await + .context("failed to create profile picture stream")?; + + let image_upload_callback = image_upload_callback + .get_or_create_consumer( + CONSUMER_NAME, + async_nats::jetstream::consumer::pull::Config { + name: Some(CONSUMER_NAME.to_owned()), + ack_wait: Duration::from_secs(30), + ..Default::default() + }, + ) + .await + .context("failed to create profile picture consumer")?; + + let mut image_upload_consumer = image_upload_callback + .messages() + .await + .context("failed to get profile picture consumer messages")?; + + while let Ok(message) = image_upload_consumer.next().context(global.ctx()).await { + let message = message + .ok_or_else(|| anyhow::anyhow!("profile picture consumer closed"))? + .context("failed to get profile picture consumer message")?; + let (job_id, job_result) = match ProcessedImage::decode(message.payload.as_ref()) { + Ok(ProcessedImage { + job_id, + result: Some(result), + }) => (job_id, result), + err => { + if let Err(err) = err { + tracing::warn!(error = %err, "failed to decode profile picture job result"); + } else { + tracing::warn!("malformed profile picture job result"); + } + message + .ack() + .await + .map_err(|err| anyhow::anyhow!(err)) + .context("failed to ack")?; + continue; + } + }; + tracing::debug!("received profile picture job result: {:?}", job_result); + + match job_result { + processed_image::Result::Success(processed_image::Success { variants }) => { + if let Err(err) = handle_success(&global, job_id.into_ulid(), variants).await { + tracing::warn!(error = %err, "failed to handle profile picture job success"); + message + .ack_with(AckKind::Nak(Some(Duration::from_secs(5)))) + .await + .map_err(|err| anyhow::anyhow!(err)) + .context("failed to ack")?; + } else { + message + .ack() + .await + .map_err(|err| anyhow::anyhow!(err)) + .context("failed to ack")?; + } + } + processed_image::Result::Failure(processed_image::Failure { + reason, + friendly_message, + }) => { + if let Err(err) = handle_failure(&global, job_id.into_ulid(), reason, friendly_message).await { + tracing::warn!(error = %err, "failed to handle profile picture job failure"); + message + .ack_with(AckKind::Nak(Some(Duration::from_secs(5)))) + .await + .map_err(|err| anyhow::anyhow!(err)) + .context("failed to ack")?; + } else { + message + .ack() + .await + .map_err(|err| anyhow::anyhow!(err)) + .context("failed to ack")?; + } + } + } + + message + .ack() + .await + .map_err(|err| anyhow::anyhow!(err)) + .context("failed to ack")?; + } + + Ok(()) +} + +async fn handle_success( + global: &Arc, + job_id: ulid::Ulid, + variants: Vec, +) -> anyhow::Result<()> { + let mut client = global.db().get().await.context("failed to get db connection")?; + let tx = client.transaction().await.context("failed to start transaction")?; + + let uploaded_file: UploadedFile = match scuffle_utils::database::query("UPDATE uploaded_files SET status = 'completed', metadata = $1, updated_at = NOW() WHERE id = $2 AND status = 'queued' RETURNING *") + .bind(utils::database::Protobuf(UploadedFileMetadata { + metadata: Some(uploaded_file_metadata::Metadata::Image(uploaded_file_metadata::Image { + versions: variants, + })), + })) + .bind(job_id) + .build_query_as() + .fetch_optional(&tx) + .await + .context("failed to get uploaded file")? { + Some(uploaded_file) => uploaded_file, + None => { + anyhow::bail!("uploaded file not found"); + } + }; + + global + .nats() + .publish( + SubscriptionTopic::UploadedFileStatus(uploaded_file.id), + pb::scuffle::platform::internal::events::UploadedFileStatus { + file_id: Some(uploaded_file.id.into()), + status: Some( + pb::scuffle::platform::internal::events::uploaded_file_status::Status::Success( + pb::scuffle::platform::internal::events::uploaded_file_status::Success {}, + ), + ), + } + .encode_to_vec() + .into(), + ) + .await + .context("failed to publish file update event")?; + + match uploaded_file.ty { + FileType::CategoryArtwork | FileType::CategoryCover => {} + FileType::ProfilePicture => { + let user_updated = scuffle_utils::database::query("UPDATE users SET profile_picture_id = $1, pending_profile_picture_id = NULL, updated_at = NOW() WHERE id = $2 AND pending_profile_picture_id = $1") + .bind(uploaded_file.id) + .bind(uploaded_file.owner_id) + .build() + .execute(&tx) + .await + .context("failed to update user")? == 1; + + tx.commit().await.context("failed to commit transaction")?; + + let owner_id = uploaded_file + .owner_id + .ok_or_else(|| anyhow::anyhow!("uploaded file owner id is null"))?; + + if user_updated { + global + .nats() + .publish( + SubscriptionTopic::UserProfilePicture(owner_id), + pb::scuffle::platform::internal::events::UserProfilePicture { + user_id: Some(owner_id.into()), + profile_picture_id: Some(uploaded_file.id.into()), + } + .encode_to_vec() + .into(), + ) + .await + .context("failed to publish profile picture update event")?; + } + } + } + + Ok(()) +} + +async fn handle_failure( + global: &Arc, + job_id: ulid::Ulid, + reason: String, + friendly_message: String, +) -> anyhow::Result<()> { + let mut client = global.db().get().await.context("failed to get db connection")?; + let tx = client.transaction().await.context("failed to start transaction")?; + + let uploaded_file: UploadedFile = match scuffle_utils::database::query("UPDATE uploaded_files SET status = 'failed', failed = $1, updated_at = NOW() WHERE id = $2 AND status = 'queued' RETURNING *") + .bind(reason.clone()) + .bind(job_id) + .build_query_as() + .fetch_optional(&tx) + .await + .context("failed to get uploaded file")? { + Some(uploaded_file) => uploaded_file, + None => { + anyhow::bail!("uploaded file not found"); + } + }; + + global + .nats() + .publish( + SubscriptionTopic::UploadedFileStatus(uploaded_file.id), + pb::scuffle::platform::internal::events::UploadedFileStatus { + file_id: Some(uploaded_file.id.into()), + status: Some( + pb::scuffle::platform::internal::events::uploaded_file_status::Status::Failure( + pb::scuffle::platform::internal::events::uploaded_file_status::Failure { + reason, + friendly_message, + }, + ), + ), + } + .encode_to_vec() + .into(), + ) + .await + .context("failed to publish file update event")?; + + let update_count = match uploaded_file.ty { + FileType::CategoryArtwork | FileType::CategoryCover => false, + FileType::ProfilePicture => { + scuffle_utils::database::query( + "UPDATE users SET pending_profile_picture_id = NULL, updated_at = NOW() WHERE id = $1 AND pending_profile_picture_id = $2", + ) + .bind(uploaded_file.owner_id) + .bind(uploaded_file.id) + .build() + .execute(&tx) + .await + .context("failed to update user")? + == 1 + } + }; + + tx.commit().await.context("failed to commit transaction")?; + + match (uploaded_file.ty, update_count) { + (FileType::CategoryArtwork | FileType::CategoryCover, _) => {} + (FileType::ProfilePicture, true) => { + global + .nats() + .publish( + SubscriptionTopic::UserProfilePicture(uploaded_file.owner_id.unwrap()), + pb::scuffle::platform::internal::events::UserProfilePicture { + user_id: Some(uploaded_file.owner_id.unwrap().into()), + profile_picture_id: None, + } + .encode_to_vec() + .into(), + ) + .await + .context("failed to publish profile picture update event")?; + } + (FileType::ProfilePicture, false) => {} + } + + Ok(()) +} diff --git a/platform/api/src/lib.rs b/platform/api/src/lib.rs index bcb182969..4a4ff94a1 100644 --- a/platform/api/src/lib.rs +++ b/platform/api/src/lib.rs @@ -5,7 +5,7 @@ pub mod dataloader; pub mod global; pub mod grpc; pub mod igdb_cron; -pub mod image_processor_callback; +pub mod image_upload_callback; pub mod subscription; pub mod turnstile; pub mod video_api; diff --git a/platform/api/src/main.rs b/platform/api/src/main.rs index ae61ba3db..e328c5572 100644 --- a/platform/api/src/main.rs +++ b/platform/api/src/main.rs @@ -18,11 +18,11 @@ use platform_api::video_api::{ load_playback_keypair_private_key, setup_video_events_client, setup_video_playback_session_client, setup_video_room_client, VideoEventsClient, VideoPlaybackSessionClient, VideoRoomClient, }; -use platform_api::{igdb_cron, image_processor_callback, video_event_handler}; +use platform_api::{igdb_cron, image_upload_callback, video_event_handler}; +use scuffle_utils::context::Context; +use scuffle_utilsdataloader::DataLoader; +use scuffle_utilsgrpc::TlsSettings; use tokio::select; -use utils::context::Context; -use utils::dataloader::DataLoader; -use utils::grpc::TlsSettings; #[derive(Debug, Clone, Default, config::Config, serde::Deserialize)] #[serde(default)] @@ -256,7 +256,7 @@ impl binary_helper::Global for GlobalState { None }; - let video_api_channel = utils::grpc::make_channel( + let video_api_channel = scuffle_utilsgrpc::make_channel( vec![config.extra.video_api.address.clone()], Duration::from_secs(30), video_api_tls, @@ -321,7 +321,7 @@ pub async fn main() { let api_future = platform_api::api::run(global.clone()); let subscription_manager = global.subscription_manager.run(global.ctx.clone(), global.nats.clone()); let video_event_handler = video_event_handler::run(global.clone()); - let image_processor_callback = image_processor_callback::run(global.clone()); + let image_upload_callback = image_upload_callback::run(global.clone()); let igdb_cron = igdb_cron::run(global.clone()); select! { @@ -329,7 +329,7 @@ pub async fn main() { r = api_future => r.context("api server stopped unexpectedly")?, r = subscription_manager => r.context("subscription manager stopped unexpectedly")?, r = video_event_handler => r.context("video event handler stopped unexpectedly")?, - r = image_processor_callback => r.context("image processor callback handler stopped unexpectedly")?, + r = image_upload_callback => r.context("image processor callback handler stopped unexpectedly")?, r = igdb_cron => r.context("igdb cron stopped unexpectedly")?, } diff --git a/platform/api/src/subscription.rs b/platform/api/src/subscription.rs index 0fd7208f8..6c333e7e5 100644 --- a/platform/api/src/subscription.rs +++ b/platform/api/src/subscription.rs @@ -2,12 +2,12 @@ use std::collections::HashMap; use std::ops::{Deref, DerefMut}; use async_nats::Message; +use scuffle_utils::context::Context; use tokio::select; use tokio::sync::{broadcast, mpsc, oneshot, Mutex}; use tokio_stream::{StreamExt, StreamMap, StreamNotifyClose}; use tracing::{debug, error, warn}; use ulid::Ulid; -use utils::context::Context; #[derive(thiserror::Error, Debug)] pub enum SubscriptionManagerError { diff --git a/platform/api/src/video_event_handler.rs b/platform/api/src/video_event_handler.rs index ef7818ea1..15ce3a2b8 100644 --- a/platform/api/src/video_event_handler.rs +++ b/platform/api/src/video_event_handler.rs @@ -62,7 +62,7 @@ async fn handle_room_event(global: &Arc, event: event::Room, ti .await .context("failed to fetch playback session count")?; - let channel_id = utils::database::query("UPDATE users SET channel_active_connection_id = $1, channel_live_viewer_count = $2, channel_live_viewer_count_updated_at = NOW(), channel_last_live_at = $3 WHERE channel_room_id = $4 RETURNING id") + let channel_id = scuffle_utils::database::query("UPDATE users SET channel_active_connection_id = $1, channel_live_viewer_count = $2, channel_live_viewer_count_updated_at = NOW(), channel_last_live_at = $3 WHERE channel_room_id = $4 RETURNING id") .bind(connection_id.into_ulid()) .bind(live_viewer_count) .bind(chrono::DateTime::from_timestamp_millis(timestamp)) @@ -89,7 +89,7 @@ async fn handle_room_event(global: &Arc, event: event::Room, ti connection_id: Some(connection_id), .. }) => { - let res = utils::database::query("UPDATE users SET channel_active_connection_id = NULL, channel_live_viewer_count = 0, channel_live_viewer_count_updated_at = NOW() WHERE channel_room_id = $1 AND channel_active_connection_id = $2 RETURNING id") + let res = scuffle_utils::database::query("UPDATE users SET channel_active_connection_id = NULL, channel_live_viewer_count = 0, channel_live_viewer_count_updated_at = NOW() WHERE channel_room_id = $1 AND channel_active_connection_id = $2 RETURNING id") .bind(room_id.into_ulid()) .bind(connection_id.into_ulid()) .build_query_single_scalar() diff --git a/platform/image_processor/APACHE2_LICENSE b/platform/image_processor/APACHE2_LICENSE deleted file mode 120000 index 4eaff7312..000000000 --- a/platform/image_processor/APACHE2_LICENSE +++ /dev/null @@ -1 +0,0 @@ -../../licenses/APACHE2_LICENSE \ No newline at end of file diff --git a/platform/image_processor/Cargo.toml b/platform/image_processor/Cargo.toml deleted file mode 100644 index 521824e71..000000000 --- a/platform/image_processor/Cargo.toml +++ /dev/null @@ -1,44 +0,0 @@ -[package] -name = "platform-image-processor" -version = "0.0.1" -edition = "2021" -authors = ["Scuffle "] -description = "Scuffle Image Processor" -license = "MIT OR Apache-2.0" - -[dependencies] -tracing = "0.1" -tokio = { version = "1.34", features = ["full"] } -serde = { version = "1.0", features = ["derive"] } -serde_json = "1.0" -ulid = { version = "1.1", features = ["uuid"] } -postgres-from-row = "0.5" -prost = "0.12" -aws-config = "1.1" -aws-sdk-s3 = { version = "1.12", features = ["behavior-version-latest"] } -async-trait = "0.1" -anyhow = "1.0" -async-nats = "0.33" -tonic = "0.11" -futures = "0.3" -thiserror = "1.0" -file-format = "0.24" -scopeguard = "1.2" -rgb = "0.8" -imgref = "1.10" -libavif-sys = { version = "0.16" } -libwebp-sys2 = { version = "0.1", features = ["1_2", "demux", "mux", "static"] } -sha2 = "0.10" -byteorder = "1.5" -gifski = "1.13" -png = "0.17" -num_cpus = "1.16" -bytes = "1.0" -reqwest = { version = "0.11", default-features = false, features = ["rustls-tls"] } -fast_image_resize = "3.0.4" - -utils = { workspace = true, features = ["all"] } -config = { workspace = true } -pb = { workspace = true } -binary-helper = { workspace = true } -ffmpeg = { workspace = true, features = ["task-abort", "tracing"] } diff --git a/platform/image_processor/MIT_LICENSE b/platform/image_processor/MIT_LICENSE deleted file mode 120000 index 9b50c52e1..000000000 --- a/platform/image_processor/MIT_LICENSE +++ /dev/null @@ -1 +0,0 @@ -../../licenses/MIT_LICENSE \ No newline at end of file diff --git a/platform/image_processor/src/config.rs b/platform/image_processor/src/config.rs deleted file mode 100644 index adaeeb7ae..000000000 --- a/platform/image_processor/src/config.rs +++ /dev/null @@ -1,49 +0,0 @@ -use binary_helper::config::{S3BucketConfig, S3CredentialsConfig}; -use ulid::Ulid; - -#[derive(Debug, Clone, PartialEq, config::Config, serde::Deserialize)] -#[serde(default)] -pub struct ImageProcessorConfig { - /// The S3 Bucket which contains the source images - pub source_bucket: S3BucketConfig, - - /// The S3 Bucket which will contain the target images - pub target_bucket: S3BucketConfig, - - /// Concurrency limit, defaults to number of CPUs - pub concurrency: usize, - - /// Instance ID (defaults to a random ULID) - pub instance_id: Ulid, - - /// Allow http downloads - pub allow_http: bool, -} - -impl Default for ImageProcessorConfig { - fn default() -> Self { - Self { - source_bucket: S3BucketConfig { - name: "scuffle-image-processor".to_owned(), - endpoint: Some("http://localhost:9000".to_owned()), - region: "us-east-1".to_owned(), - credentials: S3CredentialsConfig { - access_key: Some("root".to_owned()), - secret_key: Some("scuffle123".to_owned()), - }, - }, - target_bucket: S3BucketConfig { - name: "scuffle-image-processor-public".to_owned(), - endpoint: Some("http://localhost:9000".to_owned()), - region: "us-east-1".to_owned(), - credentials: S3CredentialsConfig { - access_key: Some("root".to_owned()), - secret_key: Some("scuffle123".to_owned()), - }, - }, - concurrency: num_cpus::get(), - instance_id: Ulid::new(), - allow_http: true, - } - } -} diff --git a/platform/image_processor/src/database.rs b/platform/image_processor/src/database.rs deleted file mode 100644 index 1298a2b68..000000000 --- a/platform/image_processor/src/database.rs +++ /dev/null @@ -1,13 +0,0 @@ -use pb::scuffle::platform::internal::image_processor::Task; -use ulid::Ulid; -use utils::database::protobuf; - -// The actual table has more columns but we only need id and task to process a -// job - -#[derive(Debug, Clone, Default, postgres_from_row::FromRow)] -pub struct Job { - pub id: Ulid, - #[from_row(from_fn = "protobuf")] - pub task: Task, -} diff --git a/platform/image_processor/src/global.rs b/platform/image_processor/src/global.rs deleted file mode 100644 index 8dd7af01f..000000000 --- a/platform/image_processor/src/global.rs +++ /dev/null @@ -1,35 +0,0 @@ -use binary_helper::s3::Bucket; - -use crate::config::ImageProcessorConfig; - -pub trait ImageProcessorState { - fn s3_source_bucket(&self) -> &Bucket; - fn s3_target_bucket(&self) -> &Bucket; - fn http_client(&self) -> &reqwest::Client; -} - -pub trait ImageProcessorGlobal: - binary_helper::global::GlobalCtx - + binary_helper::global::GlobalConfigProvider - + binary_helper::global::GlobalNats - + binary_helper::global::GlobalDb - + binary_helper::global::GlobalConfig - + ImageProcessorState - + Send - + Sync - + 'static -{ -} - -impl ImageProcessorGlobal for T where - T: binary_helper::global::GlobalCtx - + binary_helper::global::GlobalConfigProvider - + binary_helper::global::GlobalNats - + binary_helper::global::GlobalDb - + binary_helper::global::GlobalConfig - + ImageProcessorState - + Send - + Sync - + 'static -{ -} diff --git a/platform/image_processor/src/grpc.rs b/platform/image_processor/src/grpc.rs deleted file mode 100644 index f770cd149..000000000 --- a/platform/image_processor/src/grpc.rs +++ /dev/null @@ -1,9 +0,0 @@ -use std::sync::Arc; - -use tonic::transport::server::Router; - -use crate::global::ImageProcessorGlobal; - -pub fn add_routes(_: &Arc, router: Router) -> Router { - router -} diff --git a/platform/image_processor/src/lib.rs b/platform/image_processor/src/lib.rs deleted file mode 100644 index b98fa0b13..000000000 --- a/platform/image_processor/src/lib.rs +++ /dev/null @@ -1,8 +0,0 @@ -pub mod config; -pub mod database; -pub mod global; -pub mod grpc; -pub mod processor; - -#[cfg(test)] -mod tests; diff --git a/platform/image_processor/src/main.rs b/platform/image_processor/src/main.rs deleted file mode 100644 index 51620261d..000000000 --- a/platform/image_processor/src/main.rs +++ /dev/null @@ -1,131 +0,0 @@ -#![allow(dead_code)] - -use std::sync::Arc; - -use anyhow::Context as _; -use binary_helper::global::{setup_database, setup_nats, GlobalCtx, GlobalDb, GlobalNats}; -use binary_helper::{bootstrap, grpc_health, grpc_server, impl_global_traits}; -use platform_image_processor::config::ImageProcessorConfig; -use tokio::select; -use utils::context::Context; - -#[derive(Debug, Clone, Default, config::Config, serde::Deserialize)] -#[serde(default)] -struct ExtConfig { - image_processor: ImageProcessorConfig, -} - -impl binary_helper::config::ConfigExtention for ExtConfig { - const APP_NAME: &'static str = "scuffle-image-processor"; -} - -// TODO: We don't need grpc and nats -type AppConfig = binary_helper::config::AppConfig; - -struct GlobalState { - ctx: Context, - db: Arc, - config: AppConfig, - nats: async_nats::Client, - jetstream: async_nats::jetstream::Context, - s3_source_bucket: binary_helper::s3::Bucket, - s3_target_bucket: binary_helper::s3::Bucket, - http_client: reqwest::Client, -} - -impl_global_traits!(GlobalState); - -impl binary_helper::global::GlobalConfigProvider for GlobalState { - #[inline(always)] - fn provide_config(&self) -> &ImageProcessorConfig { - &self.config.extra.image_processor - } -} - -impl platform_image_processor::global::ImageProcessorState for GlobalState { - #[inline(always)] - fn s3_source_bucket(&self) -> &binary_helper::s3::Bucket { - &self.s3_source_bucket - } - - #[inline(always)] - fn s3_target_bucket(&self) -> &binary_helper::s3::Bucket { - &self.s3_target_bucket - } - - #[inline(always)] - fn http_client(&self) -> &reqwest::Client { - &self.http_client - } -} - -impl binary_helper::Global for GlobalState { - async fn new(ctx: Context, config: AppConfig) -> anyhow::Result { - let db = setup_database(&config.database).await?; - let s3_source_bucket = config.extra.image_processor.source_bucket.setup(); - let s3_target_bucket = config.extra.image_processor.target_bucket.setup(); - - let (nats, jetstream) = setup_nats(&config.name, &config.nats).await?; - - let http_client = reqwest::Client::builder() - .user_agent(concat!("scuffle-image-processor/", env!("CARGO_PKG_VERSION"))) - .build()?; - - Ok(Self { - ctx, - db, - nats, - jetstream, - config, - s3_source_bucket, - s3_target_bucket, - http_client, - }) - } -} - -pub fn main() { - tokio::runtime::Builder::new_multi_thread() - .enable_all() - .max_blocking_threads( - std::env::var("TOKIO_MAX_BLOCKING_THREADS") - .ok() - .and_then(|v| v.parse().ok()) - .unwrap_or(2048), - ) - .build() - .expect("failed to create tokio runtime") - .block_on(async { - if let Err(err) = bootstrap::(|global| async move { - let grpc_future = { - let mut server = grpc_server(&global.config.grpc) - .await - .context("failed to create grpc server")?; - let router = server.add_service(grpc_health::HealthServer::new(&global, |global, _| async move { - !global.db().is_closed() - && global.nats().connection_state() == async_nats::connection::State::Connected - })); - - let router = platform_image_processor::grpc::add_routes(&global, router); - - router.serve_with_shutdown(global.config.grpc.bind_address, async { - global.ctx().done().await; - }) - }; - - let processor_future = platform_image_processor::processor::run(global.clone()); - - select! { - r = grpc_future => r.context("grpc server stopped unexpectedly")?, - r = processor_future => r.context("processor stopped unexpectedly")?, - } - - Ok(()) - }) - .await - { - tracing::error!("{:#}", err); - std::process::exit(1); - } - }) -} diff --git a/platform/image_processor/src/processor/error.rs b/platform/image_processor/src/processor/error.rs deleted file mode 100644 index c393684cd..000000000 --- a/platform/image_processor/src/processor/error.rs +++ /dev/null @@ -1,124 +0,0 @@ -use aws_sdk_s3::operation::get_object::GetObjectError; -use aws_sdk_s3::operation::put_object::PutObjectError; -use file_format::FileFormat; - -#[derive(Debug, thiserror::Error)] -pub enum DecoderError { - #[error("input too long: {0}ms")] - TooLong(i64), - #[error("too many frames: {0}frms")] - TooManyFrames(i64), - #[error("input too high: {0}px")] - TooHigh(i32), - #[error("input too wide: {0}px")] - TooWide(i32), - #[error("{0}")] - Other(anyhow::Error), -} - -#[derive(Debug, thiserror::Error)] -pub enum ProcessorError { - #[error("semaphore ticket acquire: {0}")] - SemaphoreAcquire(#[from] tokio::sync::AcquireError), - - #[error("database: {0}")] - Database(#[from] utils::database::tokio_postgres::Error), - - #[error("database pool: {0}")] - DatabasePool(#[from] utils::database::deadpool_postgres::PoolError), - - #[error("lost job")] - LostJob, - - #[error("invalid job state")] - InvalidJobState, - - #[error("download source from s3: {0}")] - S3Download(aws_sdk_s3::error::SdkError), - - #[error("download source from s3: {0}")] - S3DownloadStream(aws_sdk_s3::primitives::ByteStreamError), - - #[error("upload target to s3: {0:?}")] - S3Upload(aws_sdk_s3::error::SdkError), - - #[error("publish to nats: {0}")] - NatsPublish(#[from] async_nats::PublishError), - - #[error("image: {0}")] - FileFormat(std::io::Error), - - #[error("unsupported input format: {0}")] - UnsupportedInputFormat(FileFormat), - - #[error("ffmpeg decode: {0}")] - FfmpegDecode(DecoderError), - - #[error("timelimit exceeded")] - TimeLimitExceeded, - - #[error("avif decode: {0}")] - AvifDecode(DecoderError), - - #[error("avif encode: {0}")] - AvifEncode(anyhow::Error), - - #[error("webp decode: {0}")] - WebPDecode(DecoderError), - - #[error("webp encode: {0}")] - WebPEncode(anyhow::Error), - - #[error("png encode: {0}")] - PngEncode(anyhow::Error), - - #[error("image resize: {0}")] - ImageResize(anyhow::Error), - - #[error("blocking task spawn")] - BlockingTaskSpawn, - - #[error("gifski encode: {0}")] - GifskiEncode(anyhow::Error), - - #[error("http download disabled")] - HttpDownloadDisabled, - - #[error("download timeout")] - DownloadTimeout, - - #[error("http download: {0}")] - HttpDownload(#[from] reqwest::Error), -} - -impl ProcessorError { - pub fn friendly_message(&self) -> String { - let msg = match self { - ProcessorError::LostJob => Some("The job was lost"), - ProcessorError::InvalidJobState => Some("The job is in an invalid state"), - ProcessorError::S3Download(_) => Some("Failed to download file"), - ProcessorError::S3Upload(_) => Some("Failed to upload file"), - ProcessorError::FileFormat(_) => Some("Failed to read file format"), - ProcessorError::UnsupportedInputFormat(_) => { - Some("Unsupported input format. Please use one of the supported formats.") - } - ProcessorError::TimeLimitExceeded => Some("The job took too long to process the file"), - ProcessorError::AvifEncode(_) => Some("Failed to reencode image to AVIF"), - ProcessorError::WebPEncode(_) => Some("Failed to reencode image to WebP"), - ProcessorError::PngEncode(_) => Some("Failed to reencode image to PNG"), - ProcessorError::ImageResize(_) => Some("Failed to resize image"), - ProcessorError::GifskiEncode(_) => Some("Failed to reencode image to GIF"), - ProcessorError::FfmpegDecode(e) | ProcessorError::AvifDecode(e) | ProcessorError::WebPDecode(e) => match e { - DecoderError::TooLong(_) => Some("The file is too long"), - DecoderError::TooManyFrames(_) => Some("The file has too many frames"), - DecoderError::TooWide(_) => Some("The image is too wide"), - DecoderError::TooHigh(_) => Some("The image is too high"), - DecoderError::Other(_) => None, - }, - _ => None, - }; - msg.map(|m| m.to_string()).unwrap_or_else(|| format!("{}", self)) - } -} - -pub type Result = std::result::Result; diff --git a/platform/image_processor/src/processor/job/decoder/ffmpeg.rs b/platform/image_processor/src/processor/job/decoder/ffmpeg.rs deleted file mode 100644 index 721903207..000000000 --- a/platform/image_processor/src/processor/job/decoder/ffmpeg.rs +++ /dev/null @@ -1,226 +0,0 @@ -use std::borrow::Cow; - -use anyhow::{anyhow, Context as _}; -use imgref::Img; -use rgb::RGBA8; - -use super::{Decoder, DecoderBackend, DecoderInfo, LoopCount}; -use crate::database::Job; -use crate::processor::error::{DecoderError, ProcessorError, Result}; -use crate::processor::job::frame::Frame; - -pub struct FfmpegDecoder<'data> { - input: ffmpeg::io::Input>>, - decoder: ffmpeg::decoder::VideoDecoder, - scaler: ffmpeg::scalar::Scalar, - info: DecoderInfo, - input_stream_index: i32, - average_frame_duration_ts: u64, - send_packet: bool, - eof: bool, - done: bool, -} - -const fn cast_bytes_to_rgba(bytes: &[u8]) -> &[rgb::RGBA8] { - unsafe { std::slice::from_raw_parts(bytes.as_ptr() as *const _, bytes.len() / 4) } -} - -static FFMPEG_LOGGING_INITIALIZED: std::sync::Once = std::sync::Once::new(); - -impl<'data> FfmpegDecoder<'data> { - pub fn new(job: &Job, data: Cow<'data, [u8]>) -> Result { - FFMPEG_LOGGING_INITIALIZED.call_once(|| { - ffmpeg::log::log_callback_tracing(); - }); - - let input = ffmpeg::io::Input::seekable(std::io::Cursor::new(data)) - .context("input") - .map_err(DecoderError::Other) - .map_err(ProcessorError::FfmpegDecode)?; - - let input_stream = input - .streams() - .best(ffmpeg::ffi::AVMediaType::AVMEDIA_TYPE_VIDEO) - .ok_or_else(|| ProcessorError::FfmpegDecode(DecoderError::Other(anyhow!("no video stream"))))?; - - let input_stream_index = input_stream.index(); - - let input_stream_time_base = input_stream.time_base(); - let input_stream_duration = input_stream.duration().unwrap_or(0); - let input_stream_frames = input_stream - .nb_frames() - .ok_or_else(|| ProcessorError::FfmpegDecode(DecoderError::Other(anyhow!("no frame count"))))? - .max(1); - - if input_stream_time_base.den == 0 || input_stream_time_base.num == 0 { - return Err(ProcessorError::FfmpegDecode(DecoderError::Other(anyhow!( - "stream time base is 0" - )))); - } - - let decoder = match ffmpeg::decoder::Decoder::new(&input_stream) - .context("video decoder") - .map_err(DecoderError::Other) - .map_err(ProcessorError::FfmpegDecode)? - { - ffmpeg::decoder::Decoder::Video(decoder) => decoder, - _ => { - return Err(ProcessorError::FfmpegDecode(DecoderError::Other(anyhow!( - "not a video decoder" - )))); - } - }; - - let max_input_width = job.task.limits.as_ref().map(|l| l.max_input_width).unwrap_or(0) as i32; - let max_input_height = job.task.limits.as_ref().map(|l| l.max_input_height).unwrap_or(0) as i32; - let max_input_frame_count = job.task.limits.as_ref().map(|l| l.max_input_frame_count).unwrap_or(0) as i32; - let max_input_duration_ms = job.task.limits.as_ref().map(|l| l.max_input_duration_ms).unwrap_or(0) as i32; - - if max_input_width > 0 && decoder.width() > max_input_width { - return Err(ProcessorError::FfmpegDecode(DecoderError::TooWide(decoder.width()))); - } - - if max_input_height > 0 && decoder.height() > max_input_height { - return Err(ProcessorError::FfmpegDecode(DecoderError::TooHigh(decoder.height()))); - } - - if max_input_frame_count > 0 && input_stream_frames > max_input_frame_count as i64 { - return Err(ProcessorError::FfmpegDecode(DecoderError::TooManyFrames(input_stream_frames))); - } - - // actual duration - // = duration * (time_base.num / time_base.den) * 1000 - // = (duration * time_base.num * 1000) / time_base.den - let duration = - (input_stream_duration * input_stream_time_base.num as i64 * 1000) / input_stream_time_base.den as i64; - if max_input_duration_ms > 0 && duration > max_input_duration_ms as i64 { - return Err(ProcessorError::FfmpegDecode(DecoderError::TooLong(duration))); - } - - let scaler = ffmpeg::scalar::Scalar::new( - decoder.width(), - decoder.height(), - decoder.pixel_format(), - decoder.width(), - decoder.height(), - ffmpeg::ffi::AVPixelFormat::AV_PIX_FMT_RGBA, - ) - .context("scaler") - .map_err(DecoderError::Other) - .map_err(ProcessorError::FfmpegDecode)?; - - Ok(Self { - info: DecoderInfo { - width: decoder.width() as usize, - height: decoder.height() as usize, - frame_count: input_stream_frames as usize, - // TODO: Support loop count from ffmpeg. - loop_count: LoopCount::Infinite, - timescale: input_stream_time_base.den as u64, - }, - average_frame_duration_ts: (input_stream_duration / input_stream_frames) as u64, - input, - scaler, - decoder, - input_stream_index, - done: false, - eof: false, - send_packet: true, - }) - } -} - -impl Decoder for FfmpegDecoder<'_> { - fn backend(&self) -> DecoderBackend { - DecoderBackend::Ffmpeg - } - - fn decode(&mut self) -> Result> { - if self.done { - return Ok(None); - } - - loop { - if self.send_packet && !self.eof { - let packet = self - .input - .packets() - .find_map(|packet| match packet { - Ok(packet) => { - if packet.stream_index() == self.input_stream_index { - Some(Ok(packet)) - } else { - None - } - } - Err(err) => { - self.done = true; - Some(Err(err)) - } - }) - .transpose() - .context("receive packet") - .map_err(DecoderError::Other) - .map_err(ProcessorError::FfmpegDecode)?; - - if let Some(packet) = packet { - self.decoder.send_packet(&packet).context("send packet").map_err(|err| { - self.done = true; - ProcessorError::FfmpegDecode(DecoderError::Other(err)) - })?; - } else { - self.decoder.send_eof().context("send eof").map_err(|err| { - self.done = true; - ProcessorError::FfmpegDecode(DecoderError::Other(err)) - })?; - self.eof = true; - } - - self.send_packet = false; - } - - let frame = self.decoder.receive_frame().context("receive frame").map_err(|err| { - self.done = true; - ProcessorError::FfmpegDecode(DecoderError::Other(err)) - })?; - - if let Some(frame) = frame { - let frame = self.scaler.process(&frame).context("scaler run").map_err(|err| { - self.done = true; - ProcessorError::FfmpegDecode(DecoderError::Other(err)) - })?; - - let mut data = vec![RGBA8::default(); frame.width() * frame.height()]; - - // The frame has padding, so we need to copy the data. - let frame_data = frame.data(0).unwrap(); - let frame_linesize = frame.linesize(0).unwrap(); - - if frame_linesize == frame.width() as i32 * 4 { - // No padding, so we can just copy the data. - data.copy_from_slice(cast_bytes_to_rgba(frame_data)); - } else { - // The frame has padding, so we need to copy the data. - for (i, row) in data.chunks_exact_mut(frame.width()).enumerate() { - let row_data = &frame_data[i * frame_linesize as usize..][..frame.width() * 4]; - row.copy_from_slice(cast_bytes_to_rgba(row_data)); - } - } - - return Ok(Some(Frame { - image: Img::new(data, self.info.width, self.info.height), - duration_ts: self.average_frame_duration_ts, - })); - } else if self.eof { - self.done = true; - return Ok(None); - } else { - self.send_packet = true; - } - } - } - - fn info(&self) -> DecoderInfo { - self.info - } -} diff --git a/platform/image_processor/src/processor/job/decoder/libavif.rs b/platform/image_processor/src/processor/job/decoder/libavif.rs deleted file mode 100644 index b6fa116d9..000000000 --- a/platform/image_processor/src/processor/job/decoder/libavif.rs +++ /dev/null @@ -1,140 +0,0 @@ -use std::borrow::Cow; -use std::ptr::NonNull; - -use anyhow::Context; - -use super::{Decoder, DecoderBackend, DecoderInfo, LoopCount}; -use crate::database::Job; -use crate::processor::error::{DecoderError, ProcessorError, Result}; -use crate::processor::job::frame::Frame; -use crate::processor::job::libavif::{AvifError, AvifRgbImage}; -use crate::processor::job::smart_object::SmartPtr; - -#[derive(Debug)] -pub struct AvifDecoder<'data> { - decoder: SmartPtr, - info: DecoderInfo, - _data: Cow<'data, [u8]>, - img: AvifRgbImage, - total_duration: u64, - max_input_duration: u64, -} - -impl<'data> AvifDecoder<'data> { - pub fn new(job: &Job, data: Cow<'data, [u8]>) -> Result { - let mut decoder = SmartPtr::new( - NonNull::new(unsafe { libavif_sys::avifDecoderCreate() }) - .ok_or(AvifError::OutOfMemory) - .context("failed to create avif decoder") - .map_err(DecoderError::Other) - .map_err(ProcessorError::AvifDecode)?, - |ptr| { - // Safety: The decoder is valid. - unsafe { - libavif_sys::avifDecoderDestroy(ptr.as_ptr()); - } - }, - ); - - let max_input_width = job.task.limits.as_ref().map(|l| l.max_input_width).unwrap_or(0); - let max_input_height = job.task.limits.as_ref().map(|l| l.max_input_height).unwrap_or(0); - if max_input_height != 0 && max_input_width != 0 { - decoder.as_mut().imageDimensionLimit = max_input_width * max_input_height; - } - - let max_input_frame_count = job.task.limits.as_ref().map(|l| l.max_input_frame_count).unwrap_or(0); - if max_input_frame_count != 0 { - decoder.as_mut().imageCountLimit = max_input_frame_count; - } - - // Safety: The decoder is valid. - let io = NonNull::new(unsafe { libavif_sys::avifIOCreateMemoryReader(data.as_ptr(), data.len()) }) - .ok_or(AvifError::OutOfMemory) - .context("failed to create avif io") - .map_err(DecoderError::Other) - .map_err(ProcessorError::AvifDecode)?; - - // Set the io pointer. - decoder.as_mut().io = io.as_ptr(); - - // Parse the data. - AvifError::from_code(unsafe { libavif_sys::avifDecoderParse(decoder.as_ptr()) }) - .context("failed to parse avif") - .map_err(DecoderError::Other) - .map_err(ProcessorError::AvifDecode)?; - - let image = AvifRgbImage::new(decoder.as_ref()); - - let info = DecoderInfo { - width: image.width as usize, - height: image.height as usize, - loop_count: if decoder.as_ref().repetitionCount <= 0 { - LoopCount::Infinite - } else { - LoopCount::Finite(decoder.as_ref().repetitionCount as usize) - }, - frame_count: decoder.as_ref().imageCount.max(0) as _, - timescale: decoder.as_ref().timescale, - }; - - let max_input_duration_ms = job.task.limits.as_ref().map(|l| l.max_input_duration_ms).unwrap_or(0); - - if max_input_width != 0 && info.width > max_input_width as usize { - return Err(ProcessorError::AvifDecode(DecoderError::TooWide(info.width as i32))); - } - - if max_input_height != 0 && info.height > max_input_height as usize { - return Err(ProcessorError::AvifDecode(DecoderError::TooHigh(info.height as i32))); - } - - if max_input_frame_count != 0 && info.frame_count > max_input_frame_count as usize { - return Err(ProcessorError::AvifDecode(DecoderError::TooManyFrames( - info.frame_count as i64, - ))); - } - - Ok(Self { - _data: data, - img: AvifRgbImage::new(decoder.as_ref()), - decoder, - max_input_duration: max_input_duration_ms as u64 * info.timescale / 1000, - total_duration: 0, - info, - }) - } -} - -impl Decoder for AvifDecoder<'_> { - fn backend(&self) -> DecoderBackend { - DecoderBackend::LibAvif - } - - fn info(&self) -> DecoderInfo { - self.info - } - - fn decode(&mut self) -> Result> { - let _abort_guard = utils::task::AbortGuard::new(); - - if AvifError::from_code(unsafe { libavif_sys::avifDecoderNextImage(self.decoder.as_ptr()) }).is_err() { - return Ok(None); - } - - AvifError::from_code(unsafe { libavif_sys::avifImageYUVToRGB(self.decoder.as_ref().image, &mut *self.img) }) - .context("failed to convert YUV to RGB") - .map_err(DecoderError::Other) - .map_err(ProcessorError::AvifDecode)?; - - let duration_ts = self.decoder.as_ref().imageTiming.durationInTimescales; - self.total_duration += duration_ts; - - if self.max_input_duration != 0 && self.total_duration > self.max_input_duration { - return Err(ProcessorError::AvifDecode(DecoderError::TooLong(self.total_duration as i64))); - } - - Ok(Some(Frame { - image: self.img.data().clone(), - duration_ts, - })) - } -} diff --git a/platform/image_processor/src/processor/job/decoder/libwebp.rs b/platform/image_processor/src/processor/job/decoder/libwebp.rs deleted file mode 100644 index 24f2bae75..000000000 --- a/platform/image_processor/src/processor/job/decoder/libwebp.rs +++ /dev/null @@ -1,140 +0,0 @@ -use std::borrow::Cow; -use std::ptr::NonNull; - -use anyhow::{anyhow, Context}; -use imgref::Img; - -use super::{Decoder, DecoderBackend, DecoderInfo, LoopCount}; -use crate::database::Job; -use crate::processor::error::{DecoderError, ProcessorError, Result}; -use crate::processor::job::frame::Frame; -use crate::processor::job::libwebp::{zero_memory_default, WebPError}; -use crate::processor::job::smart_object::SmartPtr; - -pub struct WebpDecoder<'data> { - info: DecoderInfo, - decoder: SmartPtr, - _data: Cow<'data, [u8]>, - timestamp: i32, - total_duration: u64, - max_input_duration: u64, -} - -impl<'data> WebpDecoder<'data> { - pub fn new(job: &Job, data: Cow<'data, [u8]>) -> Result { - let max_input_width = job.task.limits.as_ref().map(|l| l.max_input_width).unwrap_or(0); - let max_input_height = job.task.limits.as_ref().map(|l| l.max_input_height).unwrap_or(0); - let max_input_frame_count = job.task.limits.as_ref().map(|l| l.max_input_frame_count).unwrap_or(0); - let max_input_duration_ms = job.task.limits.as_ref().map(|l| l.max_input_duration_ms).unwrap_or(0); - - let decoder = SmartPtr::new( - NonNull::new(unsafe { - libwebp_sys::WebPAnimDecoderNew( - &libwebp_sys::WebPData { - bytes: data.as_ptr(), - size: data.len(), - }, - std::ptr::null(), - ) - }) - .ok_or(WebPError::OutOfMemory) - .context("failed to create webp decoder") - .map_err(DecoderError::Other) - .map_err(ProcessorError::WebPDecode)?, - |decoder| { - // Safety: The decoder is valid. - unsafe { - libwebp_sys::WebPAnimDecoderDelete(decoder.as_ptr()); - } - }, - ); - - let mut info = zero_memory_default::(); - - // Safety: both pointers are valid and the decoder is valid. - if unsafe { libwebp_sys::WebPAnimDecoderGetInfo(decoder.as_ptr(), &mut info) } == 0 { - return Err(ProcessorError::WebPDecode(DecoderError::Other(anyhow!( - "failed to get webp info" - )))); - } - - if max_input_width != 0 && info.canvas_width > max_input_width { - return Err(ProcessorError::WebPDecode(DecoderError::TooWide(info.canvas_width as i32))); - } - - if max_input_height != 0 && info.canvas_height > max_input_height { - return Err(ProcessorError::WebPDecode(DecoderError::TooHigh(info.canvas_height as i32))); - } - - if max_input_frame_count != 0 && info.frame_count > max_input_frame_count { - return Err(ProcessorError::WebPDecode(DecoderError::TooManyFrames( - info.frame_count as i64, - ))); - } - - Ok(Self { - info: DecoderInfo { - width: info.canvas_width as _, - height: info.canvas_height as _, - loop_count: match info.loop_count { - 0 => LoopCount::Infinite, - _ => LoopCount::Finite(info.loop_count as _), - }, - frame_count: info.frame_count as _, - timescale: 1000, - }, - max_input_duration: max_input_duration_ms as u64, - decoder, - _data: data, - total_duration: 0, - timestamp: 0, - }) - } -} - -impl Decoder for WebpDecoder<'_> { - fn backend(&self) -> DecoderBackend { - DecoderBackend::LibWebp - } - - fn info(&self) -> DecoderInfo { - self.info - } - - fn decode(&mut self) -> Result> { - let _abort_guard = utils::task::AbortGuard::new(); - - let mut buf = std::ptr::null_mut(); - let previous_timestamp = self.timestamp; - - // Safety: The buffer is a valid pointer to a null ptr, timestamp is a valid - // pointer to i32, and the decoder is valid. - let result = unsafe { libwebp_sys::WebPAnimDecoderGetNext(self.decoder.as_ptr(), &mut buf, &mut self.timestamp) }; - - // If 0 is returned, the animation is over. - if result == 0 { - return Ok(None); - } - - let buf = NonNull::new(buf) - .ok_or(WebPError::OutOfMemory) - .context("failed to get webp frame") - .map_err(DecoderError::Other) - .map_err(ProcessorError::WebPDecode)?; - - let image = - unsafe { std::slice::from_raw_parts(buf.as_ptr() as *const rgb::RGBA8, self.info.width * self.info.height) }; - - let duration_ts = (self.timestamp - previous_timestamp).max(0) as u64; - self.total_duration += duration_ts; - - if self.max_input_duration != 0 && self.total_duration > self.max_input_duration { - return Err(ProcessorError::WebPDecode(DecoderError::TooLong(self.total_duration as i64))); - } - - Ok(Some(Frame { - image: Img::new(image.to_vec(), self.info.width, self.info.height), - duration_ts: (self.timestamp - previous_timestamp).max(0) as _, - })) - } -} diff --git a/platform/image_processor/src/processor/job/encoder/gifski.rs b/platform/image_processor/src/processor/job/encoder/gifski.rs deleted file mode 100644 index 2dac130bd..000000000 --- a/platform/image_processor/src/processor/job/encoder/gifski.rs +++ /dev/null @@ -1,86 +0,0 @@ -use anyhow::Context; -use utils::task::Task; - -use super::{Encoder, EncoderFrontend, EncoderInfo, EncoderSettings}; -use crate::processor::error::{ProcessorError, Result}; -use crate::processor::job::decoder::LoopCount; -use crate::processor::job::frame::Frame; - -pub struct GifskiEncoder { - collector: gifski::Collector, - writer: Task>>, - info: EncoderInfo, -} - -impl GifskiEncoder { - pub fn new(settings: EncoderSettings) -> Result { - let (collector, writer) = gifski::new(gifski::Settings { - repeat: match settings.loop_count { - LoopCount::Infinite => gifski::Repeat::Infinite, - LoopCount::Finite(count) => gifski::Repeat::Finite(count as u16), - }, - fast: settings.fast, - ..Default::default() - }) - .context("failed to create gifski encoder") - .map_err(ProcessorError::GifskiEncode)?; - - Ok(Self { - collector, - writer: Task::spawn("gifski writer", move || { - let mut buffer = Vec::new(); - writer - .write(&mut buffer, &mut gifski::progress::NoProgress {}) - .context("failed to write gifski output") - .map_err(ProcessorError::GifskiEncode)?; - Ok(buffer) - }), - info: EncoderInfo { - duration: 0, - frame_count: 0, - frontend: EncoderFrontend::Gifski, - height: 0, - loop_count: settings.loop_count, - timescale: settings.timescale, - width: 0, - }, - }) - } - - fn duration(&mut self, duration: u64) -> f64 { - self.info.duration += duration; - self.info.duration as f64 / self.info.timescale as f64 - } -} - -impl Encoder for GifskiEncoder { - fn info(&self) -> EncoderInfo { - self.info - } - - fn add_frame(&mut self, frame: &Frame) -> Result<()> { - let _abort_guard = utils::task::AbortGuard::new(); - - let frame = frame.to_owned(); - self.info.height = frame.image.height(); - self.info.width = frame.image.width(); - let duration = self.duration(frame.duration_ts); - self.collector - .add_frame_rgba(self.info.frame_count, frame.image, duration) - .context("failed to add frame to gifski") - .map_err(ProcessorError::GifskiEncode)?; - self.info.frame_count += 1; - Ok(()) - } - - fn finish(self) -> Result> { - let _abort_guard = utils::task::AbortGuard::new(); - - drop(self.collector); - - self.writer - .join() - .map_err(|err| anyhow::anyhow!("failed to join gifski thread: {:?}", err)) - .map_err(ProcessorError::GifskiEncode)? - } -} diff --git a/platform/image_processor/src/processor/job/frame.rs b/platform/image_processor/src/processor/job/frame.rs deleted file mode 100644 index 9dd4409d5..000000000 --- a/platform/image_processor/src/processor/job/frame.rs +++ /dev/null @@ -1,7 +0,0 @@ -use imgref::ImgVec; - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct Frame { - pub image: ImgVec, - pub duration_ts: u64, -} diff --git a/platform/image_processor/src/processor/job/mod.rs b/platform/image_processor/src/processor/job/mod.rs deleted file mode 100644 index 128676c18..000000000 --- a/platform/image_processor/src/processor/job/mod.rs +++ /dev/null @@ -1,257 +0,0 @@ -use std::borrow::Cow; -use std::sync::Arc; -use std::time::Duration; - -use ::utils::prelude::FutureTimeout; -use ::utils::task::AsyncTask; -use aws_sdk_s3::types::ObjectCannedAcl; -use binary_helper::s3::PutObjectOptions; -use bytes::Bytes; -use file_format::FileFormat; -use futures::FutureExt; -use prost::Message; -use tokio::select; -use tracing::Instrument; - -use self::decoder::DecoderBackend; -use super::error::{ProcessorError, Result}; -use super::utils; -use crate::database; -use crate::global::ImageProcessorGlobal; -use crate::processor::utils::refresh_job; - -pub(crate) mod decoder; -pub(crate) mod encoder; -pub(crate) mod frame; -pub(crate) mod libavif; -pub(crate) mod libwebp; -pub(crate) mod process; -pub(crate) mod resize; -pub(crate) mod smart_object; - -pub(crate) struct Job<'a, G: ImageProcessorGlobal> { - pub(crate) global: &'a Arc, - pub(crate) job: database::Job, -} - -#[tracing::instrument(skip(global, job), fields(job_id = %job.id), level = "info")] -pub async fn handle_job(global: &Arc, job: database::Job) { - let job = Job { global, job }; - - tracing::info!("processing job"); - - if let Err(err) = job.process().in_current_span().await { - tracing::error!(err = %err, "job failed"); - } -} - -impl<'a, G: ImageProcessorGlobal> Job<'a, G> { - async fn download_source(&self) -> Result { - if self.job.task.input_path.starts_with("http://") || self.job.task.input_path.starts_with("https://") { - if !self.global.config().allow_http { - return Err(ProcessorError::HttpDownloadDisabled); - } - - tracing::debug!("downloading {}", self.job.task.input_path); - - Ok(self - .global - .http_client() - .get(&self.job.task.input_path) - .send() - .await - .map_err(ProcessorError::HttpDownload)? - .error_for_status() - .map_err(ProcessorError::HttpDownload)? - .bytes() - .await - .map_err(ProcessorError::HttpDownload)?) - } else { - tracing::debug!( - "downloading {}/{}", - self.global.config().source_bucket.name, - self.job.task.input_path - ); - - let response = self - .global - .s3_source_bucket() - .get_object(&self.job.task.input_path) - .await - .map_err(ProcessorError::S3Download)?; - - let body = response.body.collect().await.map_err(ProcessorError::S3DownloadStream)?; - Ok(body.into_bytes()) - } - } - - pub(crate) async fn process(self) -> Result<()> { - if let Err(e) = self.process_with_timeout().in_current_span().await { - tracing::warn!(err = %e, "job failed"); - tracing::debug!("publishing job failure event to {}", self.job.task.callback_subject); - self.global - .nats() - .publish( - self.job.task.callback_subject.clone(), - pb::scuffle::platform::internal::events::ProcessedImage { - job_id: Some(self.job.id.into()), - result: Some(pb::scuffle::platform::internal::events::processed_image::Result::Failure( - pb::scuffle::platform::internal::events::processed_image::Failure { - reason: e.to_string(), - friendly_message: e.friendly_message(), - }, - )), - } - .encode_to_vec() - .into(), - ) - .in_current_span() - .await - .map_err(|e| { - tracing::error!(err = %e, "failed to publish event"); - e - })?; - } - - // delete job - utils::delete_job(self.global, self.job.id).await?; - - Ok(()) - } - - async fn process_with_timeout(&self) -> Result<()> { - let mut interval = tokio::time::interval(std::time::Duration::from_secs(15)); - - let job_id = self.job.id; - let max_processing_time_ms = self.job.task.limits.as_ref().map(|l| l.max_processing_time_ms); - - let time_limit = async { - if let Some(max_processing_time_ms) = max_processing_time_ms { - tokio::time::sleep(std::time::Duration::from_millis(max_processing_time_ms as u64)).await; - Err(ProcessorError::TimeLimitExceeded) - } else { - Ok(()) - } - }; - - let global = self.global.clone(); - let mut process = std::pin::pin!(self.inner_process().in_current_span()); - let time_limit = std::pin::pin!(time_limit); - let mut time_limit = time_limit.fuse(); - - loop { - select! { - _ = interval.tick() => { - refresh_job(&global, job_id).in_current_span().await?; - }, - Err(e) = &mut time_limit => { - return Err(e); - }, - r = &mut process => { - return r; - }, - } - } - } - - async fn inner_process(&self) -> Result<()> { - let input_data = { - let mut tries = 0; - loop { - match self.download_source().timeout(Duration::from_secs(5)).await { - Ok(Ok(data)) => break data, - Ok(Err(e)) => { - if tries >= 60 { - return Err(e); - } - - tries += 1; - tracing::debug!(err = %e, "failed to download source, retrying"); - tokio::time::sleep(std::time::Duration::from_secs(1)).await; - } - Err(_) => { - if tries >= 60 { - return Err(ProcessorError::DownloadTimeout); - } - - tries += 1; - tracing::debug!("download timed out, retrying"); - } - } - } - }; - - let backend = DecoderBackend::from_format(FileFormat::from_bytes(&input_data))?; - - let job_c = self.job.clone(); - - tracing::debug!("processing job"); - - let images = AsyncTask::spawn_blocking("process", move || { - process::process_job(backend, &job_c, Cow::Borrowed(&input_data)) - }) - .join() - .await - .map_err(|e| { - tracing::error!(err = %e, "failed to process job"); - ProcessorError::BlockingTaskSpawn - })??; - - for image in images.images.iter() { - let url = image.url(&self.job.task.output_prefix); - // image upload - tracing::debug!("uploading result to {}/{}", self.global.config().target_bucket.name, url); - self.global - .s3_target_bucket() - .put_object( - url, - image.data.clone(), - Some(PutObjectOptions { - acl: Some(ObjectCannedAcl::PublicRead), - content_type: Some(image.content_type().into()), - }), - ) - .in_current_span() - .await - .map_err(ProcessorError::S3Upload)?; - } - // job completion - tracing::debug!("publishing job completion event to {}", self.job.task.callback_subject); - self.global - .nats() - .publish( - self.job.task.callback_subject.clone(), - pb::scuffle::platform::internal::events::ProcessedImage { - job_id: Some(self.job.id.into()), - result: Some(pb::scuffle::platform::internal::events::processed_image::Result::Success( - pb::scuffle::platform::internal::events::processed_image::Success { - variants: images - .images - .iter() - .map(|image| pb::scuffle::platform::internal::types::ProcessedImageVariant { - path: image.url(&self.job.task.output_prefix), - format: image.request.1.into(), - width: image.width as u32, - height: image.height as u32, - byte_size: image.data.len() as u32, - scale: image.request.0 as u32, - }) - .collect(), - }, - )), - } - .encode_to_vec() - .into(), - ) - .in_current_span() - .await - .map_err(|e| { - tracing::error!(err = %e, "failed to publish event"); - e - })?; - - tracing::info!("job completed"); - - Ok(()) - } -} diff --git a/platform/image_processor/src/processor/job/process.rs b/platform/image_processor/src/processor/job/process.rs deleted file mode 100644 index 020ba7b8e..000000000 --- a/platform/image_processor/src/processor/job/process.rs +++ /dev/null @@ -1,283 +0,0 @@ -use std::borrow::Cow; -use std::collections::{HashMap, HashSet}; - -use bytes::Bytes; -use pb::scuffle::platform::internal::types::ImageFormat; -use rgb::ComponentBytes; -use sha2::Digest; - -use super::decoder::{Decoder, DecoderBackend, LoopCount}; -use super::encoder::{AnyEncoder, Encoder, EncoderFrontend, EncoderSettings}; -use super::resize::{ImageResizer, ImageResizerTarget}; -use crate::database::Job; -use crate::processor::error::{ProcessorError, Result}; - -#[derive(Debug)] -#[allow(dead_code)] -pub struct Image { - pub width: usize, - pub height: usize, - pub frame_count: usize, - pub duration: f64, - pub encoder: EncoderFrontend, - pub data: Bytes, - pub loop_count: LoopCount, - pub request: (usize, ImageFormat), -} - -impl Image { - pub fn file_extension(&self) -> &'static str { - match self.request.1 { - ImageFormat::Avif | ImageFormat::AvifStatic => "avif", - ImageFormat::Webp | ImageFormat::WebpStatic => "webp", - ImageFormat::Gif => "gif", - ImageFormat::PngStatic => "png", - } - } - - pub fn content_type(&self) -> &'static str { - match self.request.1 { - ImageFormat::Avif | ImageFormat::AvifStatic => "image/avif", - ImageFormat::Webp | ImageFormat::WebpStatic => "image/webp", - ImageFormat::Gif => "image/gif", - ImageFormat::PngStatic => "image/png", - } - } - - pub fn is_static(&self) -> bool { - matches!( - self.request.1, - ImageFormat::AvifStatic | ImageFormat::WebpStatic | ImageFormat::PngStatic - ) - } - - pub fn url(&self, prefix: &str) -> String { - format!( - "{}/{}{}x.{}", - prefix.trim_end_matches('/'), - self.is_static().then_some("static_").unwrap_or_default(), - self.request.0, - self.file_extension() - ) - } -} - -#[derive(Debug)] -pub struct Images { - pub images: Vec, -} - -pub fn process_job(backend: DecoderBackend, job: &Job, data: Cow<'_, [u8]>) -> Result { - let mut decoder = backend.build(job, data)?; - - let info = decoder.info(); - - let formats = job.task.formats().collect::>(); - let scales = job.task.scales.iter().map(|s| *s as usize).collect::>(); - - if formats.is_empty() || scales.is_empty() { - tracing::debug!("no formats or scales specified"); - return Err(ProcessorError::InvalidJobState); - } - - let static_formats = formats - .iter() - .filter_map(|f| match f { - ImageFormat::AvifStatic => Some(EncoderFrontend::LibAvif), - ImageFormat::WebpStatic => Some(EncoderFrontend::LibWebp), - ImageFormat::PngStatic => Some(EncoderFrontend::Png), - _ => None, - }) - .collect::>(); - - let animation_formats = formats - .iter() - .filter_map(|f| match f { - ImageFormat::Avif => Some(EncoderFrontend::LibAvif), - ImageFormat::Webp => Some(EncoderFrontend::LibWebp), - ImageFormat::Gif => Some(EncoderFrontend::Gifski), - _ => None, - }) - .collect::>(); - - if static_formats.is_empty() && animation_formats.is_empty() { - tracing::debug!("no static or animation formats specified"); - return Err(ProcessorError::InvalidJobState); - } - - let anim_settings = EncoderSettings { - fast: true, - loop_count: info.loop_count, - timescale: info.timescale, - static_image: false, - }; - - let static_settings = EncoderSettings { - fast: true, - loop_count: info.loop_count, - timescale: info.timescale, - static_image: true, - }; - - let (base_width, base_height) = if job.task.upscale { - (job.task.base_width as f64, job.task.base_height as f64) - } else { - let largest_scale = scales.iter().max().copied().unwrap_or(1); - - let width = info.width as f64 / largest_scale as f64; - let height = info.height as f64 / largest_scale as f64; - - if width > job.task.base_width as f64 && height > job.task.base_height as f64 { - (job.task.base_width as f64, job.task.base_height as f64) - } else { - (width, height) - } - }; - - let mut resizers = scales - .iter() - .map(|scale| { - ( - *scale, - ImageResizer::new(ImageResizerTarget { - height: base_height.ceil() as usize * scale, - width: base_width.ceil() as usize * scale, - algorithm: job.task.resize_algorithm(), - method: job.task.resize_method(), - upscale: job.task.upscale, - }), - Vec::with_capacity(info.frame_count), - ) - }) - .collect::>(); - - let mut frame_hashes = HashMap::new(); - let mut frame_order = Vec::with_capacity(info.frame_count); - let mut count = 0; - - tracing::debug!("decoding frames"); - - while let Some(frame) = decoder.decode()? { - let hash = sha2::Sha256::digest(frame.image.buf().as_bytes()); - if let Some(idx) = frame_hashes.get(&hash) { - if let Some((last_idx, last_duration)) = frame_order.last_mut() { - if last_idx == idx { - *last_duration += frame.duration_ts; - } else { - frame_order.push((*idx, frame.duration_ts)); - } - } else { - frame_order.push((*idx, frame.duration_ts)); - } - } else { - frame_hashes.insert(hash, count); - frame_order.push((count, frame.duration_ts)); - - count += 1; - for (_, resizer, frames) in resizers.iter_mut() { - frames.push(resizer.resize(&frame)?); - } - } - } - - tracing::debug!("decoded frames: {count}"); - - // We no longer need the decoder so we can free it. - drop(decoder); - - struct Stack { - scale: usize, - static_encoders: Vec, - animation_encoders: Vec, - } - - let mut stacks = resizers - .iter_mut() - .map(|(scale, _, frames)| { - Ok(Stack { - scale: *scale, - static_encoders: static_formats - .iter() - .map(|&frontend| frontend.build(static_settings)) - .collect::>>()?, - animation_encoders: if frames.len() > 1 { - animation_formats - .iter() - .map(|&frontend| frontend.build(anim_settings)) - .collect::>>()? - } else { - Vec::new() - }, - }) - }) - .collect::>>()?; - - for (stack, frames) in stacks.iter_mut().zip(resizers.iter_mut().map(|(_, _, frames)| frames)) { - for encoder in stack.animation_encoders.iter_mut() { - for (idx, timing) in frame_order.iter() { - let frame = &mut frames[*idx]; - frame.duration_ts = *timing; - encoder.add_frame(frame)?; - } - - tracing::debug!("added frames to animation encoder: {count} => {:?}", encoder.info().frontend); - } - - for encoder in stack.static_encoders.iter_mut() { - encoder.add_frame(&frames[0])?; - tracing::debug!("added frame to static encoder: 1 => {:?}", encoder.info().frontend); - } - } - - let mut images = Vec::new(); - - for stack in stacks.into_iter() { - for encoder in stack.animation_encoders.into_iter() { - let info = encoder.info(); - let output = encoder.finish()?; - images.push(Image { - width: info.width, - height: info.height, - frame_count: info.frame_count, - duration: info.duration as f64 / info.timescale as f64, - encoder: info.frontend, - data: output.into(), - loop_count: info.loop_count, - request: ( - stack.scale, - match info.frontend { - EncoderFrontend::Gifski => ImageFormat::Gif, - EncoderFrontend::LibAvif => ImageFormat::Avif, - EncoderFrontend::LibWebp => ImageFormat::Webp, - EncoderFrontend::Png => unreachable!(), - }, - ), - }); - } - - for encoder in stack.static_encoders.into_iter() { - let info = encoder.info(); - let output = encoder.finish()?; - images.push(Image { - width: info.width, - height: info.height, - frame_count: info.frame_count, - duration: info.duration as f64 / info.timescale as f64, - encoder: info.frontend, - data: output.into(), - loop_count: info.loop_count, - request: ( - stack.scale, - match info.frontend { - EncoderFrontend::LibAvif => ImageFormat::AvifStatic, - EncoderFrontend::LibWebp => ImageFormat::WebpStatic, - EncoderFrontend::Png => ImageFormat::PngStatic, - EncoderFrontend::Gifski => unreachable!(), - }, - ), - }); - } - } - - Ok(Images { images }) -} diff --git a/platform/image_processor/src/processor/job/resize.rs b/platform/image_processor/src/processor/job/resize.rs deleted file mode 100644 index df5b910f9..000000000 --- a/platform/image_processor/src/processor/job/resize.rs +++ /dev/null @@ -1,201 +0,0 @@ -use anyhow::Context; -use fast_image_resize as fr; -use imgref::Img; -use pb::scuffle::platform::internal::image_processor::task::{ResizeAlgorithm, ResizeMethod}; -use rgb::{ComponentBytes, RGBA}; - -use super::frame::Frame; -use crate::processor::error::{ProcessorError, Result}; - -#[derive(Debug, Default, Clone, Copy, PartialEq, Eq)] -pub struct ImageResizerTarget { - pub width: usize, - pub height: usize, - pub algorithm: ResizeAlgorithm, - pub method: ResizeMethod, - pub upscale: bool, -} - -/// Resizes images to the given target size. -pub struct ImageResizer { - resizer: fr::Resizer, - target: ImageResizerTarget, -} - -impl std::fmt::Debug for ImageResizer { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("ImageResizer").field("target", &self.target).finish() - } -} - -impl ImageResizer { - pub fn new(target: ImageResizerTarget) -> Self { - Self { - resizer: fr::Resizer::new(match target.algorithm { - ResizeAlgorithm::Nearest => fr::ResizeAlg::Nearest, - ResizeAlgorithm::Box => fr::ResizeAlg::Convolution(fr::FilterType::Box), - ResizeAlgorithm::Bilinear => fr::ResizeAlg::Convolution(fr::FilterType::Bilinear), - ResizeAlgorithm::Hamming => fr::ResizeAlg::Convolution(fr::FilterType::Hamming), - ResizeAlgorithm::CatmullRom => fr::ResizeAlg::Convolution(fr::FilterType::CatmullRom), - ResizeAlgorithm::Mitchell => fr::ResizeAlg::Convolution(fr::FilterType::Mitchell), - ResizeAlgorithm::Lanczos3 => fr::ResizeAlg::Convolution(fr::FilterType::Lanczos3), - }), - target, - } - } - - /// Resize the given frame to the target size, returning a reference to the - /// resized frame. After this function returns original frame can be - /// dropped, the returned frame is valid for the lifetime of the Resizer. - pub fn resize(&mut self, frame: &Frame) -> Result { - let _abort_guard = utils::task::AbortGuard::new(); - - let (width, height) = if self.target.method == ResizeMethod::Exact { - (self.target.width, self.target.height) - } else { - let (mut width, mut height) = if frame.image.width() > frame.image.height() { - let width = self.target.width as f64; - let height = frame.image.height() as f64 / frame.image.width() as f64 * width; - (width, height) - } else { - let height = self.target.height as f64; - let width = frame.image.width() as f64 / frame.image.height() as f64 * height; - (width, height) - }; - - if width > self.target.width as f64 { - height = height / width * self.target.width as f64; - width = self.target.width as f64; - } else if height > self.target.height as f64 { - width = width / height * self.target.height as f64; - height = self.target.height as f64; - } - - let (width, height) = (width.round() as usize, height.round() as usize); - - (width, height) - }; - - let (mut dst_image, crop_box) = - if self.target.method != ResizeMethod::Fit && (width != self.target.width || height != self.target.height) { - let height_delta = self.target.height - height; - let width_delta = self.target.width - width; - - let (top, bottom, left, right) = match self.target.method { - ResizeMethod::PadBottomLeft => (0, height_delta, width_delta, 0), - ResizeMethod::PadBottomRight => (0, height_delta, 0, width_delta), - ResizeMethod::PadTopLeft => (height_delta, 0, width_delta, 0), - ResizeMethod::PadTopRight => (height_delta, 0, 0, width_delta), - ResizeMethod::PadCenter => { - let top = height_delta / 2; - let bottom = height_delta - top; - let left = width_delta / 2; - let right = width_delta - left; - (top, bottom, left, right) - } - ResizeMethod::PadCenterLeft => { - let top = height_delta / 2; - let bottom = height_delta - top; - (top, bottom, width_delta, 0) - } - ResizeMethod::PadCenterRight => { - let top = height_delta / 2; - let bottom = height_delta - top; - (top, bottom, 0, width_delta) - } - ResizeMethod::PadTopCenter => { - let left = width_delta / 2; - let right = width_delta - left; - (height_delta, 0, left, right) - } - ResizeMethod::PadBottomCenter => { - let left = width_delta / 2; - let right = width_delta - left; - (0, height_delta, left, right) - } - ResizeMethod::PadTop => (height_delta, 0, 0, 0), - ResizeMethod::PadBottom => (0, height_delta, 0, 0), - ResizeMethod::PadLeft => (0, 0, width_delta, 0), - ResizeMethod::PadRight => (0, 0, 0, width_delta), - ResizeMethod::Exact => unreachable!(), - ResizeMethod::Fit => unreachable!(), - }; - - let total_width = width + left + right; - let total_height = height + top + bottom; - - let dst_image = fr::Image::new( - (total_width as u32).try_into().unwrap(), - (total_height as u32).try_into().unwrap(), - fr::pixels::PixelType::U8x4, - ); - ( - dst_image, - ( - left as u32, - top as u32, - (height as u32).try_into().unwrap(), - (width as u32).try_into().unwrap(), - ), - ) - } else { - let dst_image = fr::Image::new( - (width as u32).try_into().unwrap(), - (height as u32).try_into().unwrap(), - fr::pixels::PixelType::U8x4, - ); - ( - dst_image, - ( - 0_u32, - 0_u32, - (height as u32).try_into().unwrap(), - (width as u32).try_into().unwrap(), - ), - ) - }; - - let mut cropped_dst_view = dst_image - .view_mut() - .crop(crop_box.0, crop_box.1, crop_box.2, crop_box.3) - .context("failed to crop image") - .map_err(ProcessorError::ImageResize)?; - - let size = frame.image.buf().len(); - - let src = fr::Image::from_slice_u8( - (frame.image.width() as u32).try_into().unwrap(), - (frame.image.height() as u32).try_into().unwrap(), - unsafe { std::slice::from_raw_parts_mut(frame.image.buf().as_ptr() as *mut u8, size * 4) }, - fr::pixels::PixelType::U8x4, - ) - .unwrap(); - self.resizer - .resize(&src.view(), &mut cropped_dst_view) - .context("failed to resize image") - .map_err(ProcessorError::ImageResize)?; - drop(src); - - let width = dst_image.width().get() as usize; - let height = dst_image.height().get() as usize; - let buffer = dst_image.into_vec(); - - let buffer = unsafe { - let buf = buffer.into_boxed_slice(); - let size = buf.len(); - let ptr = Box::into_raw(buf) as *mut u8; - - let new_size = size / 4; - assert!(new_size * 4 == size, "image buffer size mismatch"); - - Vec::from_raw_parts(ptr as *mut RGBA, new_size, new_size) - }; - - assert_eq!(buffer.as_bytes().len(), width * height * 4, "image buffer size mismatch"); - - Ok(Frame { - image: Img::new(buffer, width, height), - duration_ts: frame.duration_ts, - }) - } -} diff --git a/platform/image_processor/src/processor/mod.rs b/platform/image_processor/src/processor/mod.rs deleted file mode 100644 index 24d8bf238..000000000 --- a/platform/image_processor/src/processor/mod.rs +++ /dev/null @@ -1,78 +0,0 @@ -use std::sync::atomic::AtomicUsize; -use std::sync::Arc; - -use futures::StreamExt; -use tokio::select; - -use self::error::Result; -use crate::config::ImageProcessorConfig; -use crate::global::ImageProcessorGlobal; -use crate::processor::job::handle_job; - -pub(crate) mod error; -pub(crate) mod job; -pub(crate) mod utils; - -pub async fn run(global: Arc) -> Result<()> { - let config = global.config::(); - - let concurrency = AtomicUsize::new(config.concurrency); - - let mut done = global.ctx().done(); - - let mut futures = futures::stream::FuturesUnordered::new(); - - let make_job_query = { - let global = &global; - let concurrency = &concurrency; - move |wait: bool| async move { - if wait { - tokio::time::sleep(std::time::Duration::from_secs(1)).await; - } - - let concurrency = concurrency.load(std::sync::atomic::Ordering::Relaxed); - - if concurrency == 0 { - tracing::debug!("concurrency limit reached, waiting for a slot"); - None - } else { - tracing::debug!("querying for jobs: {concurrency}"); - Some(utils::query_job(global, concurrency).await) - } - } - }; - - let mut job_query = Some(Box::pin(make_job_query(false))); - - loop { - select! { - Some(jobs) = async { - if let Some(job_query_fut) = &mut job_query { - let r = job_query_fut.await; - job_query = None; - r - } else { - None - } - } => { - let jobs = jobs?; - tracing::debug!("got {} jobs", jobs.len()); - job_query = Some(Box::pin(make_job_query(jobs.is_empty()))); - - for job in jobs { - concurrency.fetch_sub(1, std::sync::atomic::Ordering::Relaxed); - futures.push(handle_job(&global, job)); - } - }, - Some(_) = futures.next() => { - concurrency.fetch_add(1, std::sync::atomic::Ordering::Relaxed); - if job_query.is_none() { - job_query = Some(Box::pin(make_job_query(true))); - } - }, - _ = &mut done => break, - } - } - - Ok(()) -} diff --git a/platform/image_processor/src/processor/utils.rs b/platform/image_processor/src/processor/utils.rs deleted file mode 100644 index c6a58b76a..000000000 --- a/platform/image_processor/src/processor/utils.rs +++ /dev/null @@ -1,56 +0,0 @@ -use std::sync::Arc; - -use ulid::Ulid; - -use super::error::ProcessorError; -use crate::database::Job; -use crate::global::ImageProcessorGlobal; -use crate::processor::error::Result; - -pub async fn query_job(global: &Arc, limit: usize) -> Result> { - Ok(utils::database::query( - "UPDATE image_jobs - SET claimed_by = $1, - hold_until = NOW() + INTERVAL '30 seconds' - FROM ( - SELECT id - FROM image_jobs - WHERE hold_until IS NULL OR hold_until < NOW() - ORDER BY priority DESC, - id DESC - LIMIT $2 - ) AS job - WHERE image_jobs.id = job.id - RETURNING image_jobs.id, image_jobs.task", - ) - .bind(global.config().instance_id) - .bind(limit as i64) - .build_query_as() - .fetch_all(global.db()) - .await?) -} - -pub async fn refresh_job(global: &Arc, job_id: Ulid) -> Result<()> { - let result = utils::database::query( - "UPDATE image_jobs - SET hold_until = NOW() + INTERVAL '30 seconds' - WHERE image_jobs.id = $1 AND image_jobs.claimed_by = $2", - ) - .bind(job_id) - .bind(global.config().instance_id) - .build() - .execute(global.db()) - .await?; - - if result == 0 { Err(ProcessorError::LostJob) } else { Ok(()) } -} - -pub async fn delete_job(global: &Arc, job_id: Ulid) -> Result<()> { - utils::database::query("DELETE FROM image_jobs WHERE id = $1") - .bind(job_id) - .build() - .execute(global.db()) - .await?; - - Ok(()) -} diff --git a/platform/image_processor/src/tests/global.rs b/platform/image_processor/src/tests/global.rs deleted file mode 100644 index 19d6e0195..000000000 --- a/platform/image_processor/src/tests/global.rs +++ /dev/null @@ -1,99 +0,0 @@ -use std::sync::Arc; - -use utils::context::Context; - -use crate::config::ImageProcessorConfig; - -pub struct GlobalState { - ctx: Context, - config: ImageProcessorConfig, - nats: async_nats::Client, - jetstream: async_nats::jetstream::Context, - db: Arc, - s3_source_bucket: binary_helper::s3::Bucket, - s3_target_bucket: binary_helper::s3::Bucket, - http_client: reqwest::Client, -} - -impl binary_helper::global::GlobalCtx for GlobalState { - fn ctx(&self) -> &Context { - &self.ctx - } -} - -impl binary_helper::global::GlobalConfigProvider for GlobalState { - fn provide_config(&self) -> &ImageProcessorConfig { - &self.config - } -} - -impl binary_helper::global::GlobalNats for GlobalState { - fn nats(&self) -> &async_nats::Client { - &self.nats - } - - fn jetstream(&self) -> &async_nats::jetstream::Context { - &self.jetstream - } -} - -impl binary_helper::global::GlobalDb for GlobalState { - fn db(&self) -> &Arc { - &self.db - } -} - -impl binary_helper::global::GlobalConfig for GlobalState {} - -impl crate::global::ImageProcessorState for GlobalState { - fn s3_source_bucket(&self) -> &binary_helper::s3::Bucket { - &self.s3_source_bucket - } - - fn s3_target_bucket(&self) -> &binary_helper::s3::Bucket { - &self.s3_target_bucket - } - - fn http_client(&self) -> &reqwest::Client { - &self.http_client - } -} - -// pub async fn mock_global_state(config: ImageProcessorConfig) -> -// (Arc, Handler) { let (ctx, handler) = Context::new(); - -// dotenvy::dotenv().ok(); - -// let logging_level = std::env::var("LOGGING_LEVEL").unwrap_or_else(|_| -// "info".to_string()); - -// logging::init(&logging_level, Default::default()).expect("failed to -// initialize logging"); - -// let database_uri = -// std::env::var("PLATFORM_DATABASE_URL_TEST").expect(" -// PLATFORM_DATABASE_URL_TEST must be set"); let nats_addr = -// std::env::var("NATS_ADDR").expect("NATS_URL must be set"); - -// let nats = async_nats::connect(&nats_addr).await.expect("failed to connect to -// nats"); let jetstream = async_nats::jetstream::new(nats.clone()); - -// let db = Arc::new( -// utils::database::Pool::connect(&database_uri) -// .await -// .expect("failed to connect to database"), -// ); - -// let global = Arc::new(GlobalState { -// s3_source_bucket: config.source_bucket.setup().await.expect("failed to setup -// source bucket"), s3_target_bucket: -// config.target_bucket.setup().await.expect("failed to setup target bucket"), -// config, -// ctx, -// nats, -// jetstream, -// db, -// }); - -// (global, handler) -// } diff --git a/platform/image_processor/src/tests/mod.rs b/platform/image_processor/src/tests/mod.rs deleted file mode 100644 index 05ba721a9..000000000 --- a/platform/image_processor/src/tests/mod.rs +++ /dev/null @@ -1,3 +0,0 @@ -mod global; -mod processor; -mod utils; diff --git a/platform/image_processor/src/tests/processor/decoder.rs b/platform/image_processor/src/tests/processor/decoder.rs deleted file mode 100644 index b0f13ba61..000000000 --- a/platform/image_processor/src/tests/processor/decoder.rs +++ /dev/null @@ -1,118 +0,0 @@ -use std::borrow::Cow; - -use imgref::ImgVec; - -use crate::processor::job::decoder::{Decoder, DecoderBackend, DecoderInfo, LoopCount}; -use crate::processor::job::frame::Frame; -use crate::tests::utils::asset_bytes; - -fn decode(asset_name: &str, backend: DecoderBackend, expected_info: DecoderInfo, expected_frames: Vec) { - let asset_bytes = asset_bytes(asset_name); - - let start = std::time::Instant::now(); - - let mut decoder = backend - .build(&Default::default(), Cow::Owned(asset_bytes)) - .expect("decoder build error"); - - let info = decoder.info(); - - assert_eq!(info.frame_count, expected_info.frame_count, "frame count mismatch"); - assert_eq!(info.width, expected_info.width, "width mismatch"); - assert_eq!(info.height, expected_info.height, "height mismatch"); - assert_eq!(info.loop_count, expected_info.loop_count, "loop count mismatch"); - assert_eq!(info.timescale, expected_info.timescale, "timescale mismatch"); - - let mut idx = 0; - while let Some(frame) = decoder.decode().expect("frame decode error") { - let expected = expected_frames.get(idx).expect("frame count mismatch"); - assert_eq!(frame.duration_ts, expected.duration_ts, "frame duration_ts mismatch: {idx}",); - assert_eq!(frame.image.height(), expected.image.height(), "frame height mismatch: {idx}",); - assert_eq!(frame.image.width(), expected.image.width(), "frame width mismatch: {idx}",); - idx += 1; - } - - assert_eq!(idx, expected_frames.len(), "frame count mismatch"); - - println!("decode time ({asset_name}): {:?}", start.elapsed()); -} - -#[test] -fn decode_ffmpeg_gif_test() { - let expected_info = DecoderInfo { - timescale: 100, - frame_count: 93, - loop_count: LoopCount::Infinite, - height: 128, - width: 128, - }; - - let expected_frames = (0..93) - .map(|_| Frame { - duration_ts: 4, - image: ImgVec::new(vec![], 128, 128), - }) - .collect(); - - decode("meow.gif", DecoderBackend::Ffmpeg, expected_info, expected_frames); -} - -#[test] -fn decode_ffmpeg_png_test() { - let expected_info = DecoderInfo { - timescale: 25, - frame_count: 1, - loop_count: LoopCount::Infinite, - height: 400, - width: 400, - }; - - let expected_frames = (0..1) - .map(|_| Frame { - duration_ts: 0, - image: ImgVec::new(vec![], 400, 400), - }) - .collect(); - - decode("frog.png", DecoderBackend::Ffmpeg, expected_info, expected_frames); -} - -#[test] -fn decode_libwebp_webp_test() { - let expected_info = DecoderInfo { - timescale: 1000, - height: 128, - width: 128, - frame_count: 93, - loop_count: LoopCount::Infinite, - }; - - let expected_frames = (0..93) - .map(|_| Frame { - duration_ts: 40, - image: ImgVec::new(vec![], 128, 128), - }) - .collect(); - - decode("meow.webp", DecoderBackend::LibWebp, expected_info, expected_frames); -} - -#[test] -fn decode_libavif_avif_test() { - let expected_info = DecoderInfo { - height: 128, - width: 128, - frame_count: 93, - loop_count: LoopCount::Infinite, - timescale: 100, - }; - - let expected_frames = (0..93) - .map(|_| Frame { - image: ImgVec::new(vec![], 128, 128), - duration_ts: 4, - }) - .collect(); - - decode("meow.avif", DecoderBackend::LibAvif, expected_info, expected_frames); -} diff --git a/platform/image_processor/src/tests/processor/encoder.rs b/platform/image_processor/src/tests/processor/encoder.rs deleted file mode 100644 index 989d88a0f..000000000 --- a/platform/image_processor/src/tests/processor/encoder.rs +++ /dev/null @@ -1,95 +0,0 @@ -use std::borrow::Cow; -use std::collections::HashMap; - -use rgb::ComponentBytes; -use sha2::Digest; - -use crate::processor::job::decoder::{Decoder, DecoderBackend}; -use crate::processor::job::encoder::{Encoder, EncoderFrontend, EncoderSettings}; -use crate::processor::job::resize::{ImageResizer, ImageResizerTarget}; -use crate::tests::utils::asset_bytes; - -fn encode(asset_name: &str, backend: DecoderBackend, frontend: EncoderFrontend) { - let input_bytes = asset_bytes(asset_name); - - let start = std::time::Instant::now(); - - let mut decoder = backend - .build(&Default::default(), Cow::Owned(input_bytes)) - .expect("failed to build decoder"); - - let info = decoder.info(); - - let mut resizer = ImageResizer::new(ImageResizerTarget { - height: 30, - width: 30, - ..Default::default() - }); - - let mut frames = Vec::with_capacity(info.frame_count); - let mut frame_hashes = HashMap::new(); - let mut frame_order = Vec::with_capacity(info.frame_count); - let mut count = 0; - - while let Some(frame) = decoder.decode().expect("failed to decode") { - let hash = sha2::Sha256::digest(frame.image.buf().as_bytes()); - if let Some(idx) = frame_hashes.get(&hash) { - if let Some((last_idx, last_duration)) = frame_order.last_mut() { - if last_idx == idx { - *last_duration += frame.duration_ts; - } else { - frame_order.push((*idx, frame.duration_ts)); - } - } else { - frame_order.push((*idx, frame.duration_ts)); - } - } else { - frame_hashes.insert(hash, count); - frame_order.push((count, frame.duration_ts)); - - count += 1; - frames.push(resizer.resize(&frame).expect("failed to resize")); - } - } - - let mut encoder = frontend - .build(EncoderSettings { - fast: true, - loop_count: info.loop_count, - timescale: info.timescale, - static_image: false, - }) - .expect("failed to build encoder"); - - for (idx, timing) in frame_order.into_iter() { - let resized = &mut frames[idx]; - resized.duration_ts = timing; - encoder.add_frame(resized).expect("failed to add frame"); - } - - let info = encoder.info(); - dbg!(&info); - let output = encoder.finish().expect("failed to finish"); - let output_path = format!( - "/tmp/{}x{}.{}", - info.width, - info.height, - match info.frontend { - EncoderFrontend::Gifski => "gif", - EncoderFrontend::LibAvif => "avif", - EncoderFrontend::LibWebp => "webp", - EncoderFrontend::Png => "png", - } - ); - std::fs::write(&output_path, output).expect("failed to write output"); - println!("wrote output to {}", output_path); - - println!("encode time ({asset_name}): {:?}", start.elapsed()); -} - -#[test] -fn encode_test() { - encode("cat.gif", DecoderBackend::Ffmpeg, EncoderFrontend::LibWebp); - encode("meow.webp", DecoderBackend::LibWebp, EncoderFrontend::LibAvif); - encode("meow.avif", DecoderBackend::LibAvif, EncoderFrontend::Gifski); -} diff --git a/platform/image_processor/src/tests/processor/mod.rs b/platform/image_processor/src/tests/processor/mod.rs deleted file mode 100644 index 114e509e6..000000000 --- a/platform/image_processor/src/tests/processor/mod.rs +++ /dev/null @@ -1,3 +0,0 @@ -mod decoder; -mod encoder; -mod resize; diff --git a/platform/image_processor/src/tests/processor/resize.rs b/platform/image_processor/src/tests/processor/resize.rs deleted file mode 100644 index 0f692b106..000000000 --- a/platform/image_processor/src/tests/processor/resize.rs +++ /dev/null @@ -1,45 +0,0 @@ -use std::borrow::Cow; - -use crate::processor::job::decoder::{Decoder, DecoderBackend}; -use crate::processor::job::resize::{ImageResizer, ImageResizerTarget}; -use crate::tests::utils::asset_bytes; - -fn resize(asset_name: &str, backend: DecoderBackend) { - let input_bytes = asset_bytes(asset_name); - - let start = std::time::Instant::now(); - - let mut decoder = backend - .build(&Default::default(), Cow::Owned(input_bytes)) - .expect("decoder build error"); - - let mut resizer = ImageResizer::new(ImageResizerTarget { - height: 30, - width: 30, - ..Default::default() - }); - - while let Some(frame) = decoder.decode().expect("frame decode error") { - let resized = resizer.resize(&frame).expect("resize error"); - - assert_eq!(resized.image.width(), 30, "width mismatch"); - assert_eq!(resized.image.height(), 30, "height mismatch"); - } - - println!("decode time ({asset_name}): {:?}", start.elapsed()); -} - -#[test] -fn resize_gif_test() { - resize("meow.gif", DecoderBackend::Ffmpeg); -} - -#[test] -fn resize_webp_test() { - resize("meow.webp", DecoderBackend::LibWebp); -} - -#[test] -fn resize_avif_test() { - resize("meow.avif", DecoderBackend::LibAvif); -} diff --git a/platform/image_processor/src/tests/utils.rs b/platform/image_processor/src/tests/utils.rs deleted file mode 100644 index 31e209366..000000000 --- a/platform/image_processor/src/tests/utils.rs +++ /dev/null @@ -1,23 +0,0 @@ -use std::path::PathBuf; -// use std::sync::Arc; - -// use utils::context::Handler; - -// use super::global::GlobalState; - -// pub async fn teardown(global: Arc, handler: Handler) { -// drop(global); -// handler.cancel().await; -// } - -pub fn asset_path(name: &str) -> PathBuf { - PathBuf::from(env!("CARGO_MANIFEST_DIR")) - .parent() - .unwrap() - .join("assets") - .join(name) -} - -pub fn asset_bytes(name: &str) -> Vec { - std::fs::read(asset_path(name)).unwrap() -} diff --git a/proto/scuffle/platform/internal/events/processed_image.proto b/proto/scuffle/platform/internal/events/processed_image.proto deleted file mode 100644 index a2ab6f413..000000000 --- a/proto/scuffle/platform/internal/events/processed_image.proto +++ /dev/null @@ -1,24 +0,0 @@ -syntax = "proto3"; - -package scuffle.platform.internal.events; - -import "scuffle/types/ulid.proto"; -import "scuffle/platform/internal/types/processed_image_variant.proto"; - -message ProcessedImage { - message Success { - repeated scuffle.platform.internal.types.ProcessedImageVariant variants = 1; - } - - message Failure { - string reason = 1; - string friendly_message = 2; - } - - scuffle.types.Ulid job_id = 1; - - oneof result { - Success success = 2; - Failure failure = 3; - } -} diff --git a/proto/scuffle/platform/internal/image_processor.proto b/proto/scuffle/platform/internal/image_processor.proto deleted file mode 100644 index 7356908b5..000000000 --- a/proto/scuffle/platform/internal/image_processor.proto +++ /dev/null @@ -1,61 +0,0 @@ -syntax = "proto3"; - -package scuffle.platform.internal.image_processor; - -import "scuffle/platform/internal/types/image_format.proto"; - -message Task { - enum ResizeMethod { - Fit = 0; - Exact = 1; - PadBottomLeft = 2; - PadBottomRight = 3; - PadTopLeft = 4; - PadTopRight = 5; - PadCenter = 6; - PadCenterRight = 7; - PadCenterLeft = 8; - PadTopCenter = 9; - PadBottomCenter = 10; - PadTop = 11; - PadBottom = 12; - PadLeft = 13; - PadRight = 14; - } - - enum ResizeAlgorithm { - Nearest = 0; - Box = 1; - Bilinear = 2; - Hamming = 3; - CatmullRom = 4; - Mitchell = 5; - Lanczos3 = 6; - } - - string input_path = 1; - - uint32 base_width = 2; - uint32 base_height = 3; - - repeated scuffle.platform.internal.types.ImageFormat formats = 4; - ResizeMethod resize_method = 5; - ResizeAlgorithm resize_algorithm = 6; - repeated uint32 scales = 7; - - bool upscale = 8; - - string output_prefix = 9; - - message Limits { - uint32 max_processing_time_ms = 1; - uint32 max_input_frame_count = 2; - uint32 max_input_width = 3; - uint32 max_input_height = 4; - uint32 max_input_duration_ms = 5; - } - - optional Limits limits = 10; - - string callback_subject = 11; -} diff --git a/proto/scuffle/platform/internal/types/image_format.proto b/proto/scuffle/platform/internal/types/image_format.proto deleted file mode 100644 index 619e66959..000000000 --- a/proto/scuffle/platform/internal/types/image_format.proto +++ /dev/null @@ -1,12 +0,0 @@ -syntax = "proto3"; - -package scuffle.platform.internal.types; - -enum ImageFormat { - WEBP = 0; - AVIF = 1; - GIF = 2; - WEBP_STATIC = 3; - AVIF_STATIC = 4; - PNG_STATIC = 5; -} diff --git a/proto/scuffle/platform/internal/types/processed_image_variant.proto b/proto/scuffle/platform/internal/types/processed_image_variant.proto deleted file mode 100644 index d522251b5..000000000 --- a/proto/scuffle/platform/internal/types/processed_image_variant.proto +++ /dev/null @@ -1,14 +0,0 @@ -syntax = "proto3"; - -package scuffle.platform.internal.types; - -import "scuffle/platform/internal/types/image_format.proto"; - -message ProcessedImageVariant { - uint32 width = 1; - uint32 height = 2; - ImageFormat format = 3; - uint32 scale = 4; - uint32 byte_size = 5; - string path = 6; -} diff --git a/proto/scuffle/platform/internal/types/uploaded_file_metadata.proto b/proto/scuffle/platform/internal/types/uploaded_file_metadata.proto deleted file mode 100644 index 8221c5ab7..000000000 --- a/proto/scuffle/platform/internal/types/uploaded_file_metadata.proto +++ /dev/null @@ -1,15 +0,0 @@ -syntax = "proto3"; - -import "scuffle/platform/internal/types/processed_image_variant.proto"; - -package scuffle.platform.internal.types; - -message UploadedFileMetadata { - message Image { - repeated ProcessedImageVariant versions = 1; - } - - oneof metadata { - Image image = 1; - } -} diff --git a/utils/Cargo.toml b/utils/Cargo.toml index 0461f476e..59da4696f 100644 --- a/utils/Cargo.toml +++ b/utils/Cargo.toml @@ -54,7 +54,7 @@ const_format = "0.2" tokio-postgres = { version = "0.7", optional = true } postgres-types = { version = "0.2", optional = true, features = ["with-serde_json-1", "with-chrono-0_4", "derive"] } -deadpool-postgres = { version = "0.12", optional = true } +deadpool-postgres = { version = "0.13", optional = true } postgres-from-row = { version = "0.5", optional = true } prost = { version = "0.12", optional = true } ulid = { version = "1.1", optional = true } diff --git a/video/api/Cargo.toml b/video/api/Cargo.toml index 4aa79b5c5..c21d9d1c1 100644 --- a/video/api/Cargo.toml +++ b/video/api/Cargo.toml @@ -27,10 +27,10 @@ jwt-next = "0.17" hmac = "0.12" sha2 = "0.10" rand = "0.8" -async-nats = "0.33" +async-nats = "0.34" ulid = "1.1" hex = "0.4" -base64 = "0.21" +base64 = "0.22" serde_json = "1.0" fred = { version = "8.0.0", features = ["enable-rustls", "sentinel-client", "dns"] } url = "2.5" @@ -40,7 +40,7 @@ http = "=0.2" hyper = "=0.14" postgres-from-row = "0.5" -utils = { workspace = true, features = ["all"] } +scuffle-utils = { workspace = true, features = ["all"] } config = { workspace = true } pb = { workspace = true } video-common = { workspace = true } diff --git a/video/api/src/api/access_token/create.rs b/video/api/src/api/access_token/create.rs index b33905542..8f7fb7db8 100644 --- a/video/api/src/api/access_token/create.rs +++ b/video/api/src/api/access_token/create.rs @@ -48,7 +48,7 @@ pub fn build_query( access_token: &AccessToken, permissions: RequiredScope, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("INSERT INTO ") .push(::Table::NAME) diff --git a/video/api/src/api/access_token/delete.rs b/video/api/src/api/access_token/delete.rs index a9b11b52a..f4bfef4a4 100644 --- a/video/api/src/api/access_token/delete.rs +++ b/video/api/src/api/access_token/delete.rs @@ -63,7 +63,7 @@ impl ApiRequest for tonic::Request = utils::database::query("DELETE FROM ") + let deleted_ids: Vec = scuffle_utils::database::query("DELETE FROM ") .push(::Table::NAME) .push(" WHERE id = ANY(") .push_bind(ids_to_delete.iter().copied().collect::>()) diff --git a/video/api/src/api/access_token/get.rs b/video/api/src/api/access_token/get.rs index 6d3b61011..6dad9b347 100644 --- a/video/api/src/api/access_token/get.rs +++ b/video/api/src/api/access_token/get.rs @@ -21,7 +21,7 @@ pub fn build_query( req: &AccessTokenGetRequest, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("SELECT * FROM ") .push(::Table::NAME) .push(" WHERE "); diff --git a/video/api/src/api/mod.rs b/video/api/src/api/mod.rs index 25989fb25..6fc26f23d 100644 --- a/video/api/src/api/mod.rs +++ b/video/api/src/api/mod.rs @@ -21,7 +21,7 @@ pub(crate) mod s3_bucket; pub(crate) mod transcoding_config; pub(crate) mod utils; -pub use utils::{ApiRequest, RequiredScope, ResourcePermission}; +pub use scuffle_utils::{ApiRequest, RequiredScope, ResourcePermission}; fn global_middleware( global: &Arc, diff --git a/video/api/src/api/playback_key_pair/create.rs b/video/api/src/api/playback_key_pair/create.rs index 816ba9da0..fabf2f87f 100644 --- a/video/api/src/api/playback_key_pair/create.rs +++ b/video/api/src/api/playback_key_pair/create.rs @@ -33,7 +33,7 @@ pub fn build_query( ) -> tonic::Result> { let (cert, fingerprint) = jwt; - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("INSERT INTO ") .push(::Table::NAME) diff --git a/video/api/src/api/playback_key_pair/delete.rs b/video/api/src/api/playback_key_pair/delete.rs index 03bf45786..7100d341b 100644 --- a/video/api/src/api/playback_key_pair/delete.rs +++ b/video/api/src/api/playback_key_pair/delete.rs @@ -43,7 +43,7 @@ impl ApiRequest for tonic::Request>(); - let deleted_ids: Vec = utils::database::query("DELETE FROM ") + let deleted_ids: Vec = scuffle_utils::database::query("DELETE FROM ") .push(::Table::NAME) .push(" WHERE id = ANY(") .push_bind(ids_to_delete.iter().copied().collect::>()) diff --git a/video/api/src/api/playback_key_pair/get.rs b/video/api/src/api/playback_key_pair/get.rs index a7c48c8f6..ea6f2072e 100644 --- a/video/api/src/api/playback_key_pair/get.rs +++ b/video/api/src/api/playback_key_pair/get.rs @@ -21,7 +21,7 @@ pub fn build_query( req: &PlaybackKeyPairGetRequest, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("SELECT * FROM ") .push(::Table::NAME) .push(" WHERE "); diff --git a/video/api/src/api/playback_key_pair/modify.rs b/video/api/src/api/playback_key_pair/modify.rs index 52bed5c0b..510db76d7 100644 --- a/video/api/src/api/playback_key_pair/modify.rs +++ b/video/api/src/api/playback_key_pair/modify.rs @@ -30,7 +30,7 @@ pub fn build_query( req: &PlaybackKeyPairModifyRequest, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("UPDATE ") .push(::Table::NAME) diff --git a/video/api/src/api/playback_session/count.rs b/video/api/src/api/playback_session/count.rs index 2751d33b8..328e0673a 100644 --- a/video/api/src/api/playback_session/count.rs +++ b/video/api/src/api/playback_session/count.rs @@ -26,7 +26,7 @@ pub fn build_query<'a>( req: &'a PlaybackSessionCountRequest, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); let filter = req .filter diff --git a/video/api/src/api/playback_session/get.rs b/video/api/src/api/playback_session/get.rs index fdec3dd30..942ed15f6 100644 --- a/video/api/src/api/playback_session/get.rs +++ b/video/api/src/api/playback_session/get.rs @@ -20,7 +20,7 @@ pub fn build_query<'a>( req: &'a PlaybackSessionGetRequest, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("SELECT * FROM ") .push(::Table::NAME) .push(" WHERE "); diff --git a/video/api/src/api/playback_session/revoke.rs b/video/api/src/api/playback_session/revoke.rs index d965544c7..c0b567a72 100644 --- a/video/api/src/api/playback_session/revoke.rs +++ b/video/api/src/api/playback_session/revoke.rs @@ -25,7 +25,7 @@ impl ApiRequest for tonic::Request, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); let req = self.get_ref(); @@ -114,7 +114,7 @@ impl ApiRequest for tonic::Request chrono::Utc::now() - chrono::Duration::minutes(10) }) { - utils::database::query("INSERT INTO playback_session_revocations(organization_id, room_id, recording_id, user_id, revoke_before) VALUES ($1, $2, $3, $4, $5)") + scuffle_utils::database::query("INSERT INTO playback_session_revocations(organization_id, room_id, recording_id, user_id, revoke_before) VALUES ($1, $2, $3, $4, $5)") .bind(access_token.organization_id) .bind(req.target.and_then(|t| match t.target { Some(playback_session_target::Target::RoomId(room_id)) => Some(room_id.into_ulid()), diff --git a/video/api/src/api/recording/delete.rs b/video/api/src/api/recording/delete.rs index 5e1181be7..b77c9f9be 100644 --- a/video/api/src/api/recording/delete.rs +++ b/video/api/src/api/recording/delete.rs @@ -8,9 +8,9 @@ use pb::scuffle::video::v1::types::access_token_scope::Permission; use pb::scuffle::video::v1::types::{FailedResource, Resource}; use pb::scuffle::video::v1::{RecordingDeleteRequest, RecordingDeleteResponse}; use prost::Message; +use scuffle_utils::database::ClientLike; use tonic::Status; use ulid::Ulid; -use utils::database::ClientLike; use video_common::database::{AccessToken, DatabaseTable, Rendition}; use crate::api::utils::{impl_request_scopes, ApiRequest, TonicRequest}; @@ -161,7 +161,7 @@ async fn handle_query( client: impl ClientLike, deleted_recordings: &HashMap, batch: &mut RecordingDeleteBatchTask, - qb: &mut utils::database::QueryBuilder<'_>, + qb: &mut scuffle_utils::database::QueryBuilder<'_>, ) -> Option<()> where B: UpdateBatch + postgres_from_row::FromRow + Send + Unpin, @@ -227,7 +227,7 @@ impl ApiRequest for tonic::Request = utils::database::query("UPDATE ") + let deleted_recordings: Vec = scuffle_utils::database::query("UPDATE ") .push(::Table::NAME) .push(" SET deleted_at = NOW(), room_id = NULL, recording_config_id = NULL") .push(" WHERE id = ANY(") @@ -258,7 +258,7 @@ impl ApiRequest for tonic::Request::NAME) .push(" WHERE recording_id = ANY(") .push_bind(&deleted_ids) @@ -269,7 +269,7 @@ impl ApiRequest for tonic::Request::FRIENDLY_NAME)) })?; - utils::database::query("DELETE FROM ") + scuffle_utils::database::query("DELETE FROM ") .push(::NAME) .push(" WHERE recording_id = ANY(") .push_bind(&deleted_ids) @@ -302,7 +302,7 @@ impl ApiRequest for tonic::Request::NAME) .push(" WHERE recording_id = ANY(") .push_bind(&deleted_ids) @@ -319,7 +319,7 @@ impl ApiRequest for tonic::Request::NAME) .push(" WHERE recording_id = ANY(") .push_bind(&deleted_ids) diff --git a/video/api/src/api/recording/get.rs b/video/api/src/api/recording/get.rs index 8ce4d8ee5..2f139f01f 100644 --- a/video/api/src/api/recording/get.rs +++ b/video/api/src/api/recording/get.rs @@ -25,7 +25,7 @@ impl ApiRequest for tonic::Request { ) -> tonic::Result> { let req = self.get_ref(); - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("SELECT * FROM ") .push(::Table::NAME) .push(" WHERE "); diff --git a/video/api/src/api/recording/modify.rs b/video/api/src/api/recording/modify.rs index 57e7a48fc..a282372e2 100644 --- a/video/api/src/api/recording/modify.rs +++ b/video/api/src/api/recording/modify.rs @@ -31,7 +31,7 @@ impl ApiRequest for tonic::Request::Table::NAME) @@ -45,7 +45,7 @@ impl ApiRequest for tonic::Request for tonic::Request tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("INSERT INTO ") .push(::Table::NAME) @@ -62,14 +62,14 @@ pub async fn build_query( } let bucket: S3Bucket = if let Some(s3_bucket_id) = &req.s3_bucket_id { - utils::database::query("SELECT * FROM s3_buckets WHERE id = $1 AND organization_id = $2") + scuffle_utils::database::query("SELECT * FROM s3_buckets WHERE id = $1 AND organization_id = $2") .bind(s3_bucket_id.into_ulid()) .bind(access_token.organization_id) .build_query_as() .fetch_optional(client) .await } else { - utils::database::query("SELECT * FROM s3_buckets WHERE organization_id = $1 AND managed = TRUE LIMIT 1") + scuffle_utils::database::query("SELECT * FROM s3_buckets WHERE organization_id = $1 AND managed = TRUE LIMIT 1") .bind(access_token.organization_id) .build_query_as() .fetch_optional(client) diff --git a/video/api/src/api/recording_config/delete.rs b/video/api/src/api/recording_config/delete.rs index a892b6742..4b93ca2ba 100644 --- a/video/api/src/api/recording_config/delete.rs +++ b/video/api/src/api/recording_config/delete.rs @@ -27,7 +27,7 @@ impl ApiRequest for tonic::Request tonic::Result> { // Check if any rooms are using the recording config - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); let req = self.get_ref(); @@ -78,7 +78,7 @@ impl ApiRequest for tonic::Request::Table::NAME) diff --git a/video/api/src/api/recording_config/get.rs b/video/api/src/api/recording_config/get.rs index 7a16e08c5..e981ed881 100644 --- a/video/api/src/api/recording_config/get.rs +++ b/video/api/src/api/recording_config/get.rs @@ -20,7 +20,7 @@ pub fn build_query( req: &RecordingConfigGetRequest, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("SELECT * FROM ") .push(::Table::NAME) .push(" WHERE "); diff --git a/video/api/src/api/recording_config/modify.rs b/video/api/src/api/recording_config/modify.rs index bf7344d6c..4a4c6de01 100644 --- a/video/api/src/api/recording_config/modify.rs +++ b/video/api/src/api/recording_config/modify.rs @@ -6,8 +6,8 @@ use pb::scuffle::video::v1::events_fetch_request::Target; use pb::scuffle::video::v1::types::access_token_scope::Permission; use pb::scuffle::video::v1::types::{event, Resource}; use pb::scuffle::video::v1::{RecordingConfigModifyRequest, RecordingConfigModifyResponse}; +use scuffle_utils::database::ClientLike; use tonic::Status; -use utils::database::ClientLike; use video_common::database::{AccessToken, DatabaseTable, Rendition}; use crate::api::errors::MODIFY_NO_FIELDS; @@ -32,7 +32,7 @@ pub async fn build_query<'a>( client: impl ClientLike, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("UPDATE ") .push(::Table::NAME) @@ -74,7 +74,7 @@ pub async fn build_query<'a>( } if let Some(s3_bucket_id) = &req.s3_bucket_id { - utils::database::query("SELECT * FROM s3_buckets WHERE id = $1 AND organization_id = $2") + scuffle_utils::database::query("SELECT * FROM s3_buckets WHERE id = $1 AND organization_id = $2") .bind(s3_bucket_id.into_ulid()) .bind(access_token.organization_id) .build() diff --git a/video/api/src/api/room/create.rs b/video/api/src/api/room/create.rs index 41ad9c3d1..61d2f4745 100644 --- a/video/api/src/api/room/create.rs +++ b/video/api/src/api/room/create.rs @@ -4,9 +4,9 @@ use pb::scuffle::video::v1::events_fetch_request::Target; use pb::scuffle::video::v1::types::access_token_scope::Permission; use pb::scuffle::video::v1::types::{event, Resource}; use pb::scuffle::video::v1::{RoomCreateRequest, RoomCreateResponse}; +use scuffle_utils::database::ClientLike; use tonic::Status; use ulid::Ulid; -use utils::database::ClientLike; use video_common::database::{AccessToken, DatabaseTable, Visibility}; use super::utils::create_stream_key; @@ -31,7 +31,7 @@ pub async fn build_query( client: impl ClientLike, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("INSERT INTO ") .push(::Table::NAME) @@ -50,7 +50,7 @@ pub async fn build_query( qb.push(") VALUES ("); let transcoding_config_id = if let Some(transcoding_config_id) = &req.transcoding_config_id { - utils::database::query("SELECT * FROM transcoding_configs WHERE id = $1 AND organization_id = $2") + scuffle_utils::database::query("SELECT * FROM transcoding_configs WHERE id = $1 AND organization_id = $2") .bind(transcoding_config_id.into_ulid()) .bind(access_token.organization_id) .build() @@ -68,7 +68,7 @@ pub async fn build_query( }; let recording_config_id = if let Some(recording_config_id) = &req.recording_config_id { - utils::database::query("SELECT * FROM recording_configs WHERE id = $1 AND organization_id = $2") + scuffle_utils::database::query("SELECT * FROM recording_configs WHERE id = $1 AND organization_id = $2") .bind(recording_config_id.into_ulid()) .bind(access_token.organization_id) .build() diff --git a/video/api/src/api/room/delete.rs b/video/api/src/api/room/delete.rs index b998f4aae..1e7ac612b 100644 --- a/video/api/src/api/room/delete.rs +++ b/video/api/src/api/room/delete.rs @@ -43,7 +43,7 @@ impl ApiRequest for tonic::Request { .map(pb::scuffle::types::Ulid::into_ulid) .collect::>(); - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("SELECT DISTINCT room_id AS id FROM ") .push(::NAME) @@ -71,7 +71,7 @@ impl ApiRequest for tonic::Request { .collect::>(); let deleted_ids = if !ids_to_delete.is_empty() { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("DELETE FROM ") .push(::Table::NAME) diff --git a/video/api/src/api/room/get.rs b/video/api/src/api/room/get.rs index 27c9102fc..5577944ef 100644 --- a/video/api/src/api/room/get.rs +++ b/video/api/src/api/room/get.rs @@ -21,7 +21,7 @@ pub fn build_query( req: &RoomGetRequest, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("SELECT * FROM ") .push(::Table::NAME) .push(" WHERE "); diff --git a/video/api/src/api/room/modify.rs b/video/api/src/api/room/modify.rs index 330e348de..beb28f203 100644 --- a/video/api/src/api/room/modify.rs +++ b/video/api/src/api/room/modify.rs @@ -5,8 +5,8 @@ use pb::scuffle::video::v1::events_fetch_request::Target; use pb::scuffle::video::v1::types::access_token_scope::Permission; use pb::scuffle::video::v1::types::{event, Resource}; use pb::scuffle::video::v1::{RoomModifyRequest, RoomModifyResponse}; +use scuffle_utils::database::ClientLike; use tonic::Status; -use utils::database::ClientLike; use video_common::database::{AccessToken, DatabaseTable, Visibility}; use crate::api::errors::MODIFY_NO_FIELDS; @@ -31,7 +31,7 @@ pub async fn build_query<'a>( client: impl ClientLike, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("UPDATE ") .push(::Table::NAME) @@ -44,7 +44,7 @@ pub async fn build_query<'a>( if transcoding_config_id.is_nil() { seperated.push("transcoding_config_id = NULL"); } else { - utils::database::query("SELECT 1 FROM transcoding_configs WHERE id = $1 AND organization_id = $2") + scuffle_utils::database::query("SELECT 1 FROM transcoding_configs WHERE id = $1 AND organization_id = $2") .bind(transcoding_config_id) .bind(access_token.organization_id) .build() @@ -67,7 +67,7 @@ pub async fn build_query<'a>( if recording_config_id.is_nil() { seperated.push("recording_config_id = NULL"); } else { - utils::database::query("SELECT 1 FROM recording_configs WHERE id = $1 AND organization_id = $2") + scuffle_utils::database::query("SELECT 1 FROM recording_configs WHERE id = $1 AND organization_id = $2") .bind(recording_config_id) .bind(access_token.organization_id) .build() diff --git a/video/api/src/api/room/reset_key.rs b/video/api/src/api/room/reset_key.rs index a743b623b..559c9a910 100644 --- a/video/api/src/api/room/reset_key.rs +++ b/video/api/src/api/room/reset_key.rs @@ -52,7 +52,7 @@ impl ApiRequest for tonic::Request { let data = ids_to_reset.iter().copied().map(|id| (id, create_stream_key())); - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("WITH updated_values AS (SELECT * FROM (") .push_values(data.clone(), |mut b, data| { diff --git a/video/api/src/api/s3_bucket/create.rs b/video/api/src/api/s3_bucket/create.rs index 26476abcc..b834fdab6 100644 --- a/video/api/src/api/s3_bucket/create.rs +++ b/video/api/src/api/s3_bucket/create.rs @@ -32,7 +32,7 @@ pub fn build_query<'a>( req: &'a S3BucketCreateRequest, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("INSERT INTO ") .push(::Table::NAME) diff --git a/video/api/src/api/s3_bucket/delete.rs b/video/api/src/api/s3_bucket/delete.rs index f7f837f17..c424c90d8 100644 --- a/video/api/src/api/s3_bucket/delete.rs +++ b/video/api/src/api/s3_bucket/delete.rs @@ -43,7 +43,7 @@ impl ApiRequest for tonic::Request>(); - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("(SELECT DISTINCT s3_bucket_id AS id FROM ") .push(::NAME) @@ -77,7 +77,7 @@ impl ApiRequest for tonic::Request>(); let deleted_ids = if !ids_to_delete.is_empty() { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("DELETE FROM ") .push(::Table::NAME) diff --git a/video/api/src/api/s3_bucket/get.rs b/video/api/src/api/s3_bucket/get.rs index 5362d3f01..f0856b2ce 100644 --- a/video/api/src/api/s3_bucket/get.rs +++ b/video/api/src/api/s3_bucket/get.rs @@ -20,7 +20,7 @@ pub fn build_query( req: &S3BucketGetRequest, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("SELECT * FROM ") .push(::Table::NAME) .push(" WHERE "); diff --git a/video/api/src/api/s3_bucket/modify.rs b/video/api/src/api/s3_bucket/modify.rs index 560f624c3..65cbd078f 100644 --- a/video/api/src/api/s3_bucket/modify.rs +++ b/video/api/src/api/s3_bucket/modify.rs @@ -32,7 +32,7 @@ pub fn build_query<'a>( req: &'a S3BucketModifyRequest, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("UPDATE ") .push(::Table::NAME) diff --git a/video/api/src/api/transcoding_config/create.rs b/video/api/src/api/transcoding_config/create.rs index 89195e0af..e66fee9ac 100644 --- a/video/api/src/api/transcoding_config/create.rs +++ b/video/api/src/api/transcoding_config/create.rs @@ -29,7 +29,7 @@ pub fn build_query( req: &TranscodingConfigCreateRequest, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("INSERT INTO ") .push(::Table::NAME) diff --git a/video/api/src/api/transcoding_config/delete.rs b/video/api/src/api/transcoding_config/delete.rs index e6711cd19..061421fb1 100644 --- a/video/api/src/api/transcoding_config/delete.rs +++ b/video/api/src/api/transcoding_config/delete.rs @@ -43,7 +43,7 @@ impl ApiRequest for tonic::Request>(); - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("SELECT DISTINCT transcoding_config_id AS id FROM ") .push(::NAME) @@ -71,7 +71,7 @@ impl ApiRequest for tonic::Request>(); let deleted_ids = if !ids_to_delete.is_empty() { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("DELETE FROM ") .push(::Table::NAME) diff --git a/video/api/src/api/transcoding_config/get.rs b/video/api/src/api/transcoding_config/get.rs index c1433288a..a5917d754 100644 --- a/video/api/src/api/transcoding_config/get.rs +++ b/video/api/src/api/transcoding_config/get.rs @@ -20,7 +20,7 @@ pub fn build_query( req: &TranscodingConfigGetRequest, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("SELECT * FROM ") .push(::Table::NAME) .push(" WHERE "); diff --git a/video/api/src/api/transcoding_config/modify.rs b/video/api/src/api/transcoding_config/modify.rs index 6c13e0e93..bdd191ebd 100644 --- a/video/api/src/api/transcoding_config/modify.rs +++ b/video/api/src/api/transcoding_config/modify.rs @@ -30,7 +30,7 @@ pub fn build_query<'a>( req: &'a TranscodingConfigModifyRequest, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("UPDATE ") .push(::Table::NAME) diff --git a/video/api/src/api/utils/get.rs b/video/api/src/api/utils/get.rs index 5decd203d..d84e55c0c 100644 --- a/video/api/src/api/utils/get.rs +++ b/video/api/src/api/utils/get.rs @@ -4,12 +4,12 @@ use ulid::Ulid; use super::tags::validate_tags; -pub fn organization_id(seperated: &mut utils::database::Separated<'_, '_>, organization_id: Ulid) { +pub fn organization_id(seperated: &mut scuffle_utils::database::Separated<'_, '_>, organization_id: Ulid) { seperated.push("organization_id = "); seperated.push_bind_unseparated(organization_id); } -pub fn ids(seperated: &mut utils::database::Separated<'_, '_>, ids: &[pb::scuffle::types::Ulid]) { +pub fn ids(seperated: &mut scuffle_utils::database::Separated<'_, '_>, ids: &[pb::scuffle::types::Ulid]) { if !ids.is_empty() { seperated.push("id = ANY("); seperated.push_bind_unseparated( @@ -23,7 +23,7 @@ pub fn ids(seperated: &mut utils::database::Separated<'_, '_>, ids: &[pb::scuffl } pub fn search_options( - seperated: &mut utils::database::Separated<'_, '_>, + seperated: &mut scuffle_utils::database::Separated<'_, '_>, search_options: Option<&SearchOptions>, ) -> tonic::Result<()> { if let Some(options) = search_options { diff --git a/video/api/src/api/utils/ratelimit.rs b/video/api/src/api/utils/ratelimit.rs index 3bd4a6b55..341b7e03f 100644 --- a/video/api/src/api/utils/ratelimit.rs +++ b/video/api/src/api/utils/ratelimit.rs @@ -3,11 +3,11 @@ use std::time::Duration; use fred::interfaces::KeysInterface; use futures_util::Future; +use scuffle_utils::prelude::FutureTimeout; +use scuffle_utilsratelimiter::{RateLimitResponse, RateLimiterOptions}; use tonic::metadata::AsciiMetadataValue; use tonic::{Response, Status}; use ulid::Ulid; -use utils::prelude::FutureTimeout; -use utils::ratelimiter::{RateLimitResponse, RateLimiterOptions}; use super::RequiredScope; use crate::config::ApiConfig; @@ -109,10 +109,12 @@ pub async fn ratelimit_scoped(global: &Arc, options: &RateLimiterOptions) -> tonic::Result { let redis = global.redis(); - let resp = utils::ratelimiter::ratelimit(redis.as_ref(), options).await.map_err(|err| { - tracing::error!(err = %err, "failed to rate limit"); - Status::internal("Unable to process request, failed to rate limit") - })?; + let resp = scuffle_utilsratelimiter::ratelimit(redis.as_ref(), options) + .await + .map_err(|err| { + tracing::error!(err = %err, "failed to rate limit"); + Status::internal("Unable to process request, failed to rate limit") + })?; if resp.banned || resp.remaining == -1 { let mut status = Status::resource_exhausted("rate limit exceeded"); diff --git a/video/api/src/api/utils/tags.rs b/video/api/src/api/utils/tags.rs index a7af9cc50..d71559f2f 100644 --- a/video/api/src/api/utils/tags.rs +++ b/video/api/src/api/utils/tags.rs @@ -68,7 +68,7 @@ pub fn validate_tags_array(tags: &[String]) -> tonic::Result<()> { #[derive(postgres_from_row::FromRow)] pub struct TagExt { - pub tags: utils::database::Json>, + pub tags: scuffle_utils::database::Json>, pub status: i64, } @@ -97,8 +97,8 @@ pub fn add_tag_query( tags: &HashMap, id: Ulid, organization_id: Option, -) -> utils::database::QueryBuilder<'_> { - let mut qb = utils::database::QueryBuilder::default(); +) -> scuffle_utils::database::QueryBuilder<'_> { + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("WITH mt AS (SELECT id, tags || ") .push_bind(utils::database::Json(tags)) @@ -126,8 +126,8 @@ pub fn remove_tag_query( tags: &[String], id: Ulid, organization_id: Option, -) -> utils::database::QueryBuilder<'_> { - let mut qb = utils::database::QueryBuilder::default(); +) -> scuffle_utils::database::QueryBuilder<'_> { + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("WITH rt AS (SELECT id, tags - ") .push_bind(tags) diff --git a/video/api/src/dataloaders/access_token.rs b/video/api/src/dataloaders/access_token.rs index 3537d62e6..0bb86f65f 100644 --- a/video/api/src/dataloaders/access_token.rs +++ b/video/api/src/dataloaders/access_token.rs @@ -1,7 +1,7 @@ use std::sync::Arc; +use scuffle_utilsdataloader::{DataLoader, Loader, LoaderOutput}; use ulid::Ulid; -use utils::dataloader::{DataLoader, Loader, LoaderOutput}; pub struct AccessTokenLoader { db: Arc, @@ -20,7 +20,7 @@ impl Loader for AccessTokenLoader { async fn load(&self, keys: &[Self::Key]) -> LoaderOutput { let results: Vec = - utils::database::query("SELECT * FROM access_tokens WHERE (organization_id, id) IN ") + scuffle_utils::database::query("SELECT * FROM access_tokens WHERE (organization_id, id) IN ") .push_tuples(keys, |mut qb, (organization_id, access_token_id)| { qb.push_bind(organization_id).push_bind(access_token_id); }) diff --git a/video/api/src/dataloaders/recording_state.rs b/video/api/src/dataloaders/recording_state.rs index 453cb5854..63d77bf4b 100644 --- a/video/api/src/dataloaders/recording_state.rs +++ b/video/api/src/dataloaders/recording_state.rs @@ -1,8 +1,8 @@ use std::sync::Arc; use itertools::Itertools; +use scuffle_utilsdataloader::{DataLoader, Loader, LoaderOutput}; use ulid::Ulid; -use utils::dataloader::{DataLoader, Loader, LoaderOutput}; use video_common::database::{Recording, Rendition}; pub struct RecordingStateLoader { @@ -53,7 +53,7 @@ impl Loader for RecordingStateLoader { type Value = RecordingState; async fn load(&self, keys: &[Self::Key]) -> LoaderOutput { - let results: Vec = utils::database::query("SELECT organization_id, recording_id, rendition, COUNT(size_bytes) AS size_bytes, MAX(end_time) AS end_time, MAX(start_time) AS start_time FROM recording_rendition_segments WHERE (organization_id, recording_id) IN ") + let results: Vec = scuffle_utils::database::query("SELECT organization_id, recording_id, rendition, COUNT(size_bytes) AS size_bytes, MAX(end_time) AS end_time, MAX(start_time) AS start_time FROM recording_rendition_segments WHERE (organization_id, recording_id) IN ") .push_tuples(keys, |mut qb, (organization_id, recording_id)| { qb.push_bind(organization_id).push_bind(recording_id); }).push(" GROUP BY organization_id, recording_id, rendition ORDER BY organization_id, recording_id").build_query_as().fetch_all(&self.db).await.map_err(|err| { diff --git a/video/api/src/dataloaders/room.rs b/video/api/src/dataloaders/room.rs index 7bb3eb22b..099e3836e 100644 --- a/video/api/src/dataloaders/room.rs +++ b/video/api/src/dataloaders/room.rs @@ -1,7 +1,7 @@ use std::sync::Arc; +use scuffle_utilsdataloader::{DataLoader, Loader, LoaderOutput}; use ulid::Ulid; -use utils::dataloader::{DataLoader, Loader, LoaderOutput}; pub struct RoomLoader { db: Arc, @@ -19,16 +19,17 @@ impl Loader for RoomLoader { type Value = video_common::database::Room; async fn load(&self, keys: &[Self::Key]) -> LoaderOutput { - let results: Vec = utils::database::query("SELECT * FROM rooms WHERE (organization_id, id) IN ") - .push_tuples(keys, |mut qb, (organization_id, room_id)| { - qb.push_bind(organization_id).push_bind(room_id); - }) - .build_query_as() - .fetch_all(&self.db) - .await - .map_err(|err| { - tracing::error!(error = %err, "failed to load rooms"); - })?; + let results: Vec = + scuffle_utils::database::query("SELECT * FROM rooms WHERE (organization_id, id) IN ") + .push_tuples(keys, |mut qb, (organization_id, room_id)| { + qb.push_bind(organization_id).push_bind(room_id); + }) + .build_query_as() + .fetch_all(&self.db) + .await + .map_err(|err| { + tracing::error!(error = %err, "failed to load rooms"); + })?; Ok(results.into_iter().map(|v| ((v.organization_id, v.id), v)).collect()) } diff --git a/video/api/src/global.rs b/video/api/src/global.rs index 7c48972ee..c8e57f5c5 100644 --- a/video/api/src/global.rs +++ b/video/api/src/global.rs @@ -1,4 +1,4 @@ -use utils::dataloader::DataLoader; +use scuffle_utilsdataloader::DataLoader; use crate::config::ApiConfig; use crate::dataloaders; diff --git a/video/api/src/main.rs b/video/api/src/main.rs index ec3de8a7f..072d82f75 100644 --- a/video/api/src/main.rs +++ b/video/api/src/main.rs @@ -5,9 +5,9 @@ use async_nats::jetstream::stream::{self, RetentionPolicy}; use binary_helper::config::RedisConfig; use binary_helper::global::{setup_database, setup_nats, setup_redis, GlobalCtx, GlobalDb, GlobalNats}; use binary_helper::{bootstrap, grpc_health, grpc_server, impl_global_traits}; +use scuffle_utils::context::Context; +use scuffle_utilsdataloader::DataLoader; use tokio::select; -use utils::context::Context; -use utils::dataloader::DataLoader; use video_api::config::ApiConfig; use video_api::dataloaders; @@ -88,7 +88,7 @@ impl binary_helper::Global for GlobalState { let recording_state_loader = dataloaders::RecordingStateLoader::new(db.clone()); let room_loader = dataloaders::RoomLoader::new(db.clone()); - utils::ratelimiter::load_rate_limiter_script(&*redis) + scuffle_utilsratelimiter::load_rate_limiter_script(&*redis) .await .context("failed to load rate limiter script")?; diff --git a/video/api/src/tests/api/access_token.rs b/video/api/src/tests/api/access_token.rs index 70ef1b8c9..b73c2de65 100644 --- a/video/api/src/tests/api/access_token.rs +++ b/video/api/src/tests/api/access_token.rs @@ -17,7 +17,7 @@ use crate::tests::utils; #[tokio::test] async fn test_access_token_get_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![ ( @@ -48,12 +48,12 @@ async fn test_access_token_get_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_access_token_create_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( AccessTokenCreateRequest { @@ -78,12 +78,12 @@ async fn test_access_token_create_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_access_token_tag_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( AccessTokenTagRequest { @@ -105,12 +105,12 @@ async fn test_access_token_tag_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_access_token_untag_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( AccessTokenUntagRequest { @@ -128,12 +128,12 @@ async fn test_access_token_untag_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_access_token_tag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let tag_request = AccessTokenTagRequest { id: Some(access_token.id.into()), @@ -148,12 +148,12 @@ async fn test_access_token_tag() { let tags = response.tags.unwrap(); assert_eq!(tags.tags.get("key").unwrap(), &"value"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_access_token_untag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; // Tag the token first let tag_request = AccessTokenTagRequest { @@ -179,12 +179,12 @@ async fn test_access_token_untag() { let tags = response.tags.unwrap(); assert!(tags.tags.is_empty(), "Tags should be empty after untagging"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_access_token_create() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; // Test case: Create a basic access token let req = AccessTokenCreateRequest { @@ -231,16 +231,16 @@ async fn test_access_token_create() { "tag_value" ); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_access_token_get() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; // Create multiple access tokens with different tags for testing let created_tokens = vec![ - utils::create_access_token( + scuffle_utilscreate_access_token( &global, &main_access_token.organization_id, vec![], @@ -252,7 +252,7 @@ async fn test_access_token_get() { .collect(), ) .await, - utils::create_access_token( + scuffle_utilscreate_access_token( &global, &main_access_token.organization_id, vec![], @@ -335,16 +335,16 @@ async fn test_access_token_get() { // Assertions for limit and reverse options assert_eq!(limited_tokens.len(), 1, "Should fetch only one token due to limit"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_access_token_delete() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; // Create access tokens to be deleted let token_to_delete = - utils::create_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; + scuffle_utilscreate_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; // Delete request with a token the caller should have permission to delete let delete_request = AccessTokenDeleteRequest { @@ -390,15 +390,15 @@ async fn test_access_token_delete() { "Failed deletion reason should be correct" ); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_access_token_boiler_plate() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let no_scopes_token = - utils::create_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; + scuffle_utilscreate_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; let server = AccessTokenServer::::new(); @@ -579,5 +579,5 @@ async fn test_access_token_boiler_plate() { assert_eq!(response.code(), tonic::Code::PermissionDenied); assert_eq!(response.message(), "missing required scope: access_token:delete"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } diff --git a/video/api/src/tests/api/events.rs b/video/api/src/tests/api/events.rs index 56736da68..2f4432406 100644 --- a/video/api/src/tests/api/events.rs +++ b/video/api/src/tests/api/events.rs @@ -14,7 +14,7 @@ use crate::tests::utils; #[tokio::test] async fn test_events() { - let (global, handler, access_token) = utils::setup(ApiConfig { + let (global, handler, access_token) = scuffle_utilssetup(ApiConfig { events: EventsConfig { stream_name: Ulid::new().to_string(), fetch_request_min_delay: Duration::from_secs(0), @@ -87,5 +87,5 @@ async fn test_events() { .await .expect("failed to process request"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } diff --git a/video/api/src/tests/api/playback_key_pair.rs b/video/api/src/tests/api/playback_key_pair.rs index 4ca878af0..87fa24713 100644 --- a/video/api/src/tests/api/playback_key_pair.rs +++ b/video/api/src/tests/api/playback_key_pair.rs @@ -19,7 +19,7 @@ use crate::tests::utils; #[tokio::test] async fn test_playback_key_pair_get_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let playback_key_pair = create_playback_keypair( &global, @@ -57,12 +57,12 @@ async fn test_playback_key_pair_get_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_key_pair_create_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( PlaybackKeyPairCreateRequest { @@ -80,12 +80,12 @@ async fn test_playback_key_pair_create_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_key_pair_modify_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let playback_key_pair = create_playback_keypair( &global, @@ -135,12 +135,12 @@ async fn test_playback_key_pair_modify_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_key_pair_tag_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let playback_key_pair = create_playback_keypair( &global, @@ -169,12 +169,12 @@ async fn test_playback_key_pair_tag_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_key_pair_untag_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let playback_key_pair = create_playback_keypair( &global, @@ -199,12 +199,12 @@ async fn test_playback_key_pair_untag_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_key_pair_tag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let playback_key_pair = create_playback_keypair( &global, @@ -228,12 +228,12 @@ async fn test_playback_key_pair_tag() { assert_eq!(tags.tags.get("key").unwrap(), &"value"); assert_eq!(tags.tags.get("key2").unwrap(), &"value2"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_key_pair_untag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let playback_key_pair = create_playback_keypair( &global, @@ -260,12 +260,12 @@ async fn test_playback_key_pair_untag() { assert_eq!(tags.tags.len(), 1, "Only 1 tag should be left"); assert_eq!(tags.tags.get("key2").unwrap(), &"value2"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_key_pair_create() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let (_, fingerprint) = validate_public_key(include_str!("../certs/ec384/public.pem")).unwrap(); @@ -293,12 +293,12 @@ async fn test_playback_key_pair_create() { let created = response.playback_key_pair.as_ref().unwrap(); assert_eq!(created.tags.as_ref().unwrap().tags.get("tag_key").unwrap(), "tag_value"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_key_pair_modify() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let playback_key_pair = create_playback_keypair( &global, @@ -357,12 +357,12 @@ async fn test_playback_key_pair_modify() { "Fingerprint should not change" ); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_key_pair_get() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; // Create multiple playback key pair with different tags for testing let created = vec![ @@ -447,12 +447,12 @@ async fn test_playback_key_pair_get() { // Assertions for limit and reverse options assert_eq!(fetched.len(), 1, "Should fetch only one playback key pair due to limit"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_key_pair_delete() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; // Create access tokens to be deleted let keypair_to_delete = create_playback_keypair( @@ -479,15 +479,15 @@ async fn test_playback_key_pair_delete() { ); assert!(failed_deletions.is_empty(), "No deletions should fail in this scenario"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_key_pair_boiler_plate() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let no_scopes_token = - utils::create_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; + scuffle_utilscreate_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; let server = PlaybackKeyPairServer::::new(); @@ -721,5 +721,5 @@ async fn test_playback_key_pair_boiler_plate() { assert_eq!(response.code(), tonic::Code::PermissionDenied); assert_eq!(response.message(), "missing required scope: playback_key_pair:delete"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } diff --git a/video/api/src/tests/api/playback_session.rs b/video/api/src/tests/api/playback_session.rs index 95c3439e5..34a79af1f 100644 --- a/video/api/src/tests/api/playback_session.rs +++ b/video/api/src/tests/api/playback_session.rs @@ -22,7 +22,7 @@ use crate::tests::utils::{self, teardown}; #[tokio::test] async fn test_playback_session_count_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![ ( @@ -60,12 +60,12 @@ async fn test_playback_session_count_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_session_get_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![ ( @@ -204,12 +204,12 @@ async fn test_playback_session_get_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_session_count() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let recording = create_recording( @@ -295,12 +295,12 @@ async fn test_playback_session_count() { assert_eq!(response.count, 300); assert_eq!(response.deduplicated_count, 200); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_session_revoke() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let recording = create_recording( @@ -416,12 +416,12 @@ async fn test_playback_session_revoke() { "revoke_before should be within 5 seconds of now" ); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_session_revoke_2() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let recording = create_recording( @@ -478,12 +478,12 @@ async fn test_playback_session_revoke_2() { // Half of them are authorized, so 50 should be revoked assert_eq!(response.revoked, 50); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_session_get() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let recording = create_recording( @@ -564,10 +564,10 @@ async fn test_playback_session_get() { #[tokio::test] async fn test_playback_session_boiler_plate() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let no_scopes_token = - utils::create_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; + scuffle_utilscreate_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; let server = PlaybackSessionServer::::new(); @@ -703,5 +703,5 @@ async fn test_playback_session_boiler_plate() { assert_eq!(response.code(), tonic::Code::PermissionDenied); assert_eq!(response.message(), "missing required scope: playback_session:delete"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } diff --git a/video/api/src/tests/api/recording.rs b/video/api/src/tests/api/recording.rs index 62626994a..1b417e756 100644 --- a/video/api/src/tests/api/recording.rs +++ b/video/api/src/tests/api/recording.rs @@ -25,7 +25,7 @@ use crate::tests::utils; #[tokio::test] async fn test_recording_get() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let room = create_room(&global, access_token.organization_id).await; @@ -89,12 +89,12 @@ async fn test_recording_get() { .unwrap(); assert_eq!(resp.recordings.len(), 0, "expected 0 recording"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_modify() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let room = create_room(&global, access_token.organization_id).await; @@ -202,12 +202,12 @@ async fn test_recording_modify() { "expected tag to match" ); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_tag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let room = create_room(&global, access_token.organization_id).await; @@ -267,12 +267,12 @@ async fn test_recording_tag() { "expected 1 tags" ); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_untag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let room = create_room(&global, access_token.organization_id).await; @@ -323,14 +323,14 @@ async fn test_recording_untag() { assert_eq!(resp.tags.as_ref().unwrap().tags.len(), 0, "expected 0 tags"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_delete() { let recording_delete_stream = Ulid::new().to_string(); - let (global, handler, access_token) = utils::setup(ApiConfig { + let (global, handler, access_token) = scuffle_utilssetup(ApiConfig { recording_delete_stream: recording_delete_stream.clone(), ..Default::default() }) @@ -433,15 +433,15 @@ async fn test_recording_delete() { assert!(thumbnails.is_empty(), "expected all thumbnails to be deleted"); assert!(segments.is_empty(), "expected all segments to be deleted"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_boiler_plate() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let no_scopes_token = - utils::create_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; + scuffle_utilscreate_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; let server = RecordingServer::::new(); @@ -630,5 +630,5 @@ async fn test_recording_boiler_plate() { assert_eq!(response.code(), tonic::Code::PermissionDenied); assert_eq!(response.message(), "missing required scope: recording:delete"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } diff --git a/video/api/src/tests/api/recording_config.rs b/video/api/src/tests/api/recording_config.rs index 9603c56c3..360f98f93 100644 --- a/video/api/src/tests/api/recording_config.rs +++ b/video/api/src/tests/api/recording_config.rs @@ -20,7 +20,7 @@ use crate::tests::utils; #[tokio::test] async fn test_recording_config_get_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![ ( @@ -55,12 +55,12 @@ async fn test_recording_config_get_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_config_create_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; @@ -85,12 +85,12 @@ async fn test_recording_config_create_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_config_modify_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; @@ -174,12 +174,12 @@ async fn test_recording_config_modify_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_config_tag_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( RecordingConfigTagRequest { @@ -201,12 +201,12 @@ async fn test_recording_config_tag_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_config_untag_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( RecordingConfigUntagRequest { @@ -224,12 +224,12 @@ async fn test_recording_config_untag_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_config_tag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let recording_config = create_recording_config( @@ -257,12 +257,12 @@ async fn test_recording_config_tag() { assert_eq!(tags.tags.get("key").unwrap(), &"value"); assert_eq!(tags.tags.get("key2").unwrap(), &"value2"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_config_untag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let recording_config = create_recording_config( @@ -292,12 +292,12 @@ async fn test_recording_config_untag() { assert_eq!(tags.tags.len(), 1, "Only 1 tag should be left"); assert_eq!(tags.tags.get("key2").unwrap(), &"value2"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_config_create() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; @@ -359,12 +359,12 @@ async fn test_recording_config_create() { } ); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_config_modify() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let recording_config = create_recording_config( @@ -440,12 +440,12 @@ async fn test_recording_config_modify() { } ); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_config_get() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, main_access_token.organization_id, HashMap::new()).await; @@ -546,12 +546,12 @@ async fn test_recording_config_get() { // Assertions for limit and reverse options assert_eq!(fetched.len(), 1, "Should fetch only one playback key pair due to limit"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_config_delete() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, main_access_token.organization_id, HashMap::new()).await; @@ -583,15 +583,15 @@ async fn test_recording_config_delete() { ); assert!(failed_deletions.is_empty(), "No deletions should fail in this scenario"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_config_boiler_plate() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let no_scopes_token = - utils::create_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; + scuffle_utilscreate_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; let server = RecordingConfigServer::::new(); @@ -837,5 +837,5 @@ async fn test_recording_config_boiler_plate() { assert_eq!(response.code(), tonic::Code::PermissionDenied); assert_eq!(response.message(), "missing required scope: recording_config:delete"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } diff --git a/video/api/src/tests/api/room.rs b/video/api/src/tests/api/room.rs index 1fcbd8fbc..5dd48b81e 100644 --- a/video/api/src/tests/api/room.rs +++ b/video/api/src/tests/api/room.rs @@ -22,7 +22,7 @@ use crate::tests::utils; #[tokio::test] async fn test_room_get_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![ ( @@ -116,12 +116,12 @@ async fn test_room_get_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_room_create_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let recording_config = @@ -159,12 +159,12 @@ async fn test_room_create_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_room_modify_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let recording_config = @@ -209,12 +209,12 @@ async fn test_room_modify_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_room_pair_tag_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( RoomTagRequest { @@ -236,12 +236,12 @@ async fn test_room_pair_tag_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_room_pair_untag_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( RoomUntagRequest { @@ -259,12 +259,12 @@ async fn test_room_pair_untag_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_room_create() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let recording_config = @@ -341,12 +341,12 @@ async fn test_room_create() { "tags should be empty" ); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_room_get() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let recording_config = @@ -518,12 +518,12 @@ async fn test_room_get() { assert_eq!(resp.rooms.len(), 1, "should return 1 room"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_room_modify() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let recording_config = @@ -619,12 +619,12 @@ async fn test_room_modify() { "tags should be empty" ); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_room_tag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let room = create_room(&global, access_token.organization_id).await; @@ -704,12 +704,12 @@ async fn test_room_tag() { "tags should match" ); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_room_untag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let room = create_room(&global, access_token.organization_id).await; @@ -748,12 +748,12 @@ async fn test_room_untag() { assert_eq!(resp.tags.as_ref().unwrap().tags.len(), 0, "tags should match"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_room_delete() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let room = create_room(&global, access_token.organization_id).await; @@ -785,12 +785,12 @@ async fn test_room_delete() { assert_eq!(resp.failed_deletes[0].id, Some(room.id.into()), "failed delete should match"); assert_eq!(resp.failed_deletes[0].reason, "room not found", "failed delete should match"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_room_disconnect() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let room = create_room(&global, access_token.organization_id).await; @@ -841,12 +841,12 @@ async fn test_room_disconnect() { ); assert!(msg.payload.is_empty(), "payload should be empty"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_room_reset_keys() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let room = create_room(&global, access_token.organization_id).await; @@ -875,15 +875,15 @@ async fn test_room_reset_keys() { assert_eq!(resp.rooms[0].id, Some(room.id.into()), "room should match"); assert_eq!(resp.rooms[0].key, key, "room should match"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_room_boilerplate() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let no_scopes_token = - utils::create_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; + scuffle_utilscreate_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; let room = create_room(&global, main_access_token.organization_id).await; @@ -1189,5 +1189,5 @@ async fn test_room_boilerplate() { assert_eq!(response.code(), tonic::Code::PermissionDenied); assert_eq!(response.message(), "missing required scope: room:delete"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } diff --git a/video/api/src/tests/api/s3_bucket.rs b/video/api/src/tests/api/s3_bucket.rs index 8a2e7e6bd..a83723a28 100644 --- a/video/api/src/tests/api/s3_bucket.rs +++ b/video/api/src/tests/api/s3_bucket.rs @@ -17,7 +17,7 @@ use crate::tests::utils; #[tokio::test] async fn test_s3_bucket_get_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![ ( @@ -50,12 +50,12 @@ async fn test_s3_bucket_get_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_s3_bucket_create_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( S3BucketCreateRequest { @@ -78,12 +78,12 @@ async fn test_s3_bucket_create_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_s3_bucket_modify_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![ ( @@ -119,12 +119,12 @@ async fn test_s3_bucket_modify_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_s3_bucket_tag_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( S3BucketTagRequest { @@ -146,12 +146,12 @@ async fn test_s3_bucket_tag_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_s3_bucket_untag_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( S3BucketUntagRequest { @@ -169,12 +169,12 @@ async fn test_s3_bucket_untag_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_s3_bucket_tag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket( &global, @@ -200,12 +200,12 @@ async fn test_s3_bucket_tag() { assert_eq!(tags.tags.get("key").unwrap(), &"value"); assert_eq!(tags.tags.get("key2").unwrap(), &"value2"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_s3_bucket_untag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket( &global, @@ -230,12 +230,12 @@ async fn test_s3_bucket_untag() { assert_eq!(tags.tags.len(), 1, "Only 1 tag should be left"); assert_eq!(tags.tags.get("key2").unwrap(), &"value2"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_s3_bucket_create() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let response: S3BucketCreateResponse = process_request( &global, @@ -281,12 +281,12 @@ async fn test_s3_bucket_create() { assert_eq!(created.endpoint, None); assert_eq!(created.public_url, None); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_s3_bucket_modify() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; @@ -342,12 +342,12 @@ async fn test_s3_bucket_modify() { assert_eq!(created.endpoint, Some("https://endpoint.com".to_string())); assert_eq!(created.public_url, Some("https://public_url.com".to_string())); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_s3_bucket_get() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let created = vec![ create_s3_bucket( @@ -443,12 +443,12 @@ async fn test_s3_bucket_get() { // Assertions for limit and reverse options assert_eq!(fetched.len(), 1, "Should fetch only one s3 bucket due to limit"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_s3_bucket_delete() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, main_access_token.organization_id, HashMap::new()).await; @@ -472,15 +472,15 @@ async fn test_s3_bucket_delete() { ); assert!(failed_deletions.is_empty(), "No deletions should fail in this scenario"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_s3_bucket_boilerplate() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let no_scopes_token = - utils::create_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; + scuffle_utilscreate_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; let server = S3BucketServer::::new(); @@ -700,5 +700,5 @@ async fn test_s3_bucket_boilerplate() { assert_eq!(response.code(), tonic::Code::PermissionDenied); assert_eq!(response.message(), "missing required scope: s3_bucket:delete"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } diff --git a/video/api/src/tests/api/transcoding_config.rs b/video/api/src/tests/api/transcoding_config.rs index d8de88c1f..5302cc21e 100644 --- a/video/api/src/tests/api/transcoding_config.rs +++ b/video/api/src/tests/api/transcoding_config.rs @@ -19,7 +19,7 @@ use crate::tests::utils; #[tokio::test] async fn test_transcoding_config_get_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![ ( @@ -54,12 +54,12 @@ async fn test_transcoding_config_get_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_transcoding_config_create_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( TranscodingConfigCreateRequest { @@ -78,12 +78,12 @@ async fn test_transcoding_config_create_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_transcoding_config_modify_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![ ( @@ -150,12 +150,12 @@ async fn test_transcoding_config_modify_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_transcoding_config_tag_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( TranscodingConfigTagRequest { @@ -177,12 +177,12 @@ async fn test_transcoding_config_tag_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_transcoding_config_untag_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( TranscodingConfigUntagRequest { @@ -200,12 +200,12 @@ async fn test_transcoding_config_untag_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_transcoding_config_tag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let transcoding_config = create_transcoding_config( &global, @@ -231,12 +231,12 @@ async fn test_transcoding_config_tag() { assert_eq!(tags.tags.get("key").unwrap(), &"value"); assert_eq!(tags.tags.get("key2").unwrap(), &"value2"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_transcoding_config_untag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let transcoding_config = create_transcoding_config( &global, @@ -264,12 +264,12 @@ async fn test_transcoding_config_untag() { assert_eq!(tags.tags.len(), 1, "Only 1 tag should be left"); assert_eq!(tags.tags.get("key2").unwrap(), &"value2"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_transcoding_config_create() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let response: TranscodingConfigCreateResponse = process_request( &global, @@ -314,12 +314,12 @@ async fn test_transcoding_config_create() { ] ); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_transcoding_config_modify() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let transcoding_config = create_transcoding_config( &global, @@ -381,12 +381,12 @@ async fn test_transcoding_config_modify() { ] ); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_transcoding_config_get() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let created = vec![ create_transcoding_config( @@ -482,12 +482,12 @@ async fn test_transcoding_config_get() { // Assertions for limit and reverse options assert_eq!(fetched.len(), 1, "Should fetch only one playback key pair due to limit"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_transcoding_config_delete() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let transcoding_config = create_transcoding_config( &global, @@ -516,15 +516,15 @@ async fn test_transcoding_config_delete() { ); assert!(failed_deletions.is_empty(), "No deletions should fail in this scenario"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_transcoding_config_boiler_plate() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let no_scopes_token = - utils::create_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; + scuffle_utilscreate_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; let server = TranscodingConfigServer::::new(); @@ -771,5 +771,5 @@ async fn test_transcoding_config_boiler_plate() { assert_eq!(response.code(), tonic::Code::PermissionDenied); assert_eq!(response.message(), "missing required scope: transcoding_config:delete"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } diff --git a/video/api/src/tests/api/utils.rs b/video/api/src/tests/api/utils.rs index fb3725e84..4b143bb8b 100644 --- a/video/api/src/tests/api/utils.rs +++ b/video/api/src/tests/api/utils.rs @@ -44,7 +44,7 @@ pub async fn create_playback_session( let client = global.db().get().await.unwrap(); for inserts in &inserts.chunks(u16::MAX as usize / 5) { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("INSERT INTO playback_sessions (id, organization_id, room_id, recording_id, user_id, ip_address) "); @@ -66,7 +66,7 @@ pub async fn create_playback_session( } pub async fn create_room(global: &Arc, organization_id: Ulid) -> video_common::database::Room { - utils::database::query("INSERT INTO rooms (id, organization_id, stream_key) VALUES ($1, $2, $3) RETURNING *") + scuffle_utils::database::query("INSERT INTO rooms (id, organization_id, stream_key) VALUES ($1, $2, $3) RETURNING *") .bind(Ulid::new()) .bind(organization_id) .bind(create_stream_key()) @@ -84,7 +84,7 @@ pub async fn create_recording( recording_config_id: Option, tags: HashMap, ) -> video_common::database::Recording { - utils::database::query("INSERT INTO recordings (id, organization_id, s3_bucket_id, room_id, recording_config_id, tags) VALUES ($1, $2, $3, $4, $5, $6) RETURNING *").bind(Ulid::new()).bind(organization_id).bind(s3_bucket_id).bind(room_id).bind(recording_config_id).bind(utils::database::Json(tags)).build_query_as().fetch_one(global.db()).await.unwrap() + scuffle_utils::database::query("INSERT INTO recordings (id, organization_id, s3_bucket_id, room_id, recording_config_id, tags) VALUES ($1, $2, $3, $4, $5, $6) RETURNING *").bind(Ulid::new()).bind(organization_id).bind(s3_bucket_id).bind(room_id).bind(recording_config_id).bind(utils::database::Json(tags)).build_query_as().fetch_one(global.db()).await.unwrap() } pub async fn create_recording_thumbnail( @@ -98,7 +98,7 @@ pub async fn create_recording_thumbnail( let client = global.db().get().await.unwrap(); for inserts in &inserts.chunks(u16::MAX as usize / 5) { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("INSERT INTO recording_thumbnails (organization_id, recording_id, idx, id, start_time) "); @@ -129,7 +129,7 @@ pub async fn create_recording_segment( let client = global.db().get().await.unwrap(); for inserts in &inserts.chunks(u16::MAX as usize / 14) { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push( "INSERT INTO recording_rendition_segments (organization_id, recording_id, rendition, idx, id, start_time, end_time) ", @@ -159,7 +159,7 @@ pub async fn create_recording_config( s3_bucket_id: Ulid, tags: HashMap, ) -> video_common::database::RecordingConfig { - utils::database::query( + scuffle_utils::database::query( "INSERT INTO recording_configs (id, organization_id, s3_bucket_id, tags) VALUES ($1, $2, $3, $4) RETURNING *", ) .bind(Ulid::new()) @@ -177,14 +177,16 @@ pub async fn create_transcoding_config( organization_id: Ulid, tags: HashMap, ) -> video_common::database::TranscodingConfig { - utils::database::query("INSERT INTO transcoding_configs (id, organization_id, tags) VALUES ($1, $2, $3) RETURNING *") - .bind(Ulid::new()) - .bind(organization_id) - .bind(utils::database::Json(tags)) - .build_query_as() - .fetch_one(global.db()) - .await - .unwrap() + scuffle_utils::database::query( + "INSERT INTO transcoding_configs (id, organization_id, tags) VALUES ($1, $2, $3) RETURNING *", + ) + .bind(Ulid::new()) + .bind(organization_id) + .bind(utils::database::Json(tags)) + .build_query_as() + .fetch_one(global.db()) + .await + .unwrap() } pub async fn create_s3_bucket( @@ -192,7 +194,7 @@ pub async fn create_s3_bucket( organization_id: Ulid, tags: HashMap, ) -> video_common::database::S3Bucket { - utils::database::query( + scuffle_utils::database::query( "INSERT INTO s3_buckets (id, organization_id, name, region, access_key_id, secret_access_key, managed, tags) VALUES ($1, $2, $3, $4, $5, $6, $7, $8) RETURNING *", ) .bind(Ulid::new()) @@ -216,7 +218,7 @@ pub async fn create_playback_keypair( ) -> video_common::database::PlaybackKeyPair { let (key, fingerprint) = validate_public_key(include_str!("../certs/ec384/public.pem")).unwrap(); - utils::database::query( + scuffle_utils::database::query( "INSERT INTO playback_key_pairs (id, organization_id, public_key, fingerprint, updated_at, tags) VALUES ($1, $2, $3, $4, $5, $6) RETURNING *", ) .bind(Ulid::new()) diff --git a/video/api/src/tests/global.rs b/video/api/src/tests/global.rs index 34e06b9b2..74f6a7200 100644 --- a/video/api/src/tests/global.rs +++ b/video/api/src/tests/global.rs @@ -4,11 +4,11 @@ use async_nats::jetstream::stream::{self, RetentionPolicy}; use binary_helper::logging; use fred::interfaces::ClientLike; use postgres_from_row::tokio_postgres::NoTls; -use utils::context::{Context, Handler}; -use utils::database::deadpool_postgres::{ManagerConfig, PoolConfig, RecyclingMethod, Runtime}; -use utils::database::Pool; -use utils::dataloader::DataLoader; -use utils::prelude::FutureTimeout; +use scuffle_utils::context::{Context, Handler}; +use scuffle_utils::database::deadpool_postgres::{ManagerConfig, PoolConfig, RecyclingMethod, Runtime}; +use scuffle_utils::database::Pool; +use scuffle_utils::prelude::FutureTimeout; +use scuffle_utilsdataloader::DataLoader; use crate::config::ApiConfig; use crate::dataloaders; @@ -127,7 +127,7 @@ pub async fn mock_global_state(config: ApiConfig) -> (Arc, Handler) .expect("failed to connect to redis") .expect("failed to connect to redis"); - utils::ratelimiter::load_rate_limiter_script(&*redis) + scuffle_utilsratelimiter::load_rate_limiter_script(&*redis) .await .expect("failed to load rate limiter script"); diff --git a/video/api/src/tests/utils.rs b/video/api/src/tests/utils.rs index 751cace3b..2e85c9f3a 100644 --- a/video/api/src/tests/utils.rs +++ b/video/api/src/tests/utils.rs @@ -4,9 +4,9 @@ use std::sync::Arc; use std::time::Duration; use pb::scuffle::video::v1::types::{access_token_scope, AccessTokenScope}; +use scuffle_utils::context::Handler; +use scuffle_utils::prelude::FutureTimeout; use ulid::Ulid; -use utils::context::Handler; -use utils::prelude::FutureTimeout; use video_common::database::AccessToken; use super::global::{mock_global_state, GlobalState}; @@ -14,15 +14,17 @@ use crate::config::ApiConfig; use crate::global::ApiGlobal; pub async fn create_organization(global: &Arc) -> video_common::database::Organization { - utils::database::query("INSERT INTO organizations (id, name, updated_at, tags) VALUES ($1, $2, $3, $4) RETURNING *") - .bind(Ulid::new()) - .bind("test") - .bind(chrono::Utc::now()) - .bind(utils::database::Json(std::collections::HashMap::::default())) - .build_query_as() - .fetch_one(global.db()) - .await - .unwrap() + scuffle_utils::database::query( + "INSERT INTO organizations (id, name, updated_at, tags) VALUES ($1, $2, $3, $4) RETURNING *", + ) + .bind(Ulid::new()) + .bind("test") + .bind(chrono::Utc::now()) + .bind(utils::database::Json(std::collections::HashMap::::default())) + .build_query_as() + .fetch_one(global.db()) + .await + .unwrap() } pub async fn create_access_token( @@ -31,7 +33,7 @@ pub async fn create_access_token( scopes: Vec>, tags: std::collections::HashMap, ) -> video_common::database::AccessToken { - utils::database::query("INSERT INTO access_tokens (id, organization_id, secret_token, last_active_at, updated_at, expires_at, scopes, tags) VALUES ($1, $2, $3, $4, $5, $6, $7, $8) RETURNING *") + scuffle_utils::database::query("INSERT INTO access_tokens (id, organization_id, secret_token, last_active_at, updated_at, expires_at, scopes, tags) VALUES ($1, $2, $3, $4, $5, $6, $7, $8) RETURNING *") .bind(Ulid::new()) .bind(organization_id) .bind(Ulid::new()) diff --git a/video/cli/Cargo.toml b/video/cli/Cargo.toml index 05aaea45e..621a2976b 100644 --- a/video/cli/Cargo.toml +++ b/video/cli/Cargo.toml @@ -10,7 +10,7 @@ clap = { version = "4.4", features = ["derive", "env"] } ulid = "1.1" chrono = { version = "0.4", features = ["serde"] } fred = { version = "8.0.0", features = ["enable-rustls", "sentinel-client", "dns"] } -async-nats = "0.33" +async-nats = "0.34" serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" async-trait = "0.1" @@ -19,11 +19,11 @@ tonic = "0.11" futures = "0.3" futures-util = "0.3" serde_yaml = "0.9" -base64 = "0.21" +base64 = "0.22" pb = { workspace = true } config = { workspace = true } -utils = { workspace = true, features = ["all"] } +scuffle-utils = { workspace = true, features = ["all"] } video-api = { workspace = true } video-common = { workspace = true } binary-helper = { workspace = true } diff --git a/video/cli/src/invoker/direct.rs b/video/cli/src/invoker/direct.rs index bfc3d179e..0c2409090 100644 --- a/video/cli/src/invoker/direct.rs +++ b/video/cli/src/invoker/direct.rs @@ -9,10 +9,10 @@ use binary_helper::{impl_global_traits, logging}; use futures_util::stream::BoxStream; use pb::scuffle::video::v1::types::{access_token_scope, AccessTokenScope}; pub use pb::scuffle::video::v1::*; +use scuffle_utils::context::Context; +use scuffle_utils::prelude::FutureTimeout; +use scuffle_utilsdataloader::DataLoader; use ulid::Ulid; -use utils::context::Context; -use utils::dataloader::DataLoader; -use utils::prelude::FutureTimeout; use video_api::api::ApiRequest; use video_api::config::ApiConfig; use video_api::dataloaders; @@ -34,7 +34,7 @@ impl DirectBackend { logging::init(&global.config.logging.level, global.config.logging.mode).expect("failed to init logging"); let access_token = if let Some(organization_id) = organization_id { - utils::database::query("SELECT * FROM organizations WHERE id = $1") + scuffle_utils::database::query("SELECT * FROM organizations WHERE id = $1") .bind(organization_id) .build() .fetch_optional(global.db()) @@ -76,7 +76,7 @@ impl DirectBackend { async fn create_organization(&self, req: OrganizationCreateRequest) -> anyhow::Result { let org: video_common::database::Organization = - utils::database::query("INSERT INTO organizations (id, name, tags) VALUES ($1, $2, $3) RETURNING *") + scuffle_utils::database::query("INSERT INTO organizations (id, name, tags) VALUES ($1, $2, $3) RETURNING *") .bind(Ulid::new()) .bind(req.name) .bind(utils::database::Json(req.tags)) @@ -130,7 +130,7 @@ impl DirectBackend { } async fn get_organization(&self, req: OrganizationGetRequest) -> anyhow::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("SELECT * FROM organizations"); @@ -183,7 +183,7 @@ impl DirectBackend { } async fn modify_organization(&self, req: OrganizationModifyRequest) -> anyhow::Result { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("UPDATE organizations SET "); @@ -223,7 +223,7 @@ impl DirectBackend { async fn tag_organization(&self, req: OrganizationTagRequest) -> anyhow::Result { let org: video_common::database::Organization = - utils::database::query("UPDATE organizations SET tags = tags || $1 WHERE id = $2 RETURNING *") + scuffle_utils::database::query("UPDATE organizations SET tags = tags || $1 WHERE id = $2 RETURNING *") .bind(utils::database::Json(req.tags)) .bind(req.id) .build_query_as() @@ -239,7 +239,7 @@ impl DirectBackend { async fn untag_organization(&self, req: OrganizationUntagRequest) -> anyhow::Result { let org: video_common::database::Organization = - utils::database::query("UPDATE organizations SET tags = tags - $1::text[] WHERE id = $2 RETURNING *") + scuffle_utils::database::query("UPDATE organizations SET tags = tags - $1::text[] WHERE id = $2 RETURNING *") .bind(req.tags) .bind(req.id) .build_query_as() @@ -353,7 +353,7 @@ impl GlobalState { let recording_state_loader = dataloaders::RecordingStateLoader::new(db.clone()); let room_loader = dataloaders::RoomLoader::new(db.clone()); - utils::ratelimiter::load_rate_limiter_script(&*redis) + scuffle_utilsratelimiter::load_rate_limiter_script(&*redis) .await .context("failed to load rate limiter script")?; diff --git a/video/cli/src/invoker/grpc.rs b/video/cli/src/invoker/grpc.rs index a0904f122..f2ac25355 100644 --- a/video/cli/src/invoker/grpc.rs +++ b/video/cli/src/invoker/grpc.rs @@ -2,10 +2,10 @@ use anyhow::Context as _; use base64::Engine; use futures_util::stream::BoxStream; pub use pb::scuffle::video::v1::*; +use scuffle_utils::context::Context; use tonic::service::interceptor; use tonic::transport::Channel; use ulid::Ulid; -use utils::context::Context; use crate::cli::display::{DeleteResponse, TagResponse}; pub use crate::invoker::request::*; diff --git a/video/cli/src/invoker/mod.rs b/video/cli/src/invoker/mod.rs index 42f6739fe..611af3c66 100644 --- a/video/cli/src/invoker/mod.rs +++ b/video/cli/src/invoker/mod.rs @@ -1,5 +1,5 @@ use anyhow::Context as _; -use utils::context::Context; +use scuffle_utils::context::Context; use self::direct::DirectBackend; use self::grpc::GrpcBackend; diff --git a/video/cli/src/main.rs b/video/cli/src/main.rs index 777c3846d..e17060282 100644 --- a/video/cli/src/main.rs +++ b/video/cli/src/main.rs @@ -4,8 +4,8 @@ use anyhow::Context as _; use clap::Parser; use cli::Invokable; use invoker::Invoker; -use utils::context::Context; -use utils::prelude::FutureTimeout; +use scuffle_utils::context::Context; +use scuffle_utils::prelude::FutureTimeout; mod cli; mod invoker; diff --git a/video/common/Cargo.toml b/video/common/Cargo.toml index b8d469347..667abdf92 100644 --- a/video/common/Cargo.toml +++ b/video/common/Cargo.toml @@ -19,7 +19,7 @@ futures = "0.3" futures-util = "0.3" bytes = "1.5" async-trait = "0.1" -async-nats = "0.33" +async-nats = "0.34" pb = { workspace = true } -utils = { workspace = true, features = ["all"] } +scuffle-utils = { workspace = true, features = ["all"] } diff --git a/video/common/src/database/access_token.rs b/video/common/src/database/access_token.rs index 660e6a7fb..14d6b76f1 100644 --- a/video/common/src/database/access_token.rs +++ b/video/common/src/database/access_token.rs @@ -3,8 +3,8 @@ use std::collections::HashMap; use chrono::Utc; use pb::scuffle::video::v1::types::AccessTokenScope; use postgres_from_row::FromRow; +use scuffle_utils::database::{json, protobuf_vec}; use ulid::Ulid; -use utils::database::{json, protobuf_vec}; use super::DatabaseTable; diff --git a/video/common/src/database/organization.rs b/video/common/src/database/organization.rs index 394deedeb..075d20f3c 100644 --- a/video/common/src/database/organization.rs +++ b/video/common/src/database/organization.rs @@ -1,8 +1,8 @@ use std::collections::HashMap; use postgres_from_row::FromRow; +use scuffle_utils::database::json; use ulid::Ulid; -use utils::database::json; use super::DatabaseTable; diff --git a/video/common/src/database/playback_key_pair.rs b/video/common/src/database/playback_key_pair.rs index 346e0bb7e..304807440 100644 --- a/video/common/src/database/playback_key_pair.rs +++ b/video/common/src/database/playback_key_pair.rs @@ -1,8 +1,8 @@ use std::collections::HashMap; use postgres_from_row::FromRow; +use scuffle_utils::database::json; use ulid::Ulid; -use utils::database::json; use super::DatabaseTable; diff --git a/video/common/src/database/recording.rs b/video/common/src/database/recording.rs index a2f7e8b9d..428fbbf32 100644 --- a/video/common/src/database/recording.rs +++ b/video/common/src/database/recording.rs @@ -1,8 +1,8 @@ use std::collections::HashMap; use postgres_from_row::FromRow; +use scuffle_utils::database::json; use ulid::Ulid; -use utils::database::json; use super::{DatabaseTable, Rendition, Visibility}; diff --git a/video/common/src/database/recording_config.rs b/video/common/src/database/recording_config.rs index dad5d71cc..05d4d0d1c 100644 --- a/video/common/src/database/recording_config.rs +++ b/video/common/src/database/recording_config.rs @@ -2,8 +2,8 @@ use std::collections::HashMap; use pb::scuffle::video::v1::types::{RecordingLifecyclePolicy, Rendition as PbRendition}; use postgres_from_row::FromRow; +use scuffle_utils::database::{json, protobuf_vec}; use ulid::Ulid; -use utils::database::{json, protobuf_vec}; use super::{DatabaseTable, Rendition}; diff --git a/video/common/src/database/room.rs b/video/common/src/database/room.rs index 3efb04277..628439208 100644 --- a/video/common/src/database/room.rs +++ b/video/common/src/database/room.rs @@ -2,8 +2,8 @@ use std::collections::HashMap; use pb::scuffle::video::v1::types::{AudioConfig, RecordingConfig, TranscodingConfig, VideoConfig}; use postgres_from_row::FromRow; +use scuffle_utils::database::{json, protobuf_opt, protobuf_vec_opt}; use ulid::Ulid; -use utils::database::{json, protobuf_opt, protobuf_vec_opt}; use super::{DatabaseTable, RoomStatus, Visibility}; diff --git a/video/common/src/database/s3_bucket.rs b/video/common/src/database/s3_bucket.rs index 7d4e5743f..aba90a7e6 100644 --- a/video/common/src/database/s3_bucket.rs +++ b/video/common/src/database/s3_bucket.rs @@ -1,8 +1,8 @@ use std::collections::HashMap; use postgres_from_row::FromRow; +use scuffle_utils::database::json; use ulid::Ulid; -use utils::database::json; use super::DatabaseTable; diff --git a/video/common/src/database/transcoding_config.rs b/video/common/src/database/transcoding_config.rs index 3258faf1d..9a5b12af2 100644 --- a/video/common/src/database/transcoding_config.rs +++ b/video/common/src/database/transcoding_config.rs @@ -2,8 +2,8 @@ use std::collections::HashMap; use pb::scuffle::video::v1::types::Rendition as PbRendition; use postgres_from_row::FromRow; +use scuffle_utils::database::json; use ulid::Ulid; -use utils::database::json; use super::{DatabaseTable, Rendition}; diff --git a/video/edge/Cargo.toml b/video/edge/Cargo.toml index eb986360a..0e39e6736 100644 --- a/video/edge/Cargo.toml +++ b/video/edge/Cargo.toml @@ -11,9 +11,9 @@ path = "src/main.rs" [dependencies] anyhow = "1.0" tracing = "0.1" -rustls = "0.22" +rustls = "0.23" rustls-pemfile = "2.0" -tokio-rustls = "0.25" +tokio-rustls = "0.26" postgres-from-row = "0.5" tokio = { version = "1.36", features = ["full"] } serde = { version = "1.0", features = ["derive"] } @@ -32,7 +32,7 @@ tokio-stream = "0.1" serde_json = "1.0" uuid = { version = "1.6", features = ["v4"] } url = "2.5" -async-nats = "0.33" +async-nats = "0.34" hmac = "0.12" jwt-next = "0.17" ulid = { version = "1.1", features = ["uuid", "serde"] } @@ -41,7 +41,7 @@ thiserror = "1.0" http-body-util = "0.1" hyper-util = "0.1" -utils = { workspace = true, features = ["all"] } +scuffle-utils = { workspace = true, features = ["all"] } config = { workspace = true } pb = { workspace = true } video-common = { workspace = true } diff --git a/video/edge/src/edge/error.rs b/video/edge/src/edge/error.rs index be911e386..42fde5c32 100644 --- a/video/edge/src/edge/error.rs +++ b/video/edge/src/edge/error.rs @@ -1,4 +1,4 @@ -use utils::http::RouteError; +use scuffle_utils::http::RouteError; use crate::subscription::SubscriptionError; @@ -13,9 +13,9 @@ pub enum EdgeError { #[error("internal server error: {0}")] InternalServer(&'static str), #[error("database error: {0}")] - Database(#[from] utils::database::tokio_postgres::Error), + Database(#[from] scuffle_utils::database::tokio_postgres::Error), #[error("database pool error: {0}")] - DatabasePool(#[from] utils::database::deadpool_postgres::PoolError), + DatabasePool(#[from] scuffle_utils::database::deadpool_postgres::PoolError), #[error("json error: {0}")] ParseJson(#[from] serde_json::Error), #[error("prost error: {0}")] diff --git a/video/edge/src/edge/mod.rs b/video/edge/src/edge/mod.rs index 75ea20858..996317420 100644 --- a/video/edge/src/edge/mod.rs +++ b/video/edge/src/edge/mod.rs @@ -10,12 +10,12 @@ use hyper::server::conn::http1; use hyper::service::service_fn; use hyper::StatusCode; use hyper_util::rt::TokioIo; +use scuffle_utils::context::ContextExt; +use scuffle_utils::http::router::middleware::{CorsMiddleware, CorsOptions}; +use scuffle_utils::http::router::Router; +use scuffle_utils::http::RouteError; +use scuffle_utils::prelude::FutureTimeout; use tokio::net::TcpSocket; -use utils::context::ContextExt; -use utils::http::router::middleware::{CorsMiddleware, CorsOptions}; -use utils::http::router::Router; -use utils::http::RouteError; -use utils::prelude::FutureTimeout; use crate::config::EdgeConfig; use crate::global::EdgeGlobal; diff --git a/video/edge/src/edge/stream/hls_config.rs b/video/edge/src/edge/stream/hls_config.rs index 5f5748636..709a44fbb 100644 --- a/video/edge/src/edge/stream/hls_config.rs +++ b/video/edge/src/edge/stream/hls_config.rs @@ -1,7 +1,7 @@ use hyper::{Request, StatusCode}; use pb::scuffle::video::internal::live_rendition_manifest::RenditionInfo; -use utils::http::ext::*; -use utils::http::RouteError; +use scuffle_utils::http::ext::*; +use scuffle_utils::http::RouteError; use super::block_style::BlockStyle; use crate::edge::error::Result; diff --git a/video/edge/src/edge/stream/mod.rs b/video/edge/src/edge/stream/mod.rs index ded283d62..938cd7c67 100644 --- a/video/edge/src/edge/stream/mod.rs +++ b/video/edge/src/edge/stream/mod.rs @@ -10,17 +10,17 @@ use itertools::Itertools; use pb::scuffle::video::internal::{LiveManifest, LiveRenditionManifest}; use pb::scuffle::video::v1::types::{AudioConfig, VideoConfig}; use prost::Message; +use scuffle_utils::database::non_null_vec; +use scuffle_utils::http::ext::*; +use scuffle_utils::http::router::builder::RouterBuilder; +use scuffle_utils::http::router::ext::RequestExt; +use scuffle_utils::http::router::Router; +use scuffle_utils::http::RouteError; +use scuffle_utils::prelude::FutureTimeout; +use scuffle_utilsmake_response; use tokio::io::AsyncReadExt; use tokio::time::Instant; use ulid::Ulid; -use utils::database::non_null_vec; -use utils::http::ext::*; -use utils::http::router::builder::RouterBuilder; -use utils::http::router::ext::RequestExt; -use utils::http::router::Router; -use utils::http::RouteError; -use utils::make_response; -use utils::prelude::FutureTimeout; use video_common::database::{Rendition, Room, RoomStatus, Visibility}; use video_common::keys; use video_player_types::SessionRefresh; @@ -84,7 +84,7 @@ async fn room_playlist(req: Request) -> Result = utils::database::query( + let room: Option = scuffle_utils::database::query( r#" SELECT * @@ -145,7 +145,7 @@ async fn room_playlist(req: Request) -> Result(req: Request) -> Result = utils::database::query( + let recording: Option = scuffle_utils::database::query( r#" WITH filtered_recordings AS ( SELECT @@ -346,7 +346,7 @@ async fn recording_playlist(req: Request) -> Result(req: Request) -> Result(req: Request) -> Result(req: Request) -> Result = utils::database::query( + let room: Option = scuffle_utils::database::query( r#" SELECT * diff --git a/video/edge/src/edge/stream/playlist.rs b/video/edge/src/edge/stream/playlist.rs index eb9b202bc..4e3059e0a 100644 --- a/video/edge/src/edge/stream/playlist.rs +++ b/video/edge/src/edge/stream/playlist.rs @@ -4,9 +4,9 @@ use hyper::StatusCode; use pb::ext::UlidExt; use pb::scuffle::video::internal::LiveRenditionManifest; use pb::scuffle::video::v1::types::{AudioConfig, VideoConfig}; +use scuffle_utils::database::non_null_vec; +use scuffle_utils::http::ext::*; use ulid::Ulid; -use utils::database::non_null_vec; -use utils::http::ext::*; use video_common::database::{Recording, RecordingThumbnail, Rendition, Visibility}; use video_player_types::{ RenditionPlaylist, RenditionPlaylistRendition, RenditionPlaylistSegment, RenditionPlaylistSegmentPart, @@ -192,7 +192,7 @@ pub async fn rendition_playlist( }; let recording_data = if let Some((recording_id, skip, active_idx)) = recording_data { - utils::database::query( + scuffle_utils::database::query( r#" SELECT s.public_url, @@ -239,7 +239,7 @@ pub async fn rendition_playlist( ); if !*skip { - let recording_rendition: RecordingRenditionExt = utils::database::query( + let recording_rendition: RecordingRenditionExt = scuffle_utils::database::query( r#" WITH filtered_renditions AS ( SELECT recording_id, rendition @@ -271,7 +271,7 @@ pub async fn rendition_playlist( .map_err_route((StatusCode::INTERNAL_SERVER_ERROR, "failed to query database"))? .ok_or((StatusCode::NOT_FOUND, "recording no longer exists"))?; - let recording_thumbnails: Vec = utils::database::query( + let recording_thumbnails: Vec = scuffle_utils::database::query( r#" SELECT * diff --git a/video/edge/src/edge/stream/tokens.rs b/video/edge/src/edge/stream/tokens.rs index c340208f3..052c3219d 100644 --- a/video/edge/src/edge/stream/tokens.rs +++ b/video/edge/src/edge/stream/tokens.rs @@ -5,9 +5,9 @@ use hmac::{Hmac, Mac}; use hyper::StatusCode; use jwt_next::asymmetric::VerifyingKey; use jwt_next::{asymmetric, AlgorithmType, SignWithKey, Token, VerifyWithKey}; +use scuffle_utils::http::ext::*; use sha2::Sha256; use ulid::Ulid; -use utils::http::ext::*; use video_common::database::{PlaybackKeyPair, Rendition}; use crate::config::EdgeConfig; @@ -131,7 +131,7 @@ impl TokenClaims { return Err((StatusCode::BAD_REQUEST, "invalid token, iat is too far in the past").into()); } - let keypair: Option = utils::database::query( + let keypair: Option = scuffle_utils::database::query( r#" SELECT * @@ -162,7 +162,7 @@ impl TokenClaims { .verify_with_key(&verifier) .map_err(|_| (StatusCode::BAD_REQUEST, "invalid token, failed to verify"))?; - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("SELECT 1 FROM playback_session_revocations WHERE organization_id = ") .push_bind(organization_id) @@ -201,7 +201,7 @@ impl TokenClaims { } if let Some(id) = token.claims().id.as_ref() { - if utils::database::query( + if scuffle_utils::database::query( "INSERT INTO playback_session_revocations(organization_id, sso_id) VALUES ($1, $2) ON CONFLICT DO NOTHING", ) .bind(organization_id) diff --git a/video/edge/src/main.rs b/video/edge/src/main.rs index 8d21e41a6..9522e4066 100644 --- a/video/edge/src/main.rs +++ b/video/edge/src/main.rs @@ -5,8 +5,8 @@ use anyhow::Context as _; use async_nats::jetstream::stream::StorageType; use binary_helper::global::{setup_database, setup_nats, GlobalCtx, GlobalDb, GlobalNats}; use binary_helper::{bootstrap, grpc_health, grpc_server, impl_global_traits}; +use scuffle_utils::context::Context; use tokio::select; -use utils::context::Context; use video_edge::config::EdgeConfig; use video_edge::global::EdgeState; use video_edge::subscription; diff --git a/video/edge/src/subscription/mod.rs b/video/edge/src/subscription/mod.rs index f13f29c0c..ef9868502 100644 --- a/video/edge/src/subscription/mod.rs +++ b/video/edge/src/subscription/mod.rs @@ -1,10 +1,10 @@ use std::sync::Arc; use async_nats::jetstream::kv::Entry; +use scuffle_utils::context::Context; use tokio::select; use tokio::sync::{broadcast, mpsc, oneshot, Mutex}; use tokio_stream::{StreamExt, StreamMap, StreamNotifyClose}; -use utils::context::Context; pub use self::recv::SubscriberReceiver; use self::topics::TopicMap; diff --git a/video/ingest/Cargo.toml b/video/ingest/Cargo.toml index fb0088757..ff7da6b92 100644 --- a/video/ingest/Cargo.toml +++ b/video/ingest/Cargo.toml @@ -11,9 +11,9 @@ path = "src/main.rs" [dependencies] anyhow = "1.0" tracing = "0.1" -rustls = "0.22" +rustls = "0.23" rustls-pemfile = "2.0" -tokio-rustls = "0.25" +tokio-rustls = "0.26" async-trait = "0.1" tokio = { version = "1.36", features = ["full"] } serde = { version = "1.0", features = ["derive"] } @@ -28,13 +28,13 @@ serde_json = "1.0" uuid = "1.6" ulid = { version = "1.1", features = ["uuid"] } async-stream = "0.3" -async-nats = "0.33" -base64 = "0.21" +async-nats = "0.34" +base64 = "0.22" tokio-stream = "0.1" default-net = "0.22" postgres-from-row = "0.5" -utils = { workspace = true, features = ["all"] } +scuffle-utils = { workspace = true, features = ["all"] } rtmp = { workspace = true } bytesio = { workspace = true } flv = { workspace = true } diff --git a/video/ingest/src/grpc/ingest.rs b/video/ingest/src/grpc/ingest.rs index 4b6b3f59e..041627bb4 100644 --- a/video/ingest/src/grpc/ingest.rs +++ b/video/ingest/src/grpc/ingest.rs @@ -6,8 +6,8 @@ use async_stream::try_stream; use futures_util::Stream; use pb::ext::UlidExt; use pb::scuffle::video::internal::{ingest_server, ingest_watch_request, IngestWatchRequest, IngestWatchResponse}; +use scuffle_utils::prelude::FutureTimeout; use tonic::{async_trait, Request, Response, Status, Streaming}; -use utils::prelude::FutureTimeout; use crate::global::{IncomingTranscoder, IngestGlobal}; diff --git a/video/ingest/src/ingest/connection.rs b/video/ingest/src/ingest/connection.rs index c16ca2d2f..369595dac 100644 --- a/video/ingest/src/ingest/connection.rs +++ b/video/ingest/src/ingest/connection.rs @@ -16,14 +16,14 @@ use pb::scuffle::video::v1::events_fetch_request::Target; use pb::scuffle::video::v1::types::{event, Rendition}; use prost::Message as _; use rtmp::{ChannelData, PublishRequest, Session, SessionError}; +use scuffle_utils::context::ContextExt; +use scuffle_utils::prelude::FutureTimeout; use tokio::select; use tokio::sync::mpsc; use tokio::time::Instant; use tonic::{Status, Streaming}; use transmuxer::{AudioSettings, MediaSegment, TransmuxResult, Transmuxer, VideoSettings}; use ulid::Ulid; -use utils::context::ContextExt; -use utils::prelude::FutureTimeout; use video_common::database::RoomStatus; use video_common::{events, keys}; @@ -176,7 +176,7 @@ impl Connection { let id = Ulid::new(); - let result: Option = utils::database::query( + let result: Option = scuffle_utils::database::query( r#" UPDATE rooms as new SET @@ -492,7 +492,7 @@ impl Connection { WhichTranscoder::Current => { self.current_transcoder = None; self.current_transcoder_id = Ulid::nil(); - match utils::database::query( + match scuffle_utils::database::query( r#" UPDATE rooms SET @@ -707,7 +707,7 @@ impl Connection { } .encode_to_vec(); - match utils::database::query( + match scuffle_utils::database::query( r#" UPDATE rooms SET @@ -1087,7 +1087,7 @@ impl Connection { ) .await; - utils::database::query( + scuffle_utils::database::query( r#" UPDATE rooms SET diff --git a/video/ingest/src/ingest/mod.rs b/video/ingest/src/ingest/mod.rs index 04e5bbef4..a383a8ccc 100644 --- a/video/ingest/src/ingest/mod.rs +++ b/video/ingest/src/ingest/mod.rs @@ -3,9 +3,9 @@ use std::sync::Arc; use std::time::Duration; use anyhow::Result; +use scuffle_utils::context::ContextExt; +use scuffle_utils::prelude::FutureTimeout; use tokio::net::TcpSocket; -use utils::context::ContextExt; -use utils::prelude::FutureTimeout; use crate::config::IngestConfig; use crate::global::IngestGlobal; diff --git a/video/ingest/src/ingest/update.rs b/video/ingest/src/ingest/update.rs index 5a849f1f4..336650fd5 100644 --- a/video/ingest/src/ingest/update.rs +++ b/video/ingest/src/ingest/update.rs @@ -1,9 +1,9 @@ use std::sync::Arc; use std::time::Duration; +use scuffle_utils::prelude::FutureTimeout; use tokio::sync::mpsc; use ulid::Ulid; -use utils::prelude::FutureTimeout; use crate::global::IngestGlobal; @@ -22,7 +22,7 @@ pub async fn update_db( let mut success = false; for _ in 0..5 { - match utils::database::query( + match scuffle_utils::database::query( r#" UPDATE rooms SET diff --git a/video/ingest/src/main.rs b/video/ingest/src/main.rs index c2d636559..660dea991 100644 --- a/video/ingest/src/main.rs +++ b/video/ingest/src/main.rs @@ -5,10 +5,10 @@ use std::sync::Arc; use anyhow::Context as _; use binary_helper::global::{setup_database, setup_nats, GlobalCtx, GlobalDb, GlobalNats}; use binary_helper::{bootstrap, grpc_health, grpc_server, impl_global_traits}; +use scuffle_utils::context::Context; use tokio::select; use tokio::sync::{mpsc, Mutex}; use ulid::Ulid; -use utils::context::Context; use video_ingest::config::IngestConfig; use video_ingest::global::IncomingTranscoder; diff --git a/video/ingest/src/tests/global.rs b/video/ingest/src/tests/global.rs index fe9b9eef4..48255a7fd 100644 --- a/video/ingest/src/tests/global.rs +++ b/video/ingest/src/tests/global.rs @@ -3,11 +3,11 @@ use std::sync::Arc; use binary_helper::logging; use postgres_from_row::tokio_postgres::NoTls; +use scuffle_utils::context::{Context, Handler}; +use scuffle_utils::database::deadpool_postgres::{ManagerConfig, PoolConfig, RecyclingMethod, Runtime}; +use scuffle_utils::database::Pool; use tokio::sync::{mpsc, Mutex}; use ulid::Ulid; -use utils::context::{Context, Handler}; -use utils::database::deadpool_postgres::{ManagerConfig, PoolConfig, RecyclingMethod, Runtime}; -use utils::database::Pool; use crate::config::IngestConfig; use crate::global::IncomingTranscoder; diff --git a/video/ingest/src/tests/ingest.rs b/video/ingest/src/tests/ingest.rs index c0ea414f3..fe7467049 100644 --- a/video/ingest/src/tests/ingest.rs +++ b/video/ingest/src/tests/ingest.rs @@ -18,13 +18,13 @@ use pb::scuffle::video::internal::{ingest_watch_request, ingest_watch_response, use pb::scuffle::video::v1::events_fetch_request::Target; use pb::scuffle::video::v1::types::{event, Event, Rendition}; use prost::Message; +use scuffle_utils::context::ContextExt; +use scuffle_utils::prelude::FutureTimeout; use tokio::io::AsyncWriteExt; use tokio::process::Command; use tokio::sync::mpsc; use tokio::task::JoinHandle; use ulid::Ulid; -use utils::context::ContextExt; -use utils::prelude::FutureTimeout; use uuid::Uuid; use video_common::database::Room; use video_common::keys::{self, event_subject}; @@ -126,7 +126,7 @@ impl Watcher { tracing::info!("connecting to ingest server at {}", advertise_addr); - let channel = utils::grpc::make_channel(vec![advertise_addr], Duration::from_secs(30), None).unwrap(); + let channel = scuffle_utilsgrpc::make_channel(vec![advertise_addr], Duration::from_secs(30), None).unwrap(); let mut client = IngestClient::new(channel); @@ -153,7 +153,7 @@ struct TestState { pub org_id: Ulid, pub room_id: Ulid, pub global: Arc, - pub handler: utils::context::Handler, + pub handler: scuffle_utils::context::Handler, pub transcoder_requests: Pin>>, pub events: Pin>>, pub ingest_handle: JoinHandle>, @@ -237,7 +237,7 @@ impl TestState { }) }; - utils::database::query("INSERT INTO organizations (id, name) VALUES ($1, $2)") + scuffle_utils::database::query("INSERT INTO organizations (id, name) VALUES ($1, $2)") .bind(org_id) .bind("test") .build() @@ -247,7 +247,7 @@ impl TestState { let room_id = Ulid::new(); - utils::database::query("INSERT INTO rooms (organization_id, id, stream_key) VALUES ($1, $2, $3)") + scuffle_utils::database::query("INSERT INTO rooms (organization_id, id, stream_key) VALUES ($1, $2, $3)") .bind(org_id) .bind(room_id) .bind(room_id.to_string()) @@ -321,7 +321,7 @@ async fn test_ingest_stream() { } let room: video_common::database::Room = - utils::database::query("SELECT * FROM rooms WHERE organization_id = $1 AND id = $2") + scuffle_utils::database::query("SELECT * FROM rooms WHERE organization_id = $1 AND id = $2") .bind(state.org_id) .bind(state.room_id) .build_query_as() @@ -508,7 +508,7 @@ async fn test_ingest_stream() { tracing::info!("waiting for transcoder to exit"); - let room: Room = utils::database::query("SELECT * FROM rooms WHERE organization_id = $1 AND id = $2") + let room: Room = scuffle_utils::database::query("SELECT * FROM rooms WHERE organization_id = $1 AND id = $2") .bind(state.org_id) .bind(state.room_id) .build_query_as() @@ -710,7 +710,7 @@ async fn test_ingest_stream_shutdown() { _ => panic!("unexpected event"), } - let room: Room = utils::database::query("SELECT * FROM rooms WHERE organization_id = $1 AND id = $2") + let room: Room = scuffle_utils::database::query("SELECT * FROM rooms WHERE organization_id = $1 AND id = $2") .bind(state.org_id) .bind(state.room_id) .build_query_as() @@ -751,7 +751,7 @@ async fn test_ingest_stream_transcoder_full() { _ => panic!("unexpected event"), } - let room: Room = utils::database::query("SELECT * FROM rooms WHERE organization_id = $1 AND id = $2") + let room: Room = scuffle_utils::database::query("SELECT * FROM rooms WHERE organization_id = $1 AND id = $2") .bind(state.org_id) .bind(state.room_id) .build_query_as() diff --git a/video/lib/bytesio/Cargo.toml b/video/lib/bytesio/Cargo.toml index 39636ef71..355bef148 100644 --- a/video/lib/bytesio/Cargo.toml +++ b/video/lib/bytesio/Cargo.toml @@ -5,7 +5,7 @@ edition = "2021" license = "MIT OR Apache-2.0" [features] -tokio = ["dep:tokio-util", "dep:tokio-stream", "dep:tokio", "dep:futures", "dep:utils"] +tokio = ["dep:tokio-util", "dep:tokio-stream", "dep:tokio", "dep:futures", "dep:scuffle-utils"] default = ["tokio"] [dependencies] @@ -16,7 +16,7 @@ futures = { version = "0.3", optional = true } tokio-util = { version = "0.7", features = ["codec"], optional = true } tokio-stream = { version = "0.1", optional = true } tokio = { version = "1.36", optional = true } -utils = { workspace = true, default-features = false, features = ["prelude"], optional = true } +scuffle-utils = { workspace = true, default-features = false, features = ["prelude"], optional = true } [dev-dependencies] tokio = { version = "1.36", features = ["full"] } diff --git a/video/lib/bytesio/src/bytesio.rs b/video/lib/bytesio/src/bytesio.rs index 8ddffd61a..4c2c7705d 100644 --- a/video/lib/bytesio/src/bytesio.rs +++ b/video/lib/bytesio/src/bytesio.rs @@ -2,10 +2,10 @@ use std::time::Duration; use bytes::{Bytes, BytesMut}; use futures::SinkExt; +use scuffle_utils::prelude::FutureTimeout; use tokio::io::{AsyncRead, AsyncWrite}; use tokio_stream::StreamExt; use tokio_util::codec::{BytesCodec, Framed}; -use utils::prelude::FutureTimeout; use super::bytesio_errors::BytesIOError; diff --git a/video/lib/rtmp/Cargo.toml b/video/lib/rtmp/Cargo.toml index 0e57c82fc..0544d23ab 100644 --- a/video/lib/rtmp/Cargo.toml +++ b/video/lib/rtmp/Cargo.toml @@ -21,7 +21,7 @@ tracing = "0.1" bytesio = { workspace = true, features = ["default"] } amf0 = { workspace = true } -utils = { workspace = true } +scuffle-utils = { workspace = true } [dev-dependencies] tokio = { version = "1.36", features = ["full"] } diff --git a/video/lib/rtmp/src/session/server_session.rs b/video/lib/rtmp/src/session/server_session.rs index 000258a56..631949d16 100644 --- a/video/lib/rtmp/src/session/server_session.rs +++ b/video/lib/rtmp/src/session/server_session.rs @@ -6,8 +6,8 @@ use bytes::Bytes; use bytesio::bytes_writer::BytesWriter; use bytesio::bytesio::{AsyncReadWrite, BytesIO}; use bytesio::bytesio_errors::BytesIOError; +use scuffle_utils::prelude::FutureTimeout; use tokio::sync::oneshot; -use utils::prelude::FutureTimeout; use super::define::RtmpCommand; use super::errors::SessionError; diff --git a/video/lib/rtmp/src/tests/rtmp.rs b/video/lib/rtmp/src/tests/rtmp.rs index b20f525a3..c9bcf0289 100644 --- a/video/lib/rtmp/src/tests/rtmp.rs +++ b/video/lib/rtmp/src/tests/rtmp.rs @@ -1,9 +1,9 @@ use std::path::PathBuf; use std::time::Duration; +use scuffle_utils::prelude::FutureTimeout; use tokio::process::Command; use tokio::sync::mpsc; -use utils::prelude::FutureTimeout; use crate::channels::{ChannelData, UniqueID}; use crate::Session; diff --git a/video/transcoder/Cargo.toml b/video/transcoder/Cargo.toml index 24d764209..b8502c784 100644 --- a/video/transcoder/Cargo.toml +++ b/video/transcoder/Cargo.toml @@ -27,21 +27,21 @@ tokio-util = { version = "0.7", features = ["compat"] } tokio-stream = "0.1" ulid = { version = "1.1", features = ["uuid"] } uuid = { version = "1.6", features = ["serde", "v4"] } -async-nats = "0.33" +async-nats = "0.34" thiserror = "1.0" aws-config = "1.1" aws-sdk-s3 = { version = "1.12", features = ["behavior-version-latest"] } -image = "0.24" +image = "0.25" aac = { workspace = true } mp4 = { workspace = true } -utils = { workspace = true, features = ["all"] } +scuffle-utils = { workspace = true, features = ["all"] } bytesio = { workspace = true, features = ["default"] } config = { workspace = true } pb = { workspace = true } video-common = { workspace = true } binary-helper = { workspace = true } -ffmpeg = { workspace = true, features = ["tokio-channel", "tracing", "task-abort"] } +scuffle-ffmpeg = { workspace = true, features = ["tokio-channel", "tracing"] } [dev-dependencies] dotenvy = "0.15" diff --git a/video/transcoder/src/global.rs b/video/transcoder/src/global.rs index 909ed4cfd..5f03c810c 100644 --- a/video/transcoder/src/global.rs +++ b/video/transcoder/src/global.rs @@ -1,4 +1,4 @@ -use utils::grpc::TlsSettings; +use scuffle_utilsgrpc::TlsSettings; use crate::config::TranscoderConfig; diff --git a/video/transcoder/src/main.rs b/video/transcoder/src/main.rs index a814f362b..43b827c2b 100644 --- a/video/transcoder/src/main.rs +++ b/video/transcoder/src/main.rs @@ -5,9 +5,9 @@ use anyhow::Context as _; use async_nats::jetstream::stream::StorageType; use binary_helper::global::{setup_database, setup_nats, GlobalCtx, GlobalDb, GlobalNats}; use binary_helper::{bootstrap, grpc_health, grpc_server, impl_global_traits}; +use scuffle_utils::context::Context; +use scuffle_utilsgrpc::TlsSettings; use tokio::select; -use utils::context::Context; -use utils::grpc::TlsSettings; use video_transcoder::config::TranscoderConfig; #[derive(Debug, Clone, Default, serde::Deserialize, config::Config)] diff --git a/video/transcoder/src/tests/global.rs b/video/transcoder/src/tests/global.rs index 9d9100103..26283e1b2 100644 --- a/video/transcoder/src/tests/global.rs +++ b/video/transcoder/src/tests/global.rs @@ -1,11 +1,11 @@ use std::sync::Arc; use binary_helper::logging; -use utils::context::{Context, Handler}; -use utils::database::deadpool_postgres::{ManagerConfig, PoolConfig, RecyclingMethod, Runtime}; -use utils::database::tokio_postgres::NoTls; -use utils::database::Pool; -use utils::grpc::TlsSettings; +use scuffle_utils::context::{Context, Handler}; +use scuffle_utils::database::deadpool_postgres::{ManagerConfig, PoolConfig, RecyclingMethod, Runtime}; +use scuffle_utils::database::tokio_postgres::NoTls; +use scuffle_utils::database::Pool; +use scuffle_utilsgrpc::TlsSettings; use crate::config::TranscoderConfig; diff --git a/video/transcoder/src/tests/transcoder/mod.rs b/video/transcoder/src/tests/transcoder/mod.rs index c1aab61dc..fc5c9b748 100644 --- a/video/transcoder/src/tests/transcoder/mod.rs +++ b/video/transcoder/src/tests/transcoder/mod.rs @@ -19,6 +19,7 @@ use pb::scuffle::video::internal::{ use pb::scuffle::video::v1::events_fetch_request::Target; use pb::scuffle::video::v1::types::{event, AudioConfig, Event, Rendition, VideoConfig}; use prost::Message; +use scuffle_utils::prelude::FutureTimeout; use tokio::process::Command; use tokio::sync::mpsc; use tokio_stream::wrappers::ReceiverStream; @@ -26,7 +27,6 @@ use tokio_stream::StreamExt; use tonic::Response; use transmuxer::{TransmuxResult, Transmuxer}; use ulid::Ulid; -use utils::prelude::FutureTimeout; use video_common::database::{Room, RoomStatus}; use video_common::ext::AsyncReadExt as _; @@ -114,7 +114,7 @@ async fn test_transcode() { let room_id = Ulid::new(); let connection_id = Ulid::new(); - utils::database::query( + scuffle_utils::database::query( r#" INSERT INTO organizations ( id, @@ -131,7 +131,7 @@ async fn test_transcode() { .await .unwrap(); - utils::database::query( + scuffle_utils::database::query( r#" INSERT INTO rooms ( id, @@ -545,7 +545,7 @@ async fn test_transcode() { assert_eq!(json["streams"][0]["duration_ts"], 48128); assert_eq!(json["streams"][0]["time_base"], "1/48000"); - let room: Room = utils::database::query( + let room: Room = scuffle_utils::database::query( "SELECT * FROM rooms WHERE organization_id = $1 AND id = $2 AND active_ingest_connection_id = $3", ) .bind(org_id) @@ -651,7 +651,7 @@ async fn test_transcode_reconnect() { let room_id = Ulid::new(); let connection_id = Ulid::new(); - utils::database::query( + scuffle_utils::database::query( r#" INSERT INTO organizations ( id, @@ -668,7 +668,7 @@ async fn test_transcode_reconnect() { .await .unwrap(); - utils::database::query( + scuffle_utils::database::query( r#" INSERT INTO rooms ( organization_id, diff --git a/video/transcoder/src/transcoder/job/ffmpeg/audio.rs b/video/transcoder/src/transcoder/job/ffmpeg/audio.rs index ba8755b99..25a4cea8d 100644 --- a/video/transcoder/src/transcoder/job/ffmpeg/audio.rs +++ b/video/transcoder/src/transcoder/job/ffmpeg/audio.rs @@ -1,14 +1,14 @@ use anyhow::Context; -use ffmpeg::codec::EncoderCodec; -use ffmpeg::dict::Dictionary; -use ffmpeg::encoder::{AudioEncoderSettings, MuxerEncoder, MuxerSettings}; -use ffmpeg::error::FfmpegError; -use ffmpeg::ffi::{AVCodecID, AVPictureType}; -use ffmpeg::io::channel::ChannelCompatSend; -use ffmpeg::io::OutputOptions; -use ffmpeg::packet::Packet; use mp4::codec::AudioCodec; use pb::scuffle::video::v1::types::AudioConfig; +use scuffle_ffmpeg::codec::EncoderCodec; +use scuffle_ffmpeg::dict::Dictionary; +use scuffle_ffmpeg::encoder::{AudioEncoderSettings, MuxerEncoder, MuxerSettings}; +use scuffle_ffmpeg::error::FfmpegError; +use scuffle_ffmpeg::ffi::{AVCodecID, AVPictureType}; +use scuffle_ffmpeg::io::channel::ChannelCompatSend; +use scuffle_ffmpeg::io::OutputOptions; +use scuffle_ffmpeg::packet::Packet; use tokio::sync::mpsc; use super::{muxer_options, Transcoder}; @@ -16,8 +16,8 @@ use super::{muxer_options, Transcoder}; pub fn codec_options(codec: AudioCodec) -> anyhow::Result<(EncoderCodec, Dictionary)> { Ok(match codec { AudioCodec::Aac { object_type } => { - let codec = ffmpeg::codec::EncoderCodec::by_name("libfdk_aac") - .or_else(|| ffmpeg::codec::EncoderCodec::new(AVCodecID::AV_CODEC_ID_AAC)) + let codec = scuffle_ffmpeg::codec::EncoderCodec::by_name("libfdk_aac") + .or_else(|| scuffle_ffmpeg::codec::EncoderCodec::new(AVCodecID::AV_CODEC_ID_AAC)) .ok_or(FfmpegError::NoEncoder) .context("failed to find aac encoder")?; @@ -38,8 +38,8 @@ pub fn codec_options(codec: AudioCodec) -> anyhow::Result<(EncoderCodec, Diction ) } AudioCodec::Opus => { - let codec = ffmpeg::codec::EncoderCodec::by_name("libopus") - .or_else(|| ffmpeg::codec::EncoderCodec::new(AVCodecID::AV_CODEC_ID_OPUS)) + let codec = scuffle_ffmpeg::codec::EncoderCodec::by_name("libopus") + .or_else(|| scuffle_ffmpeg::codec::EncoderCodec::new(AVCodecID::AV_CODEC_ID_OPUS)) .ok_or(FfmpegError::NoEncoder) .context("failed to find opus encoder")?; @@ -56,7 +56,7 @@ impl Transcoder { encoder_codec: EncoderCodec, encoder_options: Dictionary, ) -> anyhow::Result<()> { - let output = ffmpeg::io::Output::new( + let output = scuffle_ffmpeg::io::Output::new( sender.into_compat(), OutputOptions { format_name: Some("mp4"), diff --git a/video/transcoder/src/transcoder/job/ffmpeg/mod.rs b/video/transcoder/src/transcoder/job/ffmpeg/mod.rs index 13db1584d..8207fb5ad 100644 --- a/video/transcoder/src/transcoder/job/ffmpeg/mod.rs +++ b/video/transcoder/src/transcoder/job/ffmpeg/mod.rs @@ -4,15 +4,15 @@ use std::time::{Duration, Instant}; use anyhow::Context; use bytes::Bytes; -use ffmpeg::decoder::Decoder; -use ffmpeg::dict::Dictionary; -use ffmpeg::error::FfmpegError; -use ffmpeg::ffi::{AVMediaType, AVPixelFormat}; -use ffmpeg::frame::Frame; -use ffmpeg::io::channel::{ChannelCompatRecv as _, ChannelCompatSend as _}; -use ffmpeg::io::OutputOptions; -use ffmpeg::log::LogLevel; use pb::scuffle::video::v1::types::{AudioConfig, VideoConfig}; +use scuffle_ffmpeg::decoder::Decoder; +use scuffle_ffmpeg::dict::Dictionary; +use scuffle_ffmpeg::error::FfmpegError; +use scuffle_ffmpeg::ffi::{AVMediaType, AVPixelFormat}; +use scuffle_ffmpeg::frame::Frame; +use scuffle_ffmpeg::io::channel::{ChannelCompatRecv as _, ChannelCompatSend as _}; +use scuffle_ffmpeg::io::OutputOptions; +use scuffle_ffmpeg::log::LogLevel; use tokio::sync::mpsc; use video_common::database::Rendition; @@ -23,16 +23,16 @@ mod video; const MP4_FLAGS: &str = "frag_keyframe+frag_every_frame+empty_moov+delay_moov+default_base_moof"; -type ChannelCompatRecv = ffmpeg::io::channel::ChannelCompat>; -type ChannelCompatSend = ffmpeg::io::channel::ChannelCompat>>; +type ChannelCompatRecv = scuffle_ffmpeg::io::channel::ChannelCompat>; +type ChannelCompatSend = scuffle_ffmpeg::io::channel::ChannelCompat>>; -type Input = ffmpeg::io::Input; -type Output = ffmpeg::io::Output; -type VideoDecoder = ffmpeg::decoder::VideoDecoder; -type AudioDecoder = ffmpeg::decoder::AudioDecoder; -type Encoder = ffmpeg::encoder::MuxerEncoder; -type Scalar = ffmpeg::scalar::Scalar; -type Limiter = ffmpeg::limiter::FrameRateLimiter; +type Input = scuffle_ffmpeg::io::Input; +type Output = scuffle_ffmpeg::io::Output; +type VideoDecoder = scuffle_ffmpeg::decoder::VideoDecoder; +type AudioDecoder = scuffle_ffmpeg::decoder::AudioDecoder; +type Encoder = scuffle_ffmpeg::encoder::MuxerEncoder; +type Scalar = scuffle_ffmpeg::scalar::Scalar; +type Limiter = scuffle_ffmpeg::limiter::FrameRateLimiter; static SETUP_LOGGING: std::sync::Once = std::sync::Once::new(); @@ -90,11 +90,11 @@ impl Transcoder { mut audio_outputs: Vec, ) -> anyhow::Result { SETUP_LOGGING.call_once(|| { - ffmpeg::log::set_log_level(LogLevel::Trace); - ffmpeg::log::log_callback_tracing(); + scuffle_ffmpeg::log::set_log_level(LogLevel::Trace); + scuffle_ffmpeg::log::log_callback_tracing(); }); - let input = ffmpeg::io::Input::new(input.into_compat()).context("failed to create input")?; + let input = scuffle_ffmpeg::io::Input::new(input.into_compat()).context("failed to create input")?; let video_stream = input .streams() @@ -108,14 +108,15 @@ impl Transcoder { .ok_or(FfmpegError::NoStream) .context("failed to find video stream")?; - let video_decoder = match ffmpeg::decoder::Decoder::new(&video_stream).context("failed to create h264 decoder")? { - Decoder::Video(decoder) => decoder, - _ => anyhow::bail!("expected video decoder"), - }; + let video_decoder = + match scuffle_ffmpeg::decoder::Decoder::new(&video_stream).context("failed to create h264 decoder")? { + Decoder::Video(decoder) => decoder, + _ => anyhow::bail!("expected video decoder"), + }; let (screenshot_width, screenshot_height) = screenshot_size(video_decoder.width(), video_decoder.height()); - let screenshot_scalar = ffmpeg::scalar::Scalar::new( + let screenshot_scalar = scuffle_ffmpeg::scalar::Scalar::new( video_decoder.width(), video_decoder.height(), video_decoder.pixel_format(), @@ -148,7 +149,7 @@ impl Transcoder { .remove(&Rendition::AudioSource) .ok_or_else(|| anyhow::anyhow!("missing audio source output"))?; - let mut output = ffmpeg::io::Output::new( + let mut output = scuffle_ffmpeg::io::Output::new( sender.into_compat(), OutputOptions { format_name: Some("mp4"), @@ -175,7 +176,7 @@ impl Transcoder { .remove(&Rendition::VideoSource) .ok_or_else(|| anyhow::anyhow!("missing video source output"))?; - let mut output = ffmpeg::io::Output::new( + let mut output = scuffle_ffmpeg::io::Output::new( sender.into_compat(), OutputOptions { format_name: Some("mp4"), @@ -227,7 +228,7 @@ impl Transcoder { .context("failed to find video stream")?; this.audio_decoder = Some( - match ffmpeg::decoder::Decoder::new(&audio_stream).context("failed to create aac decoder")? { + match scuffle_ffmpeg::decoder::Decoder::new(&audio_stream).context("failed to create aac decoder")? { Decoder::Audio(decoder) => decoder, _ => anyhow::bail!("expected audio decoder"), }, diff --git a/video/transcoder/src/transcoder/job/ffmpeg/video.rs b/video/transcoder/src/transcoder/job/ffmpeg/video.rs index ad2610b72..3a04c92a1 100644 --- a/video/transcoder/src/transcoder/job/ffmpeg/video.rs +++ b/video/transcoder/src/transcoder/job/ffmpeg/video.rs @@ -1,13 +1,13 @@ use anyhow::Context; -use ffmpeg::codec::EncoderCodec; -use ffmpeg::dict::Dictionary; -use ffmpeg::encoder::{MuxerEncoder, MuxerSettings, VideoEncoderSettings}; -use ffmpeg::error::FfmpegError; -use ffmpeg::ffi::{AVCodecID, AVPictureType, AVRational}; -use ffmpeg::io::channel::ChannelCompatSend; -use ffmpeg::io::OutputOptions; use mp4::codec::VideoCodec; use pb::scuffle::video::v1::types::VideoConfig; +use scuffle_ffmpeg::codec::EncoderCodec; +use scuffle_ffmpeg::dict::Dictionary; +use scuffle_ffmpeg::encoder::{MuxerEncoder, MuxerSettings, VideoEncoderSettings}; +use scuffle_ffmpeg::error::FfmpegError; +use scuffle_ffmpeg::ffi::{AVCodecID, AVPictureType, AVRational}; +use scuffle_ffmpeg::io::channel::ChannelCompatSend; +use scuffle_ffmpeg::io::OutputOptions; use tokio::sync::mpsc; use super::{muxer_options, Limiter, Scalar, Transcoder}; @@ -59,8 +59,8 @@ pub fn codec_options(config: &TranscoderConfig, codec: VideoCodec) -> anyhow::Re config .h264_encoder .as_ref() - .map(|name| ffmpeg::codec::EncoderCodec::by_name(name)) - .unwrap_or_else(|| ffmpeg::codec::EncoderCodec::new(AVCodecID::AV_CODEC_ID_H264)) + .map(|name| scuffle_ffmpeg::codec::EncoderCodec::by_name(name)) + .unwrap_or_else(|| scuffle_ffmpeg::codec::EncoderCodec::new(AVCodecID::AV_CODEC_ID_H264)) .ok_or(FfmpegError::NoEncoder) .context("failed to find h264 encoder")?, options, @@ -83,7 +83,7 @@ impl Transcoder { encoder_codec: EncoderCodec, encoder_options: Dictionary, ) -> anyhow::Result<()> { - let output = ffmpeg::io::Output::new( + let output = scuffle_ffmpeg::io::Output::new( sender.into_compat(), OutputOptions { format_name: Some("mp4"), @@ -144,7 +144,7 @@ impl Transcoder { Ok(()) } - pub fn handle_video_packet(&mut self, mut packet: ffmpeg::packet::Packet) -> anyhow::Result<()> { + pub fn handle_video_packet(&mut self, mut packet: scuffle_ffmpeg::packet::Packet) -> anyhow::Result<()> { packet.set_pos(Some(-1)); for copy in self.video_copies.iter_mut() { copy.write_interleaved_packet(packet.clone()).context("copy")?; diff --git a/video/transcoder/src/transcoder/job/mod.rs b/video/transcoder/src/transcoder/job/mod.rs index f8c18920a..6691c26ab 100644 --- a/video/transcoder/src/transcoder/job/mod.rs +++ b/video/transcoder/src/transcoder/job/mod.rs @@ -19,12 +19,12 @@ use pb::scuffle::video::internal::{ use pb::scuffle::video::v1::events_fetch_request::Target; use pb::scuffle::video::v1::types::event; use prost::Message as _; +use scuffle_utils::prelude::FutureTimeout; +use scuffle_utils::task::AsyncTask; use tokio::sync::mpsc; use tokio::{select, try_join}; use tokio_util::sync::CancellationToken; use ulid::Ulid; -use utils::prelude::FutureTimeout; -use utils::task::AsyncTask; use video_common::database::Rendition; use self::recording::Recording; @@ -215,7 +215,7 @@ impl Job { let tls = global.ingest_tls(); - let channel = utils::grpc::make_channel(vec![message.grpc_endpoint], Duration::from_secs(30), tls)?; + let channel = scuffle_utilsgrpc::make_channel(vec![message.grpc_endpoint], Duration::from_secs(30), tls)?; let mut client = IngestClient::new(channel); diff --git a/video/transcoder/src/transcoder/job/recording.rs b/video/transcoder/src/transcoder/job/recording.rs index ecf5aa984..c5ad145c5 100644 --- a/video/transcoder/src/transcoder/job/recording.rs +++ b/video/transcoder/src/transcoder/job/recording.rs @@ -9,10 +9,10 @@ use pb::ext::UlidExt; use pb::scuffle::video::internal::live_rendition_manifest::recording_data::RecordingThumbnail; use pb::scuffle::video::v1::types::{AudioConfig, RecordingConfig, Rendition as PbRendition, VideoConfig}; use prost::Message; +use scuffle_utils::database::tokio_postgres::Transaction; +use scuffle_utils::task::AsyncTask; use tokio::sync::mpsc; use ulid::Ulid; -use utils::database::tokio_postgres::Transaction; -use utils::task::AsyncTask; use video_common::database::{Rendition, S3Bucket, Visibility}; use super::task::recording::{recording_task, recording_thumbnail_task, RecordingTask, RecordingThumbnailTask}; @@ -68,7 +68,7 @@ impl Recording { let allow_dvr = recording_renditions.len() == video_outputs.len() + audio_outputs.len(); - utils::database::query( + scuffle_utils::database::query( r#" INSERT INTO recordings ( id, @@ -100,17 +100,19 @@ impl Recording { .execute(tx) .await?; - utils::database::query("INSERT INTO recording_renditions (organization_id, recording_id, rendition, config)") - .push_values(recording_renditions.iter(), |mut b, (rendition, config)| { - b.push_bind(organization_id); - b.push_bind(id); - b.push_bind(rendition); - b.push_bind(config); - }) - .push("ON CONFLICT DO NOTHING") - .build() - .execute(tx) - .await?; + scuffle_utils::database::query( + "INSERT INTO recording_renditions (organization_id, recording_id, rendition, config)", + ) + .push_values(recording_renditions.iter(), |mut b, (rendition, config)| { + b.push_bind(organization_id); + b.push_bind(id); + b.push_bind(rendition); + b.push_bind(config); + }) + .push("ON CONFLICT DO NOTHING") + .build() + .execute(tx) + .await?; let mut tasks = Vec::new(); let mut uploaders = HashMap::new(); diff --git a/video/transcoder/src/transcoder/job/screenshot.rs b/video/transcoder/src/transcoder/job/screenshot.rs index d930a6578..26c7bb9c8 100644 --- a/video/transcoder/src/transcoder/job/screenshot.rs +++ b/video/transcoder/src/transcoder/job/screenshot.rs @@ -1,13 +1,13 @@ use anyhow::Context; use bytes::Bytes; -use ffmpeg::ffi::AVPixelFormat; -use ffmpeg::frame::Frame; use image::codecs::jpeg::JpegEncoder; +use scuffle_ffmpeg::ffi::AVPixelFormat; +use scuffle_ffmpeg::frame::Frame; use tokio::sync::mpsc; pub fn screenshot_task(mut recv: mpsc::Receiver, send: mpsc::Sender<(Bytes, f64)>) -> anyhow::Result<()> { while let Some(frame) = recv.blocking_recv() { - let _guard = utils::task::AbortGuard::new(); + let _guard = scuffle_utils::task::AbortGuard::new(); let frame = frame.video(); @@ -22,7 +22,7 @@ pub fn screenshot_task(mut recv: mpsc::Receiver, send: mpsc::Sender<(Byte } encoder - .encode(data, width, height, image::ColorType::Rgba8) + .encode(data, width, height, image::ExtendedColorType::Rgba8) .context("failed to encode jpeg")?; let data = Bytes::from(writer); diff --git a/video/transcoder/src/transcoder/job/sql_operations.rs b/video/transcoder/src/transcoder/job/sql_operations.rs index 56cdaeb5f..40226b529 100644 --- a/video/transcoder/src/transcoder/job/sql_operations.rs +++ b/video/transcoder/src/transcoder/job/sql_operations.rs @@ -29,7 +29,7 @@ pub async fn perform_sql_operations( ) -> anyhow::Result { let mut client = global.db().get().await.context("failed to get database connection")?; - let room: Option = match utils::database::query( + let room: Option = match scuffle_utils::database::query( r#" SELECT * @@ -69,7 +69,7 @@ pub async fn perform_sql_operations( Some(recording_config) } else if let Some(recording_config_id) = &room.recording_config_id { Some( - match utils::database::query( + match scuffle_utils::database::query( r#" SELECT * @@ -101,7 +101,7 @@ pub async fn perform_sql_operations( Some(( recording_config, - match utils::database::query( + match scuffle_utils::database::query( r#" SELECT * @@ -131,7 +131,7 @@ pub async fn perform_sql_operations( let transcoding_config = if let Some(transcoding_config) = room.active_transcoding_config { transcoding_config } else if let Some(transcoding_config_id) = &room.transcoding_config_id { - match utils::database::query( + match scuffle_utils::database::query( r#" SELECT * @@ -164,7 +164,7 @@ pub async fn perform_sql_operations( let tx = client.transaction().await.context("failed to start transaction")?; - utils::database::query( + scuffle_utils::database::query( r#" UPDATE rooms SET diff --git a/video/transcoder/src/transcoder/job/task/generic.rs b/video/transcoder/src/transcoder/job/task/generic.rs index 42084704d..74f4eaa9c 100644 --- a/video/transcoder/src/transcoder/job/task/generic.rs +++ b/video/transcoder/src/transcoder/job/task/generic.rs @@ -45,7 +45,7 @@ pub async fn generic_task( .context("upload manifest")?; } GenericTask::RoomReady {} => { - if utils::database::query( + if scuffle_utils::database::query( r#" UPDATE rooms SET diff --git a/video/transcoder/src/transcoder/job/task/recording.rs b/video/transcoder/src/transcoder/job/task/recording.rs index 6f278a3db..f30aed4e4 100644 --- a/video/transcoder/src/transcoder/job/task/recording.rs +++ b/video/transcoder/src/transcoder/job/task/recording.rs @@ -73,7 +73,7 @@ pub async fn recording_task( .await .context("upload segment")?; - if utils::database::query( + if scuffle_utils::database::query( r#" INSERT INTO recording_rendition_segments ( organization_id, @@ -168,7 +168,7 @@ pub async fn recording_thumbnail_task( .await .context("upload thumbnail")?; - if utils::database::query( + if scuffle_utils::database::query( r#" INSERT INTO recording_thumbnails ( organization_id, diff --git a/video/transcoder/src/transcoder/mod.rs b/video/transcoder/src/transcoder/mod.rs index 699ce1d52..c1de7e058 100644 --- a/video/transcoder/src/transcoder/mod.rs +++ b/video/transcoder/src/transcoder/mod.rs @@ -6,8 +6,8 @@ use async_nats::jetstream::consumer::pull::Config; use async_nats::jetstream::consumer::DeliverPolicy; use async_nats::jetstream::stream::RetentionPolicy; use futures::StreamExt; +use scuffle_utils::context::ContextExt; use tokio_util::sync::CancellationToken; -use utils::context::ContextExt; use crate::config::TranscoderConfig; use crate::global::TranscoderGlobal;