diff --git a/deepwell/Cargo.lock b/deepwell/Cargo.lock index f14cee6268..2a99503b52 100644 --- a/deepwell/Cargo.lock +++ b/deepwell/Cargo.lock @@ -88,7 +88,7 @@ version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "getrandom 0.2.10", "once_cell", "version_check", @@ -96,9 +96,9 @@ dependencies = [ [[package]] name = "aho-corasick" -version = "1.1.1" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea5d730647d4fadd988536d06fecce94b7b4f2a7efdae548f1cf4b63205518ab" +checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0" dependencies = [ "memchr", ] @@ -196,12 +196,6 @@ dependencies = [ "password-hash", ] -[[package]] -name = "arrayref" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b4930d2cb77ce62f89ee5d5289b4ac049559b1c45539271f5ed4fdc7db34545" - [[package]] name = "arraystring" version = "0.3.0" @@ -240,24 +234,25 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35" dependencies = [ "concurrent-queue", - "event-listener 2.5.3", + "event-listener", "futures-core", ] [[package]] name = "async-dup" -version = "1.2.3" +version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "865d94538a2d4f7197f9e08daf94203c06be78fe87a9d293ba4dd718028c5783" +checksum = "7427a12b8dc09291528cfb1da2447059adb4a257388c2acd6497a79d55cf6f7c" dependencies = [ "futures-io", + "simple-mutex", ] [[package]] name = "async-executor" -version = "1.5.4" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c1da3ae8dabd9c00f453a329dfe1fb28da3c0a72e2478cdcd93171740c20499" +checksum = "4b0c4a4f319e45986f347ee47fef8bf5e81c9abc3f6f58dc2391439f30df65f0" dependencies = [ "async-lock", "async-task", @@ -284,14 +279,15 @@ dependencies = [ [[package]] name = "async-h1" -version = "2.3.3" +version = "2.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8101020758a4fc3a7c326cb42aa99e9fa77cbfb76987c128ad956406fe1f70a7" +checksum = "5d1d1dae8cb2c4258a79d6ed088b7fb9b4763bf4e9b22d040779761e046a2971" dependencies = [ "async-channel", "async-dup", - "async-std", - "futures-core", + "async-global-executor", + "async-io", + "futures-lite", "http-types", "httparse", "log", @@ -306,15 +302,15 @@ checksum = "0fc5b45d93ef0529756f812ca52e44c221b35341892d3dcc34132ac02f3dd2af" dependencies = [ "async-lock", "autocfg", - "cfg-if 1.0.0", + "cfg-if", "concurrent-queue", "futures-lite", "log", "parking", "polling", - "rustix 0.37.24", + "rustix 0.37.25", "slab", - "socket2", + "socket2 0.4.9", "waker-fn", ] @@ -324,78 +320,7 @@ version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "287272293e9d8c41773cec55e365490fe034813a2f172f502d6ddcf75b2f582b" dependencies = [ - "event-listener 2.5.3", -] - -[[package]] -name = "async-process" -version = "1.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf012553ce51eb7aa6dc2143804cc8252bd1cb681a1c5cb7fa94ca88682dee1d" -dependencies = [ - "async-io", - "async-lock", - "async-signal", - "blocking", - "cfg-if 1.0.0", - "event-listener 3.0.0", - "futures-lite", - "rustix 0.38.15", - "windows-sys", -] - -[[package]] -name = "async-session" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "345022a2eed092cd105cc1b26fd61c341e100bd5fcbbd792df4baf31c2cc631f" -dependencies = [ - "anyhow", - "async-std", - "async-trait", - "base64 0.12.3", - "bincode", - "blake3", - "chrono", - "hmac 0.8.1", - "kv-log-macro", - "rand 0.7.3", - "serde", - "serde_json", - "sha2 0.9.9", -] - -[[package]] -name = "async-signal" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c99f3cb3f9ff89f7d718fbb942c9eb91bedff12e396adf09a622dfe7ffec2bc2" -dependencies = [ - "async-io", - "async-lock", - "atomic-waker", - "cfg-if 1.0.0", - "concurrent-queue", - "futures-core", - "futures-io", - "libc", - "signal-hook-registry", - "slab", - "windows-sys", -] - -[[package]] -name = "async-sse" -version = "4.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53bba003996b8fd22245cd0c59b869ba764188ed435392cf2796d03b805ade10" -dependencies = [ - "async-channel", - "async-std", - "http-types", - "log", - "memchr", - "pin-project-lite 0.1.12", + "event-listener", ] [[package]] @@ -409,7 +334,6 @@ dependencies = [ "async-global-executor", "async-io", "async-lock", - "async-process", "crossbeam-utils", "futures-channel", "futures-core", @@ -420,7 +344,7 @@ dependencies = [ "log", "memchr", "once_cell", - "pin-project-lite 0.2.13", + "pin-project-lite", "pin-utils", "slab", "wasm-bindgen-futures", @@ -434,7 +358,7 @@ checksum = "cd56dd203fef61ac097dd65721a419ddccb106b2d2b70ba60a6b529f03961a51" dependencies = [ "async-stream-impl", "futures-core", - "pin-project-lite 0.2.13", + "pin-project-lite", ] [[package]] @@ -445,7 +369,7 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2", "quote", - "syn 2.0.37", + "syn 2.0.38", ] [[package]] @@ -469,13 +393,13 @@ dependencies = [ [[package]] name = "async-trait" -version = "0.1.73" +version = "0.1.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc00ceb34980c03614e35a3a4e218276a0a824e911d07651cd0d858a51e8c0f0" +checksum = "a66537f1bb974b254c98ed142ff995236e81b9d0fe4db0575f46612cb15eb0f9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.37", + "syn 2.0.38", ] [[package]] @@ -510,15 +434,15 @@ dependencies = [ "serde", "serde-xml-rs", "thiserror", - "time 0.3.29", + "time 0.3.30", "url", ] [[package]] name = "aws-region" -version = "0.25.3" +version = "0.25.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "056557a61427d0e5ba29dd931031c8ffed4ee7a550e7cd55692a9d8deb0a9dba" +checksum = "42fed2b9fca70f2908268d057a607f2a906f47edbf856ea8587de9038d264e22" dependencies = [ "thiserror", ] @@ -531,7 +455,7 @@ checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837" dependencies = [ "addr2line", "cc", - "cfg-if 1.0.0", + "cfg-if", "libc", "miniz_oxide", "object", @@ -578,10 +502,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" [[package]] -name = "bincode" -version = "1.3.3" +name = "beef" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" +checksum = "3a8241f3ebb85c056b509d4327ad0358fbbba6ffb340bf388f26350aeda225b1" dependencies = [ "serde", ] @@ -594,9 +518,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.4.0" +version = "2.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635" +checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07" dependencies = [ "serde", ] @@ -622,21 +546,6 @@ dependencies = [ "digest 0.10.7", ] -[[package]] -name = "blake3" -version = "0.3.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b64485778c4f16a6a5a9d335e80d449ac6c70cdd6a06d2af18a6f6f775a125b3" -dependencies = [ - "arrayref", - "arrayvec", - "cc", - "cfg-if 0.1.10", - "constant_time_eq", - "crypto-mac 0.8.0", - "digest 0.9.0", -] - [[package]] name = "block-buffer" version = "0.9.0" @@ -657,9 +566,9 @@ dependencies = [ [[package]] name = "blocking" -version = "1.4.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94c4ef1f913d78636d78d538eec1f18de81e481f44b1be0a81060090530846e1" +checksum = "8c36a4d0d48574b3dd360b4b7d95cc651d2b6557b6402848a27d4b228a473e2a" dependencies = [ "async-channel", "async-lock", @@ -673,9 +582,9 @@ dependencies = [ [[package]] name = "built" -version = "0.7.0" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1462f4ab147e1378c64dacd28f03a56d4771d93eab6c325265a35355ce47213d" +checksum = "38d17f4d6e4dc36d1a02fbedc2753a096848e7c1b0772f7654eab8e2c927dd53" dependencies = [ "chrono", "git2", @@ -711,9 +620,9 @@ dependencies = [ [[package]] name = "byteorder" -version = "1.4.3" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" @@ -731,12 +640,6 @@ dependencies = [ "libc", ] -[[package]] -name = "cfg-if" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" - [[package]] name = "cfg-if" version = "1.0.0" @@ -751,10 +654,8 @@ checksum = "7f2c685bad3eb3d45a01354cedb7d5faa66194d1d58ba6e267a8de788f79db38" dependencies = [ "android-tzdata", "iana-time-zone", - "js-sys", "num-traits", "serde", - "wasm-bindgen", "windows-targets", ] @@ -862,12 +763,6 @@ version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fbdcdcb6d86f71c5e97409ad45898af11cbc995b4ee8112d59095a28d376c935" -[[package]] -name = "constant_time_eq" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" - [[package]] name = "convert_case" version = "0.4.0" @@ -891,6 +786,16 @@ dependencies = [ "version_check", ] +[[package]] +name = "core-foundation" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "core-foundation-sys" version = "0.8.4" @@ -927,36 +832,13 @@ version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9cace84e55f07e7301bae1c519df89cdad8cc3cd868413d3fdbdeca9ff3db484" -[[package]] -name = "crc32fast" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" -dependencies = [ - "cfg-if 1.0.0", -] - -[[package]] -name = "crossbeam" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2801af0d36612ae591caa9568261fddce32ce6e08a7275ea334a06a4ad021a2c" -dependencies = [ - "cfg-if 1.0.0", - "crossbeam-channel", - "crossbeam-deque", - "crossbeam-epoch", - "crossbeam-queue", - "crossbeam-utils", -] - [[package]] name = "crossbeam-channel" version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a33c2bf77f2df06183c3aa30d1e96c0695a313d4f9c453cc3762a6db39f99200" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "crossbeam-utils", ] @@ -966,7 +848,7 @@ version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "crossbeam-epoch", "crossbeam-utils", ] @@ -978,7 +860,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7" dependencies = [ "autocfg", - "cfg-if 1.0.0", + "cfg-if", "crossbeam-utils", "memoffset", "scopeguard", @@ -990,7 +872,7 @@ version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d1cfb3ea8a53f37c40dea2c7bedcbd88bdfae54f5e2175d6ecaff1c988353add" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "crossbeam-utils", ] @@ -1000,18 +882,7 @@ version = "0.8.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294" dependencies = [ - "cfg-if 1.0.0", -] - -[[package]] -name = "crossfire" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "27e3e43d24a1359c5f1ba9ed36bcded70f0226fc5ad535826ee6cb1a7c07db5c" -dependencies = [ - "async-trait", - "crossbeam", - "futures", + "cfg-if", ] [[package]] @@ -1030,16 +901,6 @@ dependencies = [ "typenum", ] -[[package]] -name = "crypto-mac" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b584a330336237c1eecd3e94266efb216c56ed91225d634cb2991c5f3fd1aeab" -dependencies = [ - "generic-array", - "subtle", -] - [[package]] name = "crypto-mac" version = "0.10.0" @@ -1074,7 +935,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13b588ba4ac1a99f7f2964d24b3d896ddc6bf847ee3855dbd4366f058cfcd331" dependencies = [ "quote", - "syn 2.0.37", + "syn 2.0.38", ] [[package]] @@ -1110,7 +971,7 @@ version = "5.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "hashbrown 0.14.1", "lock_api", "once_cell", @@ -1148,21 +1009,22 @@ dependencies = [ [[package]] name = "deepwell" -version = "2023.10.6" +version = "2023.10.19" dependencies = [ "anyhow", "argon2", "arraystring", "async-std", + "async-trait", "built", - "cfg-if 1.0.0", + "cfg-if", "clap", "color-backtrace", - "crossfire", "cuid2", "data-encoding", "dotenvy", "either", + "femme", "filemagic", "fluent", "ftml", @@ -1170,6 +1032,8 @@ dependencies = [ "hex", "hostname", "intl-memoizer", + "jsonrpsee", + "log", "notify", "once_cell", "otp", @@ -1177,6 +1041,7 @@ dependencies = [ "rand 0.8.5", "ref-map", "regex", + "reqwest", "rust-s3", "sea-orm", "sea-query", @@ -1189,14 +1054,13 @@ dependencies = [ "strum_macros", "subtle", "thiserror", - "tide", - "time 0.3.29", + "time 0.3.30", "tiny-keccak", + "tokio", "toml", "typenum", "unic-langid", "unicase", - "ureq", "wikidot-normalize 0.12.0", "wikidot-path", ] @@ -1214,10 +1078,11 @@ dependencies = [ [[package]] name = "deranged" -version = "0.3.8" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2696e8a945f658fd14dc3b87242e6b80cd0f36ff04ea560fa39082368847946" +checksum = "0f32d04922c60427da6f9fef14d042d9edddef64cb9d4ce0d64d0685fbeb1fd3" dependencies = [ + "powerfmt", "serde", ] @@ -1300,7 +1165,7 @@ checksum = "487585f4d0c6655fe74905e2504d8ad6908e4db67f744eb140876906c2f3175d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.37", + "syn 2.0.38", ] [[package]] @@ -1345,7 +1210,7 @@ version = "0.8.33" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7268b386296a025e474d5140678f75d6de9493ae55a5d709eeb9dd08149945e1" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", ] [[package]] @@ -1371,7 +1236,7 @@ checksum = "04d0b288e3bb1d861c4403c1774a6f7a798781dfc519b3647df2a3dd4ae95f25" dependencies = [ "proc-macro2", "quote", - "syn 2.0.37", + "syn 2.0.38", ] [[package]] @@ -1405,32 +1270,21 @@ dependencies = [ [[package]] name = "errno" -version = "0.3.3" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "136526188508e25c6fef639d7927dfb3e0e3084488bf202267829cf7fc23dbdd" +checksum = "ac3e13f66a2f95e32a39eaa81f6b95d42878ca0e1db0c7543723dfe12557e860" dependencies = [ - "errno-dragonfly", "libc", "windows-sys", ] -[[package]] -name = "errno-dragonfly" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" -dependencies = [ - "cc", - "libc", -] - [[package]] name = "etcetera" version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "136d1b5283a1ab77bd9257427ffd09d8667ced0570b6f938942bc7568ed5b943" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "home", "windows-sys", ] @@ -1441,17 +1295,6 @@ version = "2.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" -[[package]] -name = "event-listener" -version = "3.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29e56284f00d94c1bc7fd3c77027b4623c88c1f53d8d2394c6199f2921dea325" -dependencies = [ - "concurrent-queue", - "parking", - "pin-project-lite 0.2.13", -] - [[package]] name = "fastrand" version = "1.9.0" @@ -1473,7 +1316,7 @@ version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cc04871e5ae3aa2952d552dae6b291b3099723bf779a8054281c1366a54613ef" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "js-sys", "log", "serde", @@ -1499,7 +1342,7 @@ version = "0.2.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d4029edd3e734da6fe05b6cd7bd2960760a616bd2ddd0d59a0124746d6272af0" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "libc", "redox_syscall 0.3.5", "windows-sys", @@ -1511,16 +1354,6 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8fcfdc7a0362c9f4444381a9e697c79d435fe65b52a37466fc2c1184cee9edc6" -[[package]] -name = "flate2" -version = "1.0.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6c98ee8095e9d1dcbf2fcc6d95acccb90d1c81db1e44725c6a984b1dbdfb010" -dependencies = [ - "crc32fast", - "miniz_oxide", -] - [[package]] name = "fluent" version = "0.16.0" @@ -1607,7 +1440,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "66be61386787a47353524bd03d4828d3fe33950b368adf007739363d549b26d6" dependencies = [ "built", - "cfg-if 1.0.0", + "cfg-if", "entities", "enum-map", "getrandom 0.2.10", @@ -1630,7 +1463,7 @@ dependencies = [ "str-macro", "strum", "strum_macros", - "time 0.3.29", + "time 0.3.30", "tinyvec", "unicase", "wasm-bindgen", @@ -1714,7 +1547,7 @@ dependencies = [ "futures-io", "memchr", "parking", - "pin-project-lite 0.2.13", + "pin-project-lite", "waker-fn", ] @@ -1726,7 +1559,7 @@ checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" dependencies = [ "proc-macro2", "quote", - "syn 2.0.37", + "syn 2.0.38", ] [[package]] @@ -1754,7 +1587,7 @@ dependencies = [ "futures-sink", "futures-task", "memchr", - "pin-project-lite 0.2.13", + "pin-project-lite", "pin-utils", "slab", ] @@ -1784,7 +1617,7 @@ version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "libc", "wasi 0.9.0+wasi-snapshot-preview1", ] @@ -1795,7 +1628,7 @@ version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "js-sys", "libc", "wasi 0.11.0+wasi-snapshot-preview1", @@ -1824,7 +1657,7 @@ version = "0.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fbf97ba92db08df386e10c8ede66a2a0369bd277090afd8710e19e38de9ec0cd" dependencies = [ - "bitflags 2.4.0", + "bitflags 2.4.1", "libc", "libgit2-sys", "log", @@ -1843,6 +1676,25 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "h2" +version = "0.3.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91fc23aa11be92976ef4729127f1a74adf36d8436f7816b185d18df956790833" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http", + "indexmap 1.9.3", + "slab", + "tokio", + "tokio-util", + "tracing", +] + [[package]] name = "hashbrown" version = "0.12.3" @@ -1891,6 +1743,9 @@ name = "hex" version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" +dependencies = [ + "serde", +] [[package]] name = "hkdf" @@ -1911,23 +1766,13 @@ dependencies = [ "hmac 0.12.1", ] -[[package]] -name = "hmac" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "126888268dcc288495a26bf004b38c5fdbb31682f992c84ceb046a1f0fe38840" -dependencies = [ - "crypto-mac 0.8.0", - "digest 0.9.0", -] - [[package]] name = "hmac" version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c1441c6b1e930e2817404b5046f1f989899143a12bf92de603b69f4e0aee1e15" dependencies = [ - "crypto-mac 0.10.0", + "crypto-mac", "digest 0.9.0", ] @@ -1971,6 +1816,17 @@ dependencies = [ "itoa", ] +[[package]] +name = "http-body" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" +dependencies = [ + "bytes", + "http", + "pin-project-lite", +] + [[package]] name = "http-client" version = "6.5.3" @@ -1981,7 +1837,7 @@ dependencies = [ "async-std", "async-tls", "async-trait", - "cfg-if 1.0.0", + "cfg-if", "dashmap", "deadpool", "futures", @@ -2003,7 +1859,7 @@ dependencies = [ "cookie", "futures-lite", "infer", - "pin-project-lite 0.2.13", + "pin-project-lite", "rand 0.7.3", "serde", "serde_json", @@ -2018,6 +1874,50 @@ version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + +[[package]] +name = "hyper" +version = "0.14.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffb1cfd654a8219eaef89881fdb3bb3b1cdc5fa75ded05d6933b2b382e395468" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "socket2 0.4.9", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d78e1e73ec14cf7375674f74d7dde185c8206fd9dea6fb6295e8a98098aaa97" +dependencies = [ + "futures-util", + "http", + "hyper", + "rustls 0.21.7", + "tokio", + "tokio-rustls", +] + [[package]] name = "iana-time-zone" version = "0.1.57" @@ -2051,6 +1951,16 @@ dependencies = [ "unicode-normalization", ] +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", +] + [[package]] name = "indexmap" version = "2.0.2" @@ -2075,7 +1985,7 @@ checksum = "ce243b1bfa62ffc028f1cc3b6034ec63d649f3031bc8a4fbbb004e1ac17d1f68" dependencies = [ "proc-macro2", "quote", - "syn 2.0.37", + "syn 2.0.38", ] [[package]] @@ -2104,7 +2014,7 @@ version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", ] [[package]] @@ -2137,6 +2047,12 @@ dependencies = [ "windows-sys", ] +[[package]] +name = "ipnet" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28b29a3cd74f0f4598934efe3aeba42bae0eb4680554128851ebbecb02af14e6" + [[package]] name = "itertools" version = "0.10.5" @@ -2147,36 +2063,123 @@ dependencies = [ ] [[package]] -name = "itertools" -version = "0.11.0" +name = "itertools" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" + +[[package]] +name = "jobserver" +version = "0.1.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c37f63953c4c63420ed5fd3d6d398c719489b9f872b9fa683262f8edd363c7d" +dependencies = [ + "libc", +] + +[[package]] +name = "js-sys" +version = "0.3.64" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c5f195fe497f702db0f318b07fdd68edb16955aed830df8363d837542f8f935a" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "jsonrpsee" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de902baa44bf34a58b1a4906f8b840d7d60dcec5f41fe08b4dbc14cf9efa821c" +dependencies = [ + "jsonrpsee-core", + "jsonrpsee-proc-macros", + "jsonrpsee-server", + "jsonrpsee-types", + "tokio", + "tracing", +] + +[[package]] +name = "jsonrpsee-core" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51f45d37af23707750136379f6799e76ebfcf2d425ec4e36d0deb7921da5e65c" +dependencies = [ + "anyhow", + "async-trait", + "beef", + "futures-util", + "hyper", + "jsonrpsee-types", + "parking_lot", + "rand 0.8.5", + "rustc-hash", + "serde", + "serde_json", + "soketto", + "thiserror", + "tokio", + "tracing", +] + +[[package]] +name = "jsonrpsee-proc-macros" +version = "0.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" +checksum = "f26b3675a943d083d0bf6e367ec755dccec56c41888afa13b191c1c4ff87c652" dependencies = [ - "either", + "heck", + "proc-macro-crate", + "proc-macro2", + "quote", + "syn 1.0.109", ] [[package]] -name = "itoa" -version = "1.0.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" - -[[package]] -name = "jobserver" -version = "0.1.26" +name = "jsonrpsee-server" +version = "0.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "936cfd212a0155903bcbc060e316fb6cc7cbf2e1907329391ebadc1fe0ce77c2" +checksum = "2ed2bec9c76cee118c27138cc1c877938bcaa01207a5d902b80dbfc60466bc9c" dependencies = [ - "libc", + "futures-util", + "http", + "hyper", + "jsonrpsee-core", + "jsonrpsee-types", + "route-recognizer", + "serde", + "serde_json", + "soketto", + "thiserror", + "tokio", + "tokio-stream", + "tokio-util", + "tower", + "tracing", ] [[package]] -name = "js-sys" -version = "0.3.64" +name = "jsonrpsee-types" +version = "0.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5f195fe497f702db0f318b07fdd68edb16955aed830df8363d837542f8f935a" +checksum = "05eaff23af19f10ba6fbb76519bed6da4d3b9bbaef13d39b7c2b6c14e532d27e" dependencies = [ - "wasm-bindgen", + "anyhow", + "beef", + "serde", + "serde_json", + "thiserror", + "tracing", ] [[package]] @@ -2240,16 +2243,16 @@ checksum = "6607c62aa161d23d17a9072cc5da0be67cdfc89d3afb1e8d9c842bebc2525ffe" dependencies = [ "arrayvec", "bitflags 1.3.2", - "cfg-if 1.0.0", + "cfg-if", "ryu", "static_assertions", ] [[package]] name = "libc" -version = "0.2.148" +version = "0.2.149" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cdc71e17332e86d2e1d38c1f99edcb6288ee11b815fb1a4b049eaa2114d369b" +checksum = "a08173bc88b7955d1b3145aa561539096c421ac8debde8cbc3612ec635fee29b" [[package]] name = "libgit2-sys" @@ -2265,9 +2268,9 @@ dependencies = [ [[package]] name = "libm" -version = "0.2.7" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7012b1bbb0719e1097c47611d3898568c546d597c2e74d66f6087edd5233ff4" +checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" [[package]] name = "libsqlite3-sys" @@ -2300,9 +2303,9 @@ checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" [[package]] name = "linux-raw-sys" -version = "0.4.7" +version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a9bad9f94746442c783ca431b22403b519cd7fbeed0533fdd6328b2f2212128" +checksum = "da2479e8c062e40bf0066ffa0bc823de0a9368974af99c9f6df941d2c231e03f" [[package]] name = "lock_api" @@ -2359,7 +2362,7 @@ version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "digest 0.10.7", ] @@ -2371,9 +2374,9 @@ checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771" [[package]] name = "memchr" -version = "2.6.3" +version = "2.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f232d6ef707e1956a43342693d2a31e72989554d58299d7a88738cc95b0d35c" +checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167" [[package]] name = "memoffset" @@ -2454,7 +2457,7 @@ version = "6.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6205bd8bb1e454ad2e27422015fb5e4f2bcc7e08fa8f27058670d208324a4d2d" dependencies = [ - "bitflags 2.4.0", + "bitflags 2.4.1", "crossbeam-channel", "filetime", "fsevent-sys", @@ -2553,9 +2556,9 @@ dependencies = [ [[package]] name = "num-traits" -version = "0.2.16" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f30b0abd723be7e2ffca1272140fac1a2f084c77ec3e123c192b66af1ee9e6c2" +checksum = "39e3200413f237f41ab11ad6d161bc7239c84dcb631773ccd7de3dfe4b5c267c" dependencies = [ "autocfg", "libm", @@ -2594,9 +2597,9 @@ checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" [[package]] name = "ordered-float" -version = "3.9.1" +version = "3.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a54938017eacd63036332b4ae5c8a49fc8c0c1d6d629893057e4f13609edd06" +checksum = "f1e1c390732d15f1d48471625cd92d154e66db2c56645e29a9cd26f4699f72dc" dependencies = [ "num-traits", ] @@ -2643,7 +2646,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.37", + "syn 2.0.38", ] [[package]] @@ -2725,7 +2728,7 @@ version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "libc", "redox_syscall 0.3.5", "smallvec", @@ -2795,7 +2798,7 @@ dependencies = [ "pest_meta", "proc-macro2", "quote", - "syn 2.0.37", + "syn 2.0.38", ] [[package]] @@ -2908,15 +2911,9 @@ checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405" dependencies = [ "proc-macro2", "quote", - "syn 2.0.37", + "syn 2.0.38", ] -[[package]] -name = "pin-project-lite" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "257b64915a082f7811703966789728173279bdebb956b143dbcd23f6f970a777" - [[package]] name = "pin-project-lite" version = "0.2.13" @@ -2975,11 +2972,11 @@ checksum = "4b2d323e8ca7996b3e23126511a523f7e62924d93ecd5ae73b333815b0eb3dce" dependencies = [ "autocfg", "bitflags 1.3.2", - "cfg-if 1.0.0", + "cfg-if", "concurrent-queue", "libc", "log", - "pin-project-lite 0.2.13", + "pin-project-lite", "windows-sys", ] @@ -2994,6 +2991,12 @@ dependencies = [ "universal-hash", ] +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + [[package]] name = "ppv-lite86" version = "0.2.17" @@ -3006,6 +3009,16 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" +[[package]] +name = "proc-macro-crate" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" +dependencies = [ + "once_cell", + "toml_edit 0.19.15", +] + [[package]] name = "proc-macro-error" version = "1.0.4" @@ -3038,9 +3051,9 @@ checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" [[package]] name = "proc-macro2" -version = "1.0.67" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d433d9f1a3e8c1263d9456598b16fec66f4acc9a74dacffd35c7bb09b3a1328" +checksum = "134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da" dependencies = [ "unicode-ident", ] @@ -3218,9 +3231,9 @@ checksum = "d22b73985e369f260445a5e08ad470117b30e522c91b4820585baa2e0cbf7075" [[package]] name = "regex" -version = "1.9.6" +version = "1.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebee201405406dbf528b8b672104ae6d6d63e6d118cb10e4d51abbc7b58044ff" +checksum = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343" dependencies = [ "aho-corasick", "memchr", @@ -3230,9 +3243,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.3.9" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59b23e92ee4318893fa3fe3e6fb365258efbfe6ac6ab30f090cdcbb7aa37efa9" +checksum = "5f804c7828047e88b2d32e2d7fe5a105da8ee3264f01902f796c8e067dc2483f" dependencies = [ "aho-corasick", "memchr", @@ -3241,9 +3254,9 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.7.5" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbb5fb1acd8a1a18b3dd5be62d25485eb770e05afb408a9627d14d451bae12da" +checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" [[package]] name = "rend" @@ -3254,6 +3267,46 @@ dependencies = [ "bytecheck", ] +[[package]] +name = "reqwest" +version = "0.11.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "046cd98826c46c2ac8ddecae268eb5c2e58628688a5fc7a2643704a73faba95b" +dependencies = [ + "base64 0.21.4", + "bytes", + "encoding_rs", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "hyper", + "hyper-rustls", + "ipnet", + "js-sys", + "log", + "mime", + "once_cell", + "percent-encoding", + "pin-project-lite", + "rustls 0.21.7", + "rustls-pemfile", + "serde", + "serde_json", + "serde_urlencoded", + "system-configuration", + "tokio", + "tokio-rustls", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "webpki-roots 0.25.2", + "winreg", +] + [[package]] name = "ring" version = "0.16.20" @@ -3299,9 +3352,9 @@ dependencies = [ [[package]] name = "route-recognizer" -version = "0.2.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56770675ebc04927ded3e60633437841581c285dc6236109ea25fbf3beb7b59e" +checksum = "afab94fb28594581f62d981211a9a4d53cc8130bbcbbb89a0440d9b8e81a7746" [[package]] name = "rsa" @@ -3331,7 +3384,7 @@ version = "0.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f6d5f2436026b4f6e79dc829837d467cc7e9a55ee40e750d716713540715a2df" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "ordered-multimap", ] @@ -3346,7 +3399,7 @@ dependencies = [ "aws-creds", "aws-region", "base64 0.13.1", - "cfg-if 1.0.0", + "cfg-if", "futures-io", "futures-util", "hex", @@ -3362,7 +3415,7 @@ dependencies = [ "sha2 0.10.8", "surf", "thiserror", - "time 0.3.29", + "time 0.3.30", "url", ] @@ -3393,14 +3446,14 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" dependencies = [ - "semver 1.0.19", + "semver 1.0.20", ] [[package]] name = "rustix" -version = "0.37.24" +version = "0.37.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4279d76516df406a8bd37e7dff53fd37d1a093f997a3c34a5c21658c126db06d" +checksum = "d4eb579851244c2c03e7c24f501c3432bed80b8f720af1d6e5b0e0f01555a035" dependencies = [ "bitflags 1.3.2", "errno", @@ -3412,14 +3465,14 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.15" +version = "0.38.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2f9da0cbd88f9f09e7814e388301c8414c51c62aa6ce1e4b5c551d49d96e531" +checksum = "745ecfa778e66b2b63c88a61cb36e0eea109e803b0b86bf9879fbc77c70e86ed" dependencies = [ - "bitflags 2.4.0", + "bitflags 2.4.1", "errno", "libc", - "linux-raw-sys 0.4.7", + "linux-raw-sys 0.4.10", "windows-sys", ] @@ -3524,7 +3577,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.37", + "syn 2.0.38", ] [[package]] @@ -3547,7 +3600,7 @@ dependencies = [ "sqlx", "strum", "thiserror", - "time 0.3.29", + "time 0.3.30", "tracing", "url", "uuid", @@ -3563,7 +3616,7 @@ dependencies = [ "proc-macro2", "quote", "sea-bae", - "syn 2.0.37", + "syn 2.0.38", "unicode-ident", ] @@ -3578,7 +3631,7 @@ dependencies = [ "ordered-float", "sea-query-derive", "serde_json", - "time 0.3.29", + "time 0.3.30", ] [[package]] @@ -3590,7 +3643,7 @@ dependencies = [ "sea-query", "serde_json", "sqlx", - "time 0.3.29", + "time 0.3.30", ] [[package]] @@ -3635,9 +3688,9 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.19" +version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad977052201c6de01a8ef2aa3378c4bd23217a056337d1d6da40468d267a4fb0" +checksum = "836fa6a3e1e547f9a2c4040802ec865b5d85f4014efe00555d7090a3dcaa1090" [[package]] name = "semver-parser" @@ -3647,9 +3700,9 @@ checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" [[package]] name = "serde" -version = "1.0.188" +version = "1.0.189" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf9e0fcba69a370eed61bcf2b728575f726b50b55cba78064753d708ddc7549e" +checksum = "8e422a44e74ad4001bdc8eede9a4570ab52f71190e9c076d14369f38b9200537" dependencies = [ "serde_derive", ] @@ -3679,13 +3732,13 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.188" +version = "1.0.189" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4eca7ac642d82aa35b60049a6eccb4be6be75e599bd2e9adb5f875a737654af2" +checksum = "1e48d1f918009ce3145511378cf68d613e3b3d9137d67272562080d68a2b32d5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.37", + "syn 2.0.38", ] [[package]] @@ -3727,7 +3780,7 @@ checksum = "8725e1dfadb3a50f7e5ce0b1a540466f6ed3fe7a0fca2ac2b8b831d31316bd00" dependencies = [ "proc-macro2", "quote", - "syn 2.0.37", + "syn 2.0.38", ] [[package]] @@ -3751,6 +3804,19 @@ dependencies = [ "serde", ] +[[package]] +name = "sha-1" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99cd6713db3cf16b6c84e06321e049a9b9f699826e16096d23bbcc44d15d51a6" +dependencies = [ + "block-buffer 0.9.0", + "cfg-if", + "cpufeatures", + "digest 0.9.0", + "opaque-debug", +] + [[package]] name = "sha1" version = "0.6.1" @@ -3766,7 +3832,7 @@ version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "digest 0.10.7", ] @@ -3784,7 +3850,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800" dependencies = [ "block-buffer 0.9.0", - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "digest 0.9.0", "opaque-debug", @@ -3796,7 +3862,7 @@ version = "0.10.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "digest 0.10.7", ] @@ -3845,6 +3911,15 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f27f6278552951f1f2b8cf9da965d10969b2efdea95a6ec47987ab46edfe263a" +[[package]] +name = "simple-mutex" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38aabbeafa6f6dead8cebf246fe9fae1f9215c8d29b3a69f93bd62a9e4a3dcd6" +dependencies = [ + "event-listener", +] + [[package]] name = "siphasher" version = "0.3.11" @@ -3876,6 +3951,32 @@ dependencies = [ "winapi", ] +[[package]] +name = "socket2" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4031e820eb552adee9295814c0ced9e5cf38ddf1e8b7d566d6de8e2538ea989e" +dependencies = [ + "libc", + "windows-sys", +] + +[[package]] +name = "soketto" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d1c5305e39e09653383c2c7244f2f78b3bcae37cf50c64cb4789c9f5096ec2" +dependencies = [ + "base64 0.13.1", + "bytes", + "futures", + "http", + "httparse", + "log", + "rand 0.8.5", + "sha-1", +] + [[package]] name = "spin" version = "0.5.2" @@ -3941,7 +4042,7 @@ dependencies = [ "crossbeam-queue", "dotenvy", "either", - "event-listener 2.5.3", + "event-listener", "futures-channel", "futures-core", "futures-intrusive", @@ -3949,7 +4050,7 @@ dependencies = [ "futures-util", "hashlink", "hex", - "indexmap", + "indexmap 2.0.2", "log", "memchr", "once_cell", @@ -3963,7 +4064,7 @@ dependencies = [ "smallvec", "sqlformat", "thiserror", - "time 0.3.29", + "time 0.3.30", "tracing", "url", "webpki-roots 0.24.0", @@ -4016,7 +4117,7 @@ checksum = "864b869fdf56263f4c95c45483191ea0af340f9f3e3e7b4d57a61c7c87a970db" dependencies = [ "atoi", "base64 0.21.4", - "bitflags 2.4.0", + "bitflags 2.4.1", "byteorder", "bytes", "crc", @@ -4046,7 +4147,7 @@ dependencies = [ "sqlx-core", "stringprep", "thiserror", - "time 0.3.29", + "time 0.3.30", "tracing", "whoami", ] @@ -4059,7 +4160,7 @@ checksum = "eb7ae0e6a97fb3ba33b23ac2671a5ce6e3cabe003f451abd5a56e7951d975624" dependencies = [ "atoi", "base64 0.21.4", - "bitflags 2.4.0", + "bitflags 2.4.1", "byteorder", "crc", "dotenvy", @@ -4086,7 +4187,7 @@ dependencies = [ "sqlx-core", "stringprep", "thiserror", - "time 0.3.29", + "time 0.3.30", "tracing", "whoami", ] @@ -4109,7 +4210,7 @@ dependencies = [ "percent-encoding", "serde", "sqlx-core", - "time 0.3.29", + "time 0.3.30", "tracing", "url", ] @@ -4209,15 +4310,15 @@ checksum = "290d54ea6f91c969195bdbcd7442c8c2a2ba87da8bf60a7ee86a235d4bc1e125" [[package]] name = "strum_macros" -version = "0.25.2" +version = "0.25.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad8d03b598d3d0fff69bf533ee3ef19b8eeb342729596df84bcc7e1f96ec4059" +checksum = "23dc1fa9ac9c169a78ba62f0b841814b7abae11bdd047b9c58f893439e309ea0" dependencies = [ "heck", "proc-macro2", "quote", "rustversion", - "syn 2.0.37", + "syn 2.0.38", ] [[package]] @@ -4234,7 +4335,7 @@ checksum = "718b1ae6b50351982dedff021db0def601677f2120938b070eadb10ba4038dd7" dependencies = [ "async-std", "async-trait", - "cfg-if 1.0.0", + "cfg-if", "futures-util", "getrandom 0.2.10", "http-client", @@ -4242,7 +4343,7 @@ dependencies = [ "log", "mime_guess", "once_cell", - "pin-project-lite 0.2.13", + "pin-project-lite", "rustls 0.18.1", "serde", "serde_json", @@ -4250,15 +4351,15 @@ dependencies = [ [[package]] name = "sval" -version = "2.9.1" +version = "2.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53219a43817adbb1d53e6d0dd8ba8242f9a8f144b876945f3cbca6282cc0f603" +checksum = "e55089b73dfa822e1eb6b635f8795215512cca94bfae11aee3a1a06228bc88bb" [[package]] name = "sval_buffer" -version = "2.9.1" +version = "2.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ea974e4cf17f8eedd11c8bcbdddad28a65f9faadeea62e0f092f031518a6e01" +checksum = "df307823073d63f1fb126895439fead41afc493ea35d636cceedef9f6b32ba81" dependencies = [ "sval", "sval_ref", @@ -4266,18 +4367,18 @@ dependencies = [ [[package]] name = "sval_dynamic" -version = "2.9.1" +version = "2.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb8bf9bfe769973b5ab924d3da1bc0e0f6b40c877c25cdd8eadba7b9bd32319a" +checksum = "e5f8e4c4d6d028d3cbff66c2bb3d98181d031d312b7df4550eea7142d7036f37" dependencies = [ "sval", ] [[package]] name = "sval_fmt" -version = "2.9.1" +version = "2.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "856e81b22a368aa8be4ba32c13301d2f350f0026b94126ebc41b0fcf14a4d89d" +checksum = "ad53f8eb502b0a3051fea001ae2e3723044699868ebfe06ea81b45545db392c2" dependencies = [ "itoa", "ryu", @@ -4286,9 +4387,9 @@ dependencies = [ [[package]] name = "sval_json" -version = "2.9.1" +version = "2.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3afe2fd273a8b25b7f924ed8bfaaf54812d81d27b300d5c4f890f41bba96f7e5" +checksum = "f913253c9f6cd27645ba9a0b6788039b5d4338eae0833c64b42ef178168d2862" dependencies = [ "itoa", "ryu", @@ -4297,18 +4398,18 @@ dependencies = [ [[package]] name = "sval_ref" -version = "2.9.1" +version = "2.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "416a04f4aa4be4fea8badd65a92a18f7a1082447513616866c0bd2c84c7d86aa" +checksum = "66a9661412d06740ebe81512a527b3d9220460eb7685f4399232c0e670108cb7" dependencies = [ "sval", ] [[package]] name = "sval_serde" -version = "2.9.1" +version = "2.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60e14464ea11b8e017b21481f7f5982d49a150bb8c27bf4edc1eb8694a61cf50" +checksum = "b8d077e98c1c8dfa466837ae0ec1e03c78138d42ac75662dac05e1bf0aebae20" dependencies = [ "serde", "sval", @@ -4329,9 +4430,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.37" +version = "2.0.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7303ef2c05cd654186cb250d29049a24840ca25d2747c25c0381c8d9e2f582e8" +checksum = "e96b79aaa137db8f61e26363a0c9b47d8b4ec75da28b7d1d614c2303e232408b" dependencies = [ "proc-macro2", "quote", @@ -4350,6 +4451,27 @@ dependencies = [ "unicode-xid", ] +[[package]] +name = "system-configuration" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "system-configuration-sys", +] + +[[package]] +name = "system-configuration-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "tap" version = "1.0.1" @@ -4362,10 +4484,10 @@ version = "3.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb94d2f3cc536af71caac6b6fcebf65860b347e7ce0cc9ebe8f70d3e521054ef" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "fastrand 2.0.1", "redox_syscall 0.3.5", - "rustix 0.38.15", + "rustix 0.38.19", "windows-sys", ] @@ -4395,30 +4517,7 @@ checksum = "10712f02019e9288794769fba95cd6847df9874d49d871d062172f9dd41bc4cc" dependencies = [ "proc-macro2", "quote", - "syn 2.0.37", -] - -[[package]] -name = "tide" -version = "0.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c459573f0dd2cc734b539047f57489ea875af8ee950860ded20cf93a79a1dee0" -dependencies = [ - "async-h1", - "async-session", - "async-sse", - "async-std", - "async-trait", - "femme", - "futures-util", - "http-client", - "http-types", - "kv-log-macro", - "log", - "pin-project-lite 0.2.13", - "route-recognizer", - "serde", - "serde_json", + "syn 2.0.38", ] [[package]] @@ -4438,12 +4537,13 @@ dependencies = [ [[package]] name = "time" -version = "0.3.29" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "426f806f4089c493dcac0d24c29c01e2c38baf8e30f1b716ee37e83d200b18fe" +checksum = "c4a34ab300f2dee6e562c10a046fc05e358b29f9bf92277f30c3c8d82275f6f5" dependencies = [ "deranged", "itoa", + "powerfmt", "serde", "time-core", "time-macros 0.2.15", @@ -4498,9 +4598,9 @@ dependencies = [ [[package]] name = "tinystr" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b07bb54ef1f8ff27564b08b861144d3b8d40263efe07684f64987f4c0d044e3e" +checksum = "d5d0e245e80bdc9b4e5356fc45a72184abbc3861992603f515270e9340f5a219" dependencies = [ "displaydoc", ] @@ -4522,12 +4622,68 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.32.0" +version = "1.33.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17ed6077ed6cd6c74735e21f37eb16dc3935f96878b1fe961074089cc80893f9" +checksum = "4f38200e3ef7995e5ef13baec2f432a6da0aa9ac495b2c0e8f3b7eec2c92d653" dependencies = [ "backtrace", - "pin-project-lite 0.2.13", + "bytes", + "libc", + "mio", + "num_cpus", + "parking_lot", + "pin-project-lite", + "signal-hook-registry", + "socket2 0.5.4", + "tokio-macros", + "windows-sys", +] + +[[package]] +name = "tokio-macros" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.38", +] + +[[package]] +name = "tokio-rustls" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" +dependencies = [ + "rustls 0.21.7", + "tokio", +] + +[[package]] +name = "tokio-stream" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.7.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d68074620f57a0b21594d9735eb2e98ab38b17f80d3fcb189fca266771ca60d" +dependencies = [ + "bytes", + "futures-core", + "futures-io", + "futures-sink", + "pin-project-lite", + "tokio", + "tracing", ] [[package]] @@ -4539,7 +4695,7 @@ dependencies = [ "serde", "serde_spanned", "toml_datetime", - "toml_edit", + "toml_edit 0.20.2", ] [[package]] @@ -4551,48 +4707,81 @@ dependencies = [ "serde", ] +[[package]] +name = "toml_edit" +version = "0.19.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" +dependencies = [ + "indexmap 2.0.2", + "toml_datetime", + "winnow", +] + [[package]] name = "toml_edit" version = "0.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "396e4d48bbb2b7554c944bde63101b5ae446cff6ec4a24227428f15eb72ef338" dependencies = [ - "indexmap", + "indexmap 2.0.2", "serde", "serde_spanned", "toml_datetime", "winnow", ] +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-layer" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" + +[[package]] +name = "tower-service" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" + [[package]] name = "tracing" -version = "0.1.37" +version = "0.1.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" +checksum = "ee2ef2af84856a50c1d430afce2fdded0a4ec7eda868db86409b4543df0797f9" dependencies = [ - "cfg-if 1.0.0", "log", - "pin-project-lite 0.2.13", + "pin-project-lite", "tracing-attributes", "tracing-core", ] [[package]] name = "tracing-attributes" -version = "0.1.26" +version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" +checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.37", + "syn 2.0.38", ] [[package]] name = "tracing-core" -version = "0.1.31" +version = "0.1.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a" +checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" dependencies = [ "once_cell", ] @@ -4603,6 +4792,12 @@ version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "343e926fc669bc8cde4fa3129ab681c63671bae288b1f1081ceee6d9d37904fc" +[[package]] +name = "try-lock" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" + [[package]] name = "type-map" version = "0.4.0" @@ -4706,25 +4901,6 @@ version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" -[[package]] -name = "ureq" -version = "2.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5ccd538d4a604753ebc2f17cd9946e89b77bf87f6a8e2309667c6f2e87855e3" -dependencies = [ - "base64 0.21.4", - "encoding_rs", - "flate2", - "log", - "once_cell", - "rustls 0.21.7", - "rustls-webpki", - "serde", - "serde_json", - "url", - "webpki-roots 0.25.2", -] - [[package]] name = "url" version = "2.4.1" @@ -4822,6 +4998,15 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + [[package]] name = "wasi" version = "0.9.0+wasi-snapshot-preview1" @@ -4840,7 +5025,7 @@ version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "serde", "serde_json", "wasm-bindgen-macro", @@ -4857,7 +5042,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.37", + "syn 2.0.38", "wasm-bindgen-shared", ] @@ -4867,7 +5052,7 @@ version = "0.4.37" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c02dbc21516f9f1f04f187958890d7e6026df8d16540b7ad9492bc34a67cea03" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "js-sys", "wasm-bindgen", "web-sys", @@ -4891,7 +5076,7 @@ checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.37", + "syn 2.0.38", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -5096,13 +5281,23 @@ checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "winnow" -version = "0.5.15" +version = "0.5.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c2e3184b9c4e92ad5167ca73039d0c42476302ab603e2fec4487511f38ccefc" +checksum = "a3b801d0e0a6726477cc207f60162da452f3a95adb368399bef20a946e06f65c" dependencies = [ "memchr", ] +[[package]] +name = "winreg" +version = "0.50.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" +dependencies = [ + "cfg-if", + "windows-sys", +] + [[package]] name = "wyz" version = "0.5.1" diff --git a/deepwell/Cargo.toml b/deepwell/Cargo.toml index a81eca39a2..4f42518559 100644 --- a/deepwell/Cargo.toml +++ b/deepwell/Cargo.toml @@ -8,7 +8,7 @@ keywords = ["wikijump", "api", "backend", "wiki"] categories = ["asynchronous", "database", "web-programming::http-server"] exclude = [".gitignore", ".editorconfig"] -version = "2023.10.6" +version = "2023.10.19" authors = ["Emmie Maeda "] edition = "2021" # this is *not* the same as the current year @@ -20,21 +20,24 @@ anyhow = "1" argon2 = "0.5" arraystring = "0.3" async-std = { version = "1", features = ["attributes"] } +async-trait = "0.1" cfg-if = "1" clap = "4" color-backtrace = "0.6" -crossfire = "1.0" cuid2 = "0.1" data-encoding = "2" dotenvy = "0.15" either = "1" +femme = "2" filemagic = "0.12" fluent = "0.16" ftml = { version = "1.22", features = ["mathml"] } futures = { version = "0.3", features = ["async-await"], default-features = false } -hex = "0.4" +hex = { version = "0.4", features = ["serde"] } hostname = "0.3" intl-memoizer = "0.5" +jsonrpsee = { version = "0.20", features = ["macros", "server"] } +log = "0.4" notify = { version = "6", optional = true } once_cell = "1" otp = { git = "https://github.com/TimDumol/rust-otp" } @@ -42,6 +45,7 @@ paste = "1" rand = "0.8" ref-map = "0.1" regex = "1" +reqwest = { version = "0.11", features = ["json", "rustls-tls"], default-features = false } rust-s3 = { version = "0.32", features = ["with-async-std"], default-features = false } sea-orm = { version = "0.12", features = ["sqlx-postgres", "runtime-async-std-rustls", "postgres-array", "macros", "with-json", "with-time"], default-features = false } sea-query = "0.30" @@ -54,12 +58,11 @@ strum = "0.25" strum_macros = "0.25" subtle = "2.4" thiserror = "1" -tide = "0.16" time = { version = "0.3", features = ["parsing", "serde", "serde-human-readable"], default-features = false } tiny-keccak = { version = "2", features = ["k12"] } toml = { version = "0.8", features = ["parse"] } +tokio = { version = "1", features = ["full"] } typenum = "1" -ureq = { version = "2.8.0", features = ["charset", "json", "tls"] } unic-langid = "0.9" unicase = "2" wikidot-normalize = "0.12" diff --git a/deepwell/migrations/20220906103252_deepwell.sql b/deepwell/migrations/20220906103252_deepwell.sql index 492f80ca49..72cd13706a 100644 --- a/deepwell/migrations/20220906103252_deepwell.sql +++ b/deepwell/migrations/20220906103252_deepwell.sql @@ -431,6 +431,7 @@ CREATE TABLE file ( from_wikidot BOOLEAN NOT NULL DEFAULT false, name TEXT NOT NULL, page_id BIGINT NOT NULL REFERENCES page(page_id), + site_id BIGINT NOT NULL REFERENCES site(site_id), UNIQUE (page_id, name, deleted_at) ); @@ -442,6 +443,7 @@ CREATE TABLE file_revision ( revision_number INTEGER NOT NULL, file_id BIGINT NOT NULL REFERENCES file(file_id), page_id BIGINT NOT NULL REFERENCES page(page_id), + site_id BIGINT NOT NULL REFERENCES site(site_id), user_id BIGINT NOT NULL REFERENCES "user"(user_id), name TEXT NOT NULL, s3_hash BYTEA NOT NULL, diff --git a/deepwell/scripts/generate-models.sh b/deepwell/scripts/generate-models.sh index 997c5eb0d2..cf2fcd343d 100755 --- a/deepwell/scripts/generate-models.sh +++ b/deepwell/scripts/generate-models.sh @@ -13,6 +13,5 @@ sea-orm-cli generate entity \ --date-time-crate time \ --with-copy-enums \ --with-serde both \ - --model-extra-attributes 'serde(rename_all = "camelCase")' \ --database-url postgres://wikijump:wikijump@localhost/wikijump \ --output-dir src/models diff --git a/deepwell/src/api.rs b/deepwell/src/api.rs index 807db56193..36bfc4aaa5 100644 --- a/deepwell/src/api.rs +++ b/deepwell/src/api.rs @@ -29,48 +29,53 @@ use crate::config::{Config, Secrets}; use crate::database; use crate::endpoints::{ - auth::*, category::*, email::*, file::*, file_revision::*, link::*, locale::*, - message::*, misc::*, page::*, page_revision::*, parent::*, site::*, site_member::*, - text::*, user::*, user_bot::*, view::*, vote::*, + auth::*, category::*, domain::*, email::*, file::*, file_revision::*, link::*, + locale::*, message::*, misc::*, page::*, page_revision::*, parent::*, site::*, + site_member::*, text::*, user::*, user_bot::*, view::*, vote::*, }; use crate::locales::Localizations; -use crate::services::blob::spawn_magic_thread; -use crate::services::job::JobRunner; -use crate::utils::error_response; -use anyhow::Result; +use crate::services::blob::MimeAnalyzer; +use crate::services::job::JobQueue; +use crate::services::{into_rpc_error, ServiceContext}; +use jsonrpsee::server::{RpcModule, Server, ServerHandle}; +use jsonrpsee::types::error::ErrorObjectOwned; use s3::bucket::Bucket; -use sea_orm::DatabaseConnection; +use sea_orm::{DatabaseConnection, TransactionTrait}; use std::sync::Arc; use std::time::Duration; -use tide::StatusCode; -pub type ApiServerState = Arc; -pub type ApiServer = tide::Server; -pub type ApiRequest = tide::Request; -pub type ApiResponse = tide::Result; +pub type ServerState = Arc; #[derive(Debug)] -pub struct ServerState { +pub struct ServerStateInner { pub config: Config, pub database: DatabaseConnection, pub localizations: Localizations, + pub mime_analyzer: MimeAnalyzer, + pub job_queue: JobQueue, pub s3_bucket: Bucket, } pub async fn build_server_state( config: Config, secrets: Secrets, -) -> Result { +) -> anyhow::Result { // Connect to database - tide::log::info!("Connecting to PostgreSQL database"); + info!("Connecting to PostgreSQL database"); let database = database::connect(&secrets.database_url).await?; // Load localization data - tide::log::info!("Loading localization data"); + info!("Loading localization data"); let localizations = Localizations::open(&config.localization_path).await?; + // Set up job queue + let (job_queue, job_state_sender) = JobQueue::spawn(&config); + + // Load magic data and start MIME thread + let mime_analyzer = MimeAnalyzer::spawn(); + // Create S3 bucket - tide::log::info!("Opening S3 bucket"); + info!("Opening S3 bucket"); let s3_bucket = { let mut bucket = Bucket::new( @@ -87,176 +92,199 @@ pub async fn build_server_state( bucket }; - // Return server state - Ok(Arc::new(ServerState { + // Build server state + let state = Arc::new(ServerStateInner { config, database, localizations, + mime_analyzer, + job_queue, s3_bucket, - })) -} + }); -pub fn build_server(state: ApiServerState) -> ApiServer { - macro_rules! new { - () => { - tide::Server::with_state(Arc::clone(&state)) - }; - } - - // Start main job executor task - // (and ancillary repeated tasks) - JobRunner::spawn(&state); + // Start the job queue (requires ServerState) + job_state_sender + .send(Arc::clone(&state)) + .expect("Unable to send ServerState"); - // Start MIME evaluator thread - spawn_magic_thread(); + // Return server state + Ok(state) +} - // Create server and add routes - // - // Prefix is present to avoid ambiguity about what this - // API is meant to be and the fact that it's not to be publicly-facing. - let mut app = new!(); - app.at("/api/trusted").nest(build_routes(new!())); - app +pub async fn build_server(app_state: ServerState) -> anyhow::Result { + let socket_address = app_state.config.address; + let server = Server::builder().build(socket_address).await?; + let module = build_module(app_state).await?; + let handle = server.start(module); + Ok(handle) } -fn build_routes(mut app: ApiServer) -> ApiServer { +async fn build_module(app_state: ServerState) -> anyhow::Result> { + let mut module = RpcModule::new(app_state); + + macro_rules! register { + ($name:expr, $method:ident $(,)?) => {{ + // Register async method. + // + // Contains a wrapper around each to set up state, convert error types, + // and produce a transaction used in ServiceContext, passed in. + module.register_async_method($name, |params, state| async move { + // NOTE: We have our own Arc because we need to share it in some places + // before setting up, but RpcModule insists on adding its own. + // So we need to "unwrap it" before each method invocation. + // Oh well. + let state = Arc::clone(&*state); + + // Wrap each call in a transaction, which commits or rolls back + // automatically based on whether the Result is Ok or Err. + // + // At this level, we take the database-or-RPC error and make it just an RPC error. + let db_state = Arc::clone(&state); + db_state + .database + .transaction(move |txn| { + Box::pin(async move { + // Run the endpoint's implementation, and convert from + // ServiceError to an RPC error. + let ctx = ServiceContext::new(&state, &txn); + $method(&ctx, params).await.map_err(ErrorObjectOwned::from) + }) + }) + .await + .map_err(into_rpc_error) + })?; + }}; + } + // Miscellaneous - app.at("/ping").all(ping); - app.at("/version").get(version); - app.at("/version/full").get(full_version); - app.at("/hostname").get(hostname); - app.at("/config").get(config_dump); - app.at("/config/path").get(config_path); - app.at("/normalize/:input").all(normalize_method); - app.at("/teapot") - .all(|_| async { error_response(StatusCode::ImATeapot, "🫖") }); + register!("ping", ping); + register!("error", yield_error); + register!("version", version); + register!("version_full", full_version); + register!("hostname", hostname); + register!("config", config_dump); + register!("config_path", config_path); + register!("normalize", normalize_method); // Localization - app.at("/locale/:locale").get(locale_get); - app.at("/translate/:locale").put(translate_put); + register!("locale", locale_info); + register!("translate", translate_strings); - // Routes for web server - app.at("/view/page").put(view_page); + // Web server + register!("page_view", page_view); // Authentication - app.at("/auth/login").post(auth_login); - app.at("/auth/logout").delete(auth_logout); - app.at("/auth/mfa").post(auth_mfa_verify); // Is part of the login process, - // which is why it's up here. - - app.at("/auth/session/get").put(auth_session_retrieve); - app.at("/auth/session/renew").post(auth_session_renew); - app.at("/auth/session/others") - .delete(auth_session_invalidate_others); - app.at("/auth/session/others/get") - .put(auth_session_retrieve_others); - app.at("/auth/mfa/install") - .post(auth_mfa_setup) - .delete(auth_mfa_disable); - app.at("/auth/mfa/resetRecovery") - .post(auth_mfa_reset_recovery); + register!("login", auth_login); + register!("logout", auth_logout); + register!("session_get", auth_session_get); + register!("session_get_others", auth_session_get_others); + register!("session_invalidate_others", auth_session_invalidate_others); + register!("session_renew", auth_session_renew); + register!("mfa_verify", auth_mfa_verify); + register!("mfa_setup", auth_mfa_setup); + register!("mfa_disable", auth_mfa_disable); + register!("mfa_reset_recovery", auth_mfa_reset_recovery); // Site - app.at("/site").put(site_put); - app.at("/site/get").put(site_retrieve); - app.at("/site/create").post(site_create); - app.at("/site/domain/custom") - .post(site_custom_domain_post) - .delete(site_custom_domain_delete); - app.at("/site/domain/custom/get") - .put(site_custom_domain_retrieve); - app.at("/site/fromDomain/:domain").get(site_get_from_domain); - - // Site Membership - app.at("/site/member") - .put(membership_put) - .delete(membership_delete); - app.at("/site/member/get").put(membership_retrieve); + register!("site_create", site_create); + register!("site_get", site_get); + register!("site_update", site_update); + register!("site_from_domain", site_get_from_domain); + + // Site custom domain + register!("custom_domain_create", site_custom_domain_create); + register!("custom_domain_get", site_custom_domain_get); + register!("custom_domain_delete", site_custom_domain_delete); + + // Site membership + register!("member_set", membership_set); + register!("member_get", membership_get); + register!("member_delete", membership_delete); // Category - app.at("/category").get(category_get); - app.at("/category/site").get(category_all_get); + register!("category_get", category_get); + register!("category_get_all", category_get_all); // Page - app.at("/page").post(page_edit).delete(page_delete); - app.at("/page/get").put(page_retrieve); - app.at("/page/create").post(page_create); - app.at("/page/direct/:page_id").get(page_get_direct); - app.at("/page/move").post(page_move); - app.at("/page/rerender").put(page_rerender); - app.at("/page/restore").post(page_restore); + register!("page_create", page_create); + register!("page_get", page_get); + register!("page_get_direct", page_get_direct); + register!("page_edit", page_edit); + register!("page_delete", page_delete); + register!("page_move", page_move); + register!("page_rollback", page_rollback); + register!("page_rerender", page_rerender); + register!("page_restore", page_restore); // Page revisions - app.at("/page/revision").put(page_revision_put); - app.at("/page/revision/get").put(page_revision_retrieve); - app.at("/page/revision/count").get(page_revision_count); - app.at("/page/revision/rollback").post(page_rollback); - app.at("/page/revision/range") - .put(page_revision_range_retrieve); + register!("page_revision_create", page_revision_edit); + register!("page_revision_get", page_revision_get); + register!("page_revision_count", page_revision_count); + register!("page_revision_range", page_revision_range); // Page links - app.at("/page/links/from").put(page_links_from_retrieve); - app.at("/page/links/to").put(page_links_to_retrieve); - app.at("/page/links/to/missing") - .put(page_links_to_missing_retrieve); - app.at("/page/urls/from").put(page_links_external_from); - app.at("/page/urls/to").put(page_links_external_to); + register!("page_get_links_from", page_links_from_get); + register!("page_get_links_to", page_links_to_get); + register!("page_get_links_to_missing", page_links_to_missing_get); + register!("page_get_urls_from", page_links_external_from); + register!("page_get_urls_to", page_links_external_to); // Page parents - app.at("/page/parent").put(parent_put).delete(parent_delete); - app.at("/page/parent/get").put(parent_retrieve); - app.at("/page/parent/:relationship_type") - .put(parent_relationships_retrieve); + register!("parent_set", parent_set); + register!("parent_get", parent_get); + register!("parent_remove", parent_remove); + register!("parent_relationships_get", parent_relationships_get); // Files - app.at("/file").post(file_edit).delete(file_delete); - app.at("/file/get").put(file_retrieve); - app.at("/file/upload").post(file_create); - app.at("/file/move").post(file_move); - app.at("/file/restore").post(file_restore); + register!("file_upload", file_upload); + register!("file_get", file_get); + register!("file_edit", file_edit); + register!("file_delete", file_delete); + register!("file_move", file_move); + register!("file_restore", file_restore); + register!("file_hard_delete", file_hard_delete); // File revisions - app.at("/file/revision").put(file_revision_put); - app.at("/file/revision/get").put(file_revision_retrieve); - app.at("/file/revision/count").put(file_revision_count); - app.at("/file/revision/range/:direction") - .put(file_revision_range_retrieve); + register!("file_revision_get", file_revision_get); + register!("file_revision_edit", file_revision_edit); + register!("file_revision_count", file_revision_count); + register!("file_revision_range", file_revision_range); // Text - app.at("/text").put(text_put); - app.at("/text/:hash").get(text_get); + register!("text_create", text_create); + register!("text_get", text_get); // User - app.at("/user").put(user_put).delete(user_delete); - app.at("/user/get").put(user_retrieve); - app.at("/user/avatar").put(user_avatar_put); - app.at("/user/create").post(user_create); - app.at("/user/import").post(user_import); - app.at("/user/addNameChange").post(user_add_name_change); - - // User bot information - app.at("/user/bot/get").put(user_bot_retrieve); - app.at("/user/bot/create").post(user_bot_create); - app.at("/user/bot/owner") - .put(user_bot_owner_put) - .delete(user_bot_owner_delete); - - // Message - app.at("/message/draft") - .post(message_draft_create) - .put(message_draft_update) - .delete(message_draft_delete); - app.at("/message").post(message_draft_send); + register!("user_create", user_create); + register!("user_import", user_import); + register!("user_get", user_get); + register!("user_edit", user_edit); + register!("user_delete", user_delete); + register!("user_add_name_change", user_add_name_change); + + // Bot user + register!("bot_user_create", bot_user_create); + register!("bot_user_get", bot_user_get); + register!("bot_user_owner_set", bot_user_owner_set); + register!("bot_user_owner_remove", bot_user_owner_remove); + + // Direct messages + register!("message_draft_create", message_draft_create); + register!("message_draft_edit", message_draft_edit); + register!("message_draft_delete", message_draft_delete); + register!("message_draft_send", message_draft_send); // Email - app.at("/email/validate").put(validate_email); + register!("email_validate", validate_email); // Votes - app.at("/vote").put(vote_put).delete(vote_delete); - app.at("/vote/get").put(vote_retrieve); - app.at("/vote/action").put(vote_action); - app.at("/vote/list").put(vote_list_retrieve); - app.at("/vote/count").put(vote_count_retrieve); - - app + register!("vote_set", vote_set); + register!("vote_get", vote_get); + register!("vote_remove", vote_remove); + register!("vote_action", vote_action); + register!("vote_list", vote_list_get); + register!("vote_list_count", vote_list_count); + + // Return + Ok(module) } diff --git a/deepwell/src/config/file.rs b/deepwell/src/config/file.rs index d2f8f90897..f610d52694 100644 --- a/deepwell/src/config/file.rs +++ b/deepwell/src/config/file.rs @@ -20,13 +20,13 @@ use super::Config; use anyhow::Result; +use femme::LevelFilter; use std::convert::TryFrom; use std::fs::File; use std::io::Read; use std::net::SocketAddr; use std::path::PathBuf; use std::time::Duration as StdDuration; -use tide::log::LevelFilter; use time::Duration as TimeDuration; /// Structure representing a configuration file. diff --git a/deepwell/src/config/object.rs b/deepwell/src/config/object.rs index a4d85cd4f9..c8d0886f68 100644 --- a/deepwell/src/config/object.rs +++ b/deepwell/src/config/object.rs @@ -20,11 +20,11 @@ use super::file::ConfigFile; use anyhow::Result; +use femme::LevelFilter; use std::env; use std::net::SocketAddr; use std::path::PathBuf; use std::time::Duration as StdDuration; -use tide::log::LevelFilter; use time::Duration as TimeDuration; /// Primary configuration structure. @@ -182,17 +182,17 @@ impl Config { } } - tide::log::info!("Configuration details:"); - tide::log::info!("Serving on {}", self.address); - tide::log::info!( + info!("Configuration details:"); + info!("Serving on {}", self.address); + info!( "Auto-restart on config change: {}", bool_str(self.watch_files), ); - tide::log::info!("Migrations: {}", bool_str(self.run_migrations)); - tide::log::info!("Seeder: {}", bool_str(self.run_seeder)); - tide::log::info!("Localization path: {}", self.localization_path.display()); - tide::log::info!("Seeder path: {}", self.seeder_path.display()); - tide::log::info!( + info!("Migrations: {}", bool_str(self.run_migrations)); + info!("Seeder: {}", bool_str(self.run_seeder)); + info!("Localization path: {}", self.localization_path.display()); + info!("Seeder path: {}", self.seeder_path.display()); + info!( "Current working directory: {}", env::current_dir() .expect("Cannot get current working directory") diff --git a/deepwell/src/database/mod.rs b/deepwell/src/database/mod.rs index 5a3ed3a280..2786a0c971 100644 --- a/deepwell/src/database/mod.rs +++ b/deepwell/src/database/mod.rs @@ -43,7 +43,7 @@ pub async fn connect>(database_uri: S) -> Result Result<()> { let pool = Pool::::connect(database_uri).await?; - tide::log::info!("Running migrations..."); + info!("Running migrations..."); sqlx::migrate!("./migrations").run(&pool).await?; Ok(()) } diff --git a/deepwell/src/database/seeder/data.rs b/deepwell/src/database/seeder/data.rs index a41fac4cde..2c7b1ffe0d 100644 --- a/deepwell/src/database/seeder/data.rs +++ b/deepwell/src/database/seeder/data.rs @@ -64,7 +64,7 @@ impl SeedData { { path.set_file_name(filename); path.set_extension("json"); - tide::log::debug!("Loading JSON from {}", path.display()); + debug!("Loading JSON from {}", path.display()); let mut file = File::open(&path)?; let data = serde_json::from_reader(&mut file)?; @@ -74,7 +74,7 @@ impl SeedData { fn load_wikitext(path: &mut PathBuf, filename: &Path) -> Result { path.set_file_name(filename); path.set_extension("ftml"); - tide::log::debug!("Loading wikitext from {}", path.display()); + debug!("Loading wikitext from {}", path.display()); let wikitext = fs::read_to_string(&path)?; Ok(wikitext) diff --git a/deepwell/src/database/seeder/mod.rs b/deepwell/src/database/seeder/mod.rs index cfdbf5ae32..95735b7850 100644 --- a/deepwell/src/database/seeder/mod.rs +++ b/deepwell/src/database/seeder/mod.rs @@ -21,7 +21,7 @@ mod data; use self::data::{SeedData, SitePages}; -use crate::api::ApiServerState; +use crate::api::ServerState; use crate::constants::{ADMIN_USER_ID, SYSTEM_USER_ID}; use crate::models::sea_orm_active_enums::AliasType; use crate::services::alias::{AliasService, CreateAlias}; @@ -37,16 +37,16 @@ use sea_orm::{ }; use std::borrow::Cow; -pub async fn seed(state: &ApiServerState) -> Result<()> { - tide::log::info!("Running seeder..."); +pub async fn seed(state: &ServerState) -> Result<()> { + info!("Running seeder..."); // Set up context let txn = state.database.begin().await?; - let ctx = ServiceContext::from_raw(state, &txn); + let ctx = ServiceContext::new(state, &txn); // Ensure seeding has not already been done if UserService::exists(&ctx, Reference::from(ADMIN_USER_ID)).await? { - tide::log::info!("Seeding has already been done"); + info!("Seeding has already been done"); return Ok(()); } @@ -56,7 +56,7 @@ pub async fn seed(state: &ApiServerState) -> Result<()> { restart_sequence(&txn, "site_site_id_seq").await?; // Load seed data - tide::log::info!( + info!( "Loading seed data from {}", state.config.seeder_path.display(), ); @@ -71,7 +71,7 @@ pub async fn seed(state: &ApiServerState) -> Result<()> { // Seed user data for user in users { - tide::log::info!("Creating seed user '{}' (ID {})", user.name, user.id); + info!("Creating seed user '{}' (ID {})", user.name, user.id); // Create users let CreateUserOutput { user_id, slug } = UserService::create( @@ -110,17 +110,17 @@ pub async fn seed(state: &ApiServerState) -> Result<()> { // by the "system" user, which may not have been created yet. user_aliases.push((user_id, user.aliases)); - tide::log::debug!("User created with slug '{}'", slug); + debug!("User created with slug '{}'", slug); assert_eq!(user_id, user.id, "Specified user ID doesn't match created"); assert_eq!(slug, user.slug, "Specified user slug doesn't match created"); } // Seed user alias data for (user_id, aliases) in user_aliases { - tide::log::info!("Creating aliases for user ID {user_id}"); + info!("Creating aliases for user ID {user_id}"); for alias in aliases { - tide::log::info!("Creating user alias '{alias}'"); + info!("Creating user alias '{alias}'"); AliasService::create( &ctx, @@ -143,7 +143,7 @@ pub async fn seed(state: &ApiServerState) -> Result<()> { pages, } in site_pages { - tide::log::info!("Creating seed site '{}' (slug {})", site.name, site.slug); + info!("Creating seed site '{}' (slug {})", site.name, site.slug); let CreateSiteOutput { site_id, slug: _ } = SiteService::create( &ctx, @@ -158,7 +158,7 @@ pub async fn seed(state: &ApiServerState) -> Result<()> { .await?; for site_alias in site_aliases { - tide::log::info!("Creating site alias '{}'", site_alias); + info!("Creating site alias '{}'", site_alias); AliasService::create( &ctx, @@ -174,7 +174,7 @@ pub async fn seed(state: &ApiServerState) -> Result<()> { } for page in pages { - tide::log::info!("Creating page '{}' (slug {})", page.title, page.slug); + info!("Creating page '{}' (slug {})", page.title, page.slug); PageService::create( &ctx, @@ -204,21 +204,17 @@ pub async fn seed(state: &ApiServerState) -> Result<()> { SiteService::get(&ctx, Reference::Slug(slug)).await? }; - tide::log::info!( + info!( "Creating site filter '{}' ('{}') for site '{}' (ID {})", - filter.regex, - filter.description, - slug, - site.site_id, + filter.regex, filter.description, slug, site.site_id, ); Some(site.site_id) } None => { - tide::log::info!( + info!( "Creating platform filter '{}' ('{}')", - filter.regex, - filter.description, + filter.regex, filter.description, ); None @@ -268,7 +264,7 @@ pub async fn seed(state: &ApiServerState) -> Result<()> { */ txn.commit().await?; - tide::log::info!("Finished running seeder."); + info!("Finished running seeder."); Ok(()) } @@ -276,7 +272,7 @@ async fn restart_sequence( txn: &DatabaseTransaction, sequence_name: &'static str, ) -> Result<()> { - tide::log::debug!("Restarting sequence {sequence_name}"); + debug!("Restarting sequence {sequence_name}"); // SAFETY: We cannot parameterize the sequence name here, so we have to use format!() // However, by requiring that sequence_name be &'static str, we ensure that it @@ -290,9 +286,7 @@ async fn restart_sequence_with( sequence_name: &'static str, new_start_value: i64, ) -> Result<()> { - tide::log::debug!( - "Restarting sequence {sequence_name} to start with {new_start_value}", - ); + debug!("Restarting sequence {sequence_name} to start with {new_start_value}",); assert!( new_start_value > 0, "New sequence start value {new_start_value} is not positive", diff --git a/deepwell/src/endpoints/auth.rs b/deepwell/src/endpoints/auth.rs index 1a704a4c6d..8e25cded47 100644 --- a/deepwell/src/endpoints/auth.rs +++ b/deepwell/src/endpoints/auth.rs @@ -19,11 +19,14 @@ */ use super::prelude::*; +use crate::models::session::Model as SessionModel; use crate::services::authentication::{ AuthenticateUserOutput, AuthenticationService, LoginUser, LoginUserMfa, LoginUserOutput, MultiFactorAuthenticateUser, }; -use crate::services::mfa::MultiFactorConfigure; +use crate::services::mfa::{ + MultiFactorConfigure, MultiFactorResetOutput, MultiFactorSetupOutput, +}; use crate::services::session::{ CreateSession, GetOtherSessions, GetOtherSessionsOutput, InvalidateOtherSessions, RenewSession, @@ -31,14 +34,15 @@ use crate::services::session::{ use crate::services::user::GetUser; use crate::services::Error; -pub async fn auth_login(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); +pub async fn auth_login( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { let LoginUser { authenticate, ip_address, user_agent, - } = req.body_json().await?; + } = params.parse()?; // Don't allow empty passwords. // @@ -46,45 +50,40 @@ pub async fn auth_login(mut req: ApiRequest) -> ApiResponse { // entering the password in the name field instead, which we do // *not* want to be logging. if authenticate.password.is_empty() { - tide::log::error!("User submitted empty password in auth request"); - return Err(TideError::from_str(StatusCode::BadRequest, "")); + error!("User submitted empty password in auth request"); + return Err(Error::EmptyPassword); } // All authentication issue should return the same error. // // If anything went wrong, only allow a generic backend failure - // to avoid leaking internal state. + // to avoid leaking internal state. However since we are an internal + // API // // The only three possible responses to this method should be: // * success // * invalid authentication // * server error - let result = AuthenticationService::auth_password(&ctx, authenticate).await; + let result = AuthenticationService::auth_password(ctx, authenticate).await; let AuthenticateUserOutput { needs_mfa, user_id } = match result { Ok(output) => output, - Err(error) => { - let status_code = match error { - Error::InvalidAuthentication => StatusCode::Forbidden, - _ => { - tide::log::error!( - "Unexpected error during user authentication: {error}", - ); - - StatusCode::InternalServerError - } - }; + Err(mut error) => { + if !matches!(error, Error::InvalidAuthentication) { + error!("Unexpected error during user authentication: {error}"); + error = Error::AuthenticationBackend(Box::new(error)); + } - return Err(TideError::from_str(status_code, "")); + return Err(error); } }; let login_complete = !needs_mfa; - tide::log::info!( + info!( "Password authentication for user ID {user_id} succeeded (login complete: {login_complete})", ); let session_token = SessionService::create( - &ctx, + ctx, CreateSession { user_id, ip_address, @@ -94,127 +93,98 @@ pub async fn auth_login(mut req: ApiRequest) -> ApiResponse { ) .await?; - let body = Body::from_json(&LoginUserOutput { + Ok(LoginUserOutput { session_token, needs_mfa, - })?; + }) +} - let response = Response::builder(StatusCode::Ok).body(body).into(); - txn.commit().await?; - Ok(response) +pub async fn auth_logout( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result<()> { + let session_token: String = params.one()?; + SessionService::invalidate(ctx, session_token).await } /// Gets the information associated with a particular session token. /// /// This is how framerail determines the user ID this user is acting as, /// among other information. -pub async fn auth_session_retrieve(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let session_token = req.body_string().await?; - let session = SessionService::get(&ctx, &session_token).await?; - - let body = Body::from_json(&session)?; - let response = Response::builder(StatusCode::Ok).body(body).into(); - txn.commit().await?; - Ok(response) +pub async fn auth_session_get( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result> { + let session_token: String = params.one()?; + SessionService::get_optional(ctx, &session_token).await } -pub async fn auth_session_renew(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - let input: RenewSession = req.body_json().await?; - - let new_session_token = SessionService::renew(&ctx, input).await?; - - let body = Body::from_string(new_session_token); - let response = Response::builder(StatusCode::Ok).body(body).into(); - txn.commit().await?; - Ok(response) +pub async fn auth_session_renew( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { + let input: RenewSession = params.parse()?; + SessionService::renew(ctx, input).await } -pub async fn auth_session_retrieve_others(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - +pub async fn auth_session_get_others( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { let GetOtherSessions { user_id, session_token, - } = req.body_json().await?; + } = params.parse()?; // Produce output struct, which extracts the current session and // places it in its own location. - let output = { - let mut sessions = SessionService::get_all(&ctx, user_id).await?; - let current = match sessions - .iter() - .position(|session| session.session_token == session_token) - { - Some(index) => sessions.remove(index), - None => { - tide::log::error!("Cannot find own session token in list of all sessions, must be invalid"); - return Ok(Response::new(StatusCode::NotFound)); - } - }; - - GetOtherSessionsOutput { - current, - others: sessions, + let mut sessions = SessionService::get_all(ctx, user_id).await?; + let current = match sessions + .iter() + .position(|session| session.session_token == session_token) + { + Some(index) => sessions.remove(index), + None => { + error!( + "Cannot find own session token in list of all sessions, must be invalid", + ); + return Err(Error::InvalidSessionToken); } }; - let body = Body::from_json(&output)?; - let response = Response::builder(StatusCode::Ok).body(body).into(); - txn.commit().await?; - Ok(response) + Ok(GetOtherSessionsOutput { + current, + others: sessions, + }) } -pub async fn auth_session_invalidate_others(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); +pub async fn auth_session_invalidate_others( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { let InvalidateOtherSessions { session_token, user_id, - } = req.body_json().await?; - - let invalidated = - SessionService::invalidate_others(&ctx, &session_token, user_id).await?; - - let body = Body::from_json(&invalidated)?; - let response = Response::builder(StatusCode::Ok).body(body).into(); - txn.commit().await?; - Ok(response) -} - -pub async fn auth_logout(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let session_token = req.body_string().await?; - SessionService::invalidate(&ctx, session_token).await?; + } = params.parse()?; - txn.commit().await?; - Ok(Response::new(StatusCode::NoContent)) + SessionService::invalidate_others(ctx, &session_token, user_id).await } -pub async fn auth_mfa_verify(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - +pub async fn auth_mfa_verify( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { let LoginUserMfa { session_token, totp_or_code, ip_address, user_agent, - } = req.body_json().await?; + } = params.parse()?; - tide::log::info!( - "Verifying user's MFA for login (temporary session token {session_token})" - ); + info!("Verifying user's MFA for login (temporary session token {session_token})",); let user = AuthenticationService::auth_mfa( - &ctx, + ctx, MultiFactorAuthenticateUser { session_token: &session_token, totp_or_code: &totp_or_code, @@ -222,8 +192,8 @@ pub async fn auth_mfa_verify(mut req: ApiRequest) -> ApiResponse { ) .await?; - let new_session_token = SessionService::renew( - &ctx, + SessionService::renew( + ctx, RenewSession { old_session_token: session_token, user_id: user.user_id, @@ -231,78 +201,64 @@ pub async fn auth_mfa_verify(mut req: ApiRequest) -> ApiResponse { user_agent, }, ) - .await?; - - let body = Body::from_string(new_session_token); - let response = Response::builder(StatusCode::Ok).body(body).into(); - txn.commit().await?; - Ok(response) + .await } -pub async fn auth_mfa_setup(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let GetUser { user: reference } = req.body_json().await?; - let user = UserService::get(&ctx, reference).await?; - let output = MfaService::setup(&ctx, &user).await?; - - let body = Body::from_json(&output)?; - let response = Response::builder(StatusCode::Ok).body(body).into(); - txn.commit().await?; - Ok(response) +pub async fn auth_mfa_setup( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { + let GetUser { user: reference } = params.parse()?; + let user = UserService::get(ctx, reference).await?; + MfaService::setup(ctx, &user).await } -pub async fn auth_mfa_disable(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - +pub async fn auth_mfa_disable( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result<()> { let MultiFactorConfigure { user_id, session_token, - } = req.body_json().await?; - - let user = SessionService::get_user(&ctx, &session_token, false).await?; + } = params.parse()?; + let user = SessionService::get_user(ctx, &session_token, false).await?; if user.user_id != user_id { - tide::log::error!( + error!( "Passed user ID ({}) does not match session token ({})", - user_id, - user.user_id, + user_id, user.user_id, ); - return Ok(Response::new(StatusCode::Forbidden)); - } - MfaService::disable(&ctx, user.user_id).await?; + return Err(Error::SessionUserId { + active_user_id: user_id, + session_user_id: user.user_id, + }); + } - txn.commit().await?; - Ok(Response::new(StatusCode::NoContent)) + MfaService::disable(ctx, user.user_id).await } -pub async fn auth_mfa_reset_recovery(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - +pub async fn auth_mfa_reset_recovery( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { let MultiFactorConfigure { user_id, session_token, - } = req.body_json().await?; - - let user = SessionService::get_user(&ctx, &session_token, false).await?; + } = params.parse()?; + let user = SessionService::get_user(ctx, &session_token, false).await?; if user.user_id != user_id { - tide::log::error!( + error!( "Passed user ID ({}) does not match session token ({})", - user_id, - user.user_id, + user_id, user.user_id, ); - return Ok(Response::new(StatusCode::Forbidden)); - } - let output = MfaService::reset_recovery_codes(&ctx, &user).await?; + return Err(Error::SessionUserId { + active_user_id: user_id, + session_user_id: user.user_id, + }); + } - let body = Body::from_json(&output)?; - let response = Response::builder(StatusCode::Ok).body(body).into(); - txn.commit().await?; - Ok(response) + MfaService::reset_recovery_codes(ctx, &user).await } diff --git a/deepwell/src/endpoints/category.rs b/deepwell/src/endpoints/category.rs index 882f0e0611..d3ec9cce95 100644 --- a/deepwell/src/endpoints/category.rs +++ b/deepwell/src/endpoints/category.rs @@ -20,38 +20,25 @@ use super::prelude::*; use crate::models::page_category::Model as PageCategoryModel; -use crate::services::category::{CategoryOutput, GetCategory}; +use crate::services::category::GetCategory; use crate::services::site::GetSite; -pub async fn category_get(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let GetCategory { site, category } = req.body_json().await?; - let site_id = SiteService::get_id(&ctx, site).await?; - tide::log::info!("Getting page category {category:?} in site ID {site_id}"); - - let category = CategoryService::get(&ctx, site_id, category).await?; - - let output: CategoryOutput = category.into(); - let body = Body::from_json(&output)?; - Ok(body.into()) +pub async fn category_get( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result> { + let GetCategory { site, category } = params.parse()?; + let site_id = SiteService::get_id(ctx, site).await?; + info!("Getting page category {category:?} in site ID {site_id}"); + CategoryService::get_optional(ctx, site_id, category).await } -pub async fn category_all_get(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let GetSite { site } = req.body_json().await?; - let site_id = SiteService::get_id(&ctx, site).await?; - tide::log::info!("Getting all page categories in site ID {site_id}"); - - let categories: Vec = CategoryService::get_all(&ctx, site_id) - .await? - .into_iter() - .map(PageCategoryModel::into) - .collect(); - - let body = Body::from_json(&categories)?; - Ok(body.into()) +pub async fn category_get_all( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result> { + let GetSite { site } = params.parse()?; + let site_id = SiteService::get_id(ctx, site).await?; + info!("Getting all page categories in site ID {site_id}"); + CategoryService::get_all(ctx, site_id).await } diff --git a/deepwell/src/endpoints/domain.rs b/deepwell/src/endpoints/domain.rs new file mode 100644 index 0000000000..9f1079da0b --- /dev/null +++ b/deepwell/src/endpoints/domain.rs @@ -0,0 +1,56 @@ +/* + * endpoints/domain.rs + * + * DEEPWELL - Wikijump API provider and database manager + * Copyright (C) 2019-2023 Wikijump Team + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +use super::prelude::*; +use crate::models::site::Model as SiteModel; +use crate::services::domain::CreateCustomDomain; + +pub async fn site_get_from_domain( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result> { + let domain: String = params.one()?; + DomainService::site_from_domain_optional(ctx, &domain).await +} + +pub async fn site_custom_domain_create( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result<()> { + let input: CreateCustomDomain = params.parse()?; + DomainService::create_custom(ctx, input).await +} + +pub async fn site_custom_domain_get( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result> { + let domain: String = params.one()?; + DomainService::site_from_domain_optional(ctx, &domain).await +} + +// TODO rename +pub async fn site_custom_domain_delete( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result<()> { + let domain: String = params.one()?; + DomainService::remove_custom(ctx, domain).await +} diff --git a/deepwell/src/endpoints/email.rs b/deepwell/src/endpoints/email.rs index f04873fdbc..db1987da2b 100644 --- a/deepwell/src/endpoints/email.rs +++ b/deepwell/src/endpoints/email.rs @@ -19,14 +19,14 @@ */ use super::prelude::*; -use crate::services::email::EmailService; +use crate::services::email::{EmailService, EmailValidationOutput}; -pub async fn validate_email(mut req: ApiRequest) -> ApiResponse { - tide::log::info!("Validating user email"); - let email = req.body_string().await?; - let output = EmailService::validate(&email)?; - - let body = Body::from_json(&output)?; - let response = Response::builder(StatusCode::Ok).body(body).into(); - Ok(response) +pub async fn validate_email( + _ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { + let email: String = params.one()?; + info!("Validating user email: {email}"); + let output = EmailService::validate(&email).await?; + Ok(output) } diff --git a/deepwell/src/endpoints/file.rs b/deepwell/src/endpoints/file.rs index ff411af7b7..0a43b29364 100644 --- a/deepwell/src/endpoints/file.rs +++ b/deepwell/src/endpoints/file.rs @@ -21,69 +21,137 @@ use super::prelude::*; use crate::models::file::Model as FileModel; use crate::models::file_revision::Model as FileRevisionModel; -use crate::services::file::{GetFile, GetFileOutput}; +use crate::services::file::{ + DeleteFile, DeleteFileOutput, EditFile, EditFileOutput, GetFileDetails, + GetFileOutput, MoveFile, MoveFileOutput, RestoreFile, RestoreFileOutput, UploadFile, + UploadFileOutput, +}; use crate::services::Result; -use crate::web::FileDetailsQuery; +use crate::web::{Bytes, FileDetails}; -pub async fn file_retrieve(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let details: FileDetailsQuery = req.query()?; - let GetFile { - site_id, - page_id, - file: file_reference, - } = req.body_json().await?; +pub async fn file_get( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result> { + let GetFileDetails { input, details } = params.parse()?; - tide::log::info!( - "Getting file {file_reference:?} from page ID {page_id} in site ID {site_id}", + info!( + "Getting file {:?} from page ID {} in site ID {}", + input.file, input.page_id, input.site_id, ); // We cannot use get_id() because we need File for build_file_response(). - let file = FileService::get(&ctx, page_id, file_reference).await?; - - let revision = FileRevisionService::get_latest(&ctx, page_id, file.file_id).await?; + match FileService::get_optional(ctx, input).await? { + None => Ok(None), + Some(file) => { + let revision = FileRevisionService::get_latest( + ctx, + file.site_id, + file.page_id, + file.file_id, + ) + .await?; + + let output = build_file_response(ctx, file, revision, details).await?; + Ok(Some(output)) + } + } +} - let response = - build_file_response(&ctx, &file, &revision, details, StatusCode::Ok).await?; +pub async fn file_upload( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { + let input: UploadFile = params.parse()?; + + info!( + "Uploading file '{}' ({} bytes) to page ID {} in site ID {}", + input.name, + input.data.len(), + input.page_id, + input.site_id, + ); - txn.commit().await?; - Ok(response) + FileService::upload(ctx, input).await } -pub async fn file_create(_req: ApiRequest) -> ApiResponse { - todo!() +pub async fn file_edit( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result> { + let input: EditFile = params.parse()?; + + info!( + "Editing file ID {} in page ID {} in site ID {}", + input.file_id, input.page_id, input.site_id, + ); + + FileService::edit(ctx, input).await } -pub async fn file_edit(_req: ApiRequest) -> ApiResponse { - todo!() +pub async fn file_delete( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { + let input: DeleteFile = params.parse()?; + + info!( + "Deleting file {:?} in page ID {} in site ID {}", + input.file, input.page_id, input.site_id, + ); + + FileService::delete(ctx, input).await } -pub async fn file_delete(_req: ApiRequest) -> ApiResponse { - todo!() +pub async fn file_restore( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { + let input: RestoreFile = params.parse()?; + + info!( + "Restoring deleted file ID {} in page ID {} in site ID {}", + input.file_id, input.page_id, input.site_id, + ); + + FileService::restore(ctx, input).await } -pub async fn file_move(_req: ApiRequest) -> ApiResponse { - todo!() +pub async fn file_move( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result> { + let input: MoveFile = params.parse()?; + + info!( + "Moving file ID {} from page ID {} to page ID {} in site ID {}", + input.file_id, input.current_page_id, input.destination_page_id, input.site_id, + ); + + FileService::r#move(ctx, input).await } -pub async fn file_restore(_req: ApiRequest) -> ApiResponse { - todo!() +pub async fn file_hard_delete( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result<()> { + let file_id: i64 = params.one()?; + + info!( + "Hard deleting file ID {file_id} and all duplicates, including underlying data", + ); + + FileService::hard_delete_all(ctx, file_id).await } async fn build_file_response( ctx: &ServiceContext<'_>, - file: &FileModel, - revision: &FileRevisionModel, - details: FileDetailsQuery, - status: StatusCode, -) -> Result { - // Get blob data, if requested + file: FileModel, + revision: FileRevisionModel, + details: FileDetails, +) -> Result { let data = BlobService::get_maybe(ctx, details.data, &revision.s3_hash).await?; - - // Build result struct - let output = GetFileOutput { + Ok(GetFileOutput { file_id: file.file_id, file_created_at: file.created_at, file_updated_at: file.updated_at, @@ -94,16 +162,12 @@ async fn build_file_response( revision_created_at: revision.created_at, revision_number: revision.revision_number, revision_user_id: revision.user_id, - name: &file.name, - data, - mime: &revision.mime_hint, + name: file.name, + data: data.map(Bytes::from), + mime: revision.mime_hint, size: revision.size_hint, - licensing: &revision.licensing, - revision_comments: &revision.comments, - hidden_fields: &revision.hidden, - }; - - let body = Body::from_json(&output)?; - let response = Response::builder(status).body(body).into(); - Ok(response) + licensing: revision.licensing, + revision_comments: revision.comments, + hidden_fields: revision.hidden, + }) } diff --git a/deepwell/src/endpoints/file_revision.rs b/deepwell/src/endpoints/file_revision.rs index 079bac40a5..3a50c420bb 100644 --- a/deepwell/src/endpoints/file_revision.rs +++ b/deepwell/src/endpoints/file_revision.rs @@ -19,92 +19,66 @@ */ use super::prelude::*; +use crate::models::file_revision::Model as FileRevisionModel; use crate::services::file::GetFile; use crate::services::file_revision::{ FileRevisionCountOutput, GetFileRevision, GetFileRevisionRange, UpdateFileRevision, }; -pub async fn file_revision_count(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - +pub async fn file_revision_count( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { let GetFile { site_id, page_id, file: file_reference, - } = req.body_json().await?; - - tide::log::info!( - "Getting latest revision for file ID {page_id} in site ID {site_id}", - ); + } = params.parse()?; - let file_id = FileService::get_id(&ctx, site_id, file_reference).await?; + info!("Getting latest revision for file ID {page_id} in site ID {site_id}",); - let revision_count = FileRevisionService::count(&ctx, page_id, file_id).await?; + let file_id = FileService::get_id(ctx, site_id, file_reference).await?; + let revision_count = FileRevisionService::count(ctx, page_id, file_id).await?; - txn.commit().await?; - let output = FileRevisionCountOutput { + Ok(FileRevisionCountOutput { revision_count, first_revision: 0, last_revision: revision_count.get() - 1, - }; - - let body = Body::from_json(&output)?; - let response = Response::builder(StatusCode::Ok).body(body).into(); - Ok(response) + }) } -pub async fn file_revision_retrieve(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); +pub async fn file_revision_get( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result> { + let input: GetFileRevision = params.parse()?; - let GetFileRevision { - page_id, - file_id, - revision_number, - } = req.body_json().await?; - - tide::log::info!( - "Getting file revision {revision_number} for file ID {file_id} on page ID {page_id}", + info!( + "Getting file revision {} for file ID {} on page ID {}", + input.revision_number, input.file_id, input.page_id, ); - let revision = - FileRevisionService::get(&ctx, page_id, file_id, revision_number).await?; - - txn.commit().await?; - let body = Body::from_json(&revision)?; - let response = Response::builder(StatusCode::Ok).body(body).into(); - Ok(response) + FileRevisionService::get_optional(ctx, input).await } -pub async fn file_revision_put(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); +pub async fn file_revision_range( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result> { + let input: GetFileRevisionRange = params.parse()?; + FileRevisionService::get_range(ctx, input).await +} - let input: UpdateFileRevision = req.body_json().await?; +pub async fn file_revision_edit( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { + let input: UpdateFileRevision = params.parse()?; - tide::log::info!( + info!( "Editing file revision ID {} for file ID {} on page {}", - input.revision_id, - input.file_id, - input.page_id, + input.revision_id, input.file_id, input.page_id, ); - FileRevisionService::update(&ctx, input).await?; - - txn.commit().await?; - Ok(Response::new(StatusCode::NoContent)) -} - -pub async fn file_revision_range_retrieve(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let input: GetFileRevisionRange = req.body_json().await?; - let revisions = FileRevisionService::get_range(&ctx, input).await?; - - txn.commit().await?; - let body = Body::from_json(&revisions)?; - let response = Response::builder(StatusCode::Ok).body(body).into(); - Ok(response) + FileRevisionService::update(ctx, input).await } diff --git a/deepwell/src/endpoints/link.rs b/deepwell/src/endpoints/link.rs index 5f108a21c5..c3f3c8b7ab 100644 --- a/deepwell/src/endpoints/link.rs +++ b/deepwell/src/endpoints/link.rs @@ -20,95 +20,69 @@ use super::prelude::*; use crate::services::link::{ - GetLinksExternalFrom, GetLinksExternalTo, GetLinksFrom, GetLinksTo, GetLinksToMissing, + GetLinksExternalFrom, GetLinksExternalFromOutput, GetLinksExternalTo, + GetLinksExternalToOutput, GetLinksFrom, GetLinksFromOutput, GetLinksTo, + GetLinksToMissing, GetLinksToMissingOutput, GetLinksToOutput, }; -pub async fn page_links_from_retrieve(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - +pub async fn page_links_from_get( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { let GetLinksFrom { site_id, page: reference, - } = req.body_json().await?; - - tide::log::info!("Getting page links for page {reference:?} in site ID {site_id}"); + } = params.parse()?; - let page_id = PageService::get_id(&ctx, site_id, reference).await?; - let output = LinkService::get_from(&ctx, page_id).await?; - let body = Body::from_json(&output)?; - txn.commit().await?; - - Ok(body.into()) + info!("Getting page links for page {reference:?} in site ID {site_id}"); + let page_id = PageService::get_id(ctx, site_id, reference).await?; + LinkService::get_from(ctx, page_id).await } -pub async fn page_links_to_retrieve(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - +pub async fn page_links_to_get( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { let GetLinksTo { site_id, page: reference, - } = req.body_json().await?; + } = params.parse()?; - tide::log::info!("Getting page links from page {reference:?} in site ID {site_id}"); - - let page_id = PageService::get_id(&ctx, site_id, reference).await?; - let output = LinkService::get_to(&ctx, page_id, None).await?; - - let body = Body::from_json(&output)?; - txn.commit().await?; - Ok(body.into()) + info!("Getting page links from page {reference:?} in site ID {site_id}"); + let page_id = PageService::get_id(ctx, site_id, reference).await?; + LinkService::get_to(ctx, page_id, None).await } -pub async fn page_links_to_missing_retrieve(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let GetLinksToMissing { site_id, page_slug } = req.body_json().await?; - tide::log::info!( - "Getting missing page links from page slug {page_slug} in site ID {site_id}", - ); +pub async fn page_links_to_missing_get( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { + let GetLinksToMissing { site_id, page_slug } = params.parse()?; + info!("Getting missing page links from page slug {page_slug} in site ID {site_id}",); - let output = LinkService::get_to_missing(&ctx, site_id, &page_slug, None).await?; - - let body = Body::from_json(&output)?; - txn.commit().await?; - Ok(body.into()) + LinkService::get_to_missing(ctx, site_id, &page_slug, None).await } -pub async fn page_links_external_from(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - +pub async fn page_links_external_from( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { let GetLinksExternalFrom { site_id, page: reference, - } = req.body_json().await?; - - tide::log::info!( - "Getting external links from page {reference:?} in site ID {site_id}", - ); - - let page_id = PageService::get_id(&ctx, site_id, reference).await?; + } = params.parse()?; - let output = LinkService::get_external_from(&ctx, page_id).await?; + info!("Getting external links from page {reference:?} in site ID {site_id}",); - let body = Body::from_json(&output)?; - txn.commit().await?; - Ok(body.into()) + let page_id = PageService::get_id(ctx, site_id, reference).await?; + LinkService::get_external_from(ctx, page_id).await } -pub async fn page_links_external_to(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let GetLinksExternalTo { site_id, url } = req.body_json().await?; - tide::log::info!("Getting external links to URL {url} in site ID {site_id}"); - - let output = LinkService::get_external_to(&ctx, site_id, &url).await?; - - let body = Body::from_json(&output)?; - txn.commit().await?; - Ok(body.into()) +pub async fn page_links_external_to( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { + let GetLinksExternalTo { site_id, url } = params.parse()?; + info!("Getting external links to URL {url} in site ID {site_id}"); + LinkService::get_external_to(ctx, site_id, &url).await } diff --git a/deepwell/src/endpoints/locale.rs b/deepwell/src/endpoints/locale.rs index 2308014245..aefc783bbb 100644 --- a/deepwell/src/endpoints/locale.rs +++ b/deepwell/src/endpoints/locale.rs @@ -20,63 +20,70 @@ use super::prelude::*; use crate::locales::MessageArguments; -use ref_map::*; use std::collections::HashMap; use unic_langid::LanguageIdentifier; -#[derive(Serialize, Debug)] -struct LocaleOutput<'a> { - language: &'a str, - script: Option<&'a str>, - region: Option<&'a str>, +#[derive(Serialize, Debug, Clone)] +pub struct LocaleOutput { + language: String, + script: Option, + region: Option, variants: Vec, } -type TranslateInput<'a> = HashMap>; -type TranslateOutput = HashMap; +#[derive(Deserialize, Debug, Clone)] +pub struct TranslateInput<'a> { + locale: &'a str, + messages: HashMap>, +} -pub async fn locale_get(req: ApiRequest) -> ApiResponse { - let locale_str = req.param("locale")?; - tide::log::info!("Getting locale information for {locale_str}"); +type TranslateOutput = HashMap; +pub async fn locale_info( + _ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { + let locale_str: String = params.one()?; + info!("Getting locale information for {locale_str}"); let locale = LanguageIdentifier::from_bytes(locale_str.as_bytes())?; - let output = LocaleOutput { - language: locale.language.as_str(), - script: locale.script.ref_map(|s| s.as_str()), - region: locale.region.ref_map(|s| s.as_str()), - variants: locale.variants().map(|v| v.as_str().into()).collect(), - }; - - let body = Body::from_json(&output)?; - Ok(body.into()) + Ok(LocaleOutput { + language: str!(locale.language), + script: locale.script.map(|s| str!(s)), + region: locale.region.map(|s| str!(s)), + variants: locale.variants().map(|v| str!(v)).collect(), + }) } -pub async fn translate_put(mut req: ApiRequest) -> ApiResponse { - let input: TranslateInput = req.body_json().await?; - let locale_str = req.param("locale")?; - let localizations = &req.state().localizations; - tide::log::info!( +pub async fn translate_strings( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { + let TranslateInput { + locale: locale_str, + messages, + } = params.parse()?; + + info!( "Translating {} message keys in locale {locale_str}", - input.len(), + messages.len(), ); let locale = LanguageIdentifier::from_bytes(locale_str.as_bytes())?; let mut output: TranslateOutput = HashMap::new(); - for (message_key, arguments_raw) in input { - tide::log::info!( + for (message_key, arguments_raw) in messages { + info!( "Formatting message key {message_key} ({} arguments)", arguments_raw.len(), ); let arguments = arguments_raw.into_fluent_args(); - match localizations.translate(&locale, &message_key, &arguments) { - Ok(translation) => output.insert(message_key, translation.to_string()), - Err(error) => return Err(ServiceError::from(error).into_tide_error()), - }; + let translation = + ctx.localization() + .translate(&locale, &message_key, &arguments)?; + + output.insert(message_key, translation.to_string()); } - let body = Body::from_json(&output)?; - let response = Response::builder(StatusCode::Ok).body(body).into(); - Ok(response) + Ok(output) } diff --git a/deepwell/src/endpoints/message.rs b/deepwell/src/endpoints/message.rs index 5788fc965c..cb9a723261 100644 --- a/deepwell/src/endpoints/message.rs +++ b/deepwell/src/endpoints/message.rs @@ -19,57 +19,47 @@ */ use super::prelude::*; +use crate::models::message_draft::Model as MessageDraftModel; +use crate::models::message_record::Model as MessageRecordModel; use crate::services::message::{ CreateMessageDraft, DeleteMessageDraft, SendMessageDraft, UpdateMessageDraft, }; -pub async fn message_draft_create(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let input: CreateMessageDraft = req.body_json().await?; - tide::log::info!("Creating new message draft for user ID {}", input.user_id); - - let output = MessageService::create_draft(&ctx, input).await?; - txn.commit().await?; - build_json_response(&output, StatusCode::Ok) +pub async fn message_draft_create( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { + let input: CreateMessageDraft = params.parse()?; + info!("Creating new message draft for user ID {}", input.user_id); + MessageService::create_draft(ctx, input).await } -pub async fn message_draft_update(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let input: UpdateMessageDraft = req.body_json().await?; - tide::log::info!( +pub async fn message_draft_edit( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { + let input: UpdateMessageDraft = params.parse()?; + info!( "Updating message draft for draft ID {}", - input.message_draft_id + input.message_draft_id, ); - - let output = MessageService::update_draft(&ctx, input).await?; - txn.commit().await?; - build_json_response(&output, StatusCode::Ok) + MessageService::update_draft(ctx, input).await } -pub async fn message_draft_send(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let SendMessageDraft { message_draft_id } = req.body_json().await?; - tide::log::info!("Sending message draft with ID {message_draft_id}"); - - let output = MessageService::send(&ctx, &message_draft_id).await?; - txn.commit().await?; - build_json_response(&output, StatusCode::Ok) +pub async fn message_draft_delete( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result<()> { + let DeleteMessageDraft { message_draft_id } = params.parse()?; + info!("Deleting message draft with ID {message_draft_id}"); + MessageService::delete_draft(ctx, message_draft_id).await } -pub async fn message_draft_delete(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let DeleteMessageDraft { message_draft_id } = req.body_json().await?; - tide::log::info!("Deleting message draft with ID {message_draft_id}"); - - MessageService::delete_draft(&ctx, message_draft_id).await?; - txn.commit().await?; - Ok(Response::new(StatusCode::Ok)) +pub async fn message_draft_send( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { + let SendMessageDraft { message_draft_id } = params.parse()?; + info!("Sending message draft with ID {message_draft_id}"); + MessageService::send(ctx, &message_draft_id).await } diff --git a/deepwell/src/endpoints/misc.rs b/deepwell/src/endpoints/misc.rs index 74ae3e8138..cfdfb3cb42 100644 --- a/deepwell/src/endpoints/misc.rs +++ b/deepwell/src/endpoints/misc.rs @@ -21,14 +21,17 @@ use super::prelude::*; use crate::info; use sea_orm::{ConnectionTrait, DatabaseBackend, Statement}; +use std::path::PathBuf; use wikidot_normalize::normalize; -pub async fn ping(req: ApiRequest) -> ApiResponse { - tide::log::info!("Ping request"); +pub async fn ping( + ctx: &ServiceContext<'_>, + _params: Params<'static>, +) -> Result<&'static str> { + info!("Ping request"); // Ensure the database is connected - req.state() - .database + ctx.transaction() .execute(Statement::from_string( DatabaseBackend::Postgres, str!("SELECT 1"), @@ -36,44 +39,65 @@ pub async fn ping(req: ApiRequest) -> ApiResponse { .await?; // Seems good, respond to user - Ok("Pong!".into()) + Ok("Pong!") } -pub async fn version(_: ApiRequest) -> ApiResponse { - tide::log::info!("Getting DEEPWELL version"); - Ok(info::VERSION.as_str().into()) +/// Method which always returns an error. +/// For testing. +pub async fn yield_error( + _ctx: &ServiceContext<'_>, + _params: Params<'static>, +) -> Result<()> { + info!("Returning DEEPWELL error for testing"); + Err(ServiceError::BadRequest) } -pub async fn full_version(_: ApiRequest) -> ApiResponse { - tide::log::info!("Getting DEEPWELL version (full)"); - Ok(info::FULL_VERSION.as_str().into()) +pub async fn version( + _ctx: &ServiceContext<'_>, + _params: Params<'static>, +) -> Result<&'static str> { + info!("Getting DEEPWELL version"); + Ok(info::VERSION.as_str()) } -pub async fn hostname(_: ApiRequest) -> ApiResponse { - tide::log::info!("Getting DEEPWELL hostname"); - Ok(info::HOSTNAME.as_str().into()) +pub async fn full_version( + _ctx: &ServiceContext<'_>, + _params: Params<'static>, +) -> Result<&'static str> { + info!("Getting DEEPWELL version (full)"); + Ok(info::FULL_VERSION.as_str()) } -pub async fn config_dump(req: ApiRequest) -> ApiResponse { - tide::log::info!("Dumping raw DEEPWELL configuration for debugging"); - let toml_config = &req.state().config.raw_toml; - let mut body = Body::from_string(str!(toml_config)); - body.set_mime("text/toml;charset=utf-8"); - Ok(body.into()) +pub async fn hostname( + _ctx: &ServiceContext<'_>, + _params: Params<'static>, +) -> Result<&'static str> { + info!("Getting DEEPWELL hostname"); + Ok(info::HOSTNAME.as_str()) } -pub async fn config_path(req: ApiRequest) -> ApiResponse { - tide::log::info!("Dumping DEEPWELL configuration path for debugging"); - let toml_path = &req.state().config.raw_toml_path; - let body = Body::from_string(toml_path.display().to_string()); - Ok(body.into()) +pub async fn config_dump( + ctx: &ServiceContext<'_>, + _params: Params<'static>, +) -> Result { + info!("Dumping raw DEEPWELL configuration for debugging"); + Ok(ctx.config().raw_toml.to_string()) } -pub async fn normalize_method(req: ApiRequest) -> ApiResponse { - let input = req.param("input")?; - tide::log::info!("Running normalize as utility web method: {input}"); +pub async fn config_path( + ctx: &ServiceContext<'_>, + _params: Params<'static>, +) -> Result { + info!("Dumping DEEPWELL configuration path for debugging"); + Ok(ctx.config().raw_toml_path.to_path_buf()) +} - let mut value = str!(input); +pub async fn normalize_method( + _ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { + let mut value: String = params.one()?; + info!("Running normalize on string: {value:?}"); normalize(&mut value); - Ok(value.into()) + Ok(value) } diff --git a/deepwell/src/endpoints/mod.rs b/deepwell/src/endpoints/mod.rs index 38e50740ae..a159ba91fa 100644 --- a/deepwell/src/endpoints/mod.rs +++ b/deepwell/src/endpoints/mod.rs @@ -1,5 +1,5 @@ /* - * endpoints/endpoints.rs + * endpoints/mod.rs * * DEEPWELL - Wikijump API provider and database manager * Copyright (C) 2019-2023 Wikijump Team @@ -28,35 +28,22 @@ //! around service calls, or possibly perform modest data conversion for HTTP. mod prelude { - pub use crate::api::{ApiRequest, ApiResponse}; + pub use crate::api::ServerState; pub use crate::services::{ AliasService, BlobService, CategoryService, DomainService, Error as ServiceError, FileRevisionService, FileService, InteractionService, LinkService, MessageReportService, MessageService, MfaService, PageRevisionService, - PageService, ParentService, RenderService, RequestFetchService, ScoreService, - ServiceContext, SessionService, SiteService, TextService, UserService, - ViewService, VoteService, + PageService, ParentService, RenderService, Result, ScoreService, ServiceContext, + SessionService, SiteService, StdResult, TextService, UserService, ViewService, + VoteService, }; - pub use crate::utils::error_response; - pub use crate::web::HttpUnwrap; - pub use sea_orm::{ConnectionTrait, TransactionTrait}; + pub use jsonrpsee::types::params::Params; pub use std::convert::TryFrom; - pub use tide::{Body, Error as TideError, Request, Response, StatusCode}; - - use serde::Serialize; - - pub fn build_json_response( - data: &T, - status: StatusCode, - ) -> ApiResponse { - let body = Body::from_json(data)?; - let response = Response::builder(status).body(body).into(); - Ok(response) - } } pub mod auth; pub mod category; +pub mod domain; pub mod email; pub mod file; pub mod file_revision; diff --git a/deepwell/src/endpoints/page.rs b/deepwell/src/endpoints/page.rs index 5e7d94bf7e..75e6eba811 100644 --- a/deepwell/src/endpoints/page.rs +++ b/deepwell/src/endpoints/page.rs @@ -20,176 +20,134 @@ use super::prelude::*; use crate::models::page::Model as PageModel; -use crate::models::page_revision::Model as PageRevisionModel; use crate::services::page::{ - CreatePage, DeletePage, EditPage, GetPage, GetPageOutput, MovePage, RestorePage, - RollbackPage, + CreatePage, CreatePageOutput, DeletePage, DeletePageOutput, EditPage, EditPageOutput, + GetPageDirect, GetPageDirectDetails, GetPageOutput, GetPageReferenceDetails, + MovePage, MovePageOutput, RestorePage, RestorePageOutput, RollbackPage, }; use crate::services::{Result, TextService}; -use crate::web::{PageDetailsQuery, Reference}; -use ref_map::*; +use crate::web::{PageDetails, Reference}; -pub async fn page_create(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let input: CreatePage = req.body_json().await?; - tide::log::info!("Creating new page in site ID {}", input.site_id); - - let output = PageService::create(&ctx, input).await?; - build_json_response(&output, StatusCode::Ok) +pub async fn page_create( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { + let input: CreatePage = params.parse()?; + info!("Creating new page in site ID {}", input.site_id); + PageService::create(ctx, input).await } -pub async fn page_retrieve(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let details: PageDetailsQuery = req.query()?; - let GetPage { +pub async fn page_get( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result> { + let GetPageReferenceDetails { site_id, page: reference, - } = req.body_json().await?; - - tide::log::info!("Getting page {reference:?} in site ID {site_id}"); - let page = PageService::get(&ctx, site_id, reference).await?; - - let revision = PageRevisionService::get_latest(&ctx, site_id, page.page_id).await?; - - let response = - build_page_response(&ctx, &page, &revision, details, StatusCode::Ok).await?; - - txn.commit().await?; - Ok(response) + details, + } = params.parse()?; + + info!("Getting page {reference:?} in site ID {site_id}"); + match PageService::get_optional(ctx, site_id, reference).await? { + Some(page) => build_page_output(ctx, page, details).await, + None => Ok(None), + } } -pub async fn page_get_direct(req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let page_id = req.param("page_id")?.parse()?; - tide::log::info!("Getting page ID {page_id}"); - - let details: PageDetailsQuery = req.query()?; - let page = PageService::get_direct(&ctx, page_id).await?; - let revision = - PageRevisionService::get_latest(&ctx, page.site_id, page.page_id).await?; - - let response = - build_page_response(&ctx, &page, &revision, details, StatusCode::Ok).await?; - - txn.commit().await?; - Ok(response) +pub async fn page_get_direct( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result> { + let GetPageDirectDetails { + site_id, + page_id, + details, + } = params.parse()?; + + info!("Getting page ID {page_id} in site ID {site_id}"); + match PageService::get_direct_optional(ctx, site_id, page_id).await? { + Some(page) => build_page_output(ctx, page, details).await, + None => Ok(None), + } } -pub async fn page_edit(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let input: EditPage = req.body_json().await?; - tide::log::info!("Editing page {:?} in site ID {}", input.page, input.site_id); - - let output = PageService::edit(&ctx, input).await?; - - txn.commit().await?; - let body = Body::from_json(&output)?; - Ok(body.into()) +pub async fn page_edit( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result> { + let input: EditPage = params.parse()?; + info!("Editing page {:?} in site ID {}", input.page, input.site_id); + PageService::edit(ctx, input).await } -pub async fn page_delete(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let input: DeletePage = req.body_json().await?; - tide::log::info!( +pub async fn page_delete( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { + let input: DeletePage = params.parse()?; + info!( "Deleting page {:?} in site ID {}", - input.page, - input.site_id, + input.page, input.site_id, ); - - let output = PageService::delete(&ctx, input).await?; - - txn.commit().await?; - let body = Body::from_json(&output)?; - Ok(body.into()) + PageService::delete(ctx, input).await } -pub async fn page_move(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let input: MovePage = req.body_json().await?; - tide::log::info!( +pub async fn page_move( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { + let input: MovePage = params.parse()?; + info!( "Moving page {:?} in site ID {} to {}", - input.page, - input.site_id, - input.new_slug, + input.page, input.site_id, input.new_slug, ); - - let output = PageService::r#move(&ctx, input).await?; - - txn.commit().await?; - let body = Body::from_json(&output)?; - Ok(body.into()) + PageService::r#move(ctx, input).await } -pub async fn page_rerender(req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let site_id = req.param("site_id")?.parse()?; - let page_id = req.param("page_id")?.parse()?; - tide::log::info!("Re-rendering page ID {page_id} in site ID {site_id}"); - - PageRevisionService::rerender(&ctx, site_id, page_id).await?; - - txn.commit().await?; - Ok(Response::new(StatusCode::NoContent)) +pub async fn page_rerender( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result<()> { + let GetPageDirect { site_id, page_id } = params.parse()?; + info!("Re-rendering page ID {page_id} in site ID {site_id}"); + PageRevisionService::rerender(ctx, site_id, page_id).await } -pub async fn page_restore(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let input: RestorePage = req.body_json().await?; - tide::log::info!( +pub async fn page_restore( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { + let input: RestorePage = params.parse()?; + info!( "Un-deleting page ID {} in site ID {}", - input.page_id, - input.site_id, + input.page_id, input.site_id, ); - - let output = PageService::restore(&ctx, input).await?; - - txn.commit().await?; - let body = Body::from_json(&output)?; - Ok(body.into()) + PageService::restore(ctx, input).await } -pub async fn page_rollback(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); +pub async fn page_rollback( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result> { + let input: RollbackPage = params.parse()?; - let input: RollbackPage = req.body_json().await?; - tide::log::info!( + info!( "Rolling back page {:?} in site ID {} to revision number {}", - input.page, - input.site_id, - input.revision_number, + input.page, input.site_id, input.revision_number, ); - let output = PageService::rollback(&ctx, input).await?; - - txn.commit().await?; - let body = Body::from_json(&output)?; - Ok(body.into()) + PageService::rollback(ctx, input).await } -async fn build_page_response( +async fn build_page_output( ctx: &ServiceContext<'_>, - page: &PageModel, - revision: &PageRevisionModel, - details: PageDetailsQuery, - status: StatusCode, -) -> Result { + page: PageModel, + details: PageDetails, +) -> Result> { + // Get page revision + let revision = + PageRevisionService::get_latest(ctx, page.site_id, page.page_id).await?; + // Get category slug from ID let category = CategoryService::get(ctx, page.site_id, Reference::from(page.page_category_id)) @@ -205,7 +163,7 @@ async fn build_page_response( let rating = ScoreService::score(ctx, page.page_id).await?; // Build result struct - let output = GetPageOutput { + Ok(Some(GetPageOutput { page_id: page.page_id, page_created_at: page.created_at, page_updated_at: page.updated_at, @@ -213,7 +171,7 @@ async fn build_page_response( page_revision_count: revision.revision_number + 1, site_id: page.site_id, page_category_id: category.category_id, - page_category_slug: &category.slug, + page_category_slug: category.slug, discussion_thread_id: page.discussion_thread_id, revision_id: revision.revision_id, revision_type: revision.revision_type, @@ -223,17 +181,13 @@ async fn build_page_response( wikitext, compiled_html, compiled_at: revision.compiled_at, - compiled_generator: &revision.compiled_generator, - revision_comments: &revision.comments, - hidden_fields: &revision.hidden, - title: &revision.title, - alt_title: revision.alt_title.ref_map(|s| s.as_str()), - slug: &revision.slug, - tags: &revision.tags, + compiled_generator: revision.compiled_generator, + revision_comments: revision.comments, + hidden_fields: revision.hidden, + title: revision.title, + alt_title: revision.alt_title, + slug: revision.slug, + tags: revision.tags, rating, - }; - - let body = Body::from_json(&output)?; - let response = Response::builder(status).body(body).into(); - Ok(response) + })) } diff --git a/deepwell/src/endpoints/page_revision.rs b/deepwell/src/endpoints/page_revision.rs index 196d4e608e..792630b60e 100644 --- a/deepwell/src/endpoints/page_revision.rs +++ b/deepwell/src/endpoints/page_revision.rs @@ -20,115 +20,100 @@ use super::prelude::*; use crate::models::page_revision::Model as PageRevisionModel; -use crate::services::page::GetPage; +use crate::services::page::GetPageReferenceDetails; use crate::services::page_revision::{ - GetPageRevision, GetPageRevisionRange, PageRevisionCountOutput, - PageRevisionModelFiltered, UpdatePageRevision, + GetPageRevision, GetPageRevisionDetails, GetPageRevisionRangeDetails, + PageRevisionCountOutput, PageRevisionModelFiltered, UpdatePageRevisionDetails, }; use crate::services::{Result, TextService}; -use crate::web::PageDetailsQuery; +use crate::web::PageDetails; -pub async fn page_revision_count(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let GetPage { +pub async fn page_revision_count( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { + let GetPageReferenceDetails { site_id, page: reference, - } = req.body_json().await?; - - tide::log::info!( - "Getting latest revision for page {reference:?} in site ID {site_id}", - ); + details: _, + } = params.parse()?; - let page_id = PageService::get_id(&ctx, site_id, reference).await?; + info!("Getting latest revision for page {reference:?} in site ID {site_id}",); - let revision_count = PageRevisionService::count(&ctx, site_id, page_id).await?; - - txn.commit().await?; - let output = PageRevisionCountOutput { + let page_id = PageService::get_id(ctx, site_id, reference).await?; + let revision_count = PageRevisionService::count(ctx, site_id, page_id).await?; + Ok(PageRevisionCountOutput { revision_count, first_revision: 0, last_revision: revision_count.get() - 1, - }; - - let body = Body::from_json(&output)?; - let response = Response::builder(StatusCode::Ok).body(body).into(); - Ok(response) + }) } -pub async fn page_revision_retrieve(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let details: PageDetailsQuery = req.query()?; - let GetPageRevision { - site_id, - page_id, - revision_number, - } = req.body_json().await?; - - tide::log::info!( +pub async fn page_revision_get( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result> { + let GetPageRevisionDetails { + input: + GetPageRevision { + site_id, + page_id, + revision_number, + }, + details, + } = params.parse()?; + + info!( "Getting revision {revision_number} for page ID {page_id} in site ID {site_id}", ); let revision = - PageRevisionService::get(&ctx, site_id, page_id, revision_number).await?; - - let response = - build_revision_response(&ctx, revision, details, StatusCode::Ok).await?; + PageRevisionService::get_optional(ctx, site_id, page_id, revision_number).await?; - txn.commit().await?; - Ok(response) + match revision { + None => Ok(None), + Some(revision) => { + let revision = filter_and_populate_revision(ctx, revision, details).await?; + Ok(Some(revision)) + } + } } -pub async fn page_revision_put(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let details: PageDetailsQuery = req.query()?; - let input: UpdatePageRevision = req.body_json().await?; +pub async fn page_revision_edit( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { + let UpdatePageRevisionDetails { input, details } = params.parse()?; - tide::log::info!( + info!( "Editing revision ID {} for page ID {} in site ID {}", - input.revision_id, - input.page_id, - input.site_id, + input.revision_id, input.page_id, input.site_id, ); let revision_id = input.revision_id; let (_, revision) = try_join!( - PageRevisionService::update(&ctx, input), - PageRevisionService::get_direct(&ctx, revision_id), + PageRevisionService::update(ctx, input), + PageRevisionService::get_direct(ctx, revision_id), )?; - let response = - build_revision_response(&ctx, revision, details, StatusCode::Ok).await?; - - txn.commit().await?; - Ok(response) + filter_and_populate_revision(ctx, revision, details).await } -pub async fn page_revision_range_retrieve(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let details: PageDetailsQuery = req.query()?; - let input: GetPageRevisionRange = req.body_json().await?; - let revisions = PageRevisionService::get_range(&ctx, input).await?; - - let response = - build_revision_list_response(&ctx, revisions, details, StatusCode::Ok).await?; - - txn.commit().await?; - Ok(response) +pub async fn page_revision_range( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result> { + let GetPageRevisionRangeDetails { input, details } = params.parse()?; + let revisions = PageRevisionService::get_range(ctx, input).await?; + filter_and_populate_revisions(ctx, revisions, details).await } // Helper functions + async fn filter_and_populate_revision( ctx: &ServiceContext<'_>, model: PageRevisionModel, - mut details: PageDetailsQuery, + mut details: PageDetails, ) -> Result { let PageRevisionModel { revision_id, @@ -202,36 +187,17 @@ async fn filter_and_populate_revision( }) } -async fn build_revision_response( - ctx: &ServiceContext<'_>, - revision: PageRevisionModel, - details: PageDetailsQuery, - status: StatusCode, -) -> Result { - let filtered_revision = filter_and_populate_revision(ctx, revision, details).await?; - let body = Body::from_json(&filtered_revision)?; - let response = Response::builder(status).body(body).into(); - Ok(response) -} - -async fn build_revision_list_response( +async fn filter_and_populate_revisions( ctx: &ServiceContext<'_>, revisions: Vec, - details: PageDetailsQuery, - status: StatusCode, -) -> Result { - let filtered_revisions = { - let mut f_revisions = Vec::new(); - - for revision in revisions { - let f_revision = filter_and_populate_revision(ctx, revision, details).await?; - f_revisions.push(f_revision); - } + details: PageDetails, +) -> Result> { + let mut f_revisions = Vec::new(); - f_revisions - }; + for revision in revisions { + let f_revision = filter_and_populate_revision(ctx, revision, details).await?; + f_revisions.push(f_revision) + } - let body = Body::from_json(&filtered_revisions)?; - let response = Response::builder(status).body(body).into(); - Ok(response) + Ok(f_revisions) } diff --git a/deepwell/src/endpoints/parent.rs b/deepwell/src/endpoints/parent.rs index fcba4ba879..745b4da9e1 100644 --- a/deepwell/src/endpoints/parent.rs +++ b/deepwell/src/endpoints/parent.rs @@ -19,108 +19,69 @@ */ use super::prelude::*; -use crate::services::page::GetPage; -use crate::services::parent::{ParentDescription, ParentalRelationshipType}; -use serde::Serialize; - -pub async fn parent_relationships_retrieve(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let relationship_type: ParentalRelationshipType = - req.param("relationship_type")?.parse()?; - - let GetPage { +use crate::models::page_parent::Model as PageParentModel; +use crate::services::parent::{ + GetParentRelationships, ParentDescription, RemoveParentOutput, +}; + +pub async fn parent_relationships_get( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result> { + let GetParentRelationships { site_id, page: reference, - } = req.body_json().await?; + relationship_type, + } = params.parse()?; - tide::log::info!( + info!( "Getting all {} pages from {:?} in site ID {}", relationship_type.name(), reference, site_id, ); - let models = - ParentService::get_relationships(&ctx, site_id, reference, relationship_type) - .await?; - - txn.commit().await?; - build_parent_response(&models, StatusCode::Ok) + ParentService::get_relationships(ctx, site_id, reference, relationship_type).await } -pub async fn parent_retrieve(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); +pub async fn parent_get( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result> { + let input: ParentDescription = params.parse()?; - let input: ParentDescription = req.body_json().await?; - - tide::log::info!( + info!( "Getting parental relationship {:?} -> {:?} in site ID {}", - input.parent, - input.child, - input.site_id, + input.parent, input.child, input.site_id, ); - let model = ParentService::get(&ctx, input).await?; - - txn.commit().await?; - build_parent_response(&model, StatusCode::Ok) + ParentService::get_optional(ctx, input).await } -pub async fn parent_put(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let input: ParentDescription = req.body_json().await?; +pub async fn parent_set( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result> { + let input: ParentDescription = params.parse()?; - tide::log::info!( + info!( "Creating parental relationship {:?} -> {:?} in site ID {}", - input.parent, - input.child, - input.site_id, + input.parent, input.child, input.site_id, ); - let model = ParentService::create(&ctx, input).await?; - - let status = if model.is_some() { - StatusCode::Created - } else { - StatusCode::NoContent - }; - - txn.commit().await?; - Ok(Response::new(status)) + ParentService::create(ctx, input).await } -pub async fn parent_delete(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); +pub async fn parent_remove( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { + let input: ParentDescription = params.parse()?; - let input: ParentDescription = req.body_json().await?; - - tide::log::info!( - "Deleting parental relationship {:?} -> {:?} in site ID {}", - input.parent, - input.child, - input.site_id, + info!( + "Removing parental relationship {:?} -> {:?} in site ID {}", + input.parent, input.child, input.site_id, ); - let was_deleted = ParentService::remove(&ctx, input).await?; - - let status = if was_deleted { - StatusCode::NoContent - } else { - StatusCode::Gone - }; - - txn.commit().await?; - Ok(Response::new(status)) -} - -fn build_parent_response(data: &T, status: StatusCode) -> ApiResponse { - let body = Body::from_json(data)?; - let response = Response::builder(status).body(body).into(); - Ok(response) + ParentService::remove(ctx, input).await } diff --git a/deepwell/src/endpoints/site.rs b/deepwell/src/endpoints/site.rs index d007472af7..288810911f 100644 --- a/deepwell/src/endpoints/site.rs +++ b/deepwell/src/endpoints/site.rs @@ -19,119 +19,53 @@ */ use super::prelude::*; -use crate::models::alias::Model as AliasModel; use crate::models::sea_orm_active_enums::AliasType; use crate::models::site::Model as SiteModel; -use crate::models::site_domain::Model as SiteDomainModel; -use crate::services::domain::CreateCustomDomain; -use crate::services::site::{CreateSite, GetSite, GetSiteOutput, UpdateSite}; - -pub async fn site_create(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let input: CreateSite = req.body_json().await?; - let output = SiteService::create(&ctx, input).await?; - txn.commit().await?; - - let body = Body::from_json(&output)?; - let response = Response::builder(StatusCode::Created).body(body).into(); - Ok(response) +use crate::services::site::{ + CreateSite, CreateSiteOutput, GetSite, GetSiteOutput, UpdateSite, +}; + +pub async fn site_create( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { + let input: CreateSite = params.parse()?; + SiteService::create(ctx, input).await } -pub async fn site_retrieve(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let GetSite { site } = req.body_json().await?; - tide::log::info!("Getting site {:?}", site); - - let site = SiteService::get(&ctx, site).await?; - let (aliases, domains) = try_join!( - AliasService::get_all(&ctx, AliasType::Site, site.site_id), - DomainService::list_custom(&ctx, site.site_id), - )?; - - build_site_response(site, aliases, domains, StatusCode::Ok) +pub async fn site_get( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result> { + let GetSite { site } = params.parse()?; + info!("Getting site {:?}", site); + match SiteService::get_optional(ctx, site).await? { + None => Ok(None), + Some(site) => { + let (aliases, domains) = try_join!( + AliasService::get_all(ctx, AliasType::Site, site.site_id), + DomainService::list_custom(ctx, site.site_id), + )?; + + Ok(Some(GetSiteOutput { + site, + aliases, + domains, + })) + } + } } -pub async fn site_put(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - +pub async fn site_update( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { let UpdateSite { site, body, user_id, - } = req.body_json().await?; - - tide::log::info!("Updating site {:?}", site); - - SiteService::update(&ctx, site, body, user_id).await?; - - txn.commit().await?; - Ok(Response::new(StatusCode::NoContent)) -} - -pub async fn site_custom_domain_retrieve(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let domain = req.body_string().await?; - let model = DomainService::site_from_domain(&ctx, &domain).await?; - - let body = Body::from_json(&model)?; - txn.commit().await?; - Ok(body.into()) -} - -pub async fn site_custom_domain_post(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let input: CreateCustomDomain = req.body_json().await?; - DomainService::create_custom(&ctx, input).await?; - - txn.commit().await?; - Ok(Response::new(StatusCode::NoContent)) -} - -pub async fn site_custom_domain_delete(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let domain = req.body_string().await?; - DomainService::delete_custom(&ctx, domain).await?; - - txn.commit().await?; - Ok(Response::new(StatusCode::NoContent)) -} - -pub async fn site_get_from_domain(req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let domain = req.param("domain")?; - let model = DomainService::site_from_domain(&ctx, domain).await?; - - let body = Body::from_json(&model)?; - txn.commit().await?; - Ok(body.into()) -} - -fn build_site_response( - site: SiteModel, - aliases: Vec, - domains: Vec, - status: StatusCode, -) -> ApiResponse { - let output = GetSiteOutput { - site, - aliases, - domains, - }; + } = params.parse()?; - let body = Body::from_json(&output)?; - let response = Response::builder(status).body(body).into(); - Ok(response) + info!("Updating site {:?}", site); + SiteService::update(ctx, site, body, user_id).await } diff --git a/deepwell/src/endpoints/site_member.rs b/deepwell/src/endpoints/site_member.rs index ae3b5cc8ea..ab00360387 100644 --- a/deepwell/src/endpoints/site_member.rs +++ b/deepwell/src/endpoints/site_member.rs @@ -19,37 +19,29 @@ */ use super::prelude::*; +use crate::models::interaction::Model as InteractionModel; use crate::services::interaction::{CreateSiteMember, GetSiteMember, RemoveSiteMember}; -pub async fn membership_retrieve(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let input: GetSiteMember = req.body_json().await?; - let output = InteractionService::get_site_member(&ctx, input).await?; - - txn.commit().await?; - build_json_response(&output, StatusCode::Ok) +pub async fn membership_get( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result> { + let input: GetSiteMember = params.parse()?; + InteractionService::get_optional_site_member(ctx, input).await } -pub async fn membership_put(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let input: CreateSiteMember = req.body_json().await?; - InteractionService::create_site_member(&ctx, input).await?; - - txn.commit().await?; - Ok(Response::new(StatusCode::Created)) +pub async fn membership_set( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result<()> { + let input: CreateSiteMember = params.parse()?; + InteractionService::create_site_member(ctx, input).await } -pub async fn membership_delete(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let input: RemoveSiteMember = req.body_json().await?; - let output = InteractionService::remove_site_member(&ctx, input).await?; - - txn.commit().await?; - build_json_response(&output, StatusCode::Ok) +pub async fn membership_delete( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { + let input: RemoveSiteMember = params.parse()?; + InteractionService::remove_site_member(ctx, input).await } diff --git a/deepwell/src/endpoints/text.rs b/deepwell/src/endpoints/text.rs index 4f8f2ee471..725c96647d 100644 --- a/deepwell/src/endpoints/text.rs +++ b/deepwell/src/endpoints/text.rs @@ -19,44 +19,23 @@ */ use super::prelude::*; -use crate::hash::TextHash; - -pub async fn text_put(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let contents = req.body_string().await?; - tide::log::info!("Inserting new stored text (bytes {})", contents.len()); - - let hash = TextService::create(&ctx, contents).await?; - let hash_hex = hex::encode(hash); - let body = Body::from_string(hash_hex); - txn.commit().await?; - - Ok(body.into()) +use crate::web::Bytes; + +pub async fn text_create( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result> { + let contents: String = params.one()?; + info!("Inserting new stored text (bytes {})", contents.len()); + let hash = TextService::create(ctx, contents).await?; + Ok(Bytes::from(hash)) } -pub async fn text_get(req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - tide::log::info!("Getting stored text"); - let hash = read_hash(&req)?; - let contents = TextService::get(&ctx, &hash).await?; - let body = Body::from_string(contents); - txn.commit().await?; - - Ok(body.into()) -} - -fn read_hash(req: &ApiRequest) -> Result { - let hash_hex = req.param("hash")?; - tide::log::debug!("Text hash: {hash_hex}"); - - let mut hash = [0; 16]; - - hex::decode_to_slice(hash_hex, &mut hash) - .map_err(|error| TideError::new(StatusCode::UnprocessableEntity, error))?; - - Ok(hash) +pub async fn text_get( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { + info!("Getting stored text"); + let hash: Bytes = params.one()?; + TextService::get(ctx, hash.as_ref()).await } diff --git a/deepwell/src/endpoints/user.rs b/deepwell/src/endpoints/user.rs index 830d3bce12..a5ef4d9a7c 100644 --- a/deepwell/src/endpoints/user.rs +++ b/deepwell/src/endpoints/user.rs @@ -19,134 +19,74 @@ */ use super::prelude::*; -use crate::models::alias::Model as AliasModel; use crate::models::sea_orm_active_enums::AliasType; use crate::models::user::Model as UserModel; use crate::services::user::{ - CreateUser, GetUser, GetUserOutput, UpdateUser, UpdateUserBody, + CreateUser, CreateUserOutput, GetUser, GetUserOutput, UpdateUser, }; -use crate::web::ProvidedValue; -pub async fn user_create(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - tide::log::info!("Creating new regular user"); - let input: CreateUser = req.body_json().await?; - let output = UserService::create(&ctx, input).await?; - - let body = Body::from_json(&output)?; - txn.commit().await?; - - let response = Response::builder(StatusCode::Created).body(body).into(); - Ok(response) +pub async fn user_create( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { + info!("Creating new regular user"); + let input: CreateUser = params.parse()?; + UserService::create(ctx, input).await } -pub async fn user_import(_req: ApiRequest) -> ApiResponse { +pub async fn user_import( + _ctx: &ServiceContext<'_>, + _params: Params<'static>, +) -> Result { // TODO implement importing user from Wikidot todo!() } -pub async fn user_retrieve(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let GetUser { user: reference } = req.body_json().await?; - tide::log::info!("Getting user {:?}", reference); - - let user = UserService::get(&ctx, reference).await?; - let aliases = AliasService::get_all(&ctx, AliasType::User, user.user_id).await?; - - txn.commit().await?; - build_user_response(user, aliases, StatusCode::Ok) +pub async fn user_get( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result> { + let GetUser { user: reference } = params.parse()?; + info!("Getting user {:?}", reference); + + match UserService::get_optional(ctx, reference).await? { + None => Ok(None), + Some(user) => { + let aliases = + AliasService::get_all(ctx, AliasType::User, user.user_id).await?; + + Ok(Some(GetUserOutput { user, aliases })) + } + } } -pub async fn user_put(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - +pub async fn user_edit( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { let UpdateUser { user: reference, body, - } = req.body_json().await?; - - tide::log::info!("Updating user {:?}", reference); - - UserService::update(&ctx, reference, body).await?; + } = params.parse()?; - txn.commit().await?; - Ok(Response::new(StatusCode::NoContent)) + info!("Updating user {:?}", reference); + UserService::update(ctx, reference, body).await } -pub async fn user_delete(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let GetUser { user: reference } = req.body_json().await?; - tide::log::info!("Deleting user {:?}", reference); - - UserService::delete(&ctx, reference).await?; - - txn.commit().await?; - Ok(Response::new(StatusCode::NoContent)) -} - -// Separate route because a JSON-encoded byte list is very inefficient. -pub async fn user_avatar_put(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let GetUser { user: reference } = req.query()?; - let bytes = req.body_bytes().await?; - - let avatar = if bytes.is_empty() { - // An empty body means delete the avatar - tide::log::info!("Remove avatar for user {reference:?}"); - None - } else { - // Upload file contents from body - tide::log::info!("Uploading avatar for user {reference:?}"); - Some(bytes) - }; - - UserService::update( - &ctx, - reference, - UpdateUserBody { - avatar: ProvidedValue::Set(avatar), - ..Default::default() - }, - ) - .await?; - - txn.commit().await?; - Ok(Response::new(StatusCode::NoContent)) -} - -pub async fn user_add_name_change(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let GetUser { user: reference } = req.body_json().await?; - tide::log::info!("Adding user name change token to {:?}", reference); - - let name_changes = UserService::add_name_change_token(&ctx, reference).await?; - - let body = Body::from_json(&name_changes)?; - let response = Response::builder(StatusCode::Ok).body(body).into(); - txn.commit().await?; - Ok(response) +pub async fn user_delete( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { + let GetUser { user: reference } = params.parse()?; + info!("Deleting user {:?}", reference); + UserService::delete(ctx, reference).await } -fn build_user_response( - user: UserModel, - aliases: Vec, - status: StatusCode, -) -> ApiResponse { - // TODO add user profile picture to output - // flag like wikitext/compiledHtml - let output = GetUserOutput { user, aliases }; - let body = Body::from_json(&output)?; - let response = Response::builder(status).body(body).into(); - Ok(response) +pub async fn user_add_name_change( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { + let GetUser { user: reference } = params.parse()?; + info!("Adding user name change token to {:?}", reference); + UserService::add_name_change_token(ctx, reference).await } diff --git a/deepwell/src/endpoints/user_bot.rs b/deepwell/src/endpoints/user_bot.rs index 4a37b29e46..f1ee36222f 100644 --- a/deepwell/src/endpoints/user_bot.rs +++ b/deepwell/src/endpoints/user_bot.rs @@ -21,17 +21,17 @@ use super::prelude::*; use crate::models::sea_orm_active_enums::UserType; use crate::models::user_bot_owner::Model as UserBotOwnerModel; -use crate::services::user::{CreateUser, GetUser, UpdateUserBody}; +use crate::services::user::{CreateUser, CreateUserOutput, GetUser, UpdateUserBody}; use crate::services::user_bot_owner::{ - BotOwner, BotUserOutput, CreateBotOwner, CreateBotUser, DeleteBotOwner, - UserBotOwnerService, + BotOwner, BotUserOutput, CreateBotOwner, CreateBotUser, RemoveBotOwner, + RemoveBotOwnerOutput, UserBotOwnerService, }; use crate::web::{ProvidedValue, Reference}; -pub async fn user_bot_create(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - +pub async fn bot_user_create( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { let CreateBotUser { name, email, @@ -41,21 +41,22 @@ pub async fn user_bot_create(mut req: ApiRequest) -> ApiResponse { authorization_token, bypass_filter, bypass_email_verification, - } = req.body_json().await?; - tide::log::info!("Creating new bot user with name '{}'", name); + } = params.parse()?; + + info!("Creating new bot user with name '{}'", name); // TODO verify auth token let _ = authorization_token; // Create bot user let output = UserService::create( - &ctx, + ctx, CreateUser { user_type: UserType::Bot, name, email, locale, - password: String::new(), // TODO + password: String::new(), // TODO configure user-bot password bypass_filter, bypass_email_verification, }, @@ -66,7 +67,7 @@ pub async fn user_bot_create(mut req: ApiRequest) -> ApiResponse { // Set description UserService::update( - &ctx, + ctx, Reference::Id(bot_user_id), UpdateUserBody { biography: ProvidedValue::Set(Some(purpose)), @@ -76,16 +77,16 @@ pub async fn user_bot_create(mut req: ApiRequest) -> ApiResponse { .await?; // Add bot owners - tide::log::debug!("Adding human owners for bot user ID {}", bot_user_id); + debug!("Adding human owners for bot user ID {}", bot_user_id); for owner in owners { let BotOwner { user_id: human_user_id, description, } = owner; - tide::log::debug!("Adding human user ID {} as bot owner", human_user_id); + debug!("Adding human user ID {} as bot owner", human_user_id); UserBotOwnerService::add( - &ctx, + ctx, CreateBotOwner { human: Reference::Id(human_user_id), bot: Reference::Id(bot_user_id), @@ -95,74 +96,58 @@ pub async fn user_bot_create(mut req: ApiRequest) -> ApiResponse { .await?; } - // Build and return response - let body = Body::from_json(&output)?; - txn.commit().await?; - - let response = Response::builder(StatusCode::Created).body(body).into(); - Ok(response) + // Return + Ok(output) } -pub async fn user_bot_retrieve(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let GetUser { user: reference } = req.body_json().await?; - tide::log::info!("Getting bot user {reference:?}"); - - let user = UserService::get(&ctx, reference).await?; - let owners = UserBotOwnerService::get_all(&ctx, user.user_id).await?; - - let output = BotUserOutput { - user, - owners: owners - .into_iter() - .map( - |UserBotOwnerModel { - human_user_id: user_id, - description, - .. - }| BotOwner { - user_id, - description, - }, - ) - .collect(), - }; - - let body = Body::from_json(&output)?; - txn.commit().await?; - - Ok(body.into()) +pub async fn bot_user_get( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result> { + let GetUser { user: reference } = params.parse()?; + info!("Getting bot user {reference:?}"); + match UserService::get_optional(ctx, reference).await? { + None => Ok(None), + Some(user) => { + let owners = UserBotOwnerService::get_all(ctx, user.user_id).await?; + let owners = owners + .into_iter() + .map( + |UserBotOwnerModel { + human_user_id: user_id, + description, + .. + }| BotOwner { + user_id, + description, + }, + ) + .collect(); + + Ok(Some(BotUserOutput { user, owners })) + } + } } -pub async fn user_bot_owner_put(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let input: CreateBotOwner = req.body_json().await?; +pub async fn bot_user_owner_set( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { + let input: CreateBotOwner = params.parse()?; - tide::log::info!( + info!( "Adding or updating bot owner ({:?} <- {:?})", - input.bot, - input.human, + input.bot, input.human, ); - UserBotOwnerService::add(&ctx, input).await?; - - txn.commit().await?; - Ok(Response::new(StatusCode::NoContent)) + UserBotOwnerService::add(ctx, input).await } -pub async fn user_bot_owner_delete(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let input: DeleteBotOwner = req.body_json().await?; - tide::log::info!("Remove bot owner ({:?} <- {:?})", input.bot, input.human,); - - UserBotOwnerService::delete(&ctx, input).await?; - - txn.commit().await?; - Ok(Response::new(StatusCode::NoContent)) +pub async fn bot_user_owner_remove( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { + let input: RemoveBotOwner = params.parse()?; + info!("Remove bot owner ({:?} <- {:?})", input.bot, input.human,); + UserBotOwnerService::remove(ctx, input).await } diff --git a/deepwell/src/endpoints/view.rs b/deepwell/src/endpoints/view.rs index 0783b4a445..13861260fa 100644 --- a/deepwell/src/endpoints/view.rs +++ b/deepwell/src/endpoints/view.rs @@ -19,15 +19,13 @@ */ use super::prelude::*; -use crate::services::view::GetPageView; +use crate::services::view::{GetPageView, GetPageViewOutput}; /// Returns relevant context for rendering a page from a processed web request. -pub async fn view_page(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let input: GetPageView = req.body_json().await?; - let output = ViewService::page(&ctx, input).await?; - let body = Body::from_json(&output)?; - Ok(body.into()) +pub async fn page_view( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { + let input: GetPageView = params.parse()?; + ViewService::page(ctx, input).await } diff --git a/deepwell/src/endpoints/vote.rs b/deepwell/src/endpoints/vote.rs index 619e64747d..e1c42aedf9 100644 --- a/deepwell/src/endpoints/vote.rs +++ b/deepwell/src/endpoints/vote.rs @@ -19,107 +19,80 @@ */ use super::prelude::*; +use crate::models::page_vote::Model as PageVoteModel; use crate::services::vote::{ CountVoteHistory, CreateVote, GetVote, GetVoteHistory, VoteAction, }; -use serde::Serialize; -pub async fn vote_retrieve(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); +pub async fn vote_get( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result> { + let input: GetVote = params.parse()?; - let input: GetVote = req.body_json().await?; - - tide::log::info!( + info!( "Getting vote cast by {} on page {}", - input.user_id, - input.page_id, + input.user_id, input.page_id, ); - let model = VoteService::get(&ctx, input).await?; - txn.commit().await?; - build_vote_response(&model, StatusCode::Ok) + VoteService::get_optional(ctx, input).await } -pub async fn vote_put(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let input: CreateVote = req.body_json().await?; +pub async fn vote_set( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result> { + let input: CreateVote = params.parse()?; - tide::log::info!( + info!( "Casting vote cast by {} on page {}", - input.user_id, - input.page_id, + input.user_id, input.page_id, ); - let created = VoteService::add(&ctx, input).await?; - txn.commit().await?; - match created { - Some(model) => build_vote_response(&model, StatusCode::Created), - None => Ok(Response::new(StatusCode::NoContent)), - } + VoteService::add(ctx, input).await } -pub async fn vote_delete(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let input: GetVote = req.body_json().await?; +pub async fn vote_remove( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { + let input: GetVote = params.parse()?; - tide::log::info!( + info!( "Removing vote cast by {} on page {}", - input.user_id, - input.page_id, + input.user_id, input.page_id, ); - let vote = VoteService::remove(&ctx, input).await?; - txn.commit().await?; - build_vote_response(&vote, StatusCode::Ok) + VoteService::remove(ctx, input).await } -pub async fn vote_action(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - +pub async fn vote_action( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { let VoteAction { page_id, user_id, enable, acting_user_id, - } = req.body_json().await?; + } = params.parse()?; let key = GetVote { page_id, user_id }; - let vote = VoteService::action(&ctx, key, enable, acting_user_id).await?; - - txn.commit().await?; - build_vote_response(&vote, StatusCode::Ok) + VoteService::action(ctx, key, enable, acting_user_id).await } -pub async fn vote_list_retrieve(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let input: GetVoteHistory = req.body_json().await?; - let votes = VoteService::get_history(&ctx, input).await?; - - txn.commit().await?; - build_vote_response(&votes, StatusCode::Ok) -} - -pub async fn vote_count_retrieve(mut req: ApiRequest) -> ApiResponse { - let txn = req.database().begin().await?; - let ctx = ServiceContext::new(&req, &txn); - - let input: CountVoteHistory = req.body_json().await?; - let count = VoteService::count_history(&ctx, input).await?; - - txn.commit().await?; - build_vote_response(&count, StatusCode::Ok) +pub async fn vote_list_get( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result> { + let input: GetVoteHistory = params.parse()?; + VoteService::get_history(ctx, input).await } -fn build_vote_response(data: &T, status: StatusCode) -> ApiResponse { - let body = Body::from_json(data)?; - let response = Response::builder(status).body(body).into(); - Ok(response) +pub async fn vote_list_count( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { + let input: CountVoteHistory = params.parse()?; + VoteService::count_history(ctx, input).await } diff --git a/deepwell/src/locales/error.rs b/deepwell/src/locales/error.rs index 836f2f8679..b97c8485ed 100644 --- a/deepwell/src/locales/error.rs +++ b/deepwell/src/locales/error.rs @@ -43,23 +43,6 @@ pub enum LocalizationLoadError { /// this simply logs whatever we get and then returns the /// generic `LocalizationLoadError::Fluent` error variant. pub fn fluent_load_err(item: T) -> LocalizationLoadError { - tide::log::error!("Fluent error: {:#?}", item); - + error!("Fluent error: {:#?}", item); LocalizationLoadError::Fluent } - -#[derive(ThisError, Debug)] -#[allow(clippy::enum_variant_names)] -pub enum LocalizationTranslateError { - #[error("No messages are available for this locale")] - NoLocale, - - #[error("Message key not found for this locale")] - NoMessage, - - #[error("Message key was found, but has no value")] - NoMessageValue, - - #[error("Message key was found, but does not have this attribute")] - NoMessageAttribute, -} diff --git a/deepwell/src/locales/fluent.rs b/deepwell/src/locales/fluent.rs index 01f5cfbcdc..541ed1ecdc 100644 --- a/deepwell/src/locales/fluent.rs +++ b/deepwell/src/locales/fluent.rs @@ -18,7 +18,8 @@ * along with this program. If not, see . */ -use super::error::{fluent_load_err, LocalizationLoadError, LocalizationTranslateError}; +use super::error::{fluent_load_err, LocalizationLoadError}; +use crate::services::Error as ServiceError; use async_std::fs; use async_std::path::{Path, PathBuf}; use async_std::prelude::*; @@ -39,7 +40,7 @@ impl Localizations { pub async fn open>( directory: P, ) -> Result { - tide::log::debug!("Reading Fluent localization directory..."); + debug!("Reading Fluent localization directory..."); let directory = { let mut path = directory.into(); @@ -63,7 +64,7 @@ impl Localizations { bundles: &mut HashMap, directory: &Path, ) -> Result<(), LocalizationLoadError> { - tide::log::debug!("Reading component at {}", directory.display()); + debug!("Reading component at {}", directory.display()); let mut entries = fs::read_dir(directory).await?; while let Some(result) = entries.next().await { @@ -77,7 +78,7 @@ impl Localizations { .to_str() .expect("Path is not valid UTF-8"); - tide::log::debug!("Loading locale {locale_name}"); + debug!("Loading locale {locale_name}"); let locale: LanguageIdentifier = locale_name.parse()?; // Read and parse localization strings @@ -115,12 +116,12 @@ impl Localizations { &self, locale: &LanguageIdentifier, key: &str, - ) -> Result<(&FluentBundle, FluentMessage), LocalizationTranslateError> { + ) -> Result<(&FluentBundle, FluentMessage), ServiceError> { match self.bundles.get(locale) { - None => Err(LocalizationTranslateError::NoLocale), + None => Err(ServiceError::LocaleMissing), Some(bundle) => match bundle.get_message(key) { Some(message) => Ok((bundle, message)), - None => Err(LocalizationTranslateError::NoMessage), + None => Err(ServiceError::LocaleMessageMissing), }, } } @@ -130,12 +131,12 @@ impl Localizations { locale: &LanguageIdentifier, key: &str, args: &'a FluentArgs<'a>, - ) -> Result, LocalizationTranslateError> { + ) -> Result, ServiceError> { // Get appropriate message and bundle let (path, attribute) = Self::parse_selector(key); let (bundle, message) = self.get_message(locale, path)?; - tide::log::info!( + info!( "Translating for locale {}, message path {}, attribute {}", locale, path, @@ -146,11 +147,11 @@ impl Localizations { let pattern = match attribute { Some(attribute) => match message.get_attribute(attribute) { Some(attrib) => attrib.value(), - None => return Err(LocalizationTranslateError::NoMessageAttribute), + None => return Err(ServiceError::LocaleMessageAttributeMissing), }, None => match message.value() { Some(pattern) => pattern, - None => return Err(LocalizationTranslateError::NoMessageValue), + None => return Err(ServiceError::LocaleMessageValueMissing), }, }; @@ -160,16 +161,14 @@ impl Localizations { // Log any errors if !errors.is_empty() { - tide::log::warn!( - "Errors formatting message for locale {locale}, message key {key}", - ); + warn!("Errors formatting message for locale {locale}, message key {key}",); for (key, value) in args.iter() { - tide::log::warn!("Passed formatting argument: {key} -> {value:?}"); + warn!("Passed formatting argument: {key} -> {value:?}"); } for error in errors { - tide::log::warn!("Message formatting error: {error}"); + warn!("Message formatting error: {error}"); } } diff --git a/deepwell/src/main.rs b/deepwell/src/main.rs index c979675cd2..0633d4900e 100644 --- a/deepwell/src/main.rs +++ b/deepwell/src/main.rs @@ -22,7 +22,10 @@ #![deny(missing_debug_implementations)] #![allow(clippy::large_enum_variant)] -//! A web server to expose Wikijump operations via an internal REST API. +//! A server to expose Wikijump operations via an internal JSON RPC API. + +#[macro_use] +extern crate log; #[macro_use] extern crate futures; @@ -62,20 +65,19 @@ use std::fs::File; use std::io::Write; use std::process; -#[async_std::main] +#[tokio::main] async fn main() -> Result<()> { // Load the configuration so we can set up let SetupConfig { secrets, config } = SetupConfig::load(); // Copy fields we need - let socket_address = config.address; let run_migrations = config.run_migrations; let run_seeder = config.run_seeder; // Configure the logger if config.logger { - tide::log::with_level(config.logger_level); - tide::log::info!("Loaded server configuration:"); + femme::with_level(config.logger_level); + info!("Loaded server configuration:"); config.log(); color_backtrace::install(); @@ -83,7 +85,7 @@ async fn main() -> Result<()> { // Write PID file, if enabled if let Some(ref path) = config.pid_file { - tide::log::info!( + info!( "Writing process ID ({}) to {}", process::id(), path.display(), @@ -102,7 +104,7 @@ async fn main() -> Result<()> { if #[cfg(feature = "watch")] { _watcher = setup_autorestart(&config)?; } else { - tide::log::error!("The --watch-files option requires the 'watch' feature"); + error!("The --watch-files option requires the 'watch' feature"); process::exit(1); } } @@ -122,9 +124,8 @@ async fn main() -> Result<()> { } // Build and run server - tide::log::info!("Building server and listening..."); - let app = api::build_server(app_state); - app.listen(socket_address).await?; - + info!("Building server and listening..."); + let server = api::build_server(app_state).await?; + server.stopped().await; Ok(()) } diff --git a/deepwell/src/models/alias.rs b/deepwell/src/models/alias.rs index a1d21d492e..143d9440f9 100644 --- a/deepwell/src/models/alias.rs +++ b/deepwell/src/models/alias.rs @@ -6,7 +6,6 @@ use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] #[sea_orm(table_name = "alias")] -#[serde(rename_all = "camelCase")] pub struct Model { #[sea_orm(primary_key)] pub alias_id: i64, diff --git a/deepwell/src/models/file.rs b/deepwell/src/models/file.rs index 75bcc7166c..39e75f7daf 100644 --- a/deepwell/src/models/file.rs +++ b/deepwell/src/models/file.rs @@ -5,7 +5,6 @@ use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] #[sea_orm(table_name = "file")] -#[serde(rename_all = "camelCase")] pub struct Model { #[sea_orm(primary_key)] pub file_id: i64, @@ -16,6 +15,7 @@ pub struct Model { #[sea_orm(column_type = "Text")] pub name: String, pub page_id: i64, + pub site_id: i64, } #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] @@ -30,6 +30,14 @@ pub enum Relation { on_delete = "NoAction" )] Page, + #[sea_orm( + belongs_to = "super::site::Entity", + from = "Column::SiteId", + to = "super::site::Column::SiteId", + on_update = "NoAction", + on_delete = "NoAction" + )] + Site, } impl Related for Entity { diff --git a/deepwell/src/models/file_revision.rs b/deepwell/src/models/file_revision.rs index 473d8f6dc9..88991619ad 100644 --- a/deepwell/src/models/file_revision.rs +++ b/deepwell/src/models/file_revision.rs @@ -6,7 +6,6 @@ use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] #[sea_orm(table_name = "file_revision")] -#[serde(rename_all = "camelCase")] pub struct Model { #[sea_orm(primary_key)] pub revision_id: i64, @@ -15,6 +14,7 @@ pub struct Model { pub revision_number: i32, pub file_id: i64, pub page_id: i64, + pub site_id: i64, pub user_id: i64, #[sea_orm(column_type = "Text")] pub name: String, @@ -48,6 +48,14 @@ pub enum Relation { on_delete = "NoAction" )] Page, + #[sea_orm( + belongs_to = "super::site::Entity", + from = "Column::SiteId", + to = "super::site::Column::SiteId", + on_update = "NoAction", + on_delete = "NoAction" + )] + Site, #[sea_orm( belongs_to = "super::user::Entity", from = "Column::UserId", diff --git a/deepwell/src/models/filter.rs b/deepwell/src/models/filter.rs index 847e0c22d1..74084ffde7 100644 --- a/deepwell/src/models/filter.rs +++ b/deepwell/src/models/filter.rs @@ -5,7 +5,6 @@ use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] #[sea_orm(table_name = "filter")] -#[serde(rename_all = "camelCase")] pub struct Model { #[sea_orm(primary_key)] pub filter_id: i64, diff --git a/deepwell/src/models/interaction.rs b/deepwell/src/models/interaction.rs index 9e67989dc9..1f80d5e2c7 100644 --- a/deepwell/src/models/interaction.rs +++ b/deepwell/src/models/interaction.rs @@ -6,7 +6,6 @@ use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] #[sea_orm(table_name = "interaction")] -#[serde(rename_all = "camelCase")] pub struct Model { #[sea_orm(primary_key)] pub interaction_id: i64, diff --git a/deepwell/src/models/message.rs b/deepwell/src/models/message.rs index fe5d82f0f9..92da86ffcb 100644 --- a/deepwell/src/models/message.rs +++ b/deepwell/src/models/message.rs @@ -5,7 +5,6 @@ use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] #[sea_orm(table_name = "message")] -#[serde(rename_all = "camelCase")] pub struct Model { #[sea_orm(primary_key)] pub internal_id: i64, diff --git a/deepwell/src/models/message_recipient.rs b/deepwell/src/models/message_recipient.rs index 9d27125ae1..e171d39428 100644 --- a/deepwell/src/models/message_recipient.rs +++ b/deepwell/src/models/message_recipient.rs @@ -6,7 +6,6 @@ use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] #[sea_orm(table_name = "message_recipient")] -#[serde(rename_all = "camelCase")] pub struct Model { #[sea_orm(primary_key, auto_increment = false, column_type = "Text")] pub record_id: String, diff --git a/deepwell/src/models/message_record.rs b/deepwell/src/models/message_record.rs index ef241f067d..405c59d790 100644 --- a/deepwell/src/models/message_record.rs +++ b/deepwell/src/models/message_record.rs @@ -5,7 +5,6 @@ use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] #[sea_orm(table_name = "message_record")] -#[serde(rename_all = "camelCase")] pub struct Model { #[sea_orm(primary_key, auto_increment = false, column_type = "Text")] pub external_id: String, diff --git a/deepwell/src/models/message_report.rs b/deepwell/src/models/message_report.rs index 29a91fcc2e..4f06c1490c 100644 --- a/deepwell/src/models/message_report.rs +++ b/deepwell/src/models/message_report.rs @@ -5,7 +5,6 @@ use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] #[sea_orm(table_name = "message_report")] -#[serde(rename_all = "camelCase")] pub struct Model { #[sea_orm(primary_key, auto_increment = false)] pub message_id: i64, diff --git a/deepwell/src/models/page.rs b/deepwell/src/models/page.rs index 8aa93bcc18..6b9bbd568f 100644 --- a/deepwell/src/models/page.rs +++ b/deepwell/src/models/page.rs @@ -5,7 +5,6 @@ use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] #[sea_orm(table_name = "page")] -#[serde(rename_all = "camelCase")] pub struct Model { #[sea_orm(primary_key)] pub page_id: i64, diff --git a/deepwell/src/models/page_attribution.rs b/deepwell/src/models/page_attribution.rs index ce9aa5d1e8..b3b7e84bdd 100644 --- a/deepwell/src/models/page_attribution.rs +++ b/deepwell/src/models/page_attribution.rs @@ -5,7 +5,6 @@ use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] #[sea_orm(table_name = "page_attribution")] -#[serde(rename_all = "camelCase")] pub struct Model { #[sea_orm(primary_key, auto_increment = false)] pub page_id: i64, diff --git a/deepwell/src/models/page_category.rs b/deepwell/src/models/page_category.rs index 0978c83a13..74da3d689e 100644 --- a/deepwell/src/models/page_category.rs +++ b/deepwell/src/models/page_category.rs @@ -5,7 +5,6 @@ use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] #[sea_orm(table_name = "page_category")] -#[serde(rename_all = "camelCase")] pub struct Model { #[sea_orm(primary_key)] pub category_id: i64, diff --git a/deepwell/src/models/page_connection.rs b/deepwell/src/models/page_connection.rs index 68fcf63652..f8a3a38f5d 100644 --- a/deepwell/src/models/page_connection.rs +++ b/deepwell/src/models/page_connection.rs @@ -5,7 +5,6 @@ use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] #[sea_orm(table_name = "page_connection")] -#[serde(rename_all = "camelCase")] pub struct Model { #[sea_orm(primary_key, auto_increment = false)] pub from_page_id: i64, diff --git a/deepwell/src/models/page_connection_missing.rs b/deepwell/src/models/page_connection_missing.rs index e79cf9f111..cb067c70a6 100644 --- a/deepwell/src/models/page_connection_missing.rs +++ b/deepwell/src/models/page_connection_missing.rs @@ -5,7 +5,6 @@ use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] #[sea_orm(table_name = "page_connection_missing")] -#[serde(rename_all = "camelCase")] pub struct Model { #[sea_orm(primary_key, auto_increment = false)] pub from_page_id: i64, diff --git a/deepwell/src/models/page_link.rs b/deepwell/src/models/page_link.rs index c0e7fb9deb..89b6131a0e 100644 --- a/deepwell/src/models/page_link.rs +++ b/deepwell/src/models/page_link.rs @@ -5,7 +5,6 @@ use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] #[sea_orm(table_name = "page_link")] -#[serde(rename_all = "camelCase")] pub struct Model { #[sea_orm(primary_key, auto_increment = false)] pub page_id: i64, diff --git a/deepwell/src/models/page_lock.rs b/deepwell/src/models/page_lock.rs index ed72efa2c0..7fc37dc592 100644 --- a/deepwell/src/models/page_lock.rs +++ b/deepwell/src/models/page_lock.rs @@ -5,7 +5,6 @@ use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] #[sea_orm(table_name = "page_lock")] -#[serde(rename_all = "camelCase")] pub struct Model { #[sea_orm(primary_key)] pub page_lock_id: i64, diff --git a/deepwell/src/models/page_parent.rs b/deepwell/src/models/page_parent.rs index d24faf3cc0..6d93954c5f 100644 --- a/deepwell/src/models/page_parent.rs +++ b/deepwell/src/models/page_parent.rs @@ -5,7 +5,6 @@ use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] #[sea_orm(table_name = "page_parent")] -#[serde(rename_all = "camelCase")] pub struct Model { #[sea_orm(primary_key, auto_increment = false)] pub parent_page_id: i64, diff --git a/deepwell/src/models/page_revision.rs b/deepwell/src/models/page_revision.rs index 0564654910..e89c0755bb 100644 --- a/deepwell/src/models/page_revision.rs +++ b/deepwell/src/models/page_revision.rs @@ -6,7 +6,6 @@ use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] #[sea_orm(table_name = "page_revision")] -#[serde(rename_all = "camelCase")] pub struct Model { #[sea_orm(primary_key)] pub revision_id: i64, @@ -19,8 +18,10 @@ pub struct Model { pub from_wikidot: bool, pub changes: Vec, #[sea_orm(column_type = "Binary(BlobSize::Blob(None))")] + #[serde(skip)] pub wikitext_hash: Vec, #[sea_orm(column_type = "Binary(BlobSize::Blob(None))")] + #[serde(skip)] pub compiled_hash: Vec, pub compiled_at: TimeDateTimeWithTimeZone, #[sea_orm(column_type = "Text")] diff --git a/deepwell/src/models/page_vote.rs b/deepwell/src/models/page_vote.rs index 15930bceac..6a9d56e8d7 100644 --- a/deepwell/src/models/page_vote.rs +++ b/deepwell/src/models/page_vote.rs @@ -5,7 +5,6 @@ use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] #[sea_orm(table_name = "page_vote")] -#[serde(rename_all = "camelCase")] pub struct Model { #[sea_orm(primary_key)] pub page_vote_id: i64, diff --git a/deepwell/src/models/session.rs b/deepwell/src/models/session.rs index 4eee10b9e8..464e8204c1 100644 --- a/deepwell/src/models/session.rs +++ b/deepwell/src/models/session.rs @@ -5,7 +5,6 @@ use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] #[sea_orm(table_name = "session")] -#[serde(rename_all = "camelCase")] pub struct Model { #[sea_orm(primary_key, auto_increment = false, column_type = "Text")] pub session_token: String, diff --git a/deepwell/src/models/site.rs b/deepwell/src/models/site.rs index 2906a53019..b1e2b2d4a9 100644 --- a/deepwell/src/models/site.rs +++ b/deepwell/src/models/site.rs @@ -5,7 +5,6 @@ use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] #[sea_orm(table_name = "site")] -#[serde(rename_all = "camelCase")] pub struct Model { #[sea_orm(primary_key)] pub site_id: i64, diff --git a/deepwell/src/models/site_domain.rs b/deepwell/src/models/site_domain.rs index 94dda78613..e94ad4c981 100644 --- a/deepwell/src/models/site_domain.rs +++ b/deepwell/src/models/site_domain.rs @@ -5,7 +5,6 @@ use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] #[sea_orm(table_name = "site_domain")] -#[serde(rename_all = "camelCase")] pub struct Model { #[sea_orm(primary_key, auto_increment = false, column_type = "Text")] pub domain: String, diff --git a/deepwell/src/models/text.rs b/deepwell/src/models/text.rs index 7a09abec8d..f618cfd79b 100644 --- a/deepwell/src/models/text.rs +++ b/deepwell/src/models/text.rs @@ -5,7 +5,6 @@ use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] #[sea_orm(table_name = "text")] -#[serde(rename_all = "camelCase")] pub struct Model { #[sea_orm( primary_key, diff --git a/deepwell/src/models/user.rs b/deepwell/src/models/user.rs index 7ffd54b683..d09c16a6f0 100644 --- a/deepwell/src/models/user.rs +++ b/deepwell/src/models/user.rs @@ -6,7 +6,6 @@ use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] #[sea_orm(table_name = "user")] -#[serde(rename_all = "camelCase")] pub struct Model { #[sea_orm(primary_key)] pub user_id: i64, diff --git a/deepwell/src/models/user_bot_owner.rs b/deepwell/src/models/user_bot_owner.rs index 2b941e93ed..62777ed253 100644 --- a/deepwell/src/models/user_bot_owner.rs +++ b/deepwell/src/models/user_bot_owner.rs @@ -5,7 +5,6 @@ use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] #[sea_orm(table_name = "user_bot_owner")] -#[serde(rename_all = "camelCase")] pub struct Model { #[sea_orm(primary_key, auto_increment = false)] pub bot_user_id: i64, diff --git a/deepwell/src/services/alias/service.rs b/deepwell/src/services/alias/service.rs index 91f4ed8410..c4fef34a4e 100644 --- a/deepwell/src/services/alias/service.rs +++ b/deepwell/src/services/alias/service.rs @@ -61,7 +61,7 @@ impl AliasService { let txn = ctx.transaction(); let slug = get_regular_slug(slug); - tide::log::info!("Creating {alias_type:?} alias with slug '{slug}'"); + info!("Creating {alias_type:?} alias with slug '{slug}'"); // Perform filter validation if !bypass_filter { @@ -82,48 +82,48 @@ impl AliasService { match alias_type { AliasType::Site => { if !SiteService::exists(ctx, Reference::Id(target_id)).await? { - tide::log::error!( + error!( "No target site with ID {target_id} exists, cannot create alias", ); - return Err(Error::NotFound); + return Err(Error::SiteNotFound); } if verify && SiteService::exists(ctx, Reference::Slug(cow!(slug))).await? { - tide::log::error!( + error!( "Site with conflicting slug '{slug}' already exists, cannot create alias", ); - return Err(Error::Conflict); + return Err(Error::SiteExists); } } AliasType::User => { if !UserService::exists(ctx, Reference::Id(target_id)).await? { - tide::log::error!( + error!( "No target user with ID {target_id} exists, cannot create alias", ); - return Err(Error::NotFound); + return Err(Error::UserNotFound); } if verify && UserService::exists(ctx, Reference::Slug(cow!(slug))).await? { - tide::log::error!( + error!( "User with conflicting slug '{slug}' already exists, cannot create alias", ); - return Err(Error::Conflict); + return Err(Error::UserExists); } if slug.len() < ctx.config().minimum_name_bytes { - tide::log::error!( + error!( "User's name is not long enough ({} < {})", slug.len(), ctx.config().minimum_name_bytes, ); - return Err(Error::BadRequest); + return Err(Error::UserNameTooShort); } } } @@ -174,7 +174,7 @@ impl AliasService { alias_type: AliasType, slug: &str, ) -> Result { - find_or_error(Self::get_optional(ctx, alias_type, slug)).await + find_or_error!(Self::get_optional(ctx, alias_type, slug), Alias) } #[inline] @@ -193,7 +193,7 @@ impl AliasService { alias_type: AliasType, target_id: i64, ) -> Result> { - tide::log::info!("Finding all {alias_type:?} aliases for ID {target_id}"); + info!("Finding all {alias_type:?} aliases for ID {target_id}"); let txn = ctx.transaction(); let aliases = Alias::find() @@ -224,10 +224,9 @@ impl AliasService { ) -> Result<()> { let txn = ctx.transaction(); - tide::log::info!( + info!( "Swapping user alias ID {} to use slug '{}'", - alias_id, - new_slug, + alias_id, new_slug, ); let model = alias::ActiveModel { @@ -241,18 +240,18 @@ impl AliasService { Ok(()) } - /// Deletes all aliases for this target. + /// Removes all aliases for this target. /// /// # Returns /// The number of deleted aliases. - pub async fn delete_all( + pub async fn remove_all( ctx: &ServiceContext<'_>, alias_type: AliasType, target_id: i64, ) -> Result { let txn = ctx.transaction(); - tide::log::info!("Deleting all {alias_type:?} aliases for target ID {target_id}"); + info!("Removing all {alias_type:?} aliases for target ID {target_id}"); let DeleteResult { rows_affected } = Alias::delete_many() .filter( @@ -263,8 +262,8 @@ impl AliasService { .exec(txn) .await?; - tide::log::debug!( - "{rows_affected} {alias_type:?} aliases for target ID {target_id} were deleted", + debug!( + "{rows_affected} {alias_type:?} aliases for target ID {target_id} were removed", ); Ok(rows_affected) @@ -279,9 +278,7 @@ impl AliasService { alias_type: AliasType, slug: &str, ) -> Result<()> { - tide::log::info!( - "Verifying target and alias table consistency for slug '{slug}'", - ); + info!("Verifying target and alias table consistency for slug '{slug}'",); let txn = ctx.transaction(); let alias_fut = Alias::find() @@ -310,7 +307,7 @@ impl AliasService { )?; if let (Some(site), Some(alias)) = (site_result, alias_result) { - tide::log::error!( + error!( "Consistency error! Both site and alias tables have the slug '{}'", slug, ); @@ -333,7 +330,7 @@ impl AliasService { )?; if let (Some(user), Some(alias)) = (user_result, alias_result) { - tide::log::error!( + error!( "Consistency error! Both user and alias tables have the slug '{}'", slug, ); @@ -353,13 +350,13 @@ impl AliasService { alias_type: AliasType, slug: &str, ) -> Result<()> { - tide::log::info!("Checking user alias data against filters..."); + info!("Checking user alias data against filters..."); let filter_type = match alias_type { AliasType::User => FilterType::User, AliasType::Site => { // No filter with this type, skip verification - tide::log::debug!("No need to run filter verification for site alias"); + debug!("No need to run filter verification for site alias"); return Ok(()); } }; diff --git a/deepwell/src/services/alias/structs.rs b/deepwell/src/services/alias/structs.rs index 704d9f22d1..109f6fb5a4 100644 --- a/deepwell/src/services/alias/structs.rs +++ b/deepwell/src/services/alias/structs.rs @@ -21,7 +21,6 @@ use crate::models::sea_orm_active_enums::AliasType; #[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] pub struct CreateAlias { pub slug: String, pub alias_type: AliasType, @@ -33,7 +32,6 @@ pub struct CreateAlias { } #[derive(Serialize, Debug)] -#[serde(rename_all = "camelCase")] pub struct CreateAliasOutput { pub alias_id: i64, pub slug: String, diff --git a/deepwell/src/services/authentication/service.rs b/deepwell/src/services/authentication/service.rs index 1c3f1640c9..8c0309e924 100644 --- a/deepwell/src/services/authentication/service.rs +++ b/deepwell/src/services/authentication/service.rs @@ -96,7 +96,7 @@ impl AuthenticationService { ctx: &ServiceContext<'_>, name_or_email: &str, ) -> Result { - tide::log::info!("Looking for user matching name or email '{name_or_email}'"); + info!("Looking for user matching name or email '{name_or_email}'"); let txn = ctx.transaction(); let result = User::find() diff --git a/deepwell/src/services/authentication/structs.rs b/deepwell/src/services/authentication/structs.rs index c6596e9572..1e7b5ff59c 100644 --- a/deepwell/src/services/authentication/structs.rs +++ b/deepwell/src/services/authentication/structs.rs @@ -21,22 +21,19 @@ use crate::models::user::Model as UserModel; use std::net::IpAddr; -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Deserialize, Debug, Clone)] pub struct AuthenticateUser { pub name_or_email: String, pub password: String, } -#[derive(Serialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Serialize, Debug, Clone)] pub struct AuthenticateUserOutput { pub needs_mfa: bool, pub user_id: i64, } -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Deserialize, Debug, Clone)] pub struct LoginUser { pub ip_address: IpAddr, pub user_agent: String, @@ -45,21 +42,19 @@ pub struct LoginUser { pub authenticate: AuthenticateUser, } -#[derive(Serialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Serialize, Debug, Clone)] pub struct LoginUserOutput { pub session_token: String, pub needs_mfa: bool, } -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct MultiFactorAuthenticateUser<'a> { pub session_token: &'a str, pub totp_or_code: &'a str, } -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Deserialize, Debug, Clone)] pub struct LoginUserMfa { pub session_token: String, pub totp_or_code: String, diff --git a/deepwell/src/services/blob/mime.rs b/deepwell/src/services/blob/mime.rs index df4b9c4703..49cc3627a6 100644 --- a/deepwell/src/services/blob/mime.rs +++ b/deepwell/src/services/blob/mime.rs @@ -28,124 +28,116 @@ //! Instead we have it in a thread and ferry requests and responses back and forth. use super::prelude::*; -use crossfire::mpsc; use filemagic::{FileMagicError, Flags as MagicFlags, Magic}; -use once_cell::sync::Lazy; -use std::convert::Infallible; -use std::sync::Once; -use std::{process, thread}; +use std::thread; +use tokio::sync::{mpsc, oneshot}; +type RequestPayload = (Vec, ResponseSender); type ResponsePayload = StdResult; -type ResponseSender = mpsc::TxBlocking; -type RequestPayload = (Vec, ResponseSender); -type RequestSender = mpsc::TxFuture; -type RequestReceiver = mpsc::RxBlocking; +type RequestSender = mpsc::Sender; +type RequestReceiver = mpsc::Receiver; -static QUEUE: Lazy<(RequestSender, RequestReceiver)> = - Lazy::new(|| mpsc::bounded_tx_future_rx_blocking(64)); +type ResponseSender = oneshot::Sender; +type ResponseReceiver = oneshot::Receiver; -macro_rules! sink { - () => { - QUEUE.0 - }; +#[derive(Debug, Clone)] +pub struct MimeAnalyzer { + sink: RequestSender, } -macro_rules! source { - () => { - QUEUE.1 - }; -} +impl MimeAnalyzer { + /// Starts the MIME analyzer and returns an instance of this struct. + /// + /// This launches a new thread to take MIME requests and then returns + /// a means of communicating with this thread to the caller so calls can be made. + /// + /// While technically multiple `MimeAnalyzer` instances could be made, this + /// is very wasteful; you should only create and use one. + /// + /// This object is cheaply cloneable and should be reused instead of + /// making new instances and starting new threads. + pub fn spawn() -> Self { + info!("Starting MIME analyzer worker"); + let (sink, source) = mpsc::channel(64); -fn main_loop() -> Result { - const MAGIC_FLAGS: MagicFlags = MagicFlags::MIME; - const MAGIC_PATHS: &[&str] = &[]; // Empty indicates using the default magic database + thread::spawn(|| { + let magic = Self::load_magic().expect("Unable to load magic database"); + Self::main_loop(magic, source); + }); - tide::log::info!("Loading magic database data"); - let magic = Magic::open(MAGIC_FLAGS)?; - magic.load(MAGIC_PATHS)?; + MimeAnalyzer { sink } + } - loop { - tide::log::debug!("Waiting for next MIME request"); + /// Loads the libmagic database from file, failing if it was invalid or missing. + fn load_magic() -> Result { + const MAGIC_FLAGS: MagicFlags = MagicFlags::MIME; + const MAGIC_PATHS: &[&str] = &[]; // Empty indicates using the default magic database - let (bytes, sender) = source!().recv().expect("MIME channel has disconnected"); - let result = magic.buffer(&bytes); - sender - .send(result) - .expect("MIME response channel has disconnected"); + info!("Loading magic database data"); + let magic = Magic::open(MAGIC_FLAGS)?; + magic.load(MAGIC_PATHS)?; + Ok(magic) } -} -/// Starts the thread containing the `Magic` instance. -/// -/// If the thread is already started, then this does nothing. -pub fn spawn_magic_thread() { - static START: Once = Once::new(); - - macro_rules! unwrap_err { - ($result:expr) => { - match $result { - Ok(_) => unreachable!(), - Err(error) => error, - } - }; + /// Main loop for the MIME analyzer. + /// + /// Runs in a dedicated thread due to borrow checker issues, taking in + /// requests via a mpsc channel. + fn main_loop(magic: Magic, mut source: RequestReceiver) { + while let Some((bytes, sender)) = source.blocking_recv() { + debug!("Received MIME request ({} bytes)", bytes.len()); + let result = magic.buffer(&bytes); + sender.send(result).expect("Response channel is closed"); + } + + warn!("MIME magic channel closed"); } - START.call_once(|| { - thread::spawn(|| { - // Since this is an infinite loop, no success case can return. - // Only the initialization can fail, individual requests just pass back the result. - let error = unwrap_err!(main_loop()); - tide::log::error!("Failed to spawn magic thread: {error}"); - process::exit(1); - }); - }); -} - -/// Requests that libmagic analyze the buffer to determine its MIME type. -/// -/// Because all requests involve sending an item over the channel, -/// and then waiting for the response, we need to send both the input -/// and a oneshot channel to get the response. -pub async fn mime_type(buffer: Vec) -> Result { - // One-shot equivalent channel - let (resp_send, resp_recv) = mpsc::bounded_tx_blocking_rx_future(1); - - // Send request - sink!() - .send((buffer, resp_send)) - .await - .expect("MIME channel has disconnected"); - - // Wait for response - let result = resp_recv - .recv() - .await - .expect("MIME response channel has disconnected"); - - let mime = result?; - Ok(mime) + /// Requests that libmagic analyze the buffer to determine its MIME type. + /// + /// Because all requests involve sending an item over the channel, + /// and then waiting for the response, we need to send both the input + /// and a oneshot channel to get the response. + pub async fn get_mime_type(&self, buffer: Vec) -> Result { + info!("Sending MIME request ({} bytes)", buffer.len()); + + // Channel for getting the result + let (resp_send, resp_recv): (ResponseSender, ResponseReceiver) = + oneshot::channel(); + + // Send the request + self.sink + .send((buffer, resp_send)) + .await + .expect("MIME channel is closed"); + + // Wait for the response + // + // Two layers of result for channel failure and MIME request failure + let resp = resp_recv.await.expect("Response channel is closed"); + let mime = resp?; + Ok(mime) + } } -#[test] -fn mime_request() { - use async_std::task; - +#[tokio::test] +async fn mime_request() { const PNG: &[u8] = b"\x89\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\x00\x00\x01\x00\x00\x00\x01\x08\x06\x00\x00\x00\x1f\x15\xc4\x89\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\x00\x00\x00\x0b\x49\x44\x41\x54\x08\x99\x63\xf8\x0f\x04\x00\x09\xfb\x03\xfd\xe3\x55\xf2\x9c\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82"; const TAR_GZIP: &[u8] = b"\x1f\x8b\x08\x08\xb1\xb7\x8f\x62\x00\x03\x78\x00\x03\x00\x00\x00\x00"; + let mime = MimeAnalyzer::spawn(); + macro_rules! check { ($bytes:expr, $expected:expr $(,)?) => {{ - let future = mime_type($bytes.to_vec()); - let actual = task::block_on(future).expect("Unable to get MIME type"); + let future = mime.get_mime_type($bytes.to_vec()); + let actual = future.await.expect("Unable to get MIME type"); assert_eq!(actual, $expected, "Actual MIME type doesn't match expected"); }}; } - spawn_magic_thread(); - check!(b"", "application/x-empty; charset=binary"); check!(b"Apple banana", "text/plain; charset=us-ascii"); check!(PNG, "image/png; charset=binary"); diff --git a/deepwell/src/services/blob/mod.rs b/deepwell/src/services/blob/mod.rs index 9ef8eb26bc..d4a777e353 100644 --- a/deepwell/src/services/blob/mod.rs +++ b/deepwell/src/services/blob/mod.rs @@ -26,7 +26,6 @@ mod prelude { pub use super::super::prelude::*; - pub use super::mime_type; pub use super::structs::*; pub use crate::hash::{blob_hash_to_hex, sha512_hash, BlobHash}; } @@ -35,6 +34,6 @@ mod mime; mod service; mod structs; -pub use self::mime::{mime_type, spawn_magic_thread}; +pub use self::mime::MimeAnalyzer; pub use self::service::BlobService; pub use self::structs::*; diff --git a/deepwell/src/services/blob/service.rs b/deepwell/src/services/blob/service.rs index e827324560..f96b7a72ea 100644 --- a/deepwell/src/services/blob/service.rs +++ b/deepwell/src/services/blob/service.rs @@ -28,17 +28,50 @@ use std::str; use time::format_description::well_known::Rfc2822; use time::OffsetDateTime; +/// Hash for empty blobs. +/// +/// Even though it is not the SHA-512 hash, for simplicity we treat the hash +/// value with all zeroes to be the blob address for the empty blob. +/// This empty file is not actually stored in S3 but instead is a "virtual file", +/// considered to have always been present in `BlobService`. +pub const EMPTY_BLOB_HASH: BlobHash = [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, +]; + +/// MIME type for empty blobs. +pub const EMPTY_BLOB_MIME: &str = "inode/x-empty; charset=binary"; + +/// Created UNIX timestamp for empty blobs. +/// +/// Timestamp is 2019/01/18 at midnight, the date of the first Wikijump commit. +pub const EMPTY_BLOB_TIMESTAMP: i64 = 1547769600; + #[derive(Debug)] pub struct BlobService; impl BlobService { /// Creates a blob with this data, if it does not already exist. - pub async fn create( + pub async fn create>( ctx: &ServiceContext<'_>, - data: &[u8], + data: B, ) -> Result { - tide::log::info!("Creating blob (length {})", data.len()); + let data = data.as_ref(); + info!("Creating blob (length {})", data.len()); + // Special handling for empty blobs + if data.is_empty() { + debug!("File being created is empty, special case"); + return Ok(CreateBlobOutput { + hash: EMPTY_BLOB_HASH, + mime: str!(EMPTY_BLOB_MIME), + size: 0, + created: false, + }); + } + + // Upload blob let bucket = ctx.s3_bucket(); let hash = sha512_hash(data); let hex_hash = blob_hash_to_hex(&hash); @@ -49,10 +82,10 @@ impl BlobService { match Self::head(ctx, &hex_hash).await? { // Blob exists, copy metadata and return that Some(result) => { - tide::log::debug!("Blob with hash {hex_hash} already exists"); + debug!("Blob with hash {hex_hash} already exists"); // Content-Type header should be passed in - let mime = result.content_type.ok_or(Error::RemoteOperationFailed)?; + let mime = result.content_type.ok_or(Error::S3Response)?; Ok(CreateBlobOutput { hash, @@ -64,10 +97,10 @@ impl BlobService { // Blob doesn't exist, insert it None => { - tide::log::debug!("Blob with hash {hex_hash} to be created"); + debug!("Blob with hash {hex_hash} to be created"); // Determine MIME type for the new file - let mime = mime_type(data.to_vec()).await?; + let mime = ctx.mime().get_mime_type(data.to_vec()).await?; // Put into S3 let response = bucket @@ -92,6 +125,13 @@ impl BlobService { ctx: &ServiceContext<'_>, hash: &[u8], ) -> Result>> { + // Special handling for empty blobs + if hash == EMPTY_BLOB_HASH { + debug!("Returning the empty blob"); + return Ok(Some(Vec::new())); + } + + // Retrieve blob from S3 let bucket = ctx.s3_bucket(); let hex_hash = blob_hash_to_hex(hash); let response = bucket.get_object(&hex_hash).await?; @@ -105,27 +145,36 @@ impl BlobService { #[inline] pub async fn get(ctx: &ServiceContext<'_>, hash: &[u8]) -> Result> { - find_or_error(Self::get_optional(ctx, hash)).await + find_or_error!(Self::get_optional(ctx, hash), Blob) } pub async fn get_metadata_optional( ctx: &ServiceContext<'_>, hash: &[u8], ) -> Result> { - let hex_hash = blob_hash_to_hex(hash); + // Special handling for empty blobs + if hash == EMPTY_BLOB_HASH { + return Ok(Some(BlobMetadata { + mime: str!(EMPTY_BLOB_MIME), + size: 0, + created_at: OffsetDateTime::from_unix_timestamp(EMPTY_BLOB_TIMESTAMP) + .unwrap(), + })); + } + // Retrieve metadata from S3 + let hex_hash = blob_hash_to_hex(hash); match Self::head(ctx, &hex_hash).await? { None => Ok(None), Some(result) => { // Headers should be passed in - let size = result.content_length.ok_or(Error::RemoteOperationFailed)?; - let mime = result.content_type.ok_or(Error::RemoteOperationFailed)?; + let size = result.content_length.ok_or(Error::S3Response)?; + let mime = result.content_type.ok_or(Error::S3Response)?; let created_at = { - let timestamp = - result.last_modified.ok_or(Error::RemoteOperationFailed)?; + let timestamp = result.last_modified.ok_or(Error::S3Response)?; OffsetDateTime::parse(×tamp, &Rfc2822) - .map_err(|_| Error::RemoteOperationFailed)? + .map_err(|_| Error::S3Response)? }; Ok(Some(BlobMetadata { @@ -142,10 +191,17 @@ impl BlobService { ctx: &ServiceContext<'_>, hash: &[u8], ) -> Result { - find_or_error(Self::get_metadata_optional(ctx, hash)).await + find_or_error!(Self::get_metadata_optional(ctx, hash), Blob) } pub async fn exists(ctx: &ServiceContext<'_>, hash: &[u8]) -> Result { + // Special handling for the empty blob + if hash == EMPTY_BLOB_HASH { + debug!("Checking existence of the empty blob"); + return Ok(true); + } + + // Fetch existence from S3 let hex_hash = blob_hash_to_hex(hash); let result = Self::head(ctx, &hex_hash).await?; Ok(result.is_some()) @@ -185,6 +241,16 @@ impl BlobService { } pub async fn hard_delete(ctx: &ServiceContext<'_>, hash: &[u8]) -> Result<()> { + // Special handling for empty blobs + // + // Being virtual, having always existed, they cannot be deleted. + // So this is a no-op. + if hash == EMPTY_BLOB_HASH { + debug!("Ignoring attempt to hard delete the empty blob"); + return Ok(()); + } + + // Delete from S3 let bucket = ctx.s3_bucket(); let hex_hash = blob_hash_to_hex(hash); @@ -204,12 +270,13 @@ fn s3_error(response: &ResponseData, action: &str) -> Result { Err(_) => "(invalid UTF-8)", }; - tide::log::error!( + error!( "Error while {} (HTTP {}): {}", action, response.status_code(), error_message, ); - Err(Error::RemoteOperationFailed) + // TODO replace with S3 backend-specific error + Err(Error::S3Response) } diff --git a/deepwell/src/services/category/service.rs b/deepwell/src/services/category/service.rs index fcab3e13f1..0153ad85d8 100644 --- a/deepwell/src/services/category/service.rs +++ b/deepwell/src/services/category/service.rs @@ -77,7 +77,7 @@ impl CategoryService { site_id: i64, reference: Reference<'_>, ) -> Result { - find_or_error(Self::get_optional(ctx, site_id, reference)).await + find_or_error!(Self::get_optional(ctx, site_id, reference), PageCategory) } pub async fn get_or_create( diff --git a/deepwell/src/services/category/structs.rs b/deepwell/src/services/category/structs.rs index 6c0db37ac1..4c80ce0c23 100644 --- a/deepwell/src/services/category/structs.rs +++ b/deepwell/src/services/category/structs.rs @@ -18,44 +18,10 @@ * along with this program. If not, see . */ -use crate::models::page_category::Model as PageCategoryModel; use crate::web::Reference; -use time::OffsetDateTime; -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Deserialize, Debug, Clone)] pub struct GetCategory<'a> { pub site: Reference<'a>, pub category: Reference<'a>, } - -#[derive(Serialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct CategoryOutput { - category_id: i64, - created_at: OffsetDateTime, - updated_at: Option, - site_id: i64, - slug: String, -} - -impl From for CategoryOutput { - #[inline] - fn from(model: PageCategoryModel) -> CategoryOutput { - let PageCategoryModel { - category_id, - created_at, - updated_at, - site_id, - slug, - } = model; - - CategoryOutput { - category_id, - created_at, - updated_at, - site_id, - slug, - } - } -} diff --git a/deepwell/src/services/context.rs b/deepwell/src/services/context.rs index 3195399bfa..b8ef0b02eb 100644 --- a/deepwell/src/services/context.rs +++ b/deepwell/src/services/context.rs @@ -18,29 +18,27 @@ * along with this program. If not, see . */ -use crate::api::{ApiRequest, ApiServerState}; +use crate::api::ServerState; use crate::config::Config; use crate::locales::Localizations; +use crate::services::blob::MimeAnalyzer; +use crate::services::job::JobQueue; use s3::bucket::Bucket; use sea_orm::DatabaseTransaction; use std::sync::Arc; -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct ServiceContext<'txn> { - state: ApiServerState, + state: ServerState, transaction: &'txn DatabaseTransaction, } impl<'txn> ServiceContext<'txn> { - #[inline] - pub fn new(req: &ApiRequest, transaction: &'txn DatabaseTransaction) -> Self { - Self::from_raw(req.state(), transaction) - } - - pub fn from_raw( - state: &ApiServerState, - transaction: &'txn DatabaseTransaction, - ) -> Self { + // NOTE: It is the responsibility of the caller to manage commit / rollback + // for transactions. + // + // For our endpoints, this is managed in the wrapper macro in api.rs + pub fn new(state: &ServerState, transaction: &'txn DatabaseTransaction) -> Self { ServiceContext { state: Arc::clone(state), transaction, @@ -58,6 +56,16 @@ impl<'txn> ServiceContext<'txn> { &self.state.localizations } + #[inline] + pub fn mime(&self) -> &MimeAnalyzer { + &self.state.mime_analyzer + } + + #[inline] + pub fn job_queue(&self) -> &JobQueue { + &self.state.job_queue + } + #[inline] pub fn s3_bucket(&self) -> &Bucket { &self.state.s3_bucket diff --git a/deepwell/src/services/domain/service.rs b/deepwell/src/services/domain/service.rs index de89ff0d92..573350d6ca 100644 --- a/deepwell/src/services/domain/service.rs +++ b/deepwell/src/services/domain/service.rs @@ -40,12 +40,12 @@ impl DomainService { ctx: &ServiceContext<'_>, CreateCustomDomain { domain, site_id }: CreateCustomDomain, ) -> Result<()> { - tide::log::info!("Creating custom domain '{domain}' (site ID {site_id})"); + info!("Creating custom domain '{domain}' (site ID {site_id})"); let txn = ctx.transaction(); if Self::custom_domain_exists(ctx, &domain).await? { - tide::log::error!("Custom domain already exists, cannot create"); - return Err(Error::Conflict); + error!("Custom domain already exists, cannot create"); + return Err(Error::CustomDomainExists); } let model = site_domain::ActiveModel { @@ -59,9 +59,9 @@ impl DomainService { /// Delete the given custom domain. /// - /// Yields `Error::NotFound` if it's missing. - pub async fn delete_custom(ctx: &ServiceContext<'_>, domain: String) -> Result<()> { - tide::log::info!("Deleting custom domain '{domain}'"); + /// Yields `Error::CustomDomainNotFound` if it's missing. + pub async fn remove_custom(ctx: &ServiceContext<'_>, domain: String) -> Result<()> { + info!("Deleting custom domain '{domain}'"); let txn = ctx.transaction(); let DeleteResult { rows_affected, .. } = @@ -70,7 +70,7 @@ impl DomainService { if rows_affected == 1 { Ok(()) } else { - Err(Error::NotFound) + Err(Error::CustomDomainNotFound) } } @@ -78,7 +78,7 @@ impl DomainService { ctx: &ServiceContext<'_>, domain: &str, ) -> Result> { - tide::log::info!("Getting site for custom domain '{domain}'"); + info!("Getting site for custom domain '{domain}'"); // Join with the site table so we can get that data, rather than just the ID. let txn = ctx.transaction(); @@ -97,7 +97,10 @@ impl DomainService { ctx: &ServiceContext<'_>, domain: &str, ) -> Result { - find_or_error(Self::site_from_custom_domain_optional(ctx, domain)).await + find_or_error!( + Self::site_from_custom_domain_optional(ctx, domain), + CustomDomain, + ) } /// Determines if the given custom domain is registered. @@ -111,17 +114,44 @@ impl DomainService { .map(|site| site.is_some()) } + /// Gets the site corresponding with the given domain. + #[inline] + #[allow(dead_code)] // TEMP + pub async fn site_from_domain<'a>( + ctx: &ServiceContext<'_>, + domain: &'a str, + ) -> Result { + find_or_error!(Self::site_from_domain_optional(ctx, domain), CustomDomain) + } + /// Optional version of `site_from_domain()`. pub async fn site_from_domain_optional<'a>( ctx: &ServiceContext<'_>, domain: &'a str, + ) -> Result> { + let result = Self::parse_site_from_domain(ctx, domain).await?; + match result { + SiteDomainResult::Found(site) => Ok(Some(site)), + _ => Ok(None), + } + } + + /// Gets the site corresponding with the given domain. + /// + /// Returns one of three variants: + /// * `Found` — Site retrieved from the domain. + /// * `Slug` — Site does not exist. If it did, domain would be a canonical domain. + /// * `CustomDomain` — Site does not exist. If it did, domain would be a custom domain. + pub async fn parse_site_from_domain<'a>( + ctx: &ServiceContext<'_>, + domain: &'a str, ) -> Result> { - tide::log::info!("Getting site for domain '{domain}'"); + info!("Getting site for domain '{domain}'"); match Self::parse_canonical(ctx.config(), domain) { // Normal canonical domain, return from site slug fetch. Some(subdomain) => { - tide::log::debug!("Found canonical domain with slug '{subdomain}'"); + debug!("Found canonical domain with slug '{subdomain}'"); let result = SiteService::get_optional(ctx, Reference::Slug(cow!(subdomain))) @@ -136,7 +166,7 @@ impl DomainService { // Not canonical, try custom domain. None => { - tide::log::debug!("Not found, checking if it's a custom domain"); + debug!("Not found, checking if it's a custom domain"); let result = Self::site_from_custom_domain_optional(ctx, domain).await; match result { @@ -148,24 +178,6 @@ impl DomainService { } } - /// Gets the site corresponding with the given domain. - /// - /// # Returns - /// A 2-tuple, the first containing the site for this domain, - /// the second containing the site slug in this domain - /// (or `None` if it was a custom domain). - #[inline] - pub async fn site_from_domain<'a>( - ctx: &ServiceContext<'_>, - domain: &'a str, - ) -> Result { - let result = Self::site_from_domain_optional(ctx, domain).await?; - match result { - SiteDomainResult::Found(site) => Ok(site), - _ => Err(Error::NotFound), - } - } - /// If this domain is canonical domain, extract the site slug. pub fn parse_canonical<'a>(config: &Config, domain: &'a str) -> Option<&'a str> { let main_domain = &config.main_domain; @@ -192,7 +204,7 @@ impl DomainService { // For instance, foo.wikijump.com or bar.wikijump.com are valid, // but foo.bar.wikijump.com is not. Some(subdomain) if subdomain.contains('.') => { - tide::log::error!("Found domain '{domain}' is a sub-subdomain, invalid"); + error!("Found domain '{domain}' is a sub-subdomain, invalid"); None } @@ -209,10 +221,9 @@ impl DomainService { /// Gets the preferred domain for the given site. pub fn domain_for_site<'a>(config: &Config, site: &'a SiteModel) -> Cow<'a, str> { - tide::log::debug!( + debug!( "Getting preferred domain for site '{}' (ID {})", - site.slug, - site.site_id, + site.slug, site.site_id, ); match &site.custom_domain { @@ -239,7 +250,7 @@ impl DomainService { ctx: &ServiceContext<'_>, site_id: i64, ) -> Result> { - tide::log::info!("Getting domains for site ID {site_id}"); + info!("Getting domains for site ID {site_id}"); let txn = ctx.transaction(); let models = SiteDomain::find() diff --git a/deepwell/src/services/domain/structs.rs b/deepwell/src/services/domain/structs.rs index aeee7dd3f9..6dbc624fe7 100644 --- a/deepwell/src/services/domain/structs.rs +++ b/deepwell/src/services/domain/structs.rs @@ -28,7 +28,6 @@ pub enum SiteDomainResult<'a> { } #[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] pub struct CreateCustomDomain { pub domain: String, pub site_id: i64, diff --git a/deepwell/src/services/email/service.rs b/deepwell/src/services/email/service.rs index 5e684b7a94..3e65d6abff 100644 --- a/deepwell/src/services/email/service.rs +++ b/deepwell/src/services/email/service.rs @@ -25,12 +25,12 @@ pub struct EmailService; impl EmailService { /// Validates an email through the MailCheck API. - pub fn validate(email: &str) -> Result { - let url = format!("https://api.mailcheck.ai/email/{email}"); - tide::log::debug!("Requesting mailcheck status: {url}"); - - // Send a GET request to the MailCheck API and deserialize the response. - let mailcheck = ureq::get(&url).call()?.into_json::()?; + pub async fn validate(email: &str) -> Result { + // Sends a GET request to the MailCheck API and deserializes the response. + let mailcheck = reqwest::get(format!("https://api.mailcheck.ai/email/{email}")) + .await? + .json::() + .await?; // Create the output with default parameters. let mut output = EmailValidationOutput::default(); @@ -42,25 +42,24 @@ impl EmailService { // Invalid request. 400 => { - tide::log::error!( + error!( "MailCheck API request failed with bad response: {:?}", mailcheck.error, ); - return Err(Error::BadRequest); + return Err(Error::EmailVerification(mailcheck.error)); } // Exceeded rate limit. 429 => { - tide::log::error!("MailCheck API hit ratelimit: {:?}", mailcheck.error,); + error!("MailCheck API hit ratelimit: {:?}", mailcheck.error); return Err(Error::RateLimited); } // Other statuses. _ => { - tide::log::warn!( + warn!( "MailCheck API returned status {}: {:?}", - mailcheck.status, - mailcheck.error, + mailcheck.status, mailcheck.error, ); } } diff --git a/deepwell/src/services/email/structs.rs b/deepwell/src/services/email/structs.rs index 6afd64c6f7..eab507fdd2 100644 --- a/deepwell/src/services/email/structs.rs +++ b/deepwell/src/services/email/structs.rs @@ -21,7 +21,7 @@ use serde::{Deserialize, Serialize}; /// A deserialized response from the MailCheck API. -#[derive(Deserialize, Debug)] +#[derive(Deserialize, Debug, Clone)] pub struct MailCheckResponse { pub status: u16, pub email: String, @@ -33,8 +33,7 @@ pub struct MailCheckResponse { pub error: Option, } -#[derive(Debug, Serialize)] -#[serde(rename_all = "camelCase")] +#[derive(Serialize, Debug, Clone)] pub struct EmailValidationOutput { pub valid: bool, pub classification: EmailClassification, @@ -51,8 +50,8 @@ impl Default for EmailValidationOutput { } } -#[derive(Debug, Serialize)] -#[serde(rename_all = "camelCase")] +#[derive(Serialize, Debug, Clone)] +#[serde(rename_all = "kebab-case")] pub enum EmailClassification { Normal, Disposable, diff --git a/deepwell/src/services/error.rs b/deepwell/src/services/error.rs index d8f8aba445..dd5bc8ce66 100644 --- a/deepwell/src/services/error.rs +++ b/deepwell/src/services/error.rs @@ -18,13 +18,12 @@ * along with this program. If not, see . */ -use crate::locales::LocalizationTranslateError; use filemagic::FileMagicError; +use jsonrpsee::types::error::ErrorObjectOwned; +use reqwest::Error as ReqwestError; use s3::error::S3Error; -use sea_orm::error::DbErr; -use std::io; +use sea_orm::{error::DbErr, TransactionError}; use thiserror::Error as ThisError; -use tide::{Error as TideError, StatusCode}; use unic_langid::LanguageIdentifierError; pub use std::error::Error as StdError; @@ -33,11 +32,12 @@ pub type StdResult = std::result::Result; pub type Result = StdResult; /// Wrapper error for possible failure modes from service methods. -/// -/// This has a method to convert to the corresponding HTTP status, -/// via `into_tide_error()`. #[derive(ThisError, Debug)] pub enum Error { + // Error passed straight to ErrorObjectOwned without conversion + #[error("{0}")] + Raw(#[from] ErrorObjectOwned), + #[error("Cryptography error: {0}")] Cryptography(argon2::password_hash::Error), @@ -45,10 +45,19 @@ pub enum Error { Database(DbErr), #[error("Invalid locale: {0}")] - Locale(#[from] LanguageIdentifierError), + LocaleInvalid(#[from] LanguageIdentifierError), + + #[error("No messages are available for this locale")] + LocaleMissing, + + #[error("Message key not found for this locale")] + LocaleMessageMissing, - #[error("Localization error: {0}")] - Localization(#[from] LocalizationTranslateError), + #[error("Message key was found, but has no value")] + LocaleMessageValueMissing, + + #[error("Message key was found, but does not have this attribute")] + LocaleMessageAttributeMissing, #[error("Magic library error: {0}")] Magic(#[from] FileMagicError), @@ -59,28 +68,21 @@ pub enum Error { #[error("Serialization error: {0}")] Serde(#[from] serde_json::Error), - #[error("I/O error: {0}")] - Io(#[from] io::Error), + #[error("S3 service returned error: {0}")] + S3Service(#[from] S3Error), - #[error("S3 error: {0}")] - S3(#[from] S3Error), + #[error("S3 service failed to respond properly")] + S3Response, - #[error("Web server error: HTTP {}", .0.status() as u16)] - Web(TideError), + #[error("Email verification error: {}", .0.as_ref().unwrap_or(&str!("")))] + EmailVerification(Option), - // See also RemoteOperationFailed. #[error("Web request error: {0}")] - WebRequest(Box), + WebRequest(#[from] ReqwestError), #[error("Invalid enum serialization value")] InvalidEnumValue, - #[error("Inconsistency found in checked data")] - Inconsistent, - - #[error("A request to a remote service returned an error")] - RemoteOperationFailed, - #[error("Attempting to perform a wikitext parse and render has timed out")] RenderTimeout, @@ -90,6 +92,21 @@ pub enum Error { #[error("Invalid username, password, or TOTP code")] InvalidAuthentication, + #[error("Backend error while trying to authenticate")] + AuthenticationBackend(Box), + + #[error("Invalid session token, cannot be used for authentication")] + InvalidSessionToken, + + #[error("User ID {session_user_id} associated with session does not match active user ID {active_user_id}")] + SessionUserId { + active_user_id: i64, + session_user_id: i64, + }, + + #[error("A password is required")] + EmptyPassword, + #[error("The user's email is disallowed")] DisallowedEmail, @@ -99,21 +116,144 @@ pub enum Error { #[error("The request is in some way malformed or incorrect")] BadRequest, - #[error("The request conflicts with data already present")] - Conflict, - - #[error("The requested data exists, when it was expected to be missing")] - Exists, - - #[error("The requested data was not found")] - NotFound, - #[error("The request violates a configured content filter")] FilterViolation, #[error("Cannot hide the wikitext for the latest page revision")] CannotHideLatestRevision, + #[error("The regular expression found in the database is invalid")] + FilterRegexInvalid(regex::Error), + + #[error("Cannot restore a non-deleted filter")] + FilterNotDeleted, + + #[error("File name cannot be empty")] + FileNameEmpty, + + #[error("File name too long")] + FileNameTooLong, + + #[error("File MIME type cannot be empty")] + FileMimeEmpty, + + #[error("Cannot restore a non-deleted file")] + FileNotDeleted, + + #[error("Cannot restore a non-deleted page")] + PageNotDeleted, + + #[error("Page slug cannot be empty")] + PageSlugEmpty, + + #[error("Site slug cannot be empty")] + SiteSlugEmpty, + + #[error("User name is too short")] + UserNameTooShort, + + #[error("User slug cannot be empty")] + UserSlugEmpty, + + #[error("Message subject cannot be empty")] + MessageSubjectEmpty, + + #[error("Message subject too long")] + MessageSubjectTooLong, + + #[error("Message body cannot be empty")] + MessageBodyEmpty, + + #[error("Message body too long")] + MessageBodyTooLong, + + #[error("Message cannot have no recipients")] + MessageNoRecipients, + + #[error("Message has too many recipients")] + MessageTooManyRecipients, + + #[error("Unspecified entity not found")] + GeneralNotFound, + + #[error("Alias does not exist")] + AliasNotFound, + + #[error("Interaction value does not exist")] + InteractionNotFound, + + #[error("User does not exist")] + UserNotFound, + + #[error("Site does not exist")] + SiteNotFound, + + #[error("Page does not exist")] + PageNotFound, + + #[error("Page category does not exist")] + PageCategoryNotFound, + + #[error("Page parent does not exist")] + PageParentNotFound, + + #[error("Page revision does not exist")] + PageRevisionNotFound, + + #[error("File does not exist")] + FileNotFound, + + #[error("File revision does not exist")] + FileRevisionNotFound, + + #[error("Vote does not exist")] + VoteNotFound, + + #[error("Filter does not exist")] + FilterNotFound, + + #[error("Custom domain does not exist")] + CustomDomainNotFound, + + #[error("Message does not exist")] + MessageNotFound, + + #[error("Message draft does not exist")] + MessageDraftNotFound, + + #[error("Blob item does not exist")] + BlobNotFound, + + #[error("Text item does not exist")] + TextNotFound, + + #[error("Cannot perform, user already exists")] + UserExists, + + #[error("Cannot set up user MFA, already set up")] + UserMfaExists, + + #[error("Cannot perform, site already exists")] + SiteExists, + + #[error("Cannot perform, page already exists")] + PageExists, + + #[error("Cannot perform, page slug already exists")] + PageSlugExists, + + #[error("Cannot perform, page parent already exists")] + PageParentExists, + + #[error("Cannot perform, file already exists")] + FileExists, + + #[error("Cannot perform, filter already exists")] + FilterExists, + + #[error("Cannot perform, custom domain already exists")] + CustomDomainExists, + #[error("Cannot perform this action because you are blocked by the user")] UserBlockedUser, @@ -125,52 +265,163 @@ pub enum Error { } impl Error { - pub fn into_tide_error(self) -> TideError { + /// Returns the code associated with this error. + /// + /// The JSON-RPC spec has each unique error case return its own integer error code. + /// Some very negative codes are reserved for RPC internals, so we will only output + /// positive values. + /// + /// Sort of similar to HTTP status codes, we are also dividing them into groups based + /// generally on the kind of error it is. + /// + /// When an error case is removed, then its number should generally not be reused, + /// just use the next available value in line. Also be sure to update framerail + /// accordingly when error codes are added or removed. + pub fn code(&self) -> i32 { + match self { + // 1000 - Miscellaneous, general errors + // Avoid putting stuff here, prefer other categories instead + Error::Raw(_) => 1000, + + // 2000 - Database conflicts + // Missing data + Error::GeneralNotFound => 2000, + Error::AliasNotFound => 2001, + Error::InteractionNotFound => 2002, + Error::UserNotFound => 2003, + Error::SiteNotFound => 2004, + Error::PageNotFound => 2005, + Error::PageCategoryNotFound => 2006, + Error::PageParentNotFound => 2007, + Error::PageRevisionNotFound => 2008, + Error::FileNotFound => 2009, + Error::FileRevisionNotFound => 2010, + Error::VoteNotFound => 2011, + Error::FilterNotFound => 2012, + Error::CustomDomainNotFound => 2013, + Error::MessageNotFound => 2014, + Error::MessageDraftNotFound => 2015, + Error::BlobNotFound => 2016, + Error::TextNotFound => 2017, + + // 2100 -- Existing data + Error::UserExists => 2100, + Error::UserMfaExists => 2101, + Error::SiteExists => 2102, + Error::PageExists => 2103, + Error::PageSlugExists => 2104, + Error::PageParentExists => 2105, + Error::FileExists => 2106, + Error::FilterExists => 2107, + Error::CustomDomainExists => 2108, + + // 3000 - Server errors, unexpected + Error::RateLimited => 3000, + Error::WebRequest(_) => 3001, + Error::AuthenticationBackend(_) => 3002, + + // 3100 -- Remote services + Error::RenderTimeout => 3100, + Error::EmailVerification(_) => 3101, + Error::S3Service(_) => 3102, + Error::S3Response => 3103, + + // 3200 -- Backend issues + Error::Serde(_) => 3200, + Error::Database(_) => 3201, + Error::Cryptography(_) => 3202, + Error::Magic(_) => 3204, + Error::Otp(_) => 3205, + + // 4000 - Client, request errors + // BadRequest is pretty general, avoid it except for rare weird cases + Error::BadRequest => 4000, + Error::InvalidEnumValue => 4001, + Error::FilterViolation => 4002, + Error::InsufficientNameChanges => 4003, + Error::CannotHideLatestRevision => 4004, + Error::FilterRegexInvalid(_) => 4005, + Error::FilterNotDeleted => 4006, + Error::FileNameEmpty => 4007, + Error::FileNameTooLong => 4008, + Error::FileMimeEmpty => 4009, + Error::FileNotDeleted => 4010, + Error::PageNotDeleted => 4011, + Error::PageSlugEmpty => 4012, + Error::SiteSlugEmpty => 4013, + Error::UserNameTooShort => 4014, + Error::UserSlugEmpty => 4015, + Error::MessageSubjectEmpty => 4016, + Error::MessageSubjectTooLong => 4017, + Error::MessageBodyEmpty => 4018, + Error::MessageBodyTooLong => 4019, + Error::MessageNoRecipients => 4020, + Error::MessageTooManyRecipients => 4021, + + // 4100 -- Localization + Error::LocaleInvalid(_) => 4100, + Error::LocaleMissing => 4101, + Error::LocaleMessageMissing => 4102, + Error::LocaleMessageValueMissing => 4103, + Error::LocaleMessageAttributeMissing => 4104, + + // 4200 -- Login errors + Error::EmptyPassword => 4200, + Error::InvalidEmail => 4201, + Error::DisallowedEmail => 4202, + + // 4300 -- Relationship conflicts + Error::SiteBlockedUser => 4300, + Error::UserBlockedUser => 4301, + + // 5000 - Authentication, permission, or role errors + Error::InvalidAuthentication => 5000, + Error::InvalidSessionToken => 5001, + Error::SessionUserId { .. } => 5002, + // TODO: permission errors (e.g. locked page, cannot apply bans) + } + } + + /// Emit partial structured error data. + /// + /// Meant to be better than nothing and simply `Debug` but also not + /// as much boilerplate as manually implementing `Serialize` on everything. + /// This unwraps common cases and makes things generally clearer. + fn data(&self) -> serde_json::Value { + use serde_json::json; + match self { - Error::Cryptography(_) => { - // The "invalid password" variant should have already been filtered out, see below. - TideError::from_str(StatusCode::InternalServerError, "") - } - Error::Database(inner) => { - TideError::new(StatusCode::InternalServerError, inner) - } - Error::Magic(inner) => TideError::new(StatusCode::InternalServerError, inner), - Error::Locale(inner) => TideError::new(StatusCode::BadRequest, inner), - Error::Localization(inner) => TideError::new(StatusCode::NotFound, inner), - Error::Otp(inner) => TideError::new(StatusCode::InternalServerError, inner), - Error::Serde(inner) => TideError::new(StatusCode::InternalServerError, inner), - Error::Io(inner) => TideError::new(StatusCode::InternalServerError, inner), - Error::S3(inner) => TideError::new(StatusCode::InternalServerError, inner), - Error::Web(inner) => inner, - Error::WebRequest(inner) => { - TideError::new(StatusCode::InternalServerError, inner) - } - Error::InvalidEnumValue | Error::Inconsistent => { - TideError::from_str(StatusCode::InternalServerError, "") - } - Error::RemoteOperationFailed | Error::RenderTimeout => { - TideError::from_str(StatusCode::InternalServerError, "") - } - Error::InsufficientNameChanges => { - TideError::from_str(StatusCode::PaymentRequired, "") - } - Error::InvalidAuthentication => { - TideError::from_str(StatusCode::Forbidden, "") - } - Error::DisallowedEmail => TideError::from_str(StatusCode::BadRequest, ""), - Error::InvalidEmail => TideError::from_str(StatusCode::BadRequest, ""), - Error::BadRequest => TideError::from_str(StatusCode::BadRequest, ""), - Error::Exists | Error::Conflict => { - TideError::from_str(StatusCode::Conflict, "") - } - Error::NotFound => TideError::from_str(StatusCode::NotFound, ""), - Error::FilterViolation | Error::CannotHideLatestRevision => { - TideError::from_str(StatusCode::BadRequest, "") - } - Error::UserBlockedUser | Error::SiteBlockedUser => { - TideError::from_str(StatusCode::Forbidden, "") - } - Error::RateLimited => TideError::from_str(StatusCode::ServiceUnavailable, ""), + // Message already has all the data + Error::Raw(_) => json!(null), + + // Unwrap self-error + Error::AuthenticationBackend(error) => error.data(), + + // Emit as structure + Error::SessionUserId { + active_user_id, + session_user_id, + } => json!({ + "active_user_id": active_user_id, + "session_user_id": session_user_id, + }), + + // Emit as-is + Error::EmailVerification(value) => json!(value), + + // Emit as a Debug string + Error::Cryptography(value) => json!(format!("{value:?}")), + Error::Database(value) => json!(format!("{value:?}")), + Error::LocaleInvalid(value) => json!(format!("{value:?}")), + Error::Magic(value) => json!(format!("{value:?}")), + Error::Otp(value) => json!(format!("{value:?}")), + Error::Serde(value) => json!(format!("{value:?}")), + Error::S3Service(value) => json!(format!("{value:?}")), + Error::WebRequest(value) => json!(format!("{value:?}")), + Error::FilterRegexInvalid(value) => json!(format!("{value:?}")), + + // Other cases are null enums or the values are ignored + _ => json!(null), } } } @@ -184,7 +435,10 @@ impl From for Error { #[inline] fn from(error: argon2::password_hash::Error) -> Error { match error { + // Password is invalid, expected error argon2::password_hash::Error::Password => Error::InvalidAuthentication, + + // Problem with the password hashing process _ => Error::Cryptography(error), } } @@ -193,22 +447,36 @@ impl From for Error { impl From for Error { fn from(error: DbErr) -> Error { match error { - DbErr::RecordNotFound(_) => Error::NotFound, + DbErr::RecordNotFound(_) => Error::GeneralNotFound, _ => Error::Database(error), } } } -impl From for Error { - #[inline] - fn from(error: ureq::Error) -> Error { - Error::WebRequest(Box::new(error)) +// End-conversion for methods +// +// This is used to convert our ServiceError type into the RPC error type. + +impl From for ErrorObjectOwned { + fn from(error: Error) -> ErrorObjectOwned { + // Return a raw error as-is + if let Error::Raw(error) = error { + return error; + } + + // Build error object + let error_code = error.code(); + let message = str!(error); + let data = error.data(); + ErrorObjectOwned::owned(error_code, message, Some(data)) } } -impl From for Error { - #[inline] - fn from(error: TideError) -> Error { - Error::Web(error) +// Helper function for unwrapping two layers of third party crate error wrapper types. + +pub fn into_rpc_error(error: TransactionError) -> ErrorObjectOwned { + match error { + TransactionError::Connection(error) => Error::Database(error).into(), + TransactionError::Transaction(error) => error, } } diff --git a/deepwell/src/services/file/service.rs b/deepwell/src/services/file/service.rs index f9e02d67f7..6ffac148c0 100644 --- a/deepwell/src/services/file/service.rs +++ b/deepwell/src/services/file/service.rs @@ -36,23 +36,22 @@ impl FileService { /// /// In the background, this stores the blob via content addressing, /// meaning that duplicates are not uploaded twice. - #[allow(dead_code)] // TEMP - pub async fn create( + pub async fn upload( ctx: &ServiceContext<'_>, - page_id: i64, - site_id: i64, - CreateFile { - revision_comments, + UploadFile { + site_id, + page_id, name, + revision_comments, user_id, + data, licensing, bypass_filter, - }: CreateFile, - data: &[u8], - ) -> Result { + }: UploadFile, + ) -> Result { let txn = ctx.transaction(); - tide::log::info!( + info!( "Creating file with name '{}', content length {}", name, data.len(), @@ -72,11 +71,12 @@ impl FileService { mime, size, created: _, - } = BlobService::create(ctx, data).await?; + } = BlobService::create(ctx, &data).await?; // Add new file let model = file::ActiveModel { name: Set(name.clone()), + site_id: Set(site_id), page_id: Set(page_id), ..Default::default() }; @@ -103,29 +103,26 @@ impl FileService { Ok(revision_output) } - /// Updates a file, including the ability to upload a new version. - #[allow(dead_code)] // TEMP - pub async fn update( + /// Edits a file, including the ability to upload a new version. + pub async fn edit( ctx: &ServiceContext<'_>, - site_id: i64, - page_id: i64, - file_id: i64, - UpdateFile { - revision_comments, + EditFile { + site_id, + page_id, + file_id, user_id, - body, + revision_comments, bypass_filter, - }: UpdateFile, - ) -> Result> { + body, + }: EditFile, + ) -> Result> { + info!("Editing file with ID {}", file_id); + let txn = ctx.transaction(); let last_revision = - FileRevisionService::get_latest(ctx, page_id, file_id).await?; + FileRevisionService::get_latest(ctx, site_id, page_id, file_id).await?; - tide::log::info!("Updating file with ID {}", file_id); - - // Process inputs - - let UpdateFileBody { + let EditFileBody { name, data, licensing, @@ -196,34 +193,29 @@ impl FileService { } /// Moves a file from from one page to another. - #[allow(dead_code)] // TEMP pub async fn r#move( ctx: &ServiceContext<'_>, - site_id: i64, - file_id: i64, - input: MoveFile, - ) -> Result> { - let txn = ctx.transaction(); - - let MoveFile { - revision_comments, - user_id, + MoveFile { name, + site_id, current_page_id, destination_page_id, - } = input; - + file_id, + user_id, + revision_comments, + }: MoveFile, + ) -> Result> { + let txn = ctx.transaction(); let last_revision = - FileRevisionService::get_latest(ctx, current_page_id, file_id).await?; + FileRevisionService::get_latest(ctx, site_id, current_page_id, file_id) + .await?; // Get destination filename let name = name.unwrap_or_else(|| last_revision.name.clone()); - tide::log::info!( - "Moving file with ID {} from page ID {} to {} ", - file_id, - current_page_id, - destination_page_id, + info!( + "Moving file with ID {} from page ID {} to {}", + file_id, current_page_id, destination_page_id, ); // Ensure there isn't a file with this name on the destination page @@ -265,26 +257,31 @@ impl FileService { /// Like other deletions throughout Wikijump, this is a soft deletion. /// It marks the files as deleted but retains the contents, permitting it /// to be easily reverted. - #[allow(dead_code)] // TEMP pub async fn delete( ctx: &ServiceContext<'_>, - page_id: i64, - reference: Reference<'_>, - input: DeleteFile, - ) -> Result { - let txn = ctx.transaction(); - - let DeleteFile { + DeleteFile { revision_comments, site_id, + page_id, + file: reference, user_id, - } = input; + }: DeleteFile<'_>, + ) -> Result { + let txn = ctx.transaction(); // Ensure file exists - let FileModel { file_id, .. } = Self::get(ctx, page_id, reference).await?; + let FileModel { file_id, .. } = Self::get( + ctx, + GetFile { + site_id, + page_id, + file: reference, + }, + ) + .await?; let last_revision = - FileRevisionService::get_latest(ctx, page_id, file_id).await?; + FileRevisionService::get_latest(ctx, site_id, page_id, file_id).await?; // Create tombstone revision // This outdates the page, etc @@ -319,23 +316,19 @@ impl FileService { /// Restores a deleted file. /// /// This undeletes a file, moving it from the deleted sphere to the specified location. - #[allow(dead_code)] // TEMP pub async fn restore( ctx: &ServiceContext<'_>, - page_id: i64, - file_id: i64, - input: RestoreFile, - ) -> Result { - let txn = ctx.transaction(); - - let RestoreFile { - revision_comments, + RestoreFile { new_page_id, new_name, site_id, + page_id, + file_id, user_id, - } = input; - + revision_comments, + }: RestoreFile, + ) -> Result { + let txn = ctx.transaction(); let file = Self::get_direct(ctx, file_id).await?; let new_page_id = new_page_id.unwrap_or(page_id); let new_name = new_name.unwrap_or(file.name); @@ -346,19 +339,19 @@ impl FileService { // - Name doesn't already exist if file.page_id != page_id { - tide::log::warn!("File's page ID and passed page ID do not match"); - return Err(Error::NotFound); + warn!("File's page ID and passed page ID do not match"); + return Err(Error::FileNotFound); } if file.deleted_at.is_none() { - tide::log::warn!("File requested to be restored is not currently deleted"); - return Err(Error::BadRequest); + warn!("File requested to be restored is not currently deleted"); + return Err(Error::FileNotDeleted); } Self::check_conflicts(ctx, page_id, &new_name, "restore").await?; let last_revision = - FileRevisionService::get_latest(ctx, page_id, file_id).await?; + FileRevisionService::get_latest(ctx, site_id, page_id, file_id).await?; // Create resurrection revision // This outdates the page, etc @@ -396,8 +389,11 @@ impl FileService { pub async fn get_optional( ctx: &ServiceContext<'_>, - page_id: i64, - reference: Reference<'_>, + GetFile { + site_id, + page_id, + file: reference, + }: GetFile<'_>, ) -> Result> { let txn = ctx.transaction(); let file = { @@ -410,6 +406,7 @@ impl FileService { .filter( Condition::all() .add(condition) + .add(file::Column::SiteId.eq(site_id)) .add(file::Column::PageId.eq(page_id)) .add(file::Column::DeletedAt.is_null()), ) @@ -421,12 +418,8 @@ impl FileService { } #[inline] - pub async fn get( - ctx: &ServiceContext<'_>, - page_id: i64, - reference: Reference<'_>, - ) -> Result { - find_or_error(Self::get_optional(ctx, page_id, reference)).await + pub async fn get(ctx: &ServiceContext<'_>, input: GetFile<'_>) -> Result { + find_or_error!(Self::get_optional(ctx, input), File) } /// Gets the file ID from a reference, looking up if necessary. @@ -458,7 +451,7 @@ impl FileService { match result { Some(tuple) => Ok(tuple.0), - None => Err(Error::NotFound), + None => Err(Error::FileNotFound), } } } @@ -479,7 +472,7 @@ impl FileService { #[inline] pub async fn get_direct(ctx: &ServiceContext<'_>, file_id: i64) -> Result { - find_or_error(Self::get_direct_optional(ctx, file_id)).await + find_or_error!(Self::get_direct_optional(ctx, file_id), File) } /// Hard deletes this file and all duplicates. @@ -493,9 +486,9 @@ impl FileService { /// /// This method should only be used very rarely to clear content such /// as severe copyright violations, abuse content, or comply with court orders. - #[allow(dead_code)] // TEMP pub async fn hard_delete_all(_ctx: &ServiceContext<'_>, _file_id: i64) -> Result<()> { // TODO find hash. update all files with the same hash + // TODO if hash == 00000 then error // TODO add to audit log // TODO hard delete BlobService @@ -504,7 +497,7 @@ impl FileService { /// Checks to see if a file already exists at the name specified. /// - /// If so, this method fails with `Error::Conflict`. Otherwise it returns nothing. + /// If so, this method fails with `Error::FileExists`. Otherwise it returns nothing. async fn check_conflicts( ctx: &ServiceContext<'_>, page_id: i64, @@ -526,15 +519,12 @@ impl FileService { match result { None => Ok(()), Some(file) => { - tide::log::error!( + error!( "File {} with name {} already exists on page ID {}, cannot {}", - file.file_id, - name, - page_id, - action, + file.file_id, name, page_id, action, ); - Err(Error::Conflict) + Err(Error::FileExists) } } } @@ -548,7 +538,7 @@ impl FileService { site_id: i64, name: Option<&str>, ) -> Result<()> { - tide::log::info!("Checking file data against filters..."); + info!("Checking file data against filters..."); let filter_matcher = FilterService::get_matcher( ctx, diff --git a/deepwell/src/services/file/structs.rs b/deepwell/src/services/file/structs.rs index 530cf22fcc..f2627a38c0 100644 --- a/deepwell/src/services/file/structs.rs +++ b/deepwell/src/services/file/structs.rs @@ -22,59 +22,93 @@ use crate::models::sea_orm_active_enums::FileRevisionType; use crate::services::file_revision::{ CreateFileRevisionOutput, CreateFirstFileRevisionOutput, }; -use crate::web::{ProvidedValue, Reference}; +use crate::web::{Bytes, FileDetails, ProvidedValue, Reference}; use serde_json::Value as JsonValue; use time::OffsetDateTime; -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct CreateFile { - pub revision_comments: String, +#[derive(Deserialize, Debug, Clone)] +pub struct UploadFile { + pub site_id: i64, + pub page_id: i64, pub name: String, + pub revision_comments: String, pub user_id: i64, + pub data: Bytes<'static>, pub licensing: JsonValue, // TODO #[serde(default)] pub bypass_filter: bool, } -pub type CreateFileOutput = CreateFirstFileRevisionOutput; +pub type UploadFileOutput = CreateFirstFileRevisionOutput; -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Deserialize, Debug, Clone)] pub struct GetFile<'a> { pub site_id: i64, pub page_id: i64, pub file: Reference<'a>, } -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct UpdateFile { +#[derive(Deserialize, Debug, Clone)] +pub struct GetFileDetails<'a> { + #[serde(flatten)] + pub input: GetFile<'a>, + + #[serde(default)] + pub details: FileDetails, +} + +#[derive(Serialize, Debug, Clone)] +pub struct GetFileOutput { + pub file_id: i64, + pub file_created_at: OffsetDateTime, + pub file_updated_at: Option, + pub file_deleted_at: Option, + pub page_id: i64, + pub revision_id: i64, + pub revision_type: FileRevisionType, + pub revision_created_at: OffsetDateTime, + pub revision_number: i32, + pub revision_user_id: i64, + pub name: String, + pub data: Option>, + pub mime: String, + pub size: i64, + pub licensing: JsonValue, pub revision_comments: String, + pub hidden_fields: Vec, +} + +#[derive(Deserialize, Debug, Clone)] +pub struct EditFile { + pub site_id: i64, + pub page_id: i64, + pub file_id: i64, pub user_id: i64, + pub revision_comments: String, #[serde(flatten)] - pub body: UpdateFileBody, + pub body: EditFileBody, #[serde(default)] pub bypass_filter: bool, } -#[derive(Deserialize, Debug, Default)] -#[serde(rename_all = "camelCase", default)] -pub struct UpdateFileBody { +#[derive(Deserialize, Debug, Default, Clone)] +#[serde(default)] +pub struct EditFileBody { pub name: ProvidedValue, - pub data: ProvidedValue>, + pub data: ProvidedValue>, pub licensing: ProvidedValue, } -pub type UpdateFileOutput = CreateFileRevisionOutput; +pub type EditFileOutput = CreateFileRevisionOutput; -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Deserialize, Debug, Clone)] pub struct MoveFile { pub revision_comments: String, + pub site_id: i64, + pub file_id: i64, pub user_id: i64, pub name: Option, pub current_page_id: i64, @@ -83,54 +117,34 @@ pub struct MoveFile { pub type MoveFileOutput = CreateFileRevisionOutput; -#[derive(Serialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct GetFileOutput<'a> { - pub file_id: i64, - pub file_created_at: OffsetDateTime, - pub file_updated_at: Option, - pub file_deleted_at: Option, - pub page_id: i64, - pub revision_id: i64, - pub revision_type: FileRevisionType, - pub revision_created_at: OffsetDateTime, - pub revision_number: i32, - pub revision_user_id: i64, - pub name: &'a str, - pub data: Option>, - pub mime: &'a str, - pub size: i64, - pub licensing: &'a JsonValue, - pub revision_comments: &'a str, - pub hidden_fields: &'a [String], -} - -#[derive(Debug)] -pub struct DeleteFile { +#[derive(Deserialize, Debug, Clone)] +pub struct DeleteFile<'a> { pub revision_comments: String, pub site_id: i64, + pub page_id: i64, + pub file: Reference<'a>, pub user_id: i64, } -#[derive(Debug)] +#[derive(Deserialize, Debug, Clone)] pub struct RestoreFile { pub revision_comments: String, pub new_page_id: Option, pub new_name: Option, pub site_id: i64, + pub page_id: i64, + pub file_id: i64, pub user_id: i64, } -#[derive(Serialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Serialize, Debug, Clone)] pub struct DeleteFileOutput { pub file_id: i64, pub file_revision_id: i64, pub file_revision_number: i32, } -#[derive(Serialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Serialize, Debug, Clone)] pub struct RestoreFileOutput { pub page_id: i64, pub file_id: i64, diff --git a/deepwell/src/services/file_revision/service.rs b/deepwell/src/services/file_revision/service.rs index 41deb9c9c8..76ae798376 100644 --- a/deepwell/src/services/file_revision/service.rs +++ b/deepwell/src/services/file_revision/service.rs @@ -122,14 +122,19 @@ impl FileRevisionService { } // Validate inputs - if name.is_empty() || name.len() >= 256 { - tide::log::error!("File name of invalid length: {}", name.len()); - return Err(Error::BadRequest); + if name.is_empty() { + error!("File name is empty"); + return Err(Error::FileNameEmpty); + } + + if name.len() >= 256 { + error!("File name of invalid length: {}", name.len()); + return Err(Error::FileNameTooLong); } if mime_hint.is_empty() { - tide::log::error!("MIME type hint is empty"); - return Err(Error::BadRequest); + error!("MIME type hint is empty"); + return Err(Error::FileMimeEmpty); } // TODO validate licensing field @@ -144,6 +149,7 @@ impl FileRevisionService { revision_number: Set(0), file_id: Set(file_id), page_id: Set(page_id), + site_id: Set(site_id), user_id: Set(user_id), name: Set(name), s3_hash: Set(s3_hash.to_vec()), @@ -196,6 +202,7 @@ impl FileRevisionService { revision_number: Set(0), file_id: Set(file_id), page_id: Set(page_id), + site_id: Set(site_id), user_id: Set(user_id), name: Set(name), s3_hash: Set(s3_hash.to_vec()), @@ -257,6 +264,7 @@ impl FileRevisionService { revision_number: Set(revision_number), file_id: Set(file_id), page_id: Set(page_id), + site_id: Set(site_id), user_id: Set(user_id), name: Set(name), s3_hash: Set(s3_hash), @@ -343,6 +351,7 @@ impl FileRevisionService { revision_number: Set(revision_number), file_id: Set(file_id), page_id: Set(new_page_id), + site_id: Set(site_id), user_id: Set(user_id), name: Set(new_name), s3_hash: Set(s3_hash), @@ -370,21 +379,22 @@ impl FileRevisionService { pub async fn update( ctx: &ServiceContext<'_>, UpdateFileRevision { + site_id, page_id, file_id, revision_id, user_id, hidden, }: UpdateFileRevision, - ) -> Result<()> { - let txn = ctx.transaction(); - + ) -> Result { // The latest file revision cannot be hidden, because // the file, its name, contents, etc are exposed. // It should be reverted first, and then it can be hidden. - let latest = Self::get_latest(ctx, page_id, file_id).await?; + let txn = ctx.transaction(); + let latest = Self::get_latest(ctx, site_id, page_id, file_id).await?; if revision_id == latest.revision_id { + warn!("Attempting to edit latest revision, denying request"); return Err(Error::CannotHideLatestRevision); } @@ -400,8 +410,8 @@ impl FileRevisionService { }; // Update and return - model.update(txn).await?; - Ok(()) + let revision = model.update(txn).await?; + Ok(revision) } /// Get the latest revision for this file. @@ -409,6 +419,7 @@ impl FileRevisionService { /// See `RevisionService::get_latest()`. pub async fn get_latest( ctx: &ServiceContext<'_>, + site_id: i64, page_id: i64, file_id: i64, ) -> Result { @@ -419,13 +430,14 @@ impl FileRevisionService { let revision = FileRevision::find() .filter( Condition::all() + .add(file_revision::Column::SiteId.eq(site_id)) .add(file_revision::Column::PageId.eq(page_id)) .add(file_revision::Column::FileId.eq(file_id)), ) .order_by_desc(file_revision::Column::RevisionNumber) .one(txn) .await? - .ok_or(Error::NotFound)?; + .ok_or(Error::FileRevisionNotFound)?; Ok(revision) } @@ -435,14 +447,18 @@ impl FileRevisionService { /// See `RevisionService::get_optional()`. pub async fn get_optional( ctx: &ServiceContext<'_>, - page_id: i64, - file_id: i64, - revision_number: i32, + GetFileRevision { + site_id, + page_id, + file_id, + revision_number, + }: GetFileRevision, ) -> Result> { let txn = ctx.transaction(); let revision = FileRevision::find() .filter( Condition::all() + .add(file_revision::Column::SiteId.eq(site_id)) .add(file_revision::Column::PageId.eq(page_id)) .add(file_revision::Column::FileId.eq(file_id)) .add(file_revision::Column::RevisionNumber.eq(revision_number)), @@ -457,13 +473,12 @@ impl FileRevisionService { /// /// See `RevisionService::get()`. #[inline] + #[allow(dead_code)] pub async fn get( ctx: &ServiceContext<'_>, - page_id: i64, - file_id: i64, - revision_number: i32, + input: GetFileRevision, ) -> Result { - find_or_error(Self::get_optional(ctx, page_id, file_id, revision_number)).await + find_or_error!(Self::get_optional(ctx, input), FileRevision) } /// Counts the number of revisions for a file. @@ -494,7 +509,7 @@ impl FileRevisionService { // that means this page does not exist, and we should return an error. match NonZeroI32::new(row_count) { Some(count) => Ok(count), - None => Err(Error::NotFound), + None => Err(Error::FileNotFound), } } diff --git a/deepwell/src/services/file_revision/structs.rs b/deepwell/src/services/file_revision/structs.rs index fc1a8d15b2..37531caabb 100644 --- a/deepwell/src/services/file_revision/structs.rs +++ b/deepwell/src/services/file_revision/structs.rs @@ -19,10 +19,11 @@ */ use super::prelude::*; +use crate::hash::BlobHash; use crate::services::page_revision::PageRevisionCountOutput; use crate::web::FetchDirection; -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct CreateFileRevision { pub site_id: i64, pub page_id: i64, @@ -32,7 +33,7 @@ pub struct CreateFileRevision { pub body: CreateFileRevisionBody, } -#[derive(Debug, Default)] +#[derive(Debug, Default, Clone)] pub struct CreateFileRevisionBody { pub page_id: ProvidedValue, // for changing the page this file is on pub name: ProvidedValue, @@ -40,21 +41,20 @@ pub struct CreateFileRevisionBody { pub licensing: ProvidedValue, } -#[derive(Debug, PartialEq, Eq)] +#[derive(Debug, Clone, PartialEq, Eq)] pub struct FileBlob { pub s3_hash: BlobHash, pub size_hint: i64, pub mime_hint: String, } -#[derive(Serialize, Debug, Default)] -#[serde(rename_all = "camelCase", default)] +#[derive(Serialize, Debug, Clone, Default)] pub struct CreateFileRevisionOutput { pub file_revision_id: i64, pub file_revision_number: i32, } -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct CreateFirstFileRevision { pub site_id: i64, pub page_id: i64, @@ -68,13 +68,13 @@ pub struct CreateFirstFileRevision { pub comments: String, } -#[derive(Debug, Default)] +#[derive(Serialize, Debug, Clone, Default)] pub struct CreateFirstFileRevisionOutput { pub file_id: i64, pub file_revision_id: i64, } -#[derive(Debug)] +#[derive(Deserialize, Debug, Clone)] pub struct CreateTombstoneFileRevision { pub site_id: i64, pub page_id: i64, @@ -83,7 +83,7 @@ pub struct CreateTombstoneFileRevision { pub comments: String, } -#[derive(Debug)] +#[derive(Deserialize, Debug, Clone)] pub struct CreateResurrectionFileRevision { pub site_id: i64, pub page_id: i64, @@ -94,17 +94,17 @@ pub struct CreateResurrectionFileRevision { pub comments: String, } -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Deserialize, Debug, Clone)] pub struct GetFileRevision { + pub site_id: i64, pub page_id: i64, pub file_id: i64, pub revision_number: i32, } -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Deserialize, Debug, Clone)] pub struct UpdateFileRevision { + pub site_id: i64, pub page_id: i64, pub file_id: i64, pub revision_id: i64, @@ -112,8 +112,7 @@ pub struct UpdateFileRevision { pub hidden: Vec, } -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Deserialize, Debug, Clone)] pub struct GetFileRevisionRange { pub page_id: i64, pub file_id: i64, diff --git a/deepwell/src/services/filter/matcher.rs b/deepwell/src/services/filter/matcher.rs index c1be39820b..3da0bd7992 100644 --- a/deepwell/src/services/filter/matcher.rs +++ b/deepwell/src/services/filter/matcher.rs @@ -53,16 +53,15 @@ impl FilterMatcher { pub async fn verify(&self, ctx: &ServiceContext<'_>, text: &str) -> Result<()> { let matches = self.regex_set.matches(text); if !matches.matched_any() { - tide::log::info!("String passed all filters, is clear"); + info!("String passed all filters, is clear"); return Ok(()); } for index in matches { let description = &self.filter_data[index]; - tide::log::error!( + error!( "String failed filter ID {}: {}", - description.filter_id, - description.description, + description.filter_id, description.description, ); // TODO audit log, with contextual data (what it's checking) diff --git a/deepwell/src/services/filter/service.rs b/deepwell/src/services/filter/service.rs index 0bfc9a8f94..218959b6ea 100644 --- a/deepwell/src/services/filter/service.rs +++ b/deepwell/src/services/filter/service.rs @@ -43,12 +43,12 @@ impl FilterService { ) -> Result { let txn = ctx.transaction(); - tide::log::info!("Creating filter with regex '{regex}' because '{description}'"); + info!("Creating filter with regex '{regex}' because '{description}'"); // Ensure the regular expression is valid - if Regex::new(®ex).is_err() { - tide::log::error!("Passed regular expression pattern is invalid: {regex}"); - return Err(Error::BadRequest); + if let Err(error) = Regex::new(®ex) { + error!("Passed regular expression '{regex}' pattern is invalid: {error}",); + return Err(Error::FilterRegexInvalid(error)); } // Ensure there aren't conflicts @@ -92,7 +92,7 @@ impl FilterService { ) -> Result { let txn = ctx.transaction(); - tide::log::info!("Updating filter with ID {filter_id}"); + info!("Updating filter with ID {filter_id}"); let mut model = filter::ActiveModel { filter_id: Set(filter_id), @@ -162,15 +162,14 @@ impl FilterService { #[allow(dead_code)] // TEMP pub async fn delete(ctx: &ServiceContext<'_>, filter_id: i64) -> Result<()> { + info!("Deleting filter with ID {filter_id}"); let txn = ctx.transaction(); - tide::log::info!("Deleting filter with ID {filter_id}"); - // Ensure filter exists let filter = Self::get(ctx, filter_id).await?; if filter.deleted_at.is_some() { - tide::log::error!("Attempting to delete already-deleted filter"); - return Err(Error::BadRequest); + error!("Attempting to remove already-deleted filter"); + return Err(Error::FilterNotFound); } // Delete the filter @@ -191,12 +190,12 @@ impl FilterService { ) -> Result { let txn = ctx.transaction(); - tide::log::info!("Undeleting filter with ID {filter_id}"); + info!("Undeleting filter with ID {filter_id}"); let filter = Self::get(ctx, filter_id).await?; if filter.deleted_at.is_none() { - tide::log::error!("Attempting to un-delete extant filter"); - return Err(Error::BadRequest); + error!("Attempting to un-delete extant filter"); + return Err(Error::FilterNotDeleted); } // Ensure it doesn't conflict with a since-added filter @@ -214,14 +213,14 @@ impl FilterService { #[inline] pub async fn get(ctx: &ServiceContext<'_>, filter_id: i64) -> Result { - find_or_error(Self::get_optional(ctx, filter_id)).await + find_or_error!(Self::get_optional(ctx, filter_id), Filter) } pub async fn get_optional( ctx: &ServiceContext<'_>, filter_id: i64, ) -> Result> { - tide::log::info!("Getting filter with ID {filter_id}"); + info!("Getting filter with ID {filter_id}"); let txn = ctx.transaction(); let filter = Filter::find_by_id(filter_id).one(txn).await?; @@ -248,7 +247,7 @@ impl FilterService { ) -> Result> { let txn = ctx.transaction(); - tide::log::info!("Getting all {} filters", filter_class.name()); + info!("Getting all {} filters", filter_class.name()); let filter_condition = filter_type.map(|filter_type| filter_type.into_column().eq(true)); @@ -283,7 +282,7 @@ impl FilterService { filter_class: FilterClass, filter_type: FilterType, ) -> Result { - tide::log::info!( + info!( "Compiling regex set for {} filters for {filter_type:?}", filter_class.name(), ); @@ -309,11 +308,8 @@ impl FilterService { } let regex_set = RegexSet::new(regexes).map_err(|error| { - tide::log::error!( - "Invalid regular expression found in the database: {error}", - ); - - Error::Inconsistent + error!("Invalid regular expression found in the database: {error}",); + Error::FilterRegexInvalid(error) })?; Ok(FilterMatcher::new(regex_set, filter_data)) @@ -346,10 +342,10 @@ impl FilterService { match result { None => Ok(()), Some(_) => { - tide::log::error!( + error!( " filter '{regex}' for {site_id:?} already exists, cannot {action}" ); - Err(Error::Conflict) + Err(Error::FilterExists) } } } diff --git a/deepwell/src/services/import/service.rs b/deepwell/src/services/import/service.rs index d6827d51bd..fb43cdd5f2 100644 --- a/deepwell/src/services/import/service.rs +++ b/deepwell/src/services/import/service.rs @@ -60,7 +60,7 @@ impl ImportService { user_page, }: ImportUser, ) -> Result<()> { - tide::log::info!("Importing user (name '{}', slug '{}')", name, slug); + info!("Importing user (name '{}', slug '{}')", name, slug); let txn = ctx.transaction(); @@ -107,11 +107,9 @@ impl ImportService { locale, }: ImportSite, ) -> Result<()> { - tide::log::info!( + info!( "Importing site (name '{}', slug '{}', locale '{}')", - name, - slug, - locale, + name, slug, locale, ); let txn = ctx.transaction(); @@ -140,7 +138,7 @@ impl ImportService { discussion_thread_id, }: ImportPage, ) -> Result<()> { - tide::log::info!("Creating page '{}' in site ID {}", slug, site_id); + info!("Creating page '{}' in site ID {}", slug, site_id); let txn = ctx.transaction(); diff --git a/deepwell/src/services/import/structs.rs b/deepwell/src/services/import/structs.rs index 5cbc7b5795..fb37cb7419 100644 --- a/deepwell/src/services/import/structs.rs +++ b/deepwell/src/services/import/structs.rs @@ -21,7 +21,6 @@ use time::{Date, OffsetDateTime}; #[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] pub struct ImportUser { pub user_id: i64, pub created_at: OffsetDateTime, @@ -39,7 +38,6 @@ pub struct ImportUser { } #[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] pub struct ImportSite { pub site_id: i64, pub created_at: OffsetDateTime, @@ -49,7 +47,6 @@ pub struct ImportSite { } #[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] pub struct ImportPage { pub page_id: i64, pub site_id: i64, diff --git a/deepwell/src/services/interaction/macros.rs b/deepwell/src/services/interaction/macros.rs index 8f9a434ddf..184ee0d26a 100644 --- a/deepwell/src/services/interaction/macros.rs +++ b/deepwell/src/services/interaction/macros.rs @@ -40,6 +40,44 @@ macro_rules! impl_interaction { $dest_name, $from_name, }: [], + ) -> Result { + Self::get( + ctx, + InteractionReference::Relationship { + interaction_type: InteractionType::$interaction_type, + dest: InteractionObject::$dest_type($dest_name), + from: InteractionObject::$from_type($from_name), + }, + ) + .await + } + + #[allow(dead_code)] // TEMP + pub async fn []( + ctx: &ServiceContext<'_>, + [] { + $dest_name, + $from_name, + }: [], + ) -> Result> { + Self::get_optional( + ctx, + InteractionReference::Relationship { + interaction_type: InteractionType::$interaction_type, + dest: InteractionObject::$dest_type($dest_name), + from: InteractionObject::$from_type($from_name), + }, + ) + .await + } + + #[allow(dead_code)] // TEMP + pub async fn [<$interaction_type:snake _exists>]( + ctx: &ServiceContext<'_>, + [] { + $dest_name, + $from_name, + }: [], ) -> Result { Self::exists( ctx, @@ -115,7 +153,6 @@ macro_rules! impl_interaction { // // Properly fixing this will likely require a proc-macro. Which is annoying. #[derive(Deserialize, Debug, Clone)] - #[serde(rename_all = "camelCase")] pub struct [] { pub $dest_name: i64, pub $from_name: i64, @@ -124,14 +161,12 @@ macro_rules! impl_interaction { } #[derive(Deserialize, Debug, Copy, Clone)] - #[serde(rename_all = "camelCase")] pub struct [] { pub $dest_name: i64, pub $from_name: i64, } #[derive(Deserialize, Debug, Copy, Clone)] - #[serde(rename_all = "camelCase")] pub struct [] { pub $dest_name: i64, pub $from_name: i64, @@ -187,6 +222,7 @@ macro_rules! impl_interaction { }; } +// TODO: change to create-or-edit kind of thing? /// Macro which runs the actual `create()` call for the interaction. macro_rules! create_operation { ( diff --git a/deepwell/src/services/interaction/mod.rs b/deepwell/src/services/interaction/mod.rs index 7b3be543d2..a623468088 100644 --- a/deepwell/src/services/interaction/mod.rs +++ b/deepwell/src/services/interaction/mod.rs @@ -81,9 +81,7 @@ impl InteractionService { created_by: i64, metadata: &M, ) -> Result { - tide::log::debug!( - "Create interaction for {dest:?} ← {interaction_type:?} ← {from:?}", - ); + debug!("Create interaction for {dest:?} ← {interaction_type:?} ← {from:?}",); // Get previous interaction, if present let txn = ctx.transaction(); @@ -97,7 +95,7 @@ impl InteractionService { ) .await? { - tide::log::debug!("Interaction already exists, marking old item overwritten"); + debug!("Interaction already exists, marking old item overwritten"); let model = interaction::ActiveModel { interaction_id: Set(interaction.interaction_id), overwritten_at: Set(Some(now())), @@ -134,7 +132,7 @@ impl InteractionService { reference: InteractionReference, deleted_by: i64, ) -> Result { - tide::log::debug!("Removing interaction for {reference:?}"); + debug!("Removing interaction for {reference:?}"); let txn = ctx.transaction(); let interaction_id = Self::get_id(ctx, reference).await?; @@ -153,7 +151,7 @@ impl InteractionService { ctx: &ServiceContext<'_>, reference: InteractionReference, ) -> Result> { - tide::log::debug!("Getting interaction for {reference:?}"); + debug!("Getting interaction for {reference:?}"); let txn = ctx.transaction(); let interaction = Interaction::find() @@ -188,7 +186,7 @@ impl InteractionService { ctx: &ServiceContext<'_>, reference: InteractionReference, ) -> Result { - find_or_error(Self::get_optional(ctx, reference)).await + find_or_error!(Self::get_optional(ctx, reference), Interaction) } pub async fn exists( @@ -207,7 +205,7 @@ impl InteractionService { dest: InteractionObject, from: InteractionObject, ) -> Result> { - tide::log::info!( + info!( "Getting history of interactions for {dest:?} / {interaction_type:?} / {from:?}", ); @@ -228,9 +226,7 @@ impl InteractionService { object: InteractionObject, direction: InteractionDirection, ) -> Result> { - tide::log::info!( - "Getting {direction:?} interactions for {object:?} / {interaction_type:?}", - ); + info!("Getting {direction:?} interactions for {object:?} / {interaction_type:?}",); let (object_type, object_id) = object.into(); let (object_type_column, object_id_column) = match direction { diff --git a/deepwell/src/services/interaction/site_ban.rs b/deepwell/src/services/interaction/site_ban.rs index 4d89f3f13c..b6732f053b 100644 --- a/deepwell/src/services/interaction/site_ban.rs +++ b/deepwell/src/services/interaction/site_ban.rs @@ -58,6 +58,7 @@ impl InteractionService { }, ) .await?; + // TODO: remove site member applications // TODO: remove site roles create_operation!( @@ -71,12 +72,10 @@ impl InteractionService { body: GetSiteBan, action: &str, ) -> Result<()> { - if Self::get_site_ban(ctx, body).await? { - tide::log::error!( + if Self::site_ban_exists(ctx, body).await? { + error!( "User ID {} cannot {} site ID {} because they are banned", - body.user_id, - action, - body.site_id, + body.user_id, action, body.site_id, ); return Err(Error::SiteBlockedUser); diff --git a/deepwell/src/services/interaction/user_block.rs b/deepwell/src/services/interaction/user_block.rs index 27560d7e44..5a309a8b50 100644 --- a/deepwell/src/services/interaction/user_block.rs +++ b/deepwell/src/services/interaction/user_block.rs @@ -98,10 +98,10 @@ impl InteractionService { }; } - if Self::get_user_block(ctx, obj!(user_id_1, user_id_2)).await? - || Self::get_user_block(ctx, obj!(user_id_2, user_id_1)).await? + if Self::user_block_exists(ctx, obj!(user_id_1, user_id_2)).await? + || Self::user_block_exists(ctx, obj!(user_id_2, user_id_1)).await? { - tide::log::error!("User ID {user_id_1} cannot {action} user ID {user_id_2} because there is a block"); + error!("User ID {user_id_1} cannot {action} user ID {user_id_2} because there is a block"); return Err(Error::UserBlockedUser); } diff --git a/deepwell/src/services/job/mod.rs b/deepwell/src/services/job/mod.rs index 351cb0aabd..69652da8ec 100644 --- a/deepwell/src/services/job/mod.rs +++ b/deepwell/src/services/job/mod.rs @@ -22,9 +22,6 @@ //! //! At present we do not use a separate service which stores jobs durably. This //! can mean that if this DEEPWELL node fails, the queued jobs will not be run. -//! -//! The following kinds of jobs are available: -//! * Rerendering a page mod prelude { pub use super::super::prelude::*; @@ -34,5 +31,5 @@ mod prelude { mod service; mod structs; -pub use self::service::{JobRunner, JobService}; +pub use self::service::{JobQueue, JobService}; pub use self::structs::*; diff --git a/deepwell/src/services/job/service.rs b/deepwell/src/services/job/service.rs index a62c56c588..43ef05522a 100644 --- a/deepwell/src/services/job/service.rs +++ b/deepwell/src/services/job/service.rs @@ -19,121 +19,112 @@ */ use super::prelude::*; -use crate::api::ApiServerState; +use crate::api::ServerState; use crate::services::{PageRevisionService, SessionService, TextService}; -use async_std::task; -use crossfire::mpsc; -use once_cell::sync::Lazy; use sea_orm::TransactionTrait; -use std::convert::Infallible; -use std::sync::Arc; +use tokio::sync::{mpsc, oneshot}; +use tokio::{task, time}; -static QUEUE: Lazy<(mpsc::TxUnbounded, mpsc::RxUnbounded)> = - Lazy::new(mpsc::unbounded_future); +type RequestSender = mpsc::UnboundedSender; +type RequestReceiver = mpsc::UnboundedReceiver; -macro_rules! sink { - () => { - QUEUE.0 - }; -} - -macro_rules! source { - () => { - QUEUE.1 - }; -} +type StateSender = oneshot::Sender; +type StateReceiver = oneshot::Receiver; #[derive(Debug)] pub struct JobService; impl JobService { - #[inline] - fn queue_job(job: Job) { - sink!().send(job).expect("Job channel has disconnected"); - } - - pub fn queue_rerender_page(site_id: i64, page_id: i64) { - tide::log::debug!( - "Queueing page ID {page_id} in site ID {site_id} for rerendering", - ); - - Self::queue_job(Job::RerenderPageId { site_id, page_id }); + pub fn queue_rerender_page(queue: &JobQueue, site_id: i64, page_id: i64) { + debug!("Queueing page ID {page_id} in site ID {site_id} for rerendering",); + queue + .sink + .send(Job::RerenderPageId { site_id, page_id }) + .expect("Job channel is closed"); } - pub fn queue_prune_sessions() { - tide::log::debug!("Queueing sessions list for pruning"); - Self::queue_job(Job::PruneSessions); + pub fn queue_prune_sessions(queue: &JobQueue) { + debug!("Queueing sessions list for pruning"); + queue + .sink + .send(Job::PruneSessions) + .expect("Job channel is closed"); } - pub fn queue_prune_text() { - tide::log::debug!("Queueing unused text for pruning"); - Self::queue_job(Job::PruneText); + pub fn queue_prune_text(queue: &JobQueue) { + debug!("Queueing unused text for pruning"); + queue + .sink + .send(Job::PruneText) + .expect("Job channel is closed"); } } -#[derive(Debug)] -pub struct JobRunner { - state: ApiServerState, +#[derive(Debug, Clone)] +pub struct JobQueue { + sink: RequestSender, } -impl JobRunner { - pub fn spawn(state: &ApiServerState) { - // Copy configuration fields - let session_prune_delay = state.config.job_prune_session_period; - let text_prune_delay = state.config.job_prune_text_period; +impl JobQueue { + pub fn spawn(config: &Config) -> (Self, StateSender) { + // Create channels + let (sink, source) = mpsc::unbounded_channel(); + let (state_sender, state_getter) = oneshot::channel(); + let job_queue = JobQueue { sink }; + + // Copy fields for ancillary tasks + let session_prune_delay = config.job_prune_session_period; + let text_prune_delay = config.job_prune_text_period; + let job_queue_1 = job_queue.clone(); + let job_queue_2 = job_queue.clone(); // Main runner - let state = Arc::clone(state); - let runner = JobRunner { state }; - task::spawn(runner.main_loop()); + task::spawn(Self::main_loop(state_getter, source)); // Ancillary tasks task::spawn(async move { loop { - tide::log::trace!("Running repeat job: prune expired sessions"); - JobService::queue_prune_sessions(); - task::sleep(session_prune_delay).await; + trace!("Running repeat job: prune expired sessions"); + JobService::queue_prune_sessions(&job_queue_1); + time::sleep(session_prune_delay).await; } }); task::spawn(async move { loop { - tide::log::trace!("Running repeat job: prune unused text rows"); - JobService::queue_prune_text(); - task::sleep(text_prune_delay).await; + trace!("Running repeat job: prune unused text rows"); + JobService::queue_prune_text(&job_queue_2); + time::sleep(text_prune_delay).await; } }); // TODO job that checks hourly for users who can get a name change token refill // see config.refill_name_change - } - async fn main_loop(mut self) -> Infallible { - tide::log::info!("Starting job runner"); - - let delay = self.state.config.job_delay; - loop { - tide::log::trace!("Waiting for next job on queue..."); - let job = source!() - .recv() - .await - .expect("Job channel has disconnected"); - - tide::log::debug!("Received new job item: {:?}", job); + (job_queue, state_sender) + } - match self.process_job(job).await { - Ok(()) => tide::log::debug!("Finished processing job"), - Err(error) => tide::log::warn!("Error processing job: {error}"), + async fn main_loop(state_getter: StateReceiver, mut source: RequestReceiver) { + info!("Waiting for server state (to start job runner)"); + let state = state_getter.await.expect("Unable to get server state"); + let delay = state.config.job_delay; + + info!("Starting job runner"); + while let Some(job) = source.recv().await { + debug!("Received job from queue: {job:?}"); + match Self::process_job(&state, job).await { + Ok(()) => debug!("Finished processing job"), + Err(error) => warn!("Error processing job: {error}"), } - tide::log::debug!("Estimated queue backlog: {} items", source!().len()); - task::sleep(delay).await; // Sleep a bit to avoid overloading the database + trace!("Sleeping a bit to avoid overloading the database"); + time::sleep(delay).await; } } - async fn process_job(&mut self, job: Job) -> Result<()> { - let txn = self.state.database.begin().await?; - let ctx = &ServiceContext::from_raw(&self.state, &txn); + async fn process_job(state: &ServerState, job: Job) -> Result<()> { + let txn = state.database.begin().await?; + let ctx = &ServiceContext::new(state, &txn); match job { Job::RerenderPageId { site_id, page_id } => { diff --git a/deepwell/src/services/link/service.rs b/deepwell/src/services/link/service.rs index 6301476f47..3abca92f23 100644 --- a/deepwell/src/services/link/service.rs +++ b/deepwell/src/services/link/service.rs @@ -144,13 +144,13 @@ impl LinkService { if let Some(page) = PageService::get_optional(ctx, site_id, Reference::from(page_slug)).await? { - tide::log::warn!( + warn!( "Requesting missing page connections for page that exists (site id {}, page id {})", site_id, page.page_id, ); - return Err(Error::Exists); + return Err(Error::PageExists); } // Retrieve connections for this slot diff --git a/deepwell/src/services/link/structs.rs b/deepwell/src/services/link/structs.rs index 85014f1d63..5bf39c5931 100644 --- a/deepwell/src/services/link/structs.rs +++ b/deepwell/src/services/link/structs.rs @@ -24,82 +24,70 @@ use crate::models::page_link::Model as PageLinkModel; use crate::web::Reference; use time::OffsetDateTime; -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Deserialize, Debug, Clone)] pub struct GetLinksFrom<'a> { pub site_id: i64, pub page: Reference<'a>, } -#[derive(Serialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Serialize, Debug, Clone)] pub struct GetLinksFromOutput { pub present: Vec, pub absent: Vec, pub external: Vec, } -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Deserialize, Debug, Clone)] pub struct GetLinksTo<'a> { pub site_id: i64, pub page: Reference<'a>, } -#[derive(Serialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Serialize, Debug, Clone)] pub struct GetLinksToOutput { pub connections: Vec, } -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Deserialize, Debug, Clone)] pub struct GetLinksToMissing { pub site_id: i64, pub page_slug: String, } -#[derive(Serialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Serialize, Debug, Clone)] pub struct GetLinksToMissingOutput { pub connections: Vec, } -#[derive(Serialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Serialize, Debug, Clone)] pub struct GetConnectionsFromOutput { pub present: Vec, pub absent: Vec, } -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Deserialize, Debug, Clone)] pub struct GetLinksExternalFrom<'a> { pub site_id: i64, pub page: Reference<'a>, } -#[derive(Serialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Serialize, Debug, Clone)] pub struct GetLinksExternalFromOutput { pub links: Vec, } -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Deserialize, Debug, Clone)] pub struct GetLinksExternalTo { pub site_id: i64, pub url: String, } -#[derive(Serialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Serialize, Debug, Clone)] pub struct GetLinksExternalToOutput { pub links: Vec, } -#[derive(Serialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Serialize, Debug, Clone)] pub struct ToExternalLink { pub created_at: OffsetDateTime, pub updated_at: Option, diff --git a/deepwell/src/utils/tide.rs b/deepwell/src/services/macros.rs similarity index 78% rename from deepwell/src/utils/tide.rs rename to deepwell/src/services/macros.rs index 338c036ced..6f047cf4f0 100644 --- a/deepwell/src/utils/tide.rs +++ b/deepwell/src/services/macros.rs @@ -1,5 +1,5 @@ /* - * utils/tide.rs + * services/macros.rs * * DEEPWELL - Wikijump API provider and database manager * Copyright (C) 2019-2023 Wikijump Team @@ -18,12 +18,10 @@ * along with this program. If not, see . */ -use tide::{Error, Response, StatusCode}; - -#[inline] -pub fn error_response( - status: StatusCode, - message: &'static str, -) -> Result { - Err(Error::from_str(status, message)) +macro_rules! find_or_error { + ($future:expr, $error:ident $(,)?) => { + paste! { + $future.await?.ok_or(Error::[<$error NotFound>]) + } + }; } diff --git a/deepwell/src/services/message/service.rs b/deepwell/src/services/message/service.rs index eac7ad80ec..9dc3c656dc 100644 --- a/deepwell/src/services/message/service.rs +++ b/deepwell/src/services/message/service.rs @@ -54,7 +54,7 @@ impl MessageService { forwarded_from, }: CreateMessageDraft, ) -> Result { - tide::log::info!("Creating message draft for user ID {user_id}"); + info!("Creating message draft for user ID {user_id}"); // Check foreign keys if let Some(record_id) = &reply_to { @@ -100,7 +100,7 @@ impl MessageService { wikitext, }: UpdateMessageDraft, ) -> Result { - tide::log::info!("Updating message draft {draft_id}"); + info!("Updating message draft {draft_id}"); // Get current draft let current_draft = Self::get_draft(ctx, &draft_id).await?; @@ -154,8 +154,8 @@ impl MessageService { for recipient_id in recipients.iter() { if !UserService::exists(ctx, Reference::Id(recipient_id)).await? { - tide::log::error!("Recipient user ID {recipient_id} does not exist"); - return Err(Error::NotFound); + error!("Recipient user ID {recipient_id} does not exist"); + return Err(Error::UserNotFound); } } @@ -202,7 +202,7 @@ impl MessageService { ctx: &ServiceContext<'_>, draft_id: &str, ) -> Result { - tide::log::info!("Sending draft ID {draft_id} as message"); + info!("Sending draft ID {draft_id} as message"); // Gather resources let config = ctx.config(); @@ -212,45 +212,45 @@ impl MessageService { // Message validation checks if draft.subject.is_empty() { - tide::log::error!("Subject line cannot be empty"); - return Err(Error::BadRequest); + error!("Subject line cannot be empty"); + return Err(Error::MessageSubjectEmpty); } if draft.subject.len() > config.maximum_message_subject_bytes { - tide::log::error!( + error!( "Subject line is too long (is {}, max {})", draft.subject.len(), config.maximum_message_subject_bytes, ); - return Err(Error::BadRequest); + return Err(Error::MessageSubjectTooLong); } if wikitext.is_empty() { - tide::log::error!("Wikitext body cannot be empty"); - return Err(Error::BadRequest); - } - - if recipients.is_empty() { - tide::log::error!("Must have at least one message recipient"); - return Err(Error::BadRequest); + error!("Wikitext body cannot be empty"); + return Err(Error::MessageBodyEmpty); } if wikitext.len() > config.maximum_message_body_bytes { - tide::log::error!( + error!( "Wikitext body is too long (is {}, max {})", wikitext.len(), config.maximum_message_body_bytes, ); - return Err(Error::BadRequest); + return Err(Error::MessageBodyTooLong); + } + + if recipients.is_empty() { + error!("Must have at least one message recipient"); + return Err(Error::MessageNoRecipients); } if recipients.len() > config.maximum_message_recipients { - tide::log::error!( + error!( "Too many message recipients (is {}, max {})", recipients.len(), config.maximum_message_recipients, ); - return Err(Error::BadRequest); + return Err(Error::MessageTooManyRecipients); } for recipient_user_id in recipients.iter() { @@ -350,11 +350,11 @@ impl MessageService { // For self-messages, we have two kinds of behavior. // If it was sent *only* to oneself, then there is not outbox message. // If it was sent to others in addition to oneself, then there *is* an outbox message. - tide::log::debug!("Self message, checking recipients list"); + debug!("Self message, checking recipients list"); (recipients.only_has(sender_id), true) } else { // For regular messages, then just mark the outbox. - tide::log::debug!("Regular message, marking outbox only"); + debug!("Regular message, marking outbox only"); (true, false) }; @@ -379,9 +379,7 @@ impl MessageService { user_id: i64, value: bool, ) -> Result<()> { - tide::log::info!( - "Setting message read status for {record_id} / {user_id}: {value}", - ); + info!("Setting message read status for {record_id} / {user_id}: {value}",); let txn = ctx.transaction(); let message = Self::get_message(ctx, record_id, user_id).await?; @@ -420,7 +418,7 @@ impl MessageService { record_id: &str, user_id: i64, ) -> Result { - find_or_error(Self::get_message_optional(ctx, record_id, user_id)).await + find_or_error!(Self::get_message_optional(ctx, record_id, user_id), Message) } pub async fn get_record_optional( @@ -453,7 +451,7 @@ impl MessageService { ctx: &ServiceContext<'_>, draft_id: &str, ) -> Result { - find_or_error(Self::get_draft_optional(ctx, draft_id)).await + find_or_error!(Self::get_draft_optional(ctx, draft_id), MessageDraft) } // Helper methods @@ -470,13 +468,11 @@ impl MessageService { // NOTE: Because recipient lists are generally short, well under 100, // there are no practical issues with using Vec over HashSet. if added_user_ids.contains(&user_id) { - tide::log::debug!("Skipping message recipient (already added)"); + debug!("Skipping message recipient (already added)"); continue; } - tide::log::debug!( - "Adding message recipient {recipient_type:?} with ID {user_id}", - ); + debug!("Adding message recipient {recipient_type:?} with ID {user_id}",); let model = message_recipient::ActiveModel { record_id: Set(str!(record_id)), @@ -509,11 +505,9 @@ impl MessageService { let record = match Self::get_record_optional(ctx, record_id).await? { Some(record) => record, None => { - tide::log::error!( - "The {purpose} message record does not exist: {record_id}", - ); + error!("The {purpose} message record does not exist: {record_id}",); - return Err(Error::BadRequest); + return Err(Error::MessageNotFound); } }; @@ -522,11 +516,11 @@ impl MessageService { if record.sender_id != user_id && Self::any_recipient_exists(ctx, record_id, user_id).await? { - tide::log::error!( - "User ID {user_id} is not a sender or recipient of the {purpose}", - ); + error!("User ID {user_id} is not a sender or recipient of the {purpose}",); - return Err(Error::BadRequest); + // To protect privacy, if the user doesn't have access to a message with a + // given ID, we pretend it does not exist for the purposes of returning errors. + return Err(Error::MessageNotFound); } Ok(()) @@ -538,9 +532,7 @@ impl MessageService { record_id: &str, user_id: i64, ) -> Result { - tide::log::info!( - "Checking if user ID {user_id} is a recipient of record ID {record_id}", - ); + info!("Checking if user ID {user_id} is a recipient of record ID {record_id}",); let txn = ctx.transaction(); let model = MessageRecipient::find() @@ -561,7 +553,7 @@ impl MessageService { wikitext: String, user_locale: &str, ) -> Result { - tide::log::info!("Rendering message wikitext ({} bytes)", wikitext.len()); + info!("Rendering message wikitext ({} bytes)", wikitext.len()); let settings = WikitextSettings::from_mode(WikitextMode::DirectMessage); let page_info = PageInfo { diff --git a/deepwell/src/services/message/structs.rs b/deepwell/src/services/message/structs.rs index e72d8d1e33..07949c1036 100644 --- a/deepwell/src/services/message/structs.rs +++ b/deepwell/src/services/message/structs.rs @@ -19,7 +19,6 @@ */ #[derive(Serialize, Deserialize, Debug, Clone)] -#[serde(rename_all = "camelCase")] pub struct CreateMessageDraft { pub user_id: i64, pub recipients: Vec, @@ -32,7 +31,6 @@ pub struct CreateMessageDraft { } #[derive(Serialize, Deserialize, Debug, Clone)] -#[serde(rename_all = "camelCase")] pub struct UpdateMessageDraft { pub message_draft_id: String, pub recipients: Vec, @@ -43,7 +41,6 @@ pub struct UpdateMessageDraft { } #[derive(Serialize, Deserialize, Debug, Clone)] -#[serde(rename_all = "camelCase")] pub struct SendMessageDraft { pub message_draft_id: String, } diff --git a/deepwell/src/services/mfa/service.rs b/deepwell/src/services/mfa/service.rs index c6f1828708..f6ec9ea7ee 100644 --- a/deepwell/src/services/mfa/service.rs +++ b/deepwell/src/services/mfa/service.rs @@ -36,11 +36,11 @@ impl MfaService { ctx: &ServiceContext<'_>, user: &UserModel, ) -> Result { - tide::log::info!("Setting up MFA for user ID {}", user.user_id); + info!("Setting up MFA for user ID {}", user.user_id); // Only regular accounts can have MFA if user.user_type != UserType::Regular { - tide::log::error!("Only regular users may have MFA"); + error!("Only regular users may have MFA"); return Err(Error::BadRequest); } @@ -48,16 +48,16 @@ impl MfaService { if user.multi_factor_secret.is_some() || user.multi_factor_recovery_codes.is_some() { - tide::log::error!("User already has MFA set up"); - return Err(Error::Conflict); + error!("User already has MFA set up"); + return Err(Error::UserMfaExists); } // Securely generate and store secrets - tide::log::debug!("Generating MFA secrets for user ID {}", user.user_id); + debug!("Generating MFA secrets for user ID {}", user.user_id); let totp_secret = generate_totp_secret(); let recovery = RecoveryCodes::generate(ctx.config())?; - tide::log::debug!("Committing MFA secrets for user ID {}", user.user_id); + debug!("Committing MFA secrets for user ID {}", user.user_id); UserService::set_mfa_secrets( ctx, user.user_id, @@ -80,21 +80,21 @@ impl MfaService { ctx: &ServiceContext<'_>, user: &UserModel, ) -> Result { - tide::log::info!("Resetting MFA recovery codes for user ID {}", user.user_id); + info!("Resetting MFA recovery codes for user ID {}", user.user_id); // Ensure MFA is set up if user.multi_factor_secret.is_none() || user.multi_factor_recovery_codes.is_none() { - tide::log::error!("User does not have MFA set up"); - return Err(Error::Conflict); + error!("User does not have MFA set up"); + return Err(Error::UserMfaExists); } // Securely generate and store secrets - tide::log::debug!("Generating recovery codes for user ID {}", user.user_id); + debug!("Generating recovery codes for user ID {}", user.user_id); let recovery = RecoveryCodes::generate(ctx.config())?; - tide::log::debug!("Committing recovery codes for user ID {}", user.user_id); + debug!("Committing recovery codes for user ID {}", user.user_id); UserService::set_mfa_secrets( ctx, user.user_id, @@ -114,7 +114,7 @@ impl MfaService { /// After this is run, the user does not need MFA to sign in, /// and has no recovery codes or TOTP secret. pub async fn disable(ctx: &ServiceContext<'_>, user_id: i64) -> Result<()> { - tide::log::info!("Tearing down MFA for user ID {}", user_id); + info!("Tearing down MFA for user ID {}", user_id); UserService::set_mfa_secrets( ctx, @@ -134,12 +134,12 @@ impl MfaService { user: &UserModel, entered_totp: u32, ) -> Result<()> { - tide::log::info!("Verifying TOTP code for user ID {}", user.user_id); + info!("Verifying TOTP code for user ID {}", user.user_id); let secret = match &user.multi_factor_secret { Some(secret) => secret, None => { - tide::log::warn!("User has no MFA secret, cannot verify TOTP"); + warn!("User has no MFA secret, cannot verify TOTP"); return Err(Error::InvalidAuthentication); } }; @@ -170,14 +170,12 @@ impl MfaService { user: &UserModel, recovery_code: &str, ) -> Result<()> { - tide::log::info!("Verifying recovery code for user ID {}", user.user_id); + info!("Verifying recovery code for user ID {}", user.user_id); let recovery_code_hashes = match &user.multi_factor_recovery_codes { Some(codes) => codes, None => { - tide::log::warn!( - "User has no MFA recovery codes, but wants to verify recovery", - ); + warn!("User has no MFA recovery codes, but wants to verify recovery",); return Err(Error::InvalidAuthentication); } diff --git a/deepwell/src/services/mfa/structs.rs b/deepwell/src/services/mfa/structs.rs index b83ff66772..b3ffee4540 100644 --- a/deepwell/src/services/mfa/structs.rs +++ b/deepwell/src/services/mfa/structs.rs @@ -38,7 +38,7 @@ pub fn generate_totp_secret() -> String { } } -#[derive(Debug)] +#[derive(Serialize, Debug, Clone)] pub struct RecoveryCodes { pub recovery_codes: Vec, pub recovery_codes_hashed: Vec, @@ -69,7 +69,7 @@ impl RecoveryCodes { let mut hashes = Vec::new(); for code in &recovery_codes { - tide::log::debug!("Hashing recovery code"); + debug!("Hashing recovery code"); let hash = PasswordService::new_hash(code)?; hashes.push(hash); } @@ -84,22 +84,19 @@ impl RecoveryCodes { } } -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Deserialize, Debug, Clone)] pub struct MultiFactorConfigure { pub user_id: i64, pub session_token: String, } -#[derive(Serialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Serialize, Debug, Clone)] pub struct MultiFactorSetupOutput { pub totp_secret: String, pub recovery_codes: Vec, } -#[derive(Serialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Serialize, Debug, Clone)] pub struct MultiFactorResetOutput { pub recovery_codes: Vec, } diff --git a/deepwell/src/services/mod.rs b/deepwell/src/services/mod.rs index a680ffb487..bf6e9bf201 100644 --- a/deepwell/src/services/mod.rs +++ b/deepwell/src/services/mod.rs @@ -38,8 +38,9 @@ mod prelude { pub use super::context::ServiceContext; pub use super::error::*; pub use crate::config::Config; - pub use crate::utils::{find_or_error, now}; + pub use crate::utils::now; pub use crate::web::{ProvidedValue, Reference}; + pub use paste::paste; pub use sea_orm::{ ActiveModelTrait, ColumnTrait, Condition, ConnectionTrait, DeleteResult, EntityTrait, IntoActiveModel, JoinType, ModelTrait, PaginatorTrait, QueryFilter, @@ -47,6 +48,9 @@ mod prelude { }; } +#[macro_use] +mod macros; + mod context; mod error; @@ -83,9 +87,6 @@ pub mod user_bot_owner; pub mod view; pub mod vote; -use crate::api::ApiRequest; -use sea_orm::DatabaseConnection; - pub use self::alias::AliasService; pub use self::authentication::AuthenticationService; pub use self::blob::BlobService; @@ -118,15 +119,3 @@ pub use self::user::UserService; pub use self::user_bot_owner::UserBotOwnerService; pub use self::view::ViewService; pub use self::vote::VoteService; - -/// Extension trait to retrieve service objects from an `ApiRequest`. -pub trait RequestFetchService { - fn database(&self) -> &DatabaseConnection; -} - -impl RequestFetchService for ApiRequest { - #[inline] - fn database(&self) -> &DatabaseConnection { - &self.state().database - } -} diff --git a/deepwell/src/services/outdate.rs b/deepwell/src/services/outdate.rs index 5cfb066158..1f69de5715 100644 --- a/deepwell/src/services/outdate.rs +++ b/deepwell/src/services/outdate.rs @@ -80,9 +80,9 @@ impl OutdateService { /// /// Finds the most recent revision for each of the given `(site_id, page_id)` /// pairs passed in. - pub fn outdate>(ids: I) { + pub fn outdate>(ctx: &ServiceContext<'_>, ids: I) { for (site_id, page_id) in ids { - JobService::queue_rerender_page(site_id, page_id); + JobService::queue_rerender_page(ctx.job_queue(), site_id, page_id); } } @@ -101,7 +101,7 @@ impl OutdateService { .filter(|&(_, to_page_id)| to_page_id != page_id) .collect::>(); - Self::outdate(ids); + Self::outdate(ctx, ids); Ok(()) } @@ -124,7 +124,7 @@ impl OutdateService { .filter(|&(_, to_page_id)| to_page_id != page_id) .collect::>(); - Self::outdate(ids); + Self::outdate(ctx, ids); Ok(()) } @@ -160,7 +160,7 @@ impl OutdateService { .map(|model| (model.site_id, model.page_id)) .collect::>(); - Self::outdate(ids); + Self::outdate(ctx, ids); } Ok(()) diff --git a/deepwell/src/services/page/service.rs b/deepwell/src/services/page/service.rs index 18b6a47a99..6dedcb4fc8 100644 --- a/deepwell/src/services/page/service.rs +++ b/deepwell/src/services/page/service.rs @@ -224,8 +224,8 @@ impl PageService { // and that a page with that slug doesn't already exist. normalize(&mut new_slug); if old_slug == new_slug { - tide::log::error!("Source and destination slugs are the same: {}", old_slug); - return Err(Error::BadRequest); + error!("Source and destination slugs are the same: {}", old_slug); + return Err(Error::PageSlugExists); } Self::check_conflicts(ctx, site_id, &new_slug, "move").await?; @@ -294,7 +294,7 @@ impl PageService { parser_errors, }), None => { - tide::log::error!("Page move did not create new revision"); + error!("Page move did not create new revision"); Err(Error::BadRequest) } } @@ -355,7 +355,7 @@ impl PageService { }: RestorePage, ) -> Result { let txn = ctx.transaction(); - let page = Self::get_direct(ctx, page_id).await?; + let page = Self::get_direct(ctx, site_id, page_id).await?; let slug = slug.unwrap_or(page.slug); // Do page checks: @@ -364,13 +364,13 @@ impl PageService { // - Slug doesn't already exist if page.site_id != site_id { - tide::log::warn!("Page's site ID and passed site ID do not match"); - return Err(Error::NotFound); + warn!("Page's site ID and passed site ID do not match"); + return Err(Error::PageNotFound); } if page.deleted_at.is_none() { - tide::log::warn!("Page requested to be restored is not currently deleted"); - return Err(Error::BadRequest); + warn!("Page requested to be restored is not currently deleted"); + return Err(Error::PageNotDeleted); } Self::check_conflicts(ctx, site_id, &slug, "restore").await?; @@ -508,7 +508,7 @@ impl PageService { site_id: i64, reference: Reference<'_>, ) -> Result { - find_or_error(Self::get_optional(ctx, site_id, reference)).await + find_or_error!(Self::get_optional(ctx, site_id, reference), Page) } pub async fn get_optional( @@ -563,16 +563,28 @@ impl PageService { } #[inline] - pub async fn get_direct(ctx: &ServiceContext<'_>, page_id: i64) -> Result { - find_or_error(Self::get_direct_optional(ctx, page_id)).await + pub async fn get_direct( + ctx: &ServiceContext<'_>, + site_id: i64, + page_id: i64, + ) -> Result { + find_or_error!(Self::get_direct_optional(ctx, site_id, page_id), Page) } pub async fn get_direct_optional( ctx: &ServiceContext<'_>, + site_id: i64, page_id: i64, ) -> Result> { let txn = ctx.transaction(); let page = Page::find_by_id(page_id).one(txn).await?; + if let Some(ref page) = page { + // Deny page access if for the wrong site + if page.site_id != site_id { + return Ok(None); + } + } + Ok(page) } @@ -585,7 +597,7 @@ impl PageService { site_id: i64, references: &[Reference<'_>], ) -> Result> { - tide::log::info!( + info!( "Getting {} pages from references in site ID {}", references.len(), site_id, @@ -674,7 +686,7 @@ impl PageService { /// Checks to see if a page already exists at the slug specified. /// - /// If so, this method fails with `Error::Conflict`. Otherwise it returns nothing. + /// If so, this method fails with `Error::PageExists`. Otherwise it returns nothing. async fn check_conflicts( ctx: &ServiceContext<'_>, site_id: i64, @@ -684,8 +696,8 @@ impl PageService { let txn = ctx.transaction(); if slug.is_empty() { - tide::log::error!("Cannot create page with empty slug"); - return Err(Error::BadRequest); + error!("Cannot create page with empty slug"); + return Err(Error::PageSlugEmpty); } let result = Page::find() @@ -701,15 +713,12 @@ impl PageService { match result { None => Ok(()), Some(page) => { - tide::log::error!( + error!( "Page {} with slug '{}' already exists on site ID {}, cannot {}", - page.page_id, - slug, - site_id, - action, + page.page_id, slug, site_id, action, ); - Err(Error::Conflict) + Err(Error::PageExists) } } } @@ -721,7 +730,7 @@ impl PageService { title: Option, alt_title: Option, ) -> Result<()> { - tide::log::info!("Checking page data against filters..."); + info!("Checking page data against filters..."); let filter_matcher = FilterService::get_matcher( ctx, diff --git a/deepwell/src/services/page/structs.rs b/deepwell/src/services/page/structs.rs index 08f9ae5bf2..23b791f8ba 100644 --- a/deepwell/src/services/page/structs.rs +++ b/deepwell/src/services/page/structs.rs @@ -22,11 +22,11 @@ use super::prelude::*; use crate::models::sea_orm_active_enums::PageRevisionType; use crate::services::page_revision::CreatePageRevisionOutput; use crate::services::score::ScoreValue; +use crate::web::PageDetails; use ftml::parsing::ParseError; use time::OffsetDateTime; -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Deserialize, Debug, Clone)] pub struct CreatePage { pub site_id: i64, pub wikitext: String, @@ -40,8 +40,7 @@ pub struct CreatePage { pub bypass_filter: bool, } -#[derive(Serialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Serialize, Debug, Clone)] pub struct CreatePageOutput { pub page_id: i64, pub slug: String, @@ -49,16 +48,38 @@ pub struct CreatePageOutput { pub parser_errors: Vec, } -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct GetPage<'a> { +#[derive(Deserialize, Debug, Clone)] +pub struct GetPageReference<'a> { pub site_id: i64, pub page: Reference<'a>, } -#[derive(Serialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct GetPageOutput<'a> { +#[derive(Deserialize, Debug, Clone)] +pub struct GetPageReferenceDetails<'a> { + pub site_id: i64, + pub page: Reference<'a>, + + #[serde(default)] + pub details: PageDetails, +} + +#[derive(Deserialize, Debug, Clone)] +pub struct GetPageDirect { + pub site_id: i64, + pub page_id: i64, +} + +#[derive(Deserialize, Debug, Clone)] +pub struct GetPageDirectDetails { + pub site_id: i64, + pub page_id: i64, + + #[serde(default)] + pub details: PageDetails, +} + +#[derive(Serialize, Debug, Clone)] +pub struct GetPageOutput { pub page_id: i64, pub page_created_at: OffsetDateTime, pub page_updated_at: Option, @@ -66,7 +87,7 @@ pub struct GetPageOutput<'a> { pub page_revision_count: i32, pub site_id: i64, pub page_category_id: i64, - pub page_category_slug: &'a str, + pub page_category_slug: String, pub discussion_thread_id: Option, pub revision_id: i64, pub revision_type: PageRevisionType, @@ -76,18 +97,17 @@ pub struct GetPageOutput<'a> { pub wikitext: Option, pub compiled_html: Option, pub compiled_at: OffsetDateTime, - pub compiled_generator: &'a str, - pub revision_comments: &'a str, - pub hidden_fields: &'a [String], - pub title: &'a str, - pub alt_title: Option<&'a str>, - pub slug: &'a str, - pub tags: &'a [String], + pub compiled_generator: String, + pub revision_comments: String, + pub hidden_fields: Vec, + pub title: String, + pub alt_title: Option, + pub slug: String, + pub tags: Vec, pub rating: ScoreValue, } -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Deserialize, Debug, Clone)] pub struct EditPage<'a> { pub site_id: i64, pub page: Reference<'a>, @@ -98,8 +118,8 @@ pub struct EditPage<'a> { pub body: EditPageBody, } -#[derive(Deserialize, Debug, Default)] -#[serde(rename_all = "camelCase", default)] +#[derive(Deserialize, Debug, Default, Clone)] +#[serde(default)] pub struct EditPageBody { pub wikitext: ProvidedValue, pub title: ProvidedValue, @@ -107,8 +127,7 @@ pub struct EditPageBody { pub tags: ProvidedValue>, } -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Deserialize, Debug, Clone)] pub struct MovePage<'a> { pub site_id: i64, pub page: Reference<'a>, @@ -118,8 +137,7 @@ pub struct MovePage<'a> { // NOTE: slug field is a parameter, not in the body } -#[derive(Serialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Serialize, Debug, Clone)] pub struct MovePageOutput { pub old_slug: String, pub new_slug: String, @@ -128,8 +146,7 @@ pub struct MovePageOutput { pub parser_errors: Option>, } -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Deserialize, Debug, Clone)] pub struct DeletePage<'a> { pub site_id: i64, pub page: Reference<'a>, @@ -137,8 +154,14 @@ pub struct DeletePage<'a> { pub user_id: i64, } -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Serialize, Debug, Clone)] +pub struct DeletePageOutput { + page_id: i64, + revision_id: i64, + revision_number: i32, +} + +#[derive(Deserialize, Debug, Clone)] pub struct RestorePage { pub site_id: i64, pub page_id: i64, @@ -147,16 +170,7 @@ pub struct RestorePage { pub slug: Option, } -#[derive(Serialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct DeletePageOutput { - page_id: i64, - revision_id: i64, - revision_number: i32, -} - -#[derive(Serialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Serialize, Debug, Clone)] pub struct RestorePageOutput { slug: String, revision_id: i64, @@ -164,8 +178,7 @@ pub struct RestorePageOutput { parser_errors: Vec, } -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Deserialize, Debug, Clone)] pub struct RollbackPage<'a> { pub site_id: i64, pub page: Reference<'a>, diff --git a/deepwell/src/services/page_query/service.rs b/deepwell/src/services/page_query/service.rs index d7fdfb0fdd..7b816bc9c0 100644 --- a/deepwell/src/services/page_query/service.rs +++ b/deepwell/src/services/page_query/service.rs @@ -69,7 +69,7 @@ impl PageQueryService { variables, }: PageQuery<'_>, ) -> Result { - tide::log::info!("Building ListPages query from specification"); + info!("Building ListPages query from specification"); let txn = ctx.transaction(); let mut condition = Condition::all(); @@ -79,7 +79,7 @@ impl PageQueryService { // The site to query from. If not specified, then this is the current site. let queried_site_id = queried_site_id.unwrap_or(current_site_id); condition = condition.add(page::Column::SiteId.eq(queried_site_id)); - tide::log::debug!("Selecting pages from site ID: {queried_site_id}"); + debug!("Selecting pages from site ID: {queried_site_id}"); // Page Type // TODO track https://github.com/SeaQL/sea-orm/issues/1746 @@ -87,17 +87,17 @@ impl PageQueryService { match page_type { PageTypeSelector::Hidden => { // Hidden pages are any which have slugs that start with '_'. - tide::log::debug!("Selecting page slugs starting with '_'"); + debug!("Selecting page slugs starting with '_'"); condition = condition.add(hidden_condition); } PageTypeSelector::Normal => { // Normal pages are anything not in the above category. - tide::log::debug!("Selecting page slugs not starting with '_'"); + debug!("Selecting page slugs not starting with '_'"); condition = condition.add(hidden_condition.not()); } PageTypeSelector::All => { // If we're getting everything, then do nothing. - tide::log::debug!("Selecting all page slugs, normal or hidden"); + debug!("Selecting all page slugs, normal or hidden"); } } @@ -112,7 +112,7 @@ impl PageQueryService { // If all categories are selected (using an asterisk or by only specifying excluded categories), // then filter only by site_id and exclude the specified excluded categories. IncludedCategories::All => { - tide::log::debug!("Selecting all categories with exclusions"); + debug!("Selecting all categories with exclusions"); page::Column::PageCategoryId.in_subquery( Query::select() @@ -135,7 +135,7 @@ impl PageQueryService { // included categories to begin with, it is still accounted for to preserve // backwards-compatibility with poorly-constructed ListPages modules. IncludedCategories::List(included_categories) => { - tide::log::debug!("Selecting included categories only"); + debug!("Selecting included categories only"); page::Column::PageCategoryId.in_subquery( Query::select() @@ -193,7 +193,7 @@ impl PageQueryService { // This means that there should be no rows in `page_parent` // where they are the child page. PageParentSelector::NoParent => { - tide::log::debug!("Selecting pages with no parents"); + debug!("Selecting pages with no parents"); page::Column::PageId.not_in_subquery( Query::select() @@ -206,7 +206,7 @@ impl PageQueryService { // Pages which are siblings of the current page, // i.e., they share parents in common with the current page. PageParentSelector::SameParents => { - tide::log::debug!("Selecting pages are siblings under the given parents"); + debug!("Selecting pages are siblings under the given parents"); page::Column::PageId.in_subquery( Query::select() @@ -222,9 +222,7 @@ impl PageQueryService { // Pages which are not siblings of the current page, // i.e., they do not share any parents with the current page. PageParentSelector::DifferentParents => { - tide::log::debug!( - "Selecting pages which are not siblings under the given parents", - ); + debug!("Selecting pages which are not siblings under the given parents",); let parents = ParentService::get_parents( ctx, @@ -248,9 +246,7 @@ impl PageQueryService { // Pages which are children of the current page. PageParentSelector::ChildOf => { - tide::log::debug!( - "Selecting pages which are children of the current page", - ); + debug!("Selecting pages which are children of the current page",); page::Column::PageId.in_subquery( Query::select() @@ -265,9 +261,7 @@ impl PageQueryService { // TODO: Possibly allow either *any* or *all* of specified parents // rather than only any, in the future. PageParentSelector::HasParents(parents) => { - tide::log::debug!( - "Selecting on pages which have one of the given as parents", - ); + debug!("Selecting on pages which have one of the given as parents",); let parent_ids = PageService::get_pages(ctx, queried_site_id, parents) .await? @@ -288,7 +282,7 @@ impl PageQueryService { // Slug if let Some(slug) = slug { let slug = slug.as_ref(); - tide::log::debug!("Filtering based on slug {slug}"); + debug!("Filtering based on slug {slug}"); condition = condition.add(page::Column::Slug.eq(slug)); } @@ -346,10 +340,9 @@ impl PageQueryService { ascending, } = order.unwrap_or_default(); - tide::log::debug!( + debug!( "Ordering ListPages using {:?} (ascending: {})", - property, - ascending, + property, ascending, ); let order = if ascending { Order::Asc } else { Order::Desc }; @@ -359,39 +352,37 @@ impl PageQueryService { OrderProperty::PageSlug => { // idk how to do this, we need to strip off the category part somehow // PgExpr::matches? - tide::log::error!( - "Ordering by page slug (no category), not yet implemented", - ); + error!("Ordering by page slug (no category), not yet implemented",); todo!() // TODO } OrderProperty::FullSlug => { - tide::log::debug!("Ordering by page slug (with category"); + debug!("Ordering by page slug (with category"); query = query.order_by(page::Column::Slug, order); } OrderProperty::Title => { - tide::log::error!("Ordering by title, not yet implemented"); + error!("Ordering by title, not yet implemented"); join_revision!(); query = query.order_by(page_revision::Column::Title, order); } OrderProperty::AltTitle => { - tide::log::error!("Ordering by alt title, not yet implemented"); + error!("Ordering by alt title, not yet implemented"); join_revision!(); query = query.order_by(page_revision::Column::AltTitle, order); } OrderProperty::CreatedBy => { - tide::log::error!("Ordering by author, not yet implemented"); + error!("Ordering by author, not yet implemented"); todo!() // TODO } OrderProperty::CreatedAt => { - tide::log::debug!("Ordering by page creation timestamp"); + debug!("Ordering by page creation timestamp"); query = query.order_by(page::Column::CreatedAt, order); } OrderProperty::UpdatedAt => { - tide::log::debug!("Ordering by page last update timestamp"); + debug!("Ordering by page last update timestamp"); query = query.order_by(page::Column::UpdatedAt, order); } OrderProperty::Size => { - tide::log::error!("Ordering by page size, not yet implemented"); + error!("Ordering by page size, not yet implemented"); join_revision!(); join_text!(); let col = Expr::col(text::Column::Contents); @@ -399,35 +390,35 @@ impl PageQueryService { query = query.order_by(expr, order); } OrderProperty::Score => { - tide::log::error!("Ordering by score, not yet implemented"); + error!("Ordering by score, not yet implemented"); todo!() // TODO } OrderProperty::Votes => { - tide::log::error!("Ordering by vote count, not yet implemented"); + error!("Ordering by vote count, not yet implemented"); todo!() // TODO } OrderProperty::Revisions => { - tide::log::error!("Ordering by revision count, not yet implemented"); + error!("Ordering by revision count, not yet implemented"); todo!() // TODO } OrderProperty::Comments => { - tide::log::error!("Ordering by comment count, not yet implemented"); + error!("Ordering by comment count, not yet implemented"); todo!() // TODO } OrderProperty::Random => { - tide::log::debug!("Ordering by random value"); + debug!("Ordering by random value"); let expr = SimpleExpr::FunctionCall(Func::random()); query = query.order_by(expr, order); } OrderProperty::DataFormFieldName => { - tide::log::error!("Ordering by data form field, not yet implemented"); + error!("Ordering by data form field, not yet implemented"); todo!() // TODO } }; } if let Some(limit) = pagination.limit { - tide::log::debug!("Limiting ListPages to a maximum of {limit} pages total"); + debug!("Limiting ListPages to a maximum of {limit} pages total"); query = query.limit(limit); } diff --git a/deepwell/src/services/page_revision/service.rs b/deepwell/src/services/page_revision/service.rs index 47e5a69194..3dd2e5d36a 100644 --- a/deepwell/src/services/page_revision/service.rs +++ b/deepwell/src/services/page_revision/service.rs @@ -715,7 +715,7 @@ impl PageRevisionService { .order_by_desc(page_revision::Column::RevisionNumber) .one(txn) .await? - .ok_or(Error::NotFound)?; + .ok_or(Error::PageRevisionNotFound)?; Ok(revision) } @@ -747,14 +747,17 @@ impl PageRevisionService { page_id: i64, revision_number: i32, ) -> Result { - find_or_error(Self::get_optional(ctx, site_id, page_id, revision_number)).await + find_or_error!( + Self::get_optional(ctx, site_id, page_id, revision_number), + PageRevision, + ) } pub async fn get_direct( ctx: &ServiceContext<'_>, revision_id: i64, ) -> Result { - find_or_error(Self::get_direct_optional(ctx, revision_id)).await + find_or_error!(Self::get_direct_optional(ctx, revision_id), PageRevision) } pub async fn get_direct_optional( @@ -791,7 +794,7 @@ impl PageRevisionService { // that means this page does not exist, and we should return an error. match NonZeroI32::new(row_count) { Some(count) => Ok(count), - None => Err(Error::NotFound), + None => Err(Error::PageNotFound), } } diff --git a/deepwell/src/services/page_revision/structs.rs b/deepwell/src/services/page_revision/structs.rs index dbe7d682fb..7130a4175a 100644 --- a/deepwell/src/services/page_revision/structs.rs +++ b/deepwell/src/services/page_revision/structs.rs @@ -20,13 +20,12 @@ use super::prelude::*; use crate::models::sea_orm_active_enums::PageRevisionType; -use crate::web::FetchDirection; +use crate::web::{FetchDirection, PageDetails}; use ftml::parsing::ParseError; use std::num::NonZeroI32; use time::OffsetDateTime; -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Deserialize, Debug, Clone)] pub struct CreatePageRevision { pub user_id: i64, pub comments: String, @@ -35,8 +34,8 @@ pub struct CreatePageRevision { pub body: CreatePageRevisionBody, } -#[derive(Deserialize, Debug, Default)] -#[serde(rename_all = "camelCase", default)] +#[derive(Deserialize, Debug, Default, Clone)] +#[serde(default)] pub struct CreatePageRevisionBody { pub wikitext: ProvidedValue, pub title: ProvidedValue, @@ -45,7 +44,7 @@ pub struct CreatePageRevisionBody { pub tags: ProvidedValue>, } -#[derive(Debug)] +#[derive(Deserialize, Debug, Clone)] pub struct CreateFirstPageRevision { pub user_id: i64, pub comments: String, @@ -55,7 +54,7 @@ pub struct CreateFirstPageRevision { pub slug: String, } -#[derive(Debug)] +#[derive(Deserialize, Debug, Clone)] pub struct CreateTombstonePageRevision { pub site_id: i64, pub page_id: i64, @@ -63,7 +62,7 @@ pub struct CreateTombstonePageRevision { pub comments: String, } -#[derive(Debug)] +#[derive(Deserialize, Debug, Clone)] pub struct CreateResurrectionPageRevision { pub site_id: i64, pub page_id: i64, @@ -72,31 +71,36 @@ pub struct CreateResurrectionPageRevision { pub new_slug: String, } -#[derive(Serialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Serialize, Debug, Clone)] pub struct CreatePageRevisionOutput { pub revision_id: i64, pub revision_number: i32, pub parser_errors: Option>, } -#[derive(Serialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Serialize, Debug, Clone)] pub struct CreateFirstPageRevisionOutput { pub revision_id: i64, pub parser_errors: Vec, } -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Deserialize, Debug, Clone)] pub struct GetPageRevision { pub site_id: i64, pub page_id: i64, pub revision_number: i32, } -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Deserialize, Debug, Clone)] +pub struct GetPageRevisionDetails { + #[serde(flatten)] + pub input: GetPageRevision, + + #[serde(default)] + pub details: PageDetails, +} + +#[derive(Deserialize, Debug, Clone)] pub struct UpdatePageRevision { pub site_id: i64, pub page_id: i64, @@ -105,8 +109,16 @@ pub struct UpdatePageRevision { pub hidden: Vec, } -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Deserialize, Debug, Clone)] +pub struct UpdatePageRevisionDetails { + #[serde(flatten)] + pub input: UpdatePageRevision, + + #[serde(default)] + pub details: PageDetails, +} + +#[derive(Deserialize, Debug, Clone)] pub struct GetPageRevisionRange { pub site_id: i64, pub page_id: i64, @@ -115,6 +127,15 @@ pub struct GetPageRevisionRange { pub limit: u64, } +#[derive(Deserialize, Debug, Clone)] +pub struct GetPageRevisionRangeDetails { + #[serde(flatten)] + pub input: GetPageRevisionRange, + + #[serde(default)] + pub details: PageDetails, +} + /// Information about the revisions currently associated with a page. /// /// A lot of this information is not strictly necessary: @@ -124,15 +145,14 @@ pub struct GetPageRevisionRange { /// /// However it's convenient to avoid having to do these calculations inline /// in other places, and also so that API consumers have the relevant information. -#[derive(Serialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Serialize, Debug, Clone)] pub struct PageRevisionCountOutput { pub revision_count: NonZeroI32, pub first_revision: i32, pub last_revision: i32, } -#[derive(Serialize, Debug)] +#[derive(Serialize, Debug, Clone)] pub struct PageRevisionModelFiltered { pub revision_id: i64, pub revision_type: PageRevisionType, diff --git a/deepwell/src/services/parent/service.rs b/deepwell/src/services/parent/service.rs index 4b5d312176..c5b0cd3ed2 100644 --- a/deepwell/src/services/parent/service.rs +++ b/deepwell/src/services/parent/service.rs @@ -18,6 +18,8 @@ * along with this program. If not, see . */ +// TODO replace ParentService with a new interaction type + use super::prelude::*; use crate::models::page_parent::{self, Entity as PageParent, Model as PageParentModel}; use crate::services::PageService; @@ -50,11 +52,11 @@ impl ParentService { // Check if the two pages are the same if parent_page.page_id == child_page.page_id { - tide::log::error!( + error!( "Cannot parent a page to itself (ID {})", parent_page.page_id, ); - return Err(Error::Conflict); + return Err(Error::PageParentExists); } // Check if this relationship already exists @@ -84,7 +86,7 @@ impl ParentService { /// Removes the parental relationship with the two given pages. /// /// # Returns - /// Returns `true` if the relationship was deleted, and + /// The struct contains `true` if the relationship was deleted, and /// `false` if it was already absent. pub async fn remove( ctx: &ServiceContext<'_>, @@ -93,7 +95,7 @@ impl ParentService { parent: parent_reference, child: child_reference, }: ParentDescription<'_>, - ) -> Result { + ) -> Result { let txn = ctx.transaction(); let (parent_page, child_page) = try_join!( @@ -101,13 +103,18 @@ impl ParentService { PageService::get(ctx, site_id, child_reference), )?; - let rows_deleted = + let DeleteResult { rows_affected } = PageParent::delete_by_id((parent_page.page_id, child_page.page_id)) .exec(txn) - .await? - .rows_affected; + .await?; - Ok(rows_deleted == 1) + debug_assert!( + rows_affected <= 1, + "Rows deleted using ID was more than 1: {rows_affected}", + ); + + let was_deleted = rows_affected == 1; + Ok(RemoveParentOutput { was_deleted }) } pub async fn get_optional( @@ -133,15 +140,15 @@ impl ParentService { } #[inline] + #[allow(dead_code)] // TODO pub async fn get( ctx: &ServiceContext<'_>, description: ParentDescription<'_>, ) -> Result { - find_or_error(Self::get_optional(ctx, description)).await + find_or_error!(Self::get_optional(ctx, description), PageParent) } /// Gets all relationships of the given type. - #[allow(dead_code)] // TODO pub async fn get_relationships( ctx: &ServiceContext<'_>, site_id: i64, @@ -164,7 +171,7 @@ impl ParentService { } /// Gets all children of the given page. - #[allow(dead_code)] // TODO + #[allow(dead_code)] // TEMP pub async fn get_children( ctx: &ServiceContext<'_>, site_id: i64, @@ -175,7 +182,6 @@ impl ParentService { } /// Gets all parents of the given page. - #[allow(dead_code)] // TODO pub async fn get_parents( ctx: &ServiceContext<'_>, site_id: i64, diff --git a/deepwell/src/services/parent/structs.rs b/deepwell/src/services/parent/structs.rs index 84c3eeba44..2189798ae3 100644 --- a/deepwell/src/services/parent/structs.rs +++ b/deepwell/src/services/parent/structs.rs @@ -22,17 +22,19 @@ use crate::services::Error; use crate::web::Reference; use std::str::FromStr; -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Deserialize, Debug, Clone)] pub struct ParentDescription<'a> { pub site_id: i64, pub parent: Reference<'a>, pub child: Reference<'a>, } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Serialize, Deserialize, Debug, Copy, Clone, Hash, PartialEq, Eq)] pub enum ParentalRelationshipType { + #[serde(rename = "parents")] Parent, + + #[serde(rename = "children")] Child, } @@ -56,3 +58,15 @@ impl FromStr for ParentalRelationshipType { } } } + +#[derive(Deserialize, Debug, Clone)] +pub struct GetParentRelationships<'a> { + pub site_id: i64, + pub page: Reference<'a>, + pub relationship_type: ParentalRelationshipType, +} + +#[derive(Serialize, Debug, Copy, Clone)] +pub struct RemoveParentOutput { + pub was_deleted: bool, +} diff --git a/deepwell/src/services/password.rs b/deepwell/src/services/password.rs index 7851a70c2e..dcfdd9bee3 100644 --- a/deepwell/src/services/password.rs +++ b/deepwell/src/services/password.rs @@ -74,22 +74,21 @@ impl PasswordService { hash: &str, sleep: bool, ) -> Result<()> { - tide::log::info!("Attempting to verify password"); + info!("Attempting to verify password"); let result = Self::verify_internal(password, hash); match result { Ok(()) => Ok(()), Err(error) => { match error { // Simply the wrong password + // This is converted in services/error.rs Error::InvalidAuthentication => { - tide::log::warn!("Invalid password entered, verification failed"); + warn!("Invalid password entered, verification failed"); } // Some kind of server error _ => { - tide::log::error!( - "Unexpected error while verifying password: {error}", - ); + error!("Unexpected error while verifying password: {error}",); } } diff --git a/deepwell/src/services/render/structs.rs b/deepwell/src/services/render/structs.rs index 359097b84e..957209aa1f 100644 --- a/deepwell/src/services/render/structs.rs +++ b/deepwell/src/services/render/structs.rs @@ -23,7 +23,6 @@ use crate::hash::TextHash; use time::OffsetDateTime; #[derive(Serialize, Debug)] -#[serde(rename_all = "camelCase")] pub struct RenderOutput { pub html_output: HtmlOutput, pub errors: Vec, diff --git a/deepwell/src/services/score/mod.rs b/deepwell/src/services/score/mod.rs index c8711a2834..4fc4d0ad7d 100644 --- a/deepwell/src/services/score/mod.rs +++ b/deepwell/src/services/score/mod.rs @@ -23,9 +23,9 @@ mod prelude { pub use super::structs::*; pub use super::Scorer; pub use crate::models::page_vote::{self, Entity as PageVote}; + pub use async_trait::async_trait; pub use ftml::data::ScoreValue; pub use sea_orm::{DatabaseTransaction, FromQueryResult}; - pub use tide::utils::async_trait; } mod impls; diff --git a/deepwell/src/services/score/structs.rs b/deepwell/src/services/score/structs.rs index c20509f61b..5d04eb9b76 100644 --- a/deepwell/src/services/score/structs.rs +++ b/deepwell/src/services/score/structs.rs @@ -23,7 +23,6 @@ use std::collections::BTreeMap; pub use crate::services::vote::VoteValue; #[derive(Serialize, Deserialize, Debug, Copy, Clone, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] #[non_exhaustive] pub enum VoteType { UpsDowns, @@ -31,7 +30,6 @@ pub enum VoteType { } #[derive(Serialize, Deserialize, Debug, Copy, Clone, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] pub enum ScoreType { Null, Sum, diff --git a/deepwell/src/services/session/service.rs b/deepwell/src/services/session/service.rs index acfb109217..f1d605d792 100644 --- a/deepwell/src/services/session/service.rs +++ b/deepwell/src/services/session/service.rs @@ -54,9 +54,7 @@ impl SessionService { restricted, }: CreateSession, ) -> Result { - tide::log::info!( - "Creating new session for user ID {user_id} (restricted: {restricted})", - ); + info!("Creating new session for user ID {user_id} (restricted: {restricted})",); let txn = ctx.transaction(); let config = ctx.config(); @@ -79,7 +77,7 @@ impl SessionService { }; let SessionModel { session_token, .. } = model.insert(txn).await?; - tide::log::info!("Created new session token"); + info!("Created new session token"); Ok(session_token) } @@ -87,7 +85,7 @@ impl SessionService { /// /// Example generated token: `wj:T9iF6vfjoYYE20QzrybV2C1V4K0LchHXsNVipX8G1GZ9vSJf0rvQpJ4YC8c8MAQ3`. fn new_token(config: &Config) -> String { - tide::log::debug!("Generating a new session token"); + debug!("Generating a new session token"); let mut rng = thread_rng(); assert_is_csprng(&rng); @@ -103,13 +101,13 @@ impl SessionService { ctx: &ServiceContext<'_>, session_token: &str, ) -> Result { - tide::log::info!("Looking up session with token {session_token}"); + info!("Looking up session with token {session_token}"); Self::get_optional(ctx, session_token) .await? - .ok_or(Error::NotFound) + .ok_or(Error::InvalidSessionToken) } - async fn get_optional( + pub async fn get_optional( ctx: &ServiceContext<'_>, session_token: &str, ) -> Result> { @@ -137,7 +135,7 @@ impl SessionService { session_token: &str, restricted: bool, ) -> Result { - tide::log::info!("Looking up user for session token"); + info!("Looking up user for session token"); let txn = ctx.transaction(); let user = User::find() @@ -150,7 +148,7 @@ impl SessionService { ) .one(txn) .await? - .ok_or(Error::NotFound)?; + .ok_or(Error::UserNotFound)?; Ok(user) } @@ -161,7 +159,7 @@ impl SessionService { ctx: &ServiceContext<'_>, user_id: i64, ) -> Result> { - tide::log::info!("Getting all sessions for user ID {user_id}"); + info!("Getting all sessions for user ID {user_id}"); let txn = ctx.transaction(); let sessions = Session::find() @@ -190,18 +188,21 @@ impl SessionService { user_agent, }: RenewSession, ) -> Result { - tide::log::info!("Renewing session ID {old_session_token}"); + info!("Renewing session ID {old_session_token}"); // Get existing session to ensure the token matches the passed user ID. let old_session = Self::get(ctx, &old_session_token).await?; if old_session.user_id != user_id { - tide::log::error!( + error!( "Requested session renewal, user IDs do not match! (current: {}, request: {})", old_session.user_id, user_id, ); - return Err(Error::BadRequest); + return Err(Error::SessionUserId { + active_user_id: user_id, + session_user_id: old_session.user_id, + }); } // Invalid and recreate @@ -226,15 +227,15 @@ impl SessionService { ctx: &ServiceContext<'_>, session_token: String, ) -> Result<()> { - tide::log::info!("Invalidating session ID {session_token}"); + info!("Invalidating session ID {session_token}"); let txn = ctx.transaction(); let DeleteResult { rows_affected } = Session::delete_by_id(session_token).exec(txn).await?; if rows_affected != 1 { - tide::log::error!("This session was already deleted or does not exist"); - return Err(Error::NotFound); + error!("This session was already deleted or does not exist"); + return Err(Error::InvalidSessionToken); } Ok(()) @@ -253,18 +254,21 @@ impl SessionService { session_token: &str, user_id: i64, ) -> Result { - tide::log::info!("Invalidation all other session IDs for user ID {user_id}"); + info!("Invalidation all other session IDs for user ID {user_id}"); let txn = ctx.transaction(); let session = Self::get(ctx, session_token).await?; if session.user_id != user_id { - tide::log::error!( + error!( "Requested invalidation of other sessions, user IDs do not match! (current: {}, request: {})", session.user_id, user_id, ); - return Err(Error::BadRequest); + return Err(Error::SessionUserId { + active_user_id: user_id, + session_user_id: session.user_id, + }); } // Delete all sessions from user_id, except if it's this session_token @@ -277,9 +281,7 @@ impl SessionService { .exec(txn) .await?; - tide::log::debug!( - "User ID {user_id}: {rows_affected} other sessions were invalidated", - ); + debug!("User ID {user_id}: {rows_affected} other sessions were invalidated",); Ok(rows_affected) } @@ -288,7 +290,7 @@ impl SessionService { /// # Returns /// The number of pruned sessions. pub async fn prune(ctx: &ServiceContext<'_>) -> Result { - tide::log::info!("Pruning all expired sessions"); + info!("Pruning all expired sessions"); let txn = ctx.transaction(); let DeleteResult { rows_affected } = Session::delete_many() @@ -296,7 +298,7 @@ impl SessionService { .exec(txn) .await?; - tide::log::debug!("{rows_affected} expired sessions were pruned"); + debug!("{rows_affected} expired sessions were pruned"); Ok(rows_affected) } } diff --git a/deepwell/src/services/session/structs.rs b/deepwell/src/services/session/structs.rs index e90d52fa14..3fe8c8634e 100644 --- a/deepwell/src/services/session/structs.rs +++ b/deepwell/src/services/session/structs.rs @@ -22,7 +22,6 @@ use crate::models::session::Model as SessionModel; use std::net::IpAddr; #[derive(Deserialize, Debug, Clone)] -#[serde(rename_all = "camelCase")] pub struct CreateSession { pub user_id: i64, pub ip_address: IpAddr, @@ -31,7 +30,6 @@ pub struct CreateSession { } #[derive(Deserialize, Debug, Clone)] -#[serde(rename_all = "camelCase")] pub struct RenewSession { pub old_session_token: String, pub user_id: i64, @@ -42,14 +40,12 @@ pub struct RenewSession { pub type GetOtherSessions = InvalidateOtherSessions; #[derive(Serialize, Debug, Clone)] -#[serde(rename_all = "camelCase")] pub struct GetOtherSessionsOutput { pub current: SessionModel, pub others: Vec, } #[derive(Deserialize, Debug, Clone)] -#[serde(rename_all = "camelCase")] pub struct InvalidateOtherSessions { pub session_token: String, pub user_id: i64, diff --git a/deepwell/src/services/site/service.rs b/deepwell/src/services/site/service.rs index 27f9f67767..7796da668a 100644 --- a/deepwell/src/services/site/service.rs +++ b/deepwell/src/services/site/service.rs @@ -131,7 +131,7 @@ impl SiteService { new_slug: &str, user_id: i64, ) -> Result<()> { - tide::log::info!("Updating slug for site {}, adding alias", site.site_id); + info!("Updating slug for site {}, adding alias", site.site_id); let old_slug = &site.slug; match AliasService::get_optional(ctx, AliasType::Site, new_slug).await? { @@ -139,13 +139,13 @@ impl SiteService { // // Don't return a future, nothing to do after Some(alias) => { - tide::log::debug!("Swapping slug between site and alias"); + debug!("Swapping slug between site and alias"); AliasService::swap(ctx, alias.alias_id, old_slug).await?; } // Return future that creates new alias at the old location None => { - tide::log::debug!("Creating site alias for {old_slug}"); + debug!("Creating site alias for {old_slug}"); // Add site alias for old slug. // @@ -223,7 +223,7 @@ impl SiteService { ctx: &ServiceContext<'_>, reference: Reference<'_>, ) -> Result { - find_or_error(Self::get_optional(ctx, reference)).await + find_or_error!(Self::get_optional(ctx, reference), Site) } /// Gets the site ID from a reference, looking up if necessary. @@ -249,7 +249,7 @@ impl SiteService { /// Checks to see if a site already exists at the slug specified. /// - /// If so, this method fails with `Error::Conflict`. Otherwise it returns nothing. + /// If so, this method fails with `Error::SiteExists`. Otherwise it returns nothing. async fn check_conflicts( ctx: &ServiceContext<'_>, slug: &str, @@ -258,8 +258,8 @@ impl SiteService { let txn = ctx.transaction(); if slug.is_empty() { - tide::log::error!("Cannot create site with empty slug"); - return Err(Error::BadRequest); + error!("Cannot create site with empty slug"); + return Err(Error::SiteSlugEmpty); } let result = Site::find() @@ -274,13 +274,12 @@ impl SiteService { match result { None => Ok(()), Some(_) => { - tide::log::error!( + error!( "Site with slug '{}' already exists, cannot {}", - slug, - action, + slug, action, ); - Err(Error::Conflict) + Err(Error::SiteExists) } } } diff --git a/deepwell/src/services/site/structs.rs b/deepwell/src/services/site/structs.rs index 09ee24a06b..6d83ebbe9d 100644 --- a/deepwell/src/services/site/structs.rs +++ b/deepwell/src/services/site/structs.rs @@ -23,7 +23,7 @@ use crate::models::site::Model as SiteModel; use crate::models::site_domain::Model as SiteDomainModel; use crate::web::{ProvidedValue, Reference}; -#[derive(Deserialize, Debug)] +#[derive(Deserialize, Debug, Clone)] pub struct CreateSite { pub slug: String, pub name: String, @@ -32,21 +32,18 @@ pub struct CreateSite { pub locale: String, } -#[derive(Serialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Serialize, Debug, Clone)] pub struct CreateSiteOutput { pub site_id: i64, pub slug: String, } -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Deserialize, Debug, Clone)] pub struct GetSite<'a> { pub site: Reference<'a>, } -#[derive(Serialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Serialize, Debug, Clone)] pub struct GetSiteOutput { #[serde(flatten)] pub site: SiteModel, @@ -54,8 +51,7 @@ pub struct GetSiteOutput { pub domains: Vec, } -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Deserialize, Debug, Clone)] pub struct UpdateSite<'a> { pub site: Reference<'a>, pub user_id: i64, @@ -64,8 +60,8 @@ pub struct UpdateSite<'a> { pub body: UpdateSiteBody, } -#[derive(Deserialize, Debug, Default)] -#[serde(rename_all = "camelCase", default)] +#[derive(Deserialize, Debug, Clone, Default)] +#[serde(default)] pub struct UpdateSiteBody { pub name: ProvidedValue, pub slug: ProvidedValue, diff --git a/deepwell/src/services/special_page/service.rs b/deepwell/src/services/special_page/service.rs index 8cdf881a81..21dbdba1de 100644 --- a/deepwell/src/services/special_page/service.rs +++ b/deepwell/src/services/special_page/service.rs @@ -41,7 +41,7 @@ impl SpecialPageService { locale: &LanguageIdentifier, page_info: PageInfo<'_>, ) -> Result { - tide::log::info!( + info!( "Getting special page {sp_page_type:?} for site ID {}", site.site_id, ); @@ -127,7 +127,7 @@ impl SpecialPageService { locale: &LanguageIdentifier, page_info: &PageInfo<'_>, ) -> Result { - tide::log::debug!("Getting wikitext for special page, {} slugs", slugs.len()); + debug!("Getting wikitext for special page, {} slugs", slugs.len()); debug_assert!( !slugs.is_empty(), "No slugs to check for special page existence", diff --git a/deepwell/src/services/special_page/structs.rs b/deepwell/src/services/special_page/structs.rs index f0962a2287..70b933a783 100644 --- a/deepwell/src/services/special_page/structs.rs +++ b/deepwell/src/services/special_page/structs.rs @@ -30,7 +30,6 @@ pub enum SpecialPageType { } #[derive(Serialize, Debug)] -#[serde(rename_all = "camelCase")] pub struct GetSpecialPageOutput { pub wikitext: String, pub render_output: RenderOutput, diff --git a/deepwell/src/services/text.rs b/deepwell/src/services/text.rs index 0efa347f7f..8c39306f16 100644 --- a/deepwell/src/services/text.rs +++ b/deepwell/src/services/text.rs @@ -40,7 +40,14 @@ impl TextService { ctx: &ServiceContext<'_>, hash: &[u8], ) -> Result> { - assert_eq!(hash.len(), TEXT_HASH_LENGTH); + if hash.len() != TEXT_HASH_LENGTH { + error!( + "Text hash length does not match, should be {}, is {}", + TEXT_HASH_LENGTH, + hash.len(), + ); + return Err(Error::BadRequest); + } let txn = ctx.transaction(); let contents = Text::find() @@ -54,7 +61,7 @@ impl TextService { #[inline] pub async fn get(ctx: &ServiceContext<'_>, hash: &[u8]) -> Result { - find_or_error(Self::get_optional(ctx, hash)).await + find_or_error!(Self::get_optional(ctx, hash), Text) } #[inline] @@ -149,7 +156,7 @@ impl TextService { .exec(txn) .await?; - tide::log::debug!("Pruned {rows_affected} unused text rows"); + debug!("Pruned {rows_affected} unused text rows"); Ok(()) } } diff --git a/deepwell/src/services/user/service.rs b/deepwell/src/services/user/service.rs index 97bec4c33d..33c538d69c 100644 --- a/deepwell/src/services/user/service.rs +++ b/deepwell/src/services/user/service.rs @@ -54,26 +54,26 @@ impl UserService { let txn = ctx.transaction(); let slug = get_regular_slug(&name); - tide::log::debug!("Normalizing user data (name '{}', slug '{}')", name, slug,); + debug!("Normalizing user data (name '{}', slug '{}')", name, slug,); regex_replace_in_place(&mut name, &LEADING_TRAILING_CHARS, ""); - tide::log::info!("Attempting to create user '{}' ('{}')", name, slug); + info!("Attempting to create user '{}' ('{}')", name, slug); // Empty slug check if slug.is_empty() { - tide::log::error!("Cannot create user with empty slug"); - return Err(Error::BadRequest); + error!("Cannot create user with empty slug"); + return Err(Error::UserSlugEmpty); } // Check if username contains the minimum amount of required bytes. if name.len() < ctx.config().minimum_name_bytes { - tide::log::error!( + error!( "User's name is not long enough ({} < {})", slug.len(), ctx.config().minimum_name_bytes, ); - return Err(Error::BadRequest); + return Err(Error::UserNameTooShort); } // Perform filter validation @@ -100,11 +100,9 @@ impl UserService { .await?; if result.is_some() { - tide::log::error!( - "User with conflicting name or slug already exists, cannot create", - ); + error!("User with conflicting name or slug already exists, cannot create",); - return Err(Error::Conflict); + return Err(Error::UserExists); } // Check for email conflicts @@ -125,34 +123,30 @@ impl UserService { .await?; if result.is_some() { - tide::log::error!( - "User with conflicting email already exists, cannot create", - ); + error!("User with conflicting email already exists, cannot create",); - return Err(Error::Conflict); + return Err(Error::UserExists); } } // Check for alias conflicts if AliasService::exists(ctx, AliasType::User, &slug).await? { - tide::log::error!( - "User alias with conflicting slug already exists, cannot create", - ); + error!("User alias with conflicting slug already exists, cannot create",); - return Err(Error::Conflict); + return Err(Error::UserExists); } // Set up password field depending on type let password = match user_type { UserType::Regular => { - tide::log::info!("Creating regular user '{slug}' with password"); + info!("Creating regular user '{slug}' with password"); PasswordService::new_hash(&password)? } UserType::System => { - tide::log::info!("Creating system user '{slug}'"); + info!("Creating system user '{slug}'"); if !password.is_empty() { - tide::log::warn!("Password was specified for system user"); + warn!("Password was specified for system user"); return Err(Error::BadRequest); } @@ -160,7 +154,7 @@ impl UserService { str!("!") } UserType::Bot => { - tide::log::info!("Creating bot user '{slug}'"); + info!("Creating bot user '{slug}'"); // TODO assign bot token format!("TODO bot token: {}", password) } @@ -175,29 +169,28 @@ impl UserService { // The assigned variable is also used to check whether email validation occurred, as it // will always be `Some` if validation occurred and `None` otherwise. let email_is_alias = if !bypass_email_verification { - let email_validation_output = EmailService::validate(&email)?; + let email_validation_output = EmailService::validate(&email).await?; + match email_validation_output.classification { EmailClassification::Normal => { - tide::log::info!("User {slug}'s email was verified successfully"); + info!("User {slug}'s email was verified successfully"); Some(false) } EmailClassification::Alias => { - tide::log::info!( - "User {slug}'s email was verified successfully (as an alias)", - ); + info!("User {slug}'s email was verified successfully (as an alias)",); Some(true) } EmailClassification::Disposable => { - tide::log::error!( + error!( "User {slug}'s email is disposable and did not pass verification", ); return Err(Error::DisallowedEmail); } EmailClassification::Invalid => { - tide::log::error!( + error!( "User {slug}'s email is invalid and did not pass verification", ); return Err(Error::InvalidEmail); @@ -305,7 +298,7 @@ impl UserService { ctx: &ServiceContext<'_>, reference: Reference<'_>, ) -> Result { - find_or_error(Self::get_optional(ctx, reference)).await + find_or_error!(Self::get_optional(ctx, reference), User) } /// Gets the user ID from a reference, looking up if necessary. @@ -340,7 +333,7 @@ impl UserService { if user.user_type == user_type { Ok(user) } else { - Err(Error::BadRequest) + Err(Error::UserNotFound) } } @@ -369,7 +362,8 @@ impl UserService { } // Validate email - let email_validation_output = EmailService::validate(&email)?; + let email_validation_output = EmailService::validate(&email).await?; + let is_alias = match email_validation_output.classification { EmailClassification::Normal => false, EmailClassification::Alias => true, @@ -474,8 +468,8 @@ impl UserService { // Empty slug check if new_slug.is_empty() { - tide::log::error!("Cannot create user with empty slug"); - return Err(Error::BadRequest); + error!("Cannot create user with empty slug"); + return Err(Error::UserSlugEmpty); } // Perform filter validation @@ -484,7 +478,7 @@ impl UserService { } if new_slug == user.slug { - tide::log::debug!("User slug is the same, rename is free"); + debug!("User slug is the same, rename is free"); // Set model, but return early, we don't deduct a // name change token or create a new user alias. @@ -495,7 +489,7 @@ impl UserService { if let Some(alias) = AliasService::get_optional(ctx, AliasType::User, &new_slug).await? { - tide::log::debug!("User slug is a past alias, rename is free"); + debug!("User slug is a past alias, rename is free"); // Swap user alias for old slug AliasService::swap(ctx, alias.alias_id, old_slug).await?; @@ -512,20 +506,20 @@ impl UserService { // a name change token must be consumed. Check if there are any remaining tokens. if user.name_changes_left == 0 { - tide::log::error!("User ID {} has no remaining name changes", user.user_id); + error!("User ID {} has no remaining name changes", user.user_id); return Err(Error::InsufficientNameChanges); } // Check if the new name has the minimum required amount of bytes. if new_name.len() < ctx.config().minimum_name_bytes { - tide::log::error!( + error!( "User's name is not long enough ({} < {})", new_name.len(), ctx.config().minimum_name_bytes, ); - return Err(Error::BadRequest); + return Err(Error::UserNameTooShort); } // Deduct name change token and add user alias for old slug. @@ -537,10 +531,9 @@ impl UserService { // updated yet, so we instead run AliasService::verify() // ourselves at the end of user updating. - tide::log::debug!( + debug!( "Creating user alias for '{}' -> '{}', deducting name change", - old_slug, - new_slug, + old_slug, new_slug, ); model.name_changes_left = Set(user.name_changes_left - 1); @@ -583,12 +576,9 @@ impl UserService { ..Default::default() }; - tide::log::info!( + info!( "Adding name change token to user ID {} (was {}, now {}, max {})", - user.user_id, - user.name_changes_left, - name_changes, - max_name_changes, + user.user_id, user.name_changes_left, name_changes, max_name_changes, ); model.update(txn).await?; @@ -602,7 +592,7 @@ impl UserService { multi_factor_secret: ActiveValue>, multi_factor_recovery_codes: ActiveValue>>, ) -> Result<()> { - tide::log::info!("Setting MFA secret fields for user ID {user_id}"); + info!("Setting MFA secret fields for user ID {user_id}"); let txn = ctx.transaction(); let model = user::ActiveModel { @@ -624,7 +614,7 @@ impl UserService { recovery_code: &str, ) -> Result<()> { let txn = ctx.transaction(); - tide::log::info!("Removing recovery code from user ID {}", user.user_id); + info!("Removing recovery code from user ID {}", user.user_id); // Only update if there are recovery codes set for the user if let Some(current_codes) = &user.multi_factor_recovery_codes { @@ -654,10 +644,10 @@ impl UserService { ) -> Result { let txn = ctx.transaction(); let user = Self::get(ctx, reference).await?; - tide::log::info!("Deleting user with ID {}", user.user_id); + info!("Deleting user with ID {}", user.user_id); - // Delete all user aliases - AliasService::delete_all(ctx, AliasType::User, user.user_id).await?; + // Remove all user aliases + AliasService::remove_all(ctx, AliasType::User, user.user_id).await?; // Set deletion flag let model = user::ActiveModel { @@ -676,7 +666,7 @@ impl UserService { name: &str, slug: &str, ) -> Result<()> { - tide::log::info!("Checking user name data against filters..."); + info!("Checking user name data against filters..."); let filter_matcher = FilterService::get_matcher(ctx, FilterClass::Platform, FilterType::User) @@ -691,7 +681,7 @@ impl UserService { } async fn run_email_filter(ctx: &ServiceContext<'_>, email: &str) -> Result<()> { - tide::log::info!("Checking user email data against filters..."); + info!("Checking user email data against filters..."); let filter_matcher = FilterService::get_matcher(ctx, FilterClass::Platform, FilterType::Email) diff --git a/deepwell/src/services/user/structs.rs b/deepwell/src/services/user/structs.rs index b0cbf745e6..e67d58c65b 100644 --- a/deepwell/src/services/user/structs.rs +++ b/deepwell/src/services/user/structs.rs @@ -22,10 +22,10 @@ use super::prelude::*; use crate::models::alias::Model as AliasModel; use crate::models::sea_orm_active_enums::UserType; use crate::models::user::Model as UserModel; +use crate::web::Bytes; use time::Date; -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Deserialize, Debug, Clone)] pub struct CreateUser { pub user_type: UserType, pub name: String, @@ -39,29 +39,25 @@ pub struct CreateUser { pub bypass_email_verification: bool, } -#[derive(Serialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Serialize, Debug, Clone)] pub struct CreateUserOutput { pub user_id: i64, pub slug: String, } -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Deserialize, Debug, Clone)] pub struct GetUser<'a> { pub user: Reference<'a>, } -#[derive(Serialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Serialize, Debug, Clone)] pub struct GetUserOutput { #[serde(flatten)] pub user: UserModel, pub aliases: Vec, } -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Deserialize, Debug, Clone)] pub struct UpdateUser<'a> { pub user: Reference<'a>, @@ -69,15 +65,15 @@ pub struct UpdateUser<'a> { pub body: UpdateUserBody, } -#[derive(Deserialize, Debug, Default)] -#[serde(rename_all = "camelCase", default)] +#[derive(Deserialize, Debug, Default, Clone)] +#[serde(default)] pub struct UpdateUserBody { pub name: ProvidedValue, pub email: ProvidedValue, pub email_verified: ProvidedValue, pub password: ProvidedValue, pub locale: ProvidedValue, - pub avatar: ProvidedValue>>, + pub avatar: ProvidedValue>>, pub real_name: ProvidedValue>, pub gender: ProvidedValue>, pub birthday: ProvidedValue>, diff --git a/deepwell/src/services/user_bot_owner/service.rs b/deepwell/src/services/user_bot_owner/service.rs index 986569c733..4ee62e730d 100644 --- a/deepwell/src/services/user_bot_owner/service.rs +++ b/deepwell/src/services/user_bot_owner/service.rs @@ -18,6 +18,9 @@ * along with this program. If not, see . */ +// TODO replace bot owners with a user / user interaction +// add checks like we have here, one is human one is bot, etc + use super::prelude::*; use crate::models::sea_orm_active_enums::UserType; use crate::models::user_bot_owner::{ @@ -33,7 +36,7 @@ impl UserBotOwnerService { ctx: &ServiceContext<'_>, bot_user_id: i64, ) -> Result> { - tide::log::info!("Looking up owners for bot ID {bot_user_id}"); + info!("Looking up owners for bot ID {bot_user_id}"); let txn = ctx.transaction(); let owners = UserBotOwner::find() @@ -49,10 +52,9 @@ impl UserBotOwnerService { bot_user_id: i64, human_user_id: i64, ) -> Result> { - tide::log::debug!( + debug!( "Retrieving user_bot_owner record for human ID {} and bot ID {}", - human_user_id, - bot_user_id, + human_user_id, bot_user_id, ); let txn = ctx.transaction(); @@ -81,11 +83,9 @@ impl UserBotOwnerService { UserService::get_with_user_type(ctx, human_reference, UserType::Regular), )?; - tide::log::info!( + info!( "Adding user ID {} as owner for bot ID {}: {}", - human.user_id, - bot.user_id, - description, + human.user_id, bot.user_id, description, ); // NOTE: Not using upsert (INSERT .. ON CONFLICT) because @@ -95,7 +95,7 @@ impl UserBotOwnerService { let model = match Self::get_optional(ctx, bot.user_id, human.user_id).await? { // Update Some(owner) => { - tide::log::debug!("Bot owner record exists, updating"); + debug!("Bot owner record exists, updating"); let mut model = owner.into_active_model(); model.description = Set(description); @@ -105,7 +105,7 @@ impl UserBotOwnerService { // Insert None => { - tide::log::debug!("Bot owner record is missing, inserting"); + debug!("Bot owner record is missing, inserting"); let model = user_bot_owner::ActiveModel { bot_user_id: Set(bot.user_id), @@ -121,17 +121,18 @@ impl UserBotOwnerService { Ok(model) } - /// Idempotently deletes the give user / bot ownership record, if it exists. + /// Idempotently removes the give user / bot ownership record, if it exists. /// - /// Returns `true` if the deletion was carried out (i.e. it used to exist), + /// # Returns + /// The struct contains `true` if the deletion was carried out (i.e. it used to exist), /// and `false` if not. - pub async fn delete( + pub async fn remove( ctx: &ServiceContext<'_>, - DeleteBotOwner { + RemoveBotOwner { bot: bot_reference, human: human_reference, - }: DeleteBotOwner<'_>, - ) -> Result { + }: RemoveBotOwner<'_>, + ) -> Result { let txn = ctx.transaction(); // We don't check user type here because we already checked it prior to insertion. @@ -144,10 +145,9 @@ impl UserBotOwnerService { UserService::get_id(ctx, human_reference), )?; - tide::log::info!( + info!( "Deleting user ID {} as owner for bot ID {}", - human_user_id, - bot_user_id, + human_user_id, bot_user_id, ); let DeleteResult { rows_affected } = @@ -160,6 +160,7 @@ impl UserBotOwnerService { "Rows deleted using ID was more than 1: {rows_affected}", ); - Ok(rows_affected == 1) + let was_deleted = rows_affected == 1; + Ok(RemoveBotOwnerOutput { was_deleted }) } } diff --git a/deepwell/src/services/user_bot_owner/structs.rs b/deepwell/src/services/user_bot_owner/structs.rs index aba0310656..1cf3e27109 100644 --- a/deepwell/src/services/user_bot_owner/structs.rs +++ b/deepwell/src/services/user_bot_owner/structs.rs @@ -21,8 +21,7 @@ use crate::models::user::Model as UserModel; use crate::web::Reference; -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Deserialize, Debug, Clone)] pub struct CreateBotUser { pub name: String, pub email: String, @@ -39,36 +38,36 @@ pub struct CreateBotUser { // S: create a site } -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Deserialize, Debug, Clone)] pub struct CreateBotOwner<'a> { pub bot: Reference<'a>, pub human: Reference<'a>, pub description: String, } -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct DeleteBotOwner<'a> { +#[derive(Deserialize, Debug, Clone)] +pub struct RemoveBotOwner<'a> { pub bot: Reference<'a>, pub human: Reference<'a>, } -#[derive(Serialize, Deserialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Serialize, Debug, Clone)] +pub struct RemoveBotOwnerOutput { + pub was_deleted: bool, +} + +#[derive(Serialize, Deserialize, Debug, Clone)] pub struct BotOwner { pub user_id: i64, pub description: String, } -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Deserialize, Debug, Clone)] pub struct CreateBotOwnerBody { pub description: String, } -#[derive(Serialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Serialize, Debug, Clone)] pub struct BotUserOutput { pub user: UserModel, pub owners: Vec, diff --git a/deepwell/src/services/view/options.rs b/deepwell/src/services/view/options.rs index 58c75788a1..a4a8106284 100644 --- a/deepwell/src/services/view/options.rs +++ b/deepwell/src/services/view/options.rs @@ -52,8 +52,7 @@ const PAGE_ARGUMENTS_SCHEMA: ArgumentSchema = ArgumentSchema { /// /// `RawPageOptions` (from `wikidot_path`) contains the direct key / value /// pairs, this struct parses them into logical flags to be processed. -#[derive(Serialize, Deserialize, Debug, Default)] -#[serde(rename_all = "camelCase")] +#[derive(Serialize, Deserialize, Debug, Clone, Default)] pub struct PageOptions { edit: bool, title: Option, @@ -70,7 +69,7 @@ pub struct PageOptions { impl PageOptions { pub fn parse(extra: &str) -> Self { - tide::log::info!("Parsing page options: '{extra}'"); + info!("Parsing page options: '{extra}'"); let mut arguments = PageArguments::parse(extra, PAGE_ARGUMENTS_SCHEMA).0; let mut options = PageOptions::default(); @@ -129,7 +128,7 @@ impl PageOptions { if let Some((value, orig)) = arguments.remove(unicase!("offset")) { match value { ArgumentValue::Integer(offset) => options.offset = Some(offset), - _ => tide::log::error!("Invalid value for offset argument: {orig}"), + _ => error!("Invalid value for offset argument: {orig}"), } } @@ -139,9 +138,7 @@ impl PageOptions { // Now go through anything remaining and emitting warnings for them for (key, (value, raw)) in arguments { - tide::log::warn!( - "Unused argument in page path: {key} -> {value:?} ('{raw}')", - ); + warn!("Unused argument in page path: {key} -> {value:?} ('{raw}')",); } options @@ -149,7 +146,7 @@ impl PageOptions { } fn to_bool(value: ArgumentValue) -> bool { - tide::log::debug!("Converting argument value to plain boolean: {value:?}"); + debug!("Converting argument value to plain boolean: {value:?}"); match value { // Simply unwrap bool diff --git a/deepwell/src/services/view/service.rs b/deepwell/src/services/view/service.rs index 229ea33390..e7b1d2301c 100644 --- a/deepwell/src/services/view/service.rs +++ b/deepwell/src/services/view/service.rs @@ -61,11 +61,9 @@ impl ViewService { session_token, }: GetPageView, ) -> Result { - tide::log::info!( + info!( "Getting page view data for domain '{}', route '{:?}', locale '{}'", - domain, - route, - locale_str, + domain, route, locale_str, ); let locale = LanguageIdentifier::from_bytes(locale_str.as_bytes())?; @@ -145,9 +143,7 @@ impl ViewService { let user_permissions = match user_session { Some(ref session) => session.user_permissions, None => { - tide::log::debug!( - "No user for session, getting guest permission scheme", - ); + debug!("No user for session, getting guest permission scheme",); // TODO get permissions from service UserPermissions @@ -160,7 +156,7 @@ impl ViewService { // This returns false if the user is banned *and* the site // disallows banned viewing. if Self::can_access_page(ctx, user_permissions).await? { - tide::log::debug!("User has page access, return text data"); + debug!("User has page access, return text data"); let (wikitext, compiled_html) = try_join!( TextService::get(ctx, &page_revision.wikitext_hash), @@ -176,9 +172,7 @@ impl ViewService { compiled_html, ) } else { - tide::log::warn!( - "User doesn't have page access, returning permission page", - ); + warn!("User doesn't have page access, returning permission page",); let (page_status, page_type) = if user_permissions.is_banned() { (PageStatus::Banned, SpecialPageType::Banned) @@ -296,11 +290,11 @@ impl ViewService { domain: &str, session_token: Option<&str>, ) -> Result { - tide::log::info!("Getting viewer data from domain '{domain}' and session token"); + info!("Getting viewer data from domain '{domain}' and session token"); // Get site data let (site, redirect_site) = - match DomainService::site_from_domain_optional(ctx, domain).await? { + match DomainService::parse_site_from_domain(ctx, domain).await? { SiteDomainResult::Found(site) => { let redirect_site = Self::should_redirect_site(ctx, &site, domain); (site, redirect_site) @@ -385,8 +379,8 @@ impl ViewService { _ctx: &ServiceContext<'_>, permissions: UserPermissions, ) -> Result { - tide::log::info!("Checking page access: {permissions:?}"); - tide::log::debug!("TODO: stub"); + info!("Checking page access: {permissions:?}"); + debug!("TODO: stub"); // TODO perform permission checks Ok(true) } diff --git a/deepwell/src/services/view/structs.rs b/deepwell/src/services/view/structs.rs index f60af18183..203321a673 100644 --- a/deepwell/src/services/view/structs.rs +++ b/deepwell/src/services/view/structs.rs @@ -36,8 +36,7 @@ impl UserPermissions { } } -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Deserialize, Debug, Clone)] pub struct GetPageView { pub domain: String, pub session_token: Option, @@ -45,17 +44,15 @@ pub struct GetPageView { pub locale: String, } -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Deserialize, Debug, Clone)] pub struct PageRoute { pub slug: String, pub extra: String, } -#[derive(Serialize, Debug)] -#[serde(rename_all = "camelCase", tag = "type", content = "data")] +#[derive(Serialize, Debug, Clone)] +#[serde(rename_all = "snake_case", tag = "type", content = "data")] pub enum GetPageViewOutput { - #[serde(rename_all = "camelCase")] PageFound { #[serde(flatten)] viewer: Viewer, @@ -67,7 +64,6 @@ pub enum GetPageViewOutput { compiled_html: String, }, - #[serde(rename_all = "camelCase")] PageMissing { #[serde(flatten)] viewer: Viewer, @@ -77,7 +73,6 @@ pub enum GetPageViewOutput { compiled_html: String, }, - #[serde(rename_all = "camelCase")] PagePermissions { #[serde(flatten)] viewer: Viewer, @@ -87,26 +82,25 @@ pub enum GetPageViewOutput { banned: bool, }, - #[serde(rename_all = "camelCase")] - SiteMissing { html: String }, + SiteMissing { + html: String, + }, } -#[derive(Debug)] +#[derive(Debug, Clone)] pub enum ViewerResult { FoundSite(Viewer), MissingSite(String), } -#[derive(Serialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Serialize, Debug, Clone)] pub struct Viewer { pub site: SiteModel, pub redirect_site: Option, pub user_session: Option, } -#[derive(Serialize, Debug)] -#[serde(rename_all = "camelCase")] +#[derive(Serialize, Debug, Clone)] pub struct UserSession { pub session: SessionModel, pub user: UserModel, diff --git a/deepwell/src/services/vote/service.rs b/deepwell/src/services/vote/service.rs index 9a0606705d..8cd54af4a1 100644 --- a/deepwell/src/services/vote/service.rs +++ b/deepwell/src/services/vote/service.rs @@ -41,11 +41,9 @@ impl VoteService { }: CreateVote, ) -> Result> { let txn = ctx.transaction(); - tide::log::info!( + info!( "Casting new vote by user ID {} on page ID {} (value {})", - user_id, - page_id, - value, + user_id, page_id, value, ); // Get previous vote, if any @@ -76,7 +74,7 @@ impl VoteService { #[inline] pub async fn get(ctx: &ServiceContext<'_>, key: GetVote) -> Result { - find_or_error(Self::get_optional(ctx, key)).await + find_or_error!(Self::get_optional(ctx, key), Vote) } /// Gets any current vote for the current page and user. @@ -104,7 +102,7 @@ impl VoteService { enable: bool, acting_user_id: i64, ) -> Result { - tide::log::info!( + info!( "{} vote on {:?} (being done by {})", if enable { "Enabling" } else { "Disabling" }, key, @@ -130,7 +128,7 @@ impl VoteService { /// Removes the vote specified. pub async fn remove(ctx: &ServiceContext<'_>, key: GetVote) -> Result { - tide::log::info!("Removing vote {key:?}"); + info!("Removing vote {key:?}"); let txn = ctx.transaction(); let mut vote = Self::get(ctx, key).await?.into_active_model(); diff --git a/deepwell/src/services/vote/structs.rs b/deepwell/src/services/vote/structs.rs index 30ab515e50..f2f20e3f1e 100644 --- a/deepwell/src/services/vote/structs.rs +++ b/deepwell/src/services/vote/structs.rs @@ -21,7 +21,6 @@ pub type VoteValue = i16; #[derive(Deserialize, Debug, Copy, Clone)] -#[serde(rename_all = "camelCase")] pub struct CreateVote { pub page_id: i64, pub user_id: i64, @@ -29,21 +28,19 @@ pub struct CreateVote { } #[derive(Deserialize, Debug, Copy, Clone)] -#[serde(rename_all = "camelCase")] pub struct GetVote { pub page_id: i64, pub user_id: i64, } #[derive(Deserialize, Debug, Copy, Clone)] -#[serde(rename_all = "camelCase", tag = "type", content = "id")] +#[serde(tag = "type", content = "id")] pub enum VoteHistoryKind { Page(i64), User(i64), } #[derive(Deserialize, Debug, Copy, Clone)] -#[serde(rename_all = "camelCase")] pub struct GetVoteHistory { #[serde(flatten)] pub kind: VoteHistoryKind, @@ -54,7 +51,6 @@ pub struct GetVoteHistory { } #[derive(Deserialize, Debug, Copy, Clone)] -#[serde(rename_all = "camelCase")] pub struct CountVoteHistory { #[serde(flatten)] pub kind: VoteHistoryKind, @@ -64,7 +60,6 @@ pub struct CountVoteHistory { } #[derive(Deserialize, Debug, Copy, Clone)] -#[serde(rename_all = "camelCase")] pub struct VoteAction { pub page_id: i64, pub user_id: i64, diff --git a/deepwell/src/utils/error.rs b/deepwell/src/utils/error.rs deleted file mode 100644 index 0a4a75cc39..0000000000 --- a/deepwell/src/utils/error.rs +++ /dev/null @@ -1,29 +0,0 @@ -/* - * utils/error.rs - * - * DEEPWELL - Wikijump API provider and database manager - * Copyright (C) 2019-2023 Wikijump Team - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Affero General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Affero General Public License for more details. - * - * You should have received a copy of the GNU Affero General Public License - * along with this program. If not, see . - */ - -use crate::services::{Error, Result}; -use std::future::Future; - -pub async fn find_or_error(future: F) -> Result -where - F: Future>>, -{ - future.await?.ok_or(Error::NotFound) -} diff --git a/deepwell/src/utils/locale.rs b/deepwell/src/utils/locale.rs index fa80e4d9dc..44765ee9c9 100644 --- a/deepwell/src/utils/locale.rs +++ b/deepwell/src/utils/locale.rs @@ -23,7 +23,7 @@ use unic_langid::LanguageIdentifier; pub fn validate_locale(locale_str: &str) -> Result { LanguageIdentifier::from_bytes(locale_str.as_bytes()).map_err(|error| { - tide::log::warn!("Invalid locale '{}' passed: {:?}", locale_str, error); - Error::BadRequest + warn!("Invalid locale '{}' passed: {:?}", locale_str, error); + Error::LocaleInvalid(error) }) } diff --git a/deepwell/src/utils/mod.rs b/deepwell/src/utils/mod.rs index 167081b05a..2014201122 100644 --- a/deepwell/src/utils/mod.rs +++ b/deepwell/src/utils/mod.rs @@ -22,18 +22,14 @@ mod category; mod crypto; -mod error; mod locale; mod slug; mod string; -mod tide; mod time; pub use self::category::*; pub use self::crypto::*; -pub use self::error::*; pub use self::locale::*; pub use self::slug::*; pub use self::string::*; -pub use self::tide::*; pub use self::time::*; diff --git a/deepwell/src/watch.rs b/deepwell/src/watch.rs index fe7da4abf5..1aca8b4b5f 100644 --- a/deepwell/src/watch.rs +++ b/deepwell/src/watch.rs @@ -55,7 +55,7 @@ struct WatchedPaths { } pub fn setup_autorestart(config: &Config) -> Result { - tide::log::info!("Starting watcher for auto-restart on file change"); + info!("Starting watcher for auto-restart on file change"); let watched_paths = WatchedPaths { config_path: fs::canonicalize(&config.raw_toml_path)?, localization_path: fs::canonicalize(&config.localization_path)?, @@ -64,13 +64,10 @@ pub fn setup_autorestart(config: &Config) -> Result { let mut watcher = RecommendedWatcher::new( move |result: WatcherResult| match result { Err(error) => { - tide::log::error!("Unable to receive filesystem events: {error}"); + error!("Unable to receive filesystem events: {error}"); } Ok(event) => { - tide::log::debug!( - "Received filesystem event ({} paths)", - event.paths.len(), - ); + debug!("Received filesystem event ({} paths)", event.paths.len(),); if event_is_applicable(&watched_paths, event) { restart_self(); @@ -81,12 +78,12 @@ pub fn setup_autorestart(config: &Config) -> Result { )?; // Add autowatch to configuration file. - tide::log::debug!("Adding regular watch to {}", config.raw_toml_path.display()); + debug!("Adding regular watch to {}", config.raw_toml_path.display()); watcher.watch(&config.raw_toml_path, RecursiveMode::NonRecursive)?; // Add autowatch to localization directory. // Recursive because it is nested. - tide::log::debug!( + debug!( "Adding recursive watch to {}", config.localization_path.display(), ); @@ -101,7 +98,7 @@ fn event_is_applicable(watched_paths: &WatchedPaths, event: Event) -> bool { event.kind, EventKind::Access(_) | EventKind::Any | EventKind::Other, ) { - tide::log::debug!("Ignoring access or unknown event"); + debug!("Ignoring access or unknown event"); return false; } @@ -115,25 +112,23 @@ fn event_is_applicable(watched_paths: &WatchedPaths, event: Event) -> bool { } fn path_is_applicable(watched_paths: &WatchedPaths, path: &Path) -> bool { - tide::log::debug!("Checking filesystem event for {}", path.display()); + debug!("Checking filesystem event for {}", path.display()); let path = match fs::canonicalize(path) { Ok(path) => path, Err(error) => { - tide::log::error!( - "Error finding canonical path for event processing: {error}", - ); + error!("Error finding canonical path for event processing: {error}",); return false; } }; if path.starts_with(&watched_paths.config_path) { - tide::log::info!("DEEPWELL configuration path modified: {}", path.display()); + info!("DEEPWELL configuration path modified: {}", path.display()); return true; } if path.starts_with(&watched_paths.localization_path) { - tide::log::info!("Localization subpath modified: {}", path.display()); + info!("Localization subpath modified: {}", path.display()); return true; } @@ -141,12 +136,12 @@ fn path_is_applicable(watched_paths: &WatchedPaths, path: &Path) -> bool { } fn restart_self() -> Infallible { - tide::log::info!("Restarting server"); + info!("Restarting server"); let executable = env::current_exe().expect("Unable to get current executable"); let arguments = env::args_os().skip(1).collect::>(); - tide::log::info!( + info!( "Replacing process with exec: {} {:?}", Path::new(&executable).display(), arguments, diff --git a/deepwell/src/web/bytes.rs b/deepwell/src/web/bytes.rs new file mode 100644 index 0000000000..a580e95832 --- /dev/null +++ b/deepwell/src/web/bytes.rs @@ -0,0 +1,127 @@ +/* + * web/bytes.rs + * + * DEEPWELL - Wikijump API provider and database manager + * Copyright (C) 2019-2023 Wikijump Team + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +//! Wrapper type for bytes in JSON-RPC. +//! +//! This way, instead of representing bytes as an array of integers, which takes +//! up a lot more space in the JSON textual represe, we instead produce a long +//! hexadecimal string which is more compact, readable as binary, and takes up +//! a fixed amount of string space relative to blob size. + +use crate::hash::{BlobHash, TextHash}; +use hex::serde::{deserialize, serialize}; +use serde::{Deserialize, Deserializer, Serialize, Serializer}; +use std::borrow::Cow; + +#[derive(Debug, Clone, Hash, PartialEq, Eq)] +pub struct Bytes<'a> { + inner: Cow<'a, [u8]>, +} + +impl<'a> Bytes<'a> { + #[inline] + pub fn len(&self) -> usize { + self.inner.as_ref().len() + } +} + +// Borrowing + +impl AsRef<[u8]> for Bytes<'_> { + #[inline] + fn as_ref(&self) -> &[u8] { + &self.inner + } +} + +// Extraction + +impl From> for Vec { + #[inline] + fn from(bytes: Bytes) -> Vec { + bytes.inner.into_owned() + } +} + +// Construction + +impl From> for Bytes<'static> { + #[inline] + fn from(bytes: Vec) -> Bytes<'static> { + Bytes { + inner: Cow::Owned(bytes), + } + } +} + +impl From for Bytes<'static> { + #[inline] + fn from(hash: BlobHash) -> Bytes<'static> { + Bytes::from(hash.to_vec()) + } +} + +impl From for Bytes<'static> { + #[inline] + fn from(hash: TextHash) -> Bytes<'static> { + Bytes::from(hash.to_vec()) + } +} + +impl<'a> From<&'a [u8]> for Bytes<'a> { + #[inline] + fn from(bytes: &'a [u8]) -> Bytes<'a> { + Bytes { + inner: Cow::Borrowed(bytes), + } + } +} + +impl Default for Bytes<'static> { + #[inline] + fn default() -> Bytes<'static> { + Bytes { + inner: Cow::Owned(Vec::new()), + } + } +} + +// Serialization + +impl<'a> Serialize for Bytes<'a> { + #[inline] + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + serialize(self, serializer) + } +} + +impl<'de> Deserialize<'de> for Bytes<'static> { + #[inline] + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let bytes: Vec = deserialize(deserializer)?; + Ok(Self::from(bytes)) + } +} diff --git a/deepwell/src/web/fetch_direction.rs b/deepwell/src/web/fetch_direction.rs index 4bed449a23..ea8c904db8 100644 --- a/deepwell/src/web/fetch_direction.rs +++ b/deepwell/src/web/fetch_direction.rs @@ -35,7 +35,7 @@ use strum_macros::EnumIter; PartialEq, Eq, )] -#[serde(rename_all = "camelCase")] +#[serde(rename_all = "kebab-case")] pub enum FetchDirection { /// Retrieves items prior (earlier) to this one. Before, diff --git a/deepwell/src/web/file_details.rs b/deepwell/src/web/file_details.rs index 2b91527ecb..cedd0ae572 100644 --- a/deepwell/src/web/file_details.rs +++ b/deepwell/src/web/file_details.rs @@ -19,8 +19,8 @@ */ #[derive(Serialize, Deserialize, Debug, Default, Copy, Clone, PartialEq, Eq)] -#[serde(default, rename_all = "camelCase")] -pub struct FileDetailsQuery { +#[serde(default)] +pub struct FileDetails { /// Include the contents of the file. pub data: bool, } diff --git a/deepwell/src/web/mod.rs b/deepwell/src/web/mod.rs index a3e4502f55..e3a661b270 100644 --- a/deepwell/src/web/mod.rs +++ b/deepwell/src/web/mod.rs @@ -18,6 +18,7 @@ * along with this program. If not, see . */ +mod bytes; mod connection_type; mod fetch_direction; mod file_details; @@ -25,13 +26,12 @@ mod page_details; mod page_order; mod provided_value; mod reference; -mod unwrap; +pub use self::bytes::Bytes; pub use self::connection_type::ConnectionType; pub use self::fetch_direction::FetchDirection; -pub use self::file_details::FileDetailsQuery; -pub use self::page_details::PageDetailsQuery; +pub use self::file_details::FileDetails; +pub use self::page_details::PageDetails; pub use self::page_order::{PageOrder, PageOrderColumn}; pub use self::provided_value::ProvidedValue; pub use self::reference::Reference; -pub use self::unwrap::HttpUnwrap; diff --git a/deepwell/src/web/page_details.rs b/deepwell/src/web/page_details.rs index a3d1517ddd..d1742709e1 100644 --- a/deepwell/src/web/page_details.rs +++ b/deepwell/src/web/page_details.rs @@ -19,8 +19,8 @@ */ #[derive(Serialize, Deserialize, Debug, Default, Copy, Clone, PartialEq, Eq)] -#[serde(default, rename_all = "camelCase")] -pub struct PageDetailsQuery { +#[serde(default)] +pub struct PageDetails { /// Include the wikitext in the page output. pub wikitext: bool, diff --git a/deepwell/src/web/page_order.rs b/deepwell/src/web/page_order.rs index 5e07ec88b4..84db4717b1 100644 --- a/deepwell/src/web/page_order.rs +++ b/deepwell/src/web/page_order.rs @@ -46,7 +46,6 @@ impl Default for PageOrder { /// Describes what column that pages should be ordered by. #[derive(Serialize, Deserialize, Debug, Default, Copy, Clone, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] pub enum PageOrderColumn { /// Requests pages in page ID order. /// This is the default. diff --git a/deepwell/src/web/unwrap.rs b/deepwell/src/web/unwrap.rs deleted file mode 100644 index 0e8db7de68..0000000000 --- a/deepwell/src/web/unwrap.rs +++ /dev/null @@ -1,34 +0,0 @@ -/* - * web/unwrap.rs - * - * DEEPWELL - Wikijump API provider and database manager - * Copyright (C) 2019-2023 Wikijump Team - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Affero General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Affero General Public License for more details. - * - * You should have received a copy of the GNU Affero General Public License - * along with this program. If not, see . - */ - -use tide::{Error, StatusCode}; - -pub trait HttpUnwrap { - fn ok_or_404(self) -> Result; -} - -impl HttpUnwrap for Option { - fn ok_or_404(self) -> Result { - match self { - Some(object) => Ok(object), - None => Err(Error::from_str(StatusCode::NotFound, "")), - } - } -} diff --git a/framerail/package.json b/framerail/package.json index 7166eb47b4..48b756ef23 100644 --- a/framerail/package.json +++ b/framerail/package.json @@ -15,8 +15,6 @@ }, "devDependencies": { "@playwright/test": "^1.39.0", - "@sveltejs/adapter-auto": "next", - "@sveltejs/kit": "next", "@typescript-eslint/eslint-plugin": "^5.62.0", "@typescript-eslint/parser": "^5.62.0", "eslint": "^8.51.0", @@ -35,13 +33,17 @@ "stylelint-config-recess-order": "^4.3.0", "stylelint-order": "^6.0.1", "stylelint-scss": "^5.2.1", + "typescript": "^5.2.2" + }, + "type": "module", + "dependencies": { + "@sveltejs/adapter-auto": "next", + "@sveltejs/kit": "next", + "json-rpc-2.0": "^1.6.0", "svelte": "^3.59.2", "svelte-check": "^3.5.2", "svelte-preprocess": "^5.0.4", "tslib": "^2.6.2", - "typescript": "^5.2.2", "vite": "^4.4.11" - }, - "type": "module", - "dependencies": {} + } } diff --git a/framerail/pnpm-lock.yaml b/framerail/pnpm-lock.yaml index b1a90c97b2..987a44257f 100644 --- a/framerail/pnpm-lock.yaml +++ b/framerail/pnpm-lock.yaml @@ -4,16 +4,36 @@ settings: autoInstallPeers: true excludeLinksFromLockfile: false -devDependencies: - '@playwright/test': - specifier: ^1.39.0 - version: 1.39.0 +dependencies: '@sveltejs/adapter-auto': specifier: next version: 1.0.0-next.91(@sveltejs/kit@1.0.0-next.589) '@sveltejs/kit': specifier: next version: 1.0.0-next.589(svelte@3.59.2)(vite@4.4.11) + json-rpc-2.0: + specifier: ^1.6.0 + version: 1.6.0 + svelte: + specifier: ^3.59.2 + version: 3.59.2 + svelte-check: + specifier: ^3.5.2 + version: 3.5.2(postcss@8.4.31)(sass@1.69.3)(svelte@3.59.2) + svelte-preprocess: + specifier: ^5.0.4 + version: 5.0.4(postcss@8.4.31)(sass@1.69.3)(svelte@3.59.2)(typescript@5.2.2) + tslib: + specifier: ^2.6.2 + version: 2.6.2 + vite: + specifier: ^4.4.11 + version: 4.4.11(sass@1.69.3) + +devDependencies: + '@playwright/test': + specifier: ^1.39.0 + version: 1.39.0 '@typescript-eslint/eslint-plugin': specifier: ^5.62.0 version: 5.62.0(@typescript-eslint/parser@5.62.0)(eslint@8.51.0)(typescript@5.2.2) @@ -68,24 +88,9 @@ devDependencies: stylelint-scss: specifier: ^5.2.1 version: 5.2.1(stylelint@15.10.3) - svelte: - specifier: ^3.59.2 - version: 3.59.2 - svelte-check: - specifier: ^3.5.2 - version: 3.5.2(postcss@8.4.31)(sass@1.69.3)(svelte@3.59.2) - svelte-preprocess: - specifier: ^5.0.4 - version: 5.0.4(postcss@8.4.31)(sass@1.69.3)(svelte@3.59.2)(typescript@5.2.2) - tslib: - specifier: ^2.6.2 - version: 2.6.2 typescript: specifier: ^5.2.2 version: 5.2.2 - vite: - specifier: ^4.4.11 - version: 4.4.11(sass@1.69.3) packages: @@ -155,7 +160,7 @@ packages: cpu: [arm64] os: [android] requiresBuild: true - dev: true + dev: false optional: true /@esbuild/android-arm@0.18.11: @@ -164,7 +169,7 @@ packages: cpu: [arm] os: [android] requiresBuild: true - dev: true + dev: false optional: true /@esbuild/android-x64@0.18.11: @@ -173,7 +178,7 @@ packages: cpu: [x64] os: [android] requiresBuild: true - dev: true + dev: false optional: true /@esbuild/darwin-arm64@0.18.11: @@ -182,7 +187,7 @@ packages: cpu: [arm64] os: [darwin] requiresBuild: true - dev: true + dev: false optional: true /@esbuild/darwin-x64@0.18.11: @@ -191,7 +196,7 @@ packages: cpu: [x64] os: [darwin] requiresBuild: true - dev: true + dev: false optional: true /@esbuild/freebsd-arm64@0.18.11: @@ -200,7 +205,7 @@ packages: cpu: [arm64] os: [freebsd] requiresBuild: true - dev: true + dev: false optional: true /@esbuild/freebsd-x64@0.18.11: @@ -209,7 +214,7 @@ packages: cpu: [x64] os: [freebsd] requiresBuild: true - dev: true + dev: false optional: true /@esbuild/linux-arm64@0.18.11: @@ -218,7 +223,7 @@ packages: cpu: [arm64] os: [linux] requiresBuild: true - dev: true + dev: false optional: true /@esbuild/linux-arm@0.18.11: @@ -227,7 +232,7 @@ packages: cpu: [arm] os: [linux] requiresBuild: true - dev: true + dev: false optional: true /@esbuild/linux-ia32@0.18.11: @@ -236,7 +241,7 @@ packages: cpu: [ia32] os: [linux] requiresBuild: true - dev: true + dev: false optional: true /@esbuild/linux-loong64@0.18.11: @@ -245,7 +250,7 @@ packages: cpu: [loong64] os: [linux] requiresBuild: true - dev: true + dev: false optional: true /@esbuild/linux-mips64el@0.18.11: @@ -254,7 +259,7 @@ packages: cpu: [mips64el] os: [linux] requiresBuild: true - dev: true + dev: false optional: true /@esbuild/linux-ppc64@0.18.11: @@ -263,7 +268,7 @@ packages: cpu: [ppc64] os: [linux] requiresBuild: true - dev: true + dev: false optional: true /@esbuild/linux-riscv64@0.18.11: @@ -272,7 +277,7 @@ packages: cpu: [riscv64] os: [linux] requiresBuild: true - dev: true + dev: false optional: true /@esbuild/linux-s390x@0.18.11: @@ -281,7 +286,7 @@ packages: cpu: [s390x] os: [linux] requiresBuild: true - dev: true + dev: false optional: true /@esbuild/linux-x64@0.18.11: @@ -290,7 +295,7 @@ packages: cpu: [x64] os: [linux] requiresBuild: true - dev: true + dev: false optional: true /@esbuild/netbsd-x64@0.18.11: @@ -299,7 +304,7 @@ packages: cpu: [x64] os: [netbsd] requiresBuild: true - dev: true + dev: false optional: true /@esbuild/openbsd-x64@0.18.11: @@ -308,7 +313,7 @@ packages: cpu: [x64] os: [openbsd] requiresBuild: true - dev: true + dev: false optional: true /@esbuild/sunos-x64@0.18.11: @@ -317,7 +322,7 @@ packages: cpu: [x64] os: [sunos] requiresBuild: true - dev: true + dev: false optional: true /@esbuild/win32-arm64@0.18.11: @@ -326,7 +331,7 @@ packages: cpu: [arm64] os: [win32] requiresBuild: true - dev: true + dev: false optional: true /@esbuild/win32-ia32@0.18.11: @@ -335,7 +340,7 @@ packages: cpu: [ia32] os: [win32] requiresBuild: true - dev: true + dev: false optional: true /@esbuild/win32-x64@0.18.11: @@ -344,7 +349,7 @@ packages: cpu: [x64] os: [win32] requiresBuild: true - dev: true + dev: false optional: true /@eslint-community/eslint-utils@4.4.0(eslint@8.51.0): @@ -412,18 +417,17 @@ packages: /@jridgewell/resolve-uri@3.1.0: resolution: {integrity: sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==} engines: {node: '>=6.0.0'} - dev: true + dev: false /@jridgewell/sourcemap-codec@1.4.14: resolution: {integrity: sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==} - dev: true /@jridgewell/trace-mapping@0.3.17: resolution: {integrity: sha512-MCNzAp77qzKca9+W/+I0+sEpaUnZoeasnghNeVc41VZCEKaCH73Vq3BZZ/SzWIgrqE4H4ceI+p+b6C0mHf9T4g==} dependencies: '@jridgewell/resolve-uri': 3.1.0 '@jridgewell/sourcemap-codec': 1.4.14 - dev: true + dev: false /@microsoft/tsdoc-config@0.16.2: resolution: {integrity: sha512-OGiIzzoBLgWWR0UdRJX98oYO+XKGf7tiK4Zk6tQ/E4IJqGCe7dvkTvgDZV5cFJUzLGDOjeAXrnZoA6QkVySuxw==} @@ -444,12 +448,10 @@ packages: dependencies: '@nodelib/fs.stat': 2.0.5 run-parallel: 1.2.0 - dev: true /@nodelib/fs.stat@2.0.5: resolution: {integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==} engines: {node: '>= 8'} - dev: true /@nodelib/fs.walk@1.2.8: resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==} @@ -457,7 +459,6 @@ packages: dependencies: '@nodelib/fs.scandir': 2.1.5 fastq: 1.13.0 - dev: true /@playwright/test@1.39.0: resolution: {integrity: sha512-3u1iFqgzl7zr004bGPYiN/5EZpRUSFddQBra8Rqll5N0/vfpqlP9I9EXqAoGacuAbX6c9Ulg/Cjqglp5VkK6UQ==} @@ -469,7 +470,7 @@ packages: /@polka/url@1.0.0-next.21: resolution: {integrity: sha512-a5Sab1C4/icpTZVzZc5Ghpz88yQtGOyNqYXcZgOssB2uuAr+wF/MvN6bgtW32q7HHrvBki+BsZ0OuNv6EV3K9g==} - dev: true + dev: false /@sveltejs/adapter-auto@1.0.0-next.91(@sveltejs/kit@1.0.0-next.589): resolution: {integrity: sha512-U57tQdzTfFINim8tzZSARC9ztWPzwOoHwNOpGdb2o6XrD0mEQwU9DsII7dBblvzg+xCnmd0pw7PDtXz5c5t96w==} @@ -478,7 +479,7 @@ packages: dependencies: '@sveltejs/kit': 1.0.0-next.589(svelte@3.59.2)(vite@4.4.11) import-meta-resolve: 2.2.0 - dev: true + dev: false /@sveltejs/kit@1.0.0-next.589(svelte@3.59.2)(vite@4.4.11): resolution: {integrity: sha512-5ABRw46z9B+cCe/YWhcx+I/azNZg1NCDEkVJifZn8ToFoJ3a1eP0OexNIrvMEWpllMbNMPcJm2TC9tnz9oPfWQ==} @@ -506,7 +507,7 @@ packages: vite: 4.4.11(sass@1.69.3) transitivePeerDependencies: - supports-color - dev: true + dev: false /@sveltejs/vite-plugin-svelte@2.0.4(svelte@3.59.2)(vite@4.4.11): resolution: {integrity: sha512-pjqhW00KwK2uzDGEr+yJBwut+D+4XfJO/+bHHdHzPRXn9+1Jeq5JcFHyrUiYaXgHtyhX0RsllCTm4ssAx4ZY7Q==} @@ -525,11 +526,11 @@ packages: vitefu: 0.2.4(vite@4.4.11) transitivePeerDependencies: - supports-color - dev: true + dev: false /@types/cookie@0.5.1: resolution: {integrity: sha512-COUnqfB2+ckwXXSFInsFdOAWQzCCx+a5hq2ruyj+Vjund94RJQd4LG2u9hnvJrTgunKAaax7ancBYlDrNYxA0g==} - dev: true + dev: false /@types/debug@4.1.7: resolution: {integrity: sha512-9AonUzyTjXXhEOa0DnqpzZi6VHlqKMswga9EXjpXnnqxwLtdvPPtlO8evrI5D9S6asFRCQ6v+wpiUKbw+vKqyg==} @@ -561,7 +562,7 @@ packages: /@types/pug@2.0.6: resolution: {integrity: sha512-SnHmG9wN1UVmagJOnyo/qkk0Z7gejYxOYYmaAwr5u2yFYfsupN3sg10kyzN8Hep/2zbHxCnsumxOoRIRMBwKCg==} - dev: true + dev: false /@types/semver@7.3.13: resolution: {integrity: sha512-21cFJr9z3g5dW8B0CVI9g2O9beqaThGQ6ZFBqHfwhzLDKUxaqTIy3vnfah/UPkfOiF2pLq+tGz+W8RyCskuslw==} @@ -758,7 +759,6 @@ packages: dependencies: normalize-path: 3.0.0 picomatch: 2.3.1 - dev: true /argparse@2.0.1: resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} @@ -781,7 +781,6 @@ packages: /balanced-match@1.0.2: resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} - dev: true /balanced-match@2.0.0: resolution: {integrity: sha512-1ugUSr8BHXRnK23KfuYS+gVMC3LB8QGH9W1iGtDPsNWoQbgtXSExkBu2aDR4epiGWZOjZsj6lDl/N/AqqTC3UA==} @@ -790,7 +789,6 @@ packages: /binary-extensions@2.2.0: resolution: {integrity: sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==} engines: {node: '>=8'} - dev: true /binary-searching@2.0.5: resolution: {integrity: sha512-v4N2l3RxL+m4zDxyxz3Ne2aTmiPn8ZUpKFpdPtO+ItW1NcTCXA7JeHG5GMBSvoKSkQZ9ycS+EouDVxYB9ufKWA==} @@ -801,30 +799,27 @@ packages: dependencies: balanced-match: 1.0.2 concat-map: 0.0.1 - dev: true /braces@3.0.2: resolution: {integrity: sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==} engines: {node: '>=8'} dependencies: fill-range: 7.0.1 - dev: true /buffer-crc32@0.2.13: resolution: {integrity: sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==} - dev: true + dev: false /busboy@1.6.0: resolution: {integrity: sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA==} engines: {node: '>=10.16.0'} dependencies: streamsearch: 1.1.0 - dev: true + dev: false /callsites@3.1.0: resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==} engines: {node: '>=6'} - dev: true /camelcase-keys@7.0.2: resolution: {integrity: sha512-Rjs1H+A9R+Ig+4E/9oyB66UC5Mj9Xq3N//vcLf2WzgdTi/3gUu3Z9KoqmlrEG4VuuLK8wJHofxzdQXz/knhiYg==} @@ -875,7 +870,6 @@ packages: readdirp: 3.6.0 optionalDependencies: fsevents: 2.3.2 - dev: true /color-convert@1.9.3: resolution: {integrity: sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==} @@ -909,12 +903,11 @@ packages: /concat-map@0.0.1: resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} - dev: true /cookie@0.5.0: resolution: {integrity: sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==} engines: {node: '>= 0.6'} - dev: true + dev: false /cosmiconfig@8.2.0: resolution: {integrity: sha512-3rTMnFJA1tCOPwRxtgF4wd7Ab2qvDbL8jX+3smjIbS4HlZBagTlpERbdN7iAbWlrfxE3M8c27kTwTawQ7st+OQ==} @@ -964,7 +957,6 @@ packages: optional: true dependencies: ms: 2.1.2 - dev: true /decamelize-keys@1.1.1: resolution: {integrity: sha512-WiPxgEirIV0/eIOMcnFBA3/IJZAZqKnwAwWyvvdi4lsr1WCN22nhdf/3db3DoZcUjTV2SqfzIwNyp6y2xs3nmg==} @@ -997,7 +989,7 @@ packages: /deepmerge@4.3.1: resolution: {integrity: sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==} engines: {node: '>=0.10.0'} - dev: true + dev: false /dequal@2.0.3: resolution: {integrity: sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==} @@ -1007,11 +999,11 @@ packages: /detect-indent@6.1.0: resolution: {integrity: sha512-reYkTUJAZb9gUuZ2RvVCNhVHdg62RHnJ7WJl8ftMi4diZ6NWlciOzQN88pUhSELEwflJht4oQDv0F0BMlwaYtA==} engines: {node: '>=8'} - dev: true + dev: false /devalue@4.2.0: resolution: {integrity: sha512-mbjoAaCL2qogBKgeFxFPOXAUsZchircF+B/79LD4sHH0+NHfYm8gZpQrskKDn5gENGt35+5OI1GUF7hLVnkPDw==} - dev: true + dev: false /diff@5.1.0: resolution: {integrity: sha512-D+mk+qE8VC/PAUrlAU34N+VfXev0ghe5ywmpqrawphmVZc1bEfn56uo9qpyGp1p4xpzOHkSW4ztBd6L7Xx4ACw==} @@ -1076,7 +1068,7 @@ packages: /es6-promise@3.3.1: resolution: {integrity: sha512-SOp9Phqvqn7jtEUxPWdWfWoLmyt2VaJ6MpvP9Comy1MceMXqE6bxvaTu4iaxpYYPzhny28Lc+M87/c2cPK6lDg==} - dev: true + dev: false /esbuild@0.18.11: resolution: {integrity: sha512-i8u6mQF0JKJUlGR3OdFLKldJQMMs8OqM9Cc3UCi9XXziJ9WERM5bfkHaEAy0YAvPRMgqSW55W7xYn84XtEFTtA==} @@ -1106,7 +1098,7 @@ packages: '@esbuild/win32-arm64': 0.18.11 '@esbuild/win32-ia32': 0.18.11 '@esbuild/win32-x64': 0.18.11 - dev: true + dev: false /escape-string-regexp@1.0.5: resolution: {integrity: sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==} @@ -1231,7 +1223,7 @@ packages: /esm-env@1.0.0: resolution: {integrity: sha512-Cf6VksWPsTuW01vU9Mk/3vRue91Zevka5SjyNf3nEpokFRuqt/KjUQoGAwq9qMmhpLTHmXzSIrFRw8zxWzmFBA==} - dev: true + dev: false /espree@9.6.1: resolution: {integrity: sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==} @@ -1284,7 +1276,6 @@ packages: glob-parent: 5.1.2 merge2: 1.4.1 micromatch: 4.0.5 - dev: true /fast-json-stable-stringify@2.1.0: resolution: {integrity: sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==} @@ -1303,7 +1294,6 @@ packages: resolution: {integrity: sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==} dependencies: reusify: 1.0.4 - dev: true /file-entry-cache@6.0.1: resolution: {integrity: sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==} @@ -1317,7 +1307,6 @@ packages: engines: {node: '>=8'} dependencies: to-regex-range: 5.0.1 - dev: true /find-up@5.0.0: resolution: {integrity: sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==} @@ -1341,14 +1330,12 @@ packages: /fs.realpath@1.0.0: resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==} - dev: true /fsevents@2.3.2: resolution: {integrity: sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==} engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} os: [darwin] requiresBuild: true - dev: true optional: true /function-bind@1.1.1: @@ -1360,7 +1347,6 @@ packages: engines: {node: '>= 6'} dependencies: is-glob: 4.0.3 - dev: true /glob-parent@6.0.2: resolution: {integrity: sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==} @@ -1378,7 +1364,6 @@ packages: minimatch: 3.1.2 once: 1.4.0 path-is-absolute: 1.0.1 - dev: true /global-modules@2.0.0: resolution: {integrity: sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A==} @@ -1405,7 +1390,7 @@ packages: /globalyzer@0.1.0: resolution: {integrity: sha512-40oNTM9UfG6aBmuKxk/giHn5nQ8RVz/SS4Ir6zgzOv9/qC3kKZ9v4etGTcJbEl/NyVQH7FGU7d+X1egr57Md2Q==} - dev: true + dev: false /globby@11.1.0: resolution: {integrity: sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==} @@ -1425,11 +1410,11 @@ packages: /globrex@0.1.2: resolution: {integrity: sha512-uHJgbwAMwNFf5mLst7IWLNg14x1CkeqglJb/K3doi4dw6q2IvAAmM/Y81kevy83wP+Sst+nutFTYOGg3d1lsxg==} - dev: true + dev: false /graceful-fs@4.2.10: resolution: {integrity: sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==} - dev: true + dev: false /graphemer@1.4.0: resolution: {integrity: sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==} @@ -1485,7 +1470,6 @@ packages: /immutable@4.1.0: resolution: {integrity: sha512-oNkuqVTA8jqG1Q6c+UglTOD1xhC1BtjKI7XkCXRkZHrN5m18/XsnUp8Q89GkQO/z+0WjonSvl0FLhDYftp46nQ==} - dev: true /import-fresh@3.3.0: resolution: {integrity: sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==} @@ -1493,7 +1477,6 @@ packages: dependencies: parent-module: 1.0.1 resolve-from: 4.0.0 - dev: true /import-lazy@4.0.0: resolution: {integrity: sha512-rKtvo6a868b5Hu3heneU+L4yEQ4jYKLtjpnPeUdK7h0yzXGmyBTypknlkCvHFBqfX9YlorEiMM6Dnq/5atfHkw==} @@ -1502,7 +1485,7 @@ packages: /import-meta-resolve@2.2.0: resolution: {integrity: sha512-CpPOtiCHxP9HdtDM5F45tNiAe66Cqlv3f5uHoJjt+KlaLrUh9/Wz9vepADZ78SlqEo62aDWZtj9ydMGXV+CPnw==} - dev: true + dev: false /imurmurhash@0.1.4: resolution: {integrity: sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==} @@ -1519,11 +1502,9 @@ packages: dependencies: once: 1.4.0 wrappy: 1.0.2 - dev: true /inherits@2.0.4: resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} - dev: true /ini@1.3.8: resolution: {integrity: sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==} @@ -1538,7 +1519,6 @@ packages: engines: {node: '>=8'} dependencies: binary-extensions: 2.2.0 - dev: true /is-core-module@2.11.0: resolution: {integrity: sha512-RRjxlvLDkD1YJwDbroBHMb+cukurkDWNyHx7D3oNB5x9rb5ogcksMC5wHCadcXoo67gVr/+3GFySh3134zi6rw==} @@ -1549,7 +1529,6 @@ packages: /is-extglob@2.1.1: resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} engines: {node: '>=0.10.0'} - dev: true /is-fullwidth-code-point@3.0.0: resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} @@ -1561,12 +1540,10 @@ packages: engines: {node: '>=0.10.0'} dependencies: is-extglob: 2.1.1 - dev: true /is-number@7.0.0: resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} engines: {node: '>=0.12.0'} - dev: true /is-path-inside@3.0.3: resolution: {integrity: sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==} @@ -1610,6 +1587,10 @@ packages: resolution: {integrity: sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==} dev: true + /json-rpc-2.0@1.6.0: + resolution: {integrity: sha512-+pKxaoIqnA5VjXmZiAI1+CkFG7mHLg+dhtliOe/mp1P5Gdn8P5kE/Xxp2CUBwnGL7pfw6gC8zWTWekhSnKzHFA==} + dev: false + /json-schema-traverse@0.4.1: resolution: {integrity: sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==} dev: true @@ -1630,7 +1611,6 @@ packages: /kleur@4.1.5: resolution: {integrity: sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==} engines: {node: '>=6'} - dev: true /known-css-properties@0.28.0: resolution: {integrity: sha512-9pSL5XB4J+ifHP0e0jmmC98OGC1nL8/JjS+fi6mnTlIf//yt/MfVLtKg7S6nCtj/8KTcWX7nRlY0XywoYY1ISQ==} @@ -1680,14 +1660,14 @@ packages: engines: {node: '>=12'} dependencies: '@jridgewell/sourcemap-codec': 1.4.14 - dev: true + dev: false /magic-string@0.30.0: resolution: {integrity: sha512-LA+31JYDJLs82r2ScLrlz1GjSgu66ZV518eyWT+S8VhyQn/JL0u9MeBOvQMGYiPk1DBiSN9DDMOcXvigJZaViQ==} engines: {node: '>=12'} dependencies: '@jridgewell/sourcemap-codec': 1.4.14 - dev: true + dev: false /map-obj@1.0.1: resolution: {integrity: sha512-7N/q3lyZ+LVCp7PzuxrJr4KMbBE2hW7BT7YNia330OFxIf4d3r5zVpicP2650l7CPN6RM9zOJRl3NGpqSiw3Eg==} @@ -1751,7 +1731,6 @@ packages: /merge2@1.4.1: resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} engines: {node: '>= 8'} - dev: true /micromark-core-commonmark@1.0.6: resolution: {integrity: sha512-K+PkJTxqjFfSNkfAhp4GB+cZPfQd6dxtTXnf+RjZOV7T4EEXnvgzOcnp+eSTmpGk9d1S9sL6/lqrgSNn/s0HZA==} @@ -1935,24 +1914,21 @@ packages: dependencies: braces: 3.0.2 picomatch: 2.3.1 - dev: true /mime@3.0.0: resolution: {integrity: sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==} engines: {node: '>=10.0.0'} hasBin: true - dev: true + dev: false /min-indent@1.0.1: resolution: {integrity: sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==} engines: {node: '>=4'} - dev: true /minimatch@3.1.2: resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==} dependencies: brace-expansion: 1.1.11 - dev: true /minimist-options@4.1.0: resolution: {integrity: sha512-Q4r8ghd80yhO/0j1O3B2BjweX3fiHg9cdOwjJd2J76Q135c+NDxGCqdYKQ1SKBuFfgWbAUzBfvYjPUEeNgqN1A==} @@ -1965,34 +1941,31 @@ packages: /minimist@1.2.7: resolution: {integrity: sha512-bzfL1YUZsP41gmu/qjrEk0Q6i2ix/cVeAhbCbqH9u3zYutS1cLg00qhrD0M2MVdCcx4Sc0UpP2eBWo9rotpq6g==} - dev: true + dev: false /mkdirp@0.5.6: resolution: {integrity: sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==} hasBin: true dependencies: minimist: 1.2.7 - dev: true + dev: false /mri@1.2.0: resolution: {integrity: sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==} engines: {node: '>=4'} - dev: true /mrmime@1.0.1: resolution: {integrity: sha512-hzzEagAgDyoU1Q6yg5uI+AorQgdvMCur3FcKf7NhMKWsaYg+RnbTyHRa/9IlLF9rf455MOCtcqqrQQ83pPP7Uw==} engines: {node: '>=10'} - dev: true + dev: false /ms@2.1.2: resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==} - dev: true /nanoid@3.3.6: resolution: {integrity: sha512-BGcqMMJuToF7i1rt+2PWSNVnWIkGCU78jBG3RxO/bZlnZPK2Cmi2QaffxGO/2RvWi9sL+FAiRiXMgsyxQ1DIDA==} engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} hasBin: true - dev: true /natural-compare-lite@1.4.0: resolution: {integrity: sha512-Tj+HTDSJJKaZnfiuw+iaF9skdPpTo2GtEly5JHnWV/hfv2Qj/9RKsGISQtLh2ox3l5EAGw487hnBee0sIJ6v2g==} @@ -2015,13 +1988,11 @@ packages: /normalize-path@3.0.0: resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} engines: {node: '>=0.10.0'} - dev: true /once@1.4.0: resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} dependencies: wrappy: 1.0.2 - dev: true /optionator@0.9.3: resolution: {integrity: sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg==} @@ -2054,7 +2025,6 @@ packages: engines: {node: '>=6'} dependencies: callsites: 3.1.0 - dev: true /parse-json@5.2.0: resolution: {integrity: sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==} @@ -2074,7 +2044,6 @@ packages: /path-is-absolute@1.0.1: resolution: {integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==} engines: {node: '>=0.10.0'} - dev: true /path-key@3.1.1: resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} @@ -2092,12 +2061,10 @@ packages: /picocolors@1.0.0: resolution: {integrity: sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==} - dev: true /picomatch@2.3.1: resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} engines: {node: '>=8.6'} - dev: true /playwright-core@1.39.0: resolution: {integrity: sha512-+k4pdZgs1qiM+OUkSjx96YiKsXsmb59evFoqv8SKO067qBA+Z2s/dCzJij/ZhdQcs2zlTAgRKfeiiLm8PQ2qvw==} @@ -2195,7 +2162,6 @@ packages: nanoid: 3.3.6 picocolors: 1.0.0 source-map-js: 1.0.2 - dev: true /prelude-ls@1.2.1: resolution: {integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==} @@ -2256,7 +2222,6 @@ packages: /queue-microtask@1.2.3: resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} - dev: true /quick-lru@5.1.1: resolution: {integrity: sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==} @@ -2287,7 +2252,6 @@ packages: engines: {node: '>=8.10.0'} dependencies: picomatch: 2.3.1 - dev: true /redent@4.0.0: resolution: {integrity: sha512-tYkDkVVtYkSVhuQ4zBgfvciymHaeuel+zFKXShfDnFP5SyVEP7qo70Rf1jTOTCx3vGNAbnEi/xFkcfQVMIBWag==} @@ -2305,7 +2269,6 @@ packages: /resolve-from@4.0.0: resolution: {integrity: sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==} engines: {node: '>=4'} - dev: true /resolve-from@5.0.0: resolution: {integrity: sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==} @@ -2322,14 +2285,13 @@ packages: /reusify@1.0.4: resolution: {integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==} engines: {iojs: '>=1.0.0', node: '>=0.10.0'} - dev: true /rimraf@2.7.1: resolution: {integrity: sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==} hasBin: true dependencies: glob: 7.2.3 - dev: true + dev: false /rimraf@3.0.2: resolution: {integrity: sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==} @@ -2344,20 +2306,18 @@ packages: hasBin: true optionalDependencies: fsevents: 2.3.2 - dev: true + dev: false /run-parallel@1.2.0: resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} dependencies: queue-microtask: 1.2.3 - dev: true /sade@1.8.1: resolution: {integrity: sha512-xal3CZX1Xlo/k4ApwCFrHVACi9fBqJ7V+mwhBsuf/1IOKbBy098Fex+Wa/5QMubw09pSZ/u8EY8PWgevJsXp1A==} engines: {node: '>=6'} dependencies: mri: 1.2.0 - dev: true /sander@0.5.1: resolution: {integrity: sha512-3lVqBir7WuKDHGrKRDn/1Ye3kwpXaDOMsiRP1wd6wpZW56gJhsbp5RqQpA6JG/P+pkXizygnr1dKR8vzWaVsfA==} @@ -2366,7 +2326,7 @@ packages: graceful-fs: 4.2.10 mkdirp: 0.5.6 rimraf: 2.7.1 - dev: true + dev: false /sass@1.69.3: resolution: {integrity: sha512-X99+a2iGdXkdWn1akFPs0ZmelUzyAQfvqYc2P/MPTrJRuIRoTffGzT9W9nFqG00S+c8hXzVmgxhUuHFdrwxkhQ==} @@ -2376,7 +2336,6 @@ packages: chokidar: 3.5.3 immutable: 4.1.0 source-map-js: 1.0.2 - dev: true /semver@7.5.3: resolution: {integrity: sha512-QBlUtyVk/5EeHbi7X0fw6liDZc7BBmEaSYn01fMU1OUYbf6GPsbTtd8WmnqbI20SeycoHSeiybkE/q1Q+qlThQ==} @@ -2388,7 +2347,7 @@ packages: /set-cookie-parser@2.5.1: resolution: {integrity: sha512-1jeBGaKNGdEq4FgIrORu/N570dwoPYio8lSoYLWmX7sQ//0JY08Xh9o5pBcgmHQ/MbsYp/aZnOe1s1lIsbLprQ==} - dev: true + dev: false /shebang-command@2.0.0: resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} @@ -2414,7 +2373,7 @@ packages: '@polka/url': 1.0.0-next.21 mrmime: 1.0.1 totalist: 3.0.0 - dev: true + dev: false /slash@3.0.0: resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==} @@ -2438,12 +2397,11 @@ packages: buffer-crc32: 0.2.13 minimist: 1.2.7 sander: 0.5.1 - dev: true + dev: false /source-map-js@1.0.2: resolution: {integrity: sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==} engines: {node: '>=0.10.0'} - dev: true /spdx-correct@3.1.1: resolution: {integrity: sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w==} @@ -2470,7 +2428,7 @@ packages: /streamsearch@1.1.0: resolution: {integrity: sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg==} engines: {node: '>=10.0.0'} - dev: true + dev: false /string-width@4.2.3: resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} @@ -2493,7 +2451,7 @@ packages: engines: {node: '>=8'} dependencies: min-indent: 1.0.1 - dev: true + dev: false /strip-indent@4.0.0: resolution: {integrity: sha512-mnVSV2l+Zv6BLpSD/8V87CW/y9EmmbYzGCIavsnsI6/nwn26DwffM/yztm30Z/I2DY9wdS3vXVCMnHDgZaVNoA==} @@ -2639,7 +2597,7 @@ packages: - sass - stylus - sugarss - dev: true + dev: false /svelte-eslint-parser@0.33.0(svelte@3.59.2): resolution: {integrity: sha512-5awZ6Bs+Tb/zQwa41PSdcLynAVQTwW0HGyCBjtbAQ59taLZqDgQSMzRlDmapjZdDtzERm0oXDZNE0E+PKJ6ryg==} @@ -2665,7 +2623,7 @@ packages: svelte: '>=3.19.0' dependencies: svelte: 3.59.2 - dev: true + dev: false /svelte-preprocess@5.0.4(postcss@8.4.31)(sass@1.69.3)(svelte@3.59.2)(typescript@5.2.2): resolution: {integrity: sha512-ABia2QegosxOGsVlsSBJvoWeXy1wUKSfF7SWJdTjLAbx/Y3SrVevvvbFNQqrSJw89+lNSsM58SipmZJ5SRi5iw==} @@ -2714,12 +2672,11 @@ packages: strip-indent: 3.0.0 svelte: 3.59.2 typescript: 5.2.2 - dev: true + dev: false /svelte@3.59.2: resolution: {integrity: sha512-vzSyuGr3eEoAtT/A6bmajosJZIUWySzY2CzB3w2pgPvnkUjGqlDnsNnA0PMO+mMAhuyMul6C2uuZzY6ELSkzyA==} engines: {node: '>= 8'} - dev: true /svg-tags@1.0.0: resolution: {integrity: sha512-ovssysQTa+luh7A5Weu3Rta6FJlFBBbInjOh722LIt6klpU2/HtdUbszju/G4devcvk8PGt7FCLv5wftu3THUA==} @@ -2745,19 +2702,18 @@ packages: dependencies: globalyzer: 0.1.0 globrex: 0.1.2 - dev: true + dev: false /to-regex-range@5.0.1: resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} engines: {node: '>=8.0'} dependencies: is-number: 7.0.0 - dev: true /totalist@3.0.0: resolution: {integrity: sha512-eM+pCBxXO/njtF7vdFsHuqb+ElbxqtI4r5EAvk6grfAFyJ6IvWlSkfZ5T9ozC6xWw3Fj1fGoSmrl0gUs46JVIw==} engines: {node: '>=6'} - dev: true + dev: false /trim-newlines@4.1.1: resolution: {integrity: sha512-jRKj0n0jXWo6kh62nA5TEh3+4igKDXLvzBJcPpiizP7oOolUrYIxmVBG9TOtHYFHoddUk6YvAkGeGoSVTXfQXQ==} @@ -2770,7 +2726,7 @@ packages: /tslib@2.6.2: resolution: {integrity: sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==} - dev: true + dev: false /tsutils@3.21.0(typescript@5.2.2): resolution: {integrity: sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==} @@ -2803,14 +2759,13 @@ packages: resolution: {integrity: sha512-mI4WrpHsbCIcwT9cF4FZvr80QUeKvsUsUvKDoR+X/7XHQH98xYD8YHZg7ANtz2GtZt/CBq2QJ0thkGJMHfqc1w==} engines: {node: '>=14.17'} hasBin: true - dev: true /undici@5.14.0: resolution: {integrity: sha512-yJlHYw6yXPPsuOH0x2Ib1Km61vu4hLiRRQoafs+WUgX1vO64vgnxiCEN9dpIrhZyHFsai3F0AEj4P9zy19enEQ==} engines: {node: '>=12.18'} dependencies: busboy: 1.6.0 - dev: true + dev: false /unist-util-stringify-position@3.0.2: resolution: {integrity: sha512-7A6eiDCs9UtjcwZOcCpM4aPII3bAAGv13E96IkawkOAW0OhH+yRxtY0lzo8KiHpzEMfH7Q+FizUmwp8Iqy5EWg==} @@ -2880,7 +2835,7 @@ packages: sass: 1.69.3 optionalDependencies: fsevents: 2.3.2 - dev: true + dev: false /vitefu@0.2.4(vite@4.4.11): resolution: {integrity: sha512-fanAXjSaf9xXtOOeno8wZXIhgia+CZury481LsDaV++lSvcU2R9Ch2bPh3PYFyoHW+w9LqAeYRISVQjUIew14g==} @@ -2891,7 +2846,7 @@ packages: optional: true dependencies: vite: 4.4.11(sass@1.69.3) - dev: true + dev: false /which@1.3.1: resolution: {integrity: sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==} @@ -2910,7 +2865,6 @@ packages: /wrappy@1.0.2: resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==} - dev: true /write-file-atomic@5.0.1: resolution: {integrity: sha512-+QU2zd6OTD8XWIJCbffaiQeH9U73qIqafo1x6V1snCWYGJf6cVE0cDR4D8xRzcEnfI21IFrUPzPGtcPf8AC+Rw==} diff --git a/framerail/src/lib/fetch.ts b/framerail/src/lib/fetch.ts index 3b3cff0a3f..261d5018d1 100644 --- a/framerail/src/lib/fetch.ts +++ b/framerail/src/lib/fetch.ts @@ -1,6 +1,6 @@ // Wrapper around fetch() to provide timeouts. -export const DEFAULT_TIMEOUT = 2000 +export const DEFAULT_TIMEOUT = 1500 export function wjfetch(url, options = {}) { let timeout = DEFAULT_TIMEOUT diff --git a/framerail/src/lib/server/deepwell/index.ts b/framerail/src/lib/server/deepwell/index.ts index e7ca0c109b..43d1984c8c 100644 --- a/framerail/src/lib/server/deepwell/index.ts +++ b/framerail/src/lib/server/deepwell/index.ts @@ -1,23 +1,23 @@ // TODO refactor into proper TS service -import { wjfetch } from "$lib/fetch.ts" +import { JSONRPCClient, JSONRPCRequest } from "json-rpc-2.0" -const DEEPWELL_HOST = process.env.DEEPWELL_HOST || "localhost" -const DEEPWELL_PORT = 2747 -const DEEPWELL_ROUTE = `http://${DEEPWELL_HOST}:${DEEPWELL_PORT}/api/trusted` +export const DEEPWELL_HOST = process.env.DEEPWELL_HOST || "localhost" +export const DEEPWELL_PORT = 2747 +export const DEEPWELL_URL = `http://${DEEPWELL_HOST}:${DEEPWELL_PORT}/jsonrpc` +export const client = new JSONRPCClient(processRawRequest) -export function wellfetch(path, options = {}) { - if (!path.startsWith("/")) { - throw new Error(`DEEPWELL path does not start with /: ${path}`) - } +async function processRawRequest(request: JSONRPCRequest): Promise { + const response = await fetch(DEEPWELL_URL, { + method: "POST", + headers: { "content-type": "application/json" }, + body: JSON.stringify(request) + }) - const url = `${DEEPWELL_ROUTE}${path}` - return wjfetch(url, options) + const data = await response.json() + client.receive(data) } export async function ping(): void { - const response = await wellfetch("/ping") - if (!response.ok) { - throw new Error("Cannot ping DEEPWELL!") - } + await client.request("ping") } diff --git a/framerail/src/lib/server/deepwell/page.ts b/framerail/src/lib/server/deepwell/page.ts new file mode 100644 index 0000000000..1797b2d415 --- /dev/null +++ b/framerail/src/lib/server/deepwell/page.ts @@ -0,0 +1,69 @@ +import { client } from "$lib/server/deepwell/index.ts" +import type { Optional } from "$lib/types.ts" + +export async function pageDelete( + siteId: number, + pageId: Optional, + slug: string, + revisionComments: Optional +): Promise { + return client.request("page_delete", { + site_id: siteId, + page: pageId ?? slug, + user_id: 1, // TODO: identify user session and pass the user to the API request + revision_comments: revisionComments + }) +} + +export async function pageEdit( + siteId: number, + pageId: Optional, + slug: string, + revisionComments: Optional, + wikitext: string, + title: string, + altTitle: string, + tags: string[] +): Promise { + return client.request(pageId ? "page_edit" : "page_create", { + site_id: siteId, + page: pageId ?? slug, + user_id: 1, // TODO: identify user session and pass the user to the API request + revision_comments: revisionComments, + wikitext, + title, + alt_title: altTitle, + tags + }) +} + +export async function pageHistory( + siteId: number, + pageId: Optional, + revisionNumber: Optional, + limit: Optional +): Promise { + return client.request("page_revision_range", { + site_id: siteId, + page_id: pageId, + revision_number: revisionNumber, + revision_direction: "before", + limit + }) +} + +export async function pageMove( + siteId: number, + pageId: Optional, + slug: string, + newSlug: string, + revisionComments: Optional +): Promise { + return client.request("page_move", { + site_id: siteId, + page: pageId ?? slug, + new_slug: newSlug, + user_id: 1, // TODO: identify user session and pass the user to the API request + revision_comments: revisionComments + }) +} diff --git a/framerail/src/lib/server/deepwell/views.ts b/framerail/src/lib/server/deepwell/views.ts index 1fd1007f8c..a4d4449b24 100644 --- a/framerail/src/lib/server/deepwell/views.ts +++ b/framerail/src/lib/server/deepwell/views.ts @@ -1,4 +1,4 @@ -import { wellfetch } from "$lib/server/deepwell/index.ts" +import { client } from "$lib/server/deepwell/index.ts" import type { Optional } from "$lib/types.ts" export interface PageRoute { @@ -11,23 +11,11 @@ export async function pageView( locale: string, route: Optional, sessionToken: Optional -): object { - const response = await wellfetch("/view/page", { - method: "PUT", - headers: { - "Content-Type": "application/json" - }, - body: JSON.stringify({ - domain, - locale, - sessionToken, - route - }) +): Promise { + return client.request("page_view", { + domain, + locale, + session_token: sessionToken, + route }) - - if (!response.ok) { - throw new Error("Unable to get view data from server") - } - - return response.json() } diff --git a/framerail/src/lib/server/page/delete.ts b/framerail/src/lib/server/page/delete.ts deleted file mode 100644 index 7a3b5c6ea4..0000000000 --- a/framerail/src/lib/server/page/delete.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { wellfetch } from "$lib/server/deepwell/index.ts" -import type { Optional } from "$lib/types.ts" - -export async function pageDelete( - siteId: number, - pageId: Optional, - slug: string, - revisionComments: Optional -): Promise { - const response = await wellfetch("/page", { - method: "DELETE", - headers: { - "Content-Type": "application/json" - }, - body: JSON.stringify({ - siteId, - page: pageId ?? slug, - userId: 1, // TODO: identify user session and pass the user to the API request - revisionComments - }) - }) - - if (!response.ok) { - throw new Error("Unable to delete page") - } - - return response.json() -} diff --git a/framerail/src/lib/server/page/edit.ts b/framerail/src/lib/server/page/edit.ts deleted file mode 100644 index e321c2acd1..0000000000 --- a/framerail/src/lib/server/page/edit.ts +++ /dev/null @@ -1,38 +0,0 @@ -import { wellfetch } from "$lib/server/deepwell/index.ts" -import type { Optional } from "$lib/types.ts" - -export async function pageEdit( - siteId: number, - pageId: Optional, - slug: string, - revisionComments: Optional, - wikitext: string, - title: string, - altTitle: string, - tags: string[] -): Promise { - let endpoint = pageId ? "/page" : "/page/create" - const response = await wellfetch(endpoint, { - method: "POST", - headers: { - "Content-Type": "application/json" - }, - body: JSON.stringify({ - siteId, - page: pageId ?? slug, - slug, - userId: 1, // TODO: identify user session and pass the user to the API request - revisionComments, - wikitext, - title, - altTitle, - tags - }) - }) - - if (!response.ok) { - throw new Error("Unable to save data to server") - } - - return response.json() -} diff --git a/framerail/src/lib/server/page/history.ts b/framerail/src/lib/server/page/history.ts deleted file mode 100644 index cb10496c9f..0000000000 --- a/framerail/src/lib/server/page/history.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { wellfetch } from "$lib/server/deepwell/index.ts" -import type { Optional } from "$lib/types.ts" - -export async function pageHistory( - siteId: number, - pageId: Optional, - revisionNumber: Optional, - limit: Optional -): Promise { - const response = await wellfetch("/page/revision/range", { - method: "PUT", - headers: { - "Content-Type": "application/json" - }, - body: JSON.stringify({ - siteId, - pageId, - revisionNumber, - revisionDirection: "before", - limit - }) - }) - - if (!response.ok) { - throw new Error("Unable to get data from server") - } - - return response.json() -} diff --git a/framerail/src/lib/server/page/index.ts b/framerail/src/lib/server/page/index.ts deleted file mode 100644 index d67a70d81e..0000000000 --- a/framerail/src/lib/server/page/index.ts +++ /dev/null @@ -1,4 +0,0 @@ -export { pageDelete } from "$lib/server/page/delete" -export { pageEdit } from "$lib/server/page/edit" -export { pageHistory } from "$lib/server/page/history" -export { pageMove } from "$lib/server/page/move" diff --git a/framerail/src/lib/server/page/move.ts b/framerail/src/lib/server/page/move.ts deleted file mode 100644 index 247468f55f..0000000000 --- a/framerail/src/lib/server/page/move.ts +++ /dev/null @@ -1,30 +0,0 @@ -import { wellfetch } from "$lib/server/deepwell/index.ts" -import type { Optional } from "$lib/types.ts" - -export async function pageMove( - siteId: number, - pageId: Optional, - slug: string, - newSlug: string, - revisionComments: Optional -): Promise { - const response = await wellfetch("/page/move", { - method: "POST", - headers: { - "Content-Type": "application/json" - }, - body: JSON.stringify({ - siteId, - page: pageId ?? slug, - newSlug, - userId: 1, // TODO: identify user session and pass the user to the API request - revisionComments - }) - }) - - if (!response.ok) { - throw new Error("Unable to send data to server") - } - - return response.json() -} diff --git a/framerail/src/routes/[slug]/[...extra]/+server.ts b/framerail/src/routes/[slug]/[...extra]/+server.ts index 1f0ee987e0..93200a23bf 100644 --- a/framerail/src/routes/[slug]/[...extra]/+server.ts +++ b/framerail/src/routes/[slug]/[...extra]/+server.ts @@ -1,4 +1,6 @@ -import * as page from "$lib/server/page" +import * as page from "$lib/server/deepwell/page" + +// Handling of server events from client export async function POST(event) { let data = await event.request.formData() diff --git a/framerail/src/routes/[slug]/[...extra]/page.svelte b/framerail/src/routes/[slug]/[...extra]/page.svelte index 8bc6611b08..9683534b4d 100644 --- a/framerail/src/routes/[slug]/[...extra]/page.svelte +++ b/framerail/src/routes/[slug]/[...extra]/page.svelte @@ -97,7 +97,7 @@ UNTRANSLATED:Response

-

{data.pageRevision.title}

+

{data.page_revision.title}


@@ -114,7 +114,7 @@ Tags
    - {#each data.pageRevision.tags as tag} + {#each data.page_revision.tags as tag}
  • {tag}
  • {/each}
@@ -127,14 +127,14 @@ class="editor-title" placeholder="UT:title" type="text" - value={data.pageRevision.title} + value={data.page_revision.title} />