From 24a31196637c1fad26f31e7f1ee47fb69db7e209 Mon Sep 17 00:00:00 2001 From: Laurent Wouters Date: Fri, 25 Oct 2024 16:41:17 +0200 Subject: [PATCH] feature: implementing worker nodes --- ...733daa283ce8a370cb3664e5e1ac052860d45.json | 38 ++ ...1373d8d4c7cc00cc68369301da21483068ff.json} | 10 +- ...11a7ab0d84881a4cdd2af9491ff2197a696d0.json | 12 + ...5bdc12de3e301fc51881e9bb5d2b5e41a6893.json | 20 - ...592b32cdc3fed4a4c9b7bb4586e1a37057ee2.json | 12 - ...2b62ad832b8b9ba1eb81820d26cc4ea4b4dbf.json | 20 + ...c21b45b4253255e4f2343ccae3d2fea5b4e2.json} | 6 +- ...49bb2f5608cd35c0a7f02c8e3a992eaeb453.json} | 30 +- ...9af20a0a7b6751916375bbf8ec09a4a69ad9.json} | 30 +- ...d292c946c49c08ef936feeceddcaadaa34065.json | 12 + ...75b9e21cffd74c7ad8fe0885ce34d5eb57c3.json} | 30 +- ...dd05a3df847d2bbb2fd4f28922a741dc91868.json | 12 - ...fd980df34e6b27cf3f0407a85bcd2a0a61a26.json | 12 + ...0efae1104057c8a50ca75a4b324961535d3c5.json | 32 -- ...6ffaf334a1ebca3c66d9876569fe019bb303.json} | 30 +- ...80d5f67b932082657c7d1ebf6981f92ade736.json | 44 ++ Cargo.lock | 306 +++++----- Cargo.toml | 5 +- docker-compose.yml | 2 + src/application.rs | 123 +++- src/empty.db | Bin 90112 -> 90112 bytes src/main.rs | 39 +- src/migrations/mod.rs | 4 + src/migrations/v1.10.0.sql | 9 + src/model/config.rs | 208 ++++++- src/model/docs.rs | 23 + src/model/mod.rs | 14 +- src/model/packages.rs | 14 +- src/model/worker.rs | 525 ++++++++++++++++++ src/routes.rs | 210 ++++++- src/schema.sql | 6 +- src/services/database/jobs.rs | 30 +- src/services/database/packages.rs | 178 ++++-- src/services/deps.rs | 7 +- src/services/docs.rs | 350 +++++++----- src/services/mod.rs | 5 +- src/tests/mocks.rs | 4 + src/tests/mod.rs | 3 +- src/utils/concurrent.rs | 74 ++- src/webapp/admin-jobs-docgen.html | 155 ++++-- src/webapp/admin-workers.html | 219 ++++++++ src/webapp/admin.html | 3 + src/webapp/api.js | 19 + src/webapp/mod.rs | 1 + src/worker.rs | 204 +++++++ 45 files changed, 2546 insertions(+), 544 deletions(-) create mode 100644 .sqlx/query-0c4a9fc1c579deb5383be4f4b22733daa283ce8a370cb3664e5e1ac052860d45.json rename .sqlx/{query-ed3036952b6035ee5405564b9b39d7d1e17b200c02f0d2c7fea8f3352f390aff.json => query-1809658f299accd1508f8b0e135c1373d8d4c7cc00cc68369301da21483068ff.json} (55%) create mode 100644 .sqlx/query-25a11eaa79fc9d2bdc652dd2ce211a7ab0d84881a4cdd2af9491ff2197a696d0.json delete mode 100644 .sqlx/query-308b534191433ac9f4815dd4a3f5bdc12de3e301fc51881e9bb5d2b5e41a6893.json delete mode 100644 .sqlx/query-4095ed6e6c8c884650d3f2a0b5c592b32cdc3fed4a4c9b7bb4586e1a37057ee2.json create mode 100644 .sqlx/query-525cccfb8c504e5a2f1859303b72b62ad832b8b9ba1eb81820d26cc4ea4b4dbf.json rename .sqlx/{query-3df0a8bc40999444dd48a6eee73036ccf4d1d574aa67c7db7180671fd9a0b96a.json => query-57d52150eff656e039d699a4ee82c21b45b4253255e4f2343ccae3d2fea5b4e2.json} (54%) rename .sqlx/{query-1a56d9cdcfc118eb3d31969355475322c95421d1195765ba7a2490e446bc6e2d.json => query-90bc90ebc40a48197ec82f50cfdd49bb2f5608cd35c0a7f02c8e3a992eaeb453.json} (67%) rename .sqlx/{query-33f4917075aa208e638b04db719431096533afc0e66749dc0a275d4019cba9e2.json => query-a2f900c3beef9c71756144cd41849af20a0a7b6751916375bbf8ec09a4a69ad9.json} (64%) create mode 100644 .sqlx/query-acecad1292fd960a812613923b7d292c946c49c08ef936feeceddcaadaa34065.json rename .sqlx/{query-7d4a876be75a4dbe251c1482199fba8ac54c91d47a0f32e1e4194a802fbe53f4.json => query-be6bdd0bdd130a4c8ccbe22d245a75b9e21cffd74c7ad8fe0885ce34d5eb57c3.json} (67%) delete mode 100644 .sqlx/query-c8f38288739dafe0f36ac13ba6bdd05a3df847d2bbb2fd4f28922a741dc91868.json create mode 100644 .sqlx/query-dadb1defa6f0225fa6226e1db99fd980df34e6b27cf3f0407a85bcd2a0a61a26.json delete mode 100644 .sqlx/query-e238744f8c6b032977668aa37fa0efae1104057c8a50ca75a4b324961535d3c5.json rename .sqlx/{query-8cde657283afccf23752f3ac0e71d156656d0f955b3bf67314a24bcebc7e6551.json => query-e689423740539eccaf76b7a981266ffaf334a1ebca3c66d9876569fe019bb303.json} (66%) create mode 100644 .sqlx/query-ffc7595568edfe8c3f956640a0180d5f67b932082657c7d1ebf6981f92ade736.json create mode 100644 src/migrations/v1.10.0.sql create mode 100644 src/model/worker.rs create mode 100644 src/webapp/admin-workers.html create mode 100644 src/worker.rs diff --git a/.sqlx/query-0c4a9fc1c579deb5383be4f4b22733daa283ce8a370cb3664e5e1ac052860d45.json b/.sqlx/query-0c4a9fc1c579deb5383be4f4b22733daa283ce8a370cb3664e5e1ac052860d45.json new file mode 100644 index 0000000..1af988d --- /dev/null +++ b/.sqlx/query-0c4a9fc1c579deb5383be4f4b22733daa283ce8a370cb3664e5e1ac052860d45.json @@ -0,0 +1,38 @@ +{ + "db_name": "SQLite", + "query": "SELECT isDeprecated AS is_deprecated, targets, nativeTargets AS nativetargets, capabilities FROM Package WHERE name = $1 LIMIT 1", + "describe": { + "columns": [ + { + "name": "is_deprecated", + "ordinal": 0, + "type_info": "Bool" + }, + { + "name": "targets", + "ordinal": 1, + "type_info": "Text" + }, + { + "name": "nativetargets", + "ordinal": 2, + "type_info": "Text" + }, + { + "name": "capabilities", + "ordinal": 3, + "type_info": "Text" + } + ], + "parameters": { + "Right": 1 + }, + "nullable": [ + false, + false, + false, + false + ] + }, + "hash": "0c4a9fc1c579deb5383be4f4b22733daa283ce8a370cb3664e5e1ac052860d45" +} diff --git a/.sqlx/query-ed3036952b6035ee5405564b9b39d7d1e17b200c02f0d2c7fea8f3352f390aff.json b/.sqlx/query-1809658f299accd1508f8b0e135c1373d8d4c7cc00cc68369301da21483068ff.json similarity index 55% rename from .sqlx/query-ed3036952b6035ee5405564b9b39d7d1e17b200c02f0d2c7fea8f3352f390aff.json rename to .sqlx/query-1809658f299accd1508f8b0e135c1373d8d4c7cc00cc68369301da21483068ff.json index 7013a0c..9e82521 100644 --- a/.sqlx/query-ed3036952b6035ee5405564b9b39d7d1e17b200c02f0d2c7fea8f3352f390aff.json +++ b/.sqlx/query-1809658f299accd1508f8b0e135c1373d8d4c7cc00cc68369301da21483068ff.json @@ -1,15 +1,15 @@ { "db_name": "SQLite", - "query": "SELECT isDeprecated AS is_deprecated, targets FROM Package WHERE name = $1 LIMIT 1", + "query": "SELECT targets, nativeTargets AS nativetargets FROM Package WHERE name = $1 LIMIT 1", "describe": { "columns": [ { - "name": "is_deprecated", + "name": "targets", "ordinal": 0, - "type_info": "Bool" + "type_info": "Text" }, { - "name": "targets", + "name": "nativetargets", "ordinal": 1, "type_info": "Text" } @@ -22,5 +22,5 @@ false ] }, - "hash": "ed3036952b6035ee5405564b9b39d7d1e17b200c02f0d2c7fea8f3352f390aff" + "hash": "1809658f299accd1508f8b0e135c1373d8d4c7cc00cc68369301da21483068ff" } diff --git a/.sqlx/query-25a11eaa79fc9d2bdc652dd2ce211a7ab0d84881a4cdd2af9491ff2197a696d0.json b/.sqlx/query-25a11eaa79fc9d2bdc652dd2ce211a7ab0d84881a4cdd2af9491ff2197a696d0.json new file mode 100644 index 0000000..9ca37c7 --- /dev/null +++ b/.sqlx/query-25a11eaa79fc9d2bdc652dd2ce211a7ab0d84881a4cdd2af9491ff2197a696d0.json @@ -0,0 +1,12 @@ +{ + "db_name": "SQLite", + "query": "UPDATE Package SET targets = $2, nativeTargets = $3 WHERE name = $1", + "describe": { + "columns": [], + "parameters": { + "Right": 3 + }, + "nullable": [] + }, + "hash": "25a11eaa79fc9d2bdc652dd2ce211a7ab0d84881a4cdd2af9491ff2197a696d0" +} diff --git a/.sqlx/query-308b534191433ac9f4815dd4a3f5bdc12de3e301fc51881e9bb5d2b5e41a6893.json b/.sqlx/query-308b534191433ac9f4815dd4a3f5bdc12de3e301fc51881e9bb5d2b5e41a6893.json deleted file mode 100644 index 66f0005..0000000 --- a/.sqlx/query-308b534191433ac9f4815dd4a3f5bdc12de3e301fc51881e9bb5d2b5e41a6893.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "db_name": "SQLite", - "query": "INSERT INTO DocGenJob (\n package, version, target, state,\n queuedOn, startedOn, finishedOn, lastUpdate,\n triggerUser, triggerEvent, output\n ) VALUES (\n $1, $2, $3, $4,\n $5, $5, $5, $5,\n $6, $7, ''\n ) RETURNING id", - "describe": { - "columns": [ - { - "name": "id", - "ordinal": 0, - "type_info": "Integer" - } - ], - "parameters": { - "Right": 7 - }, - "nullable": [ - false - ] - }, - "hash": "308b534191433ac9f4815dd4a3f5bdc12de3e301fc51881e9bb5d2b5e41a6893" -} diff --git a/.sqlx/query-4095ed6e6c8c884650d3f2a0b5c592b32cdc3fed4a4c9b7bb4586e1a37057ee2.json b/.sqlx/query-4095ed6e6c8c884650d3f2a0b5c592b32cdc3fed4a4c9b7bb4586e1a37057ee2.json deleted file mode 100644 index 0329e56..0000000 --- a/.sqlx/query-4095ed6e6c8c884650d3f2a0b5c592b32cdc3fed4a4c9b7bb4586e1a37057ee2.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "db_name": "SQLite", - "query": "INSERT INTO Package (name, lowercase, targets, isDeprecated) VALUES ($1, $2, '', FALSE)", - "describe": { - "columns": [], - "parameters": { - "Right": 2 - }, - "nullable": [] - }, - "hash": "4095ed6e6c8c884650d3f2a0b5c592b32cdc3fed4a4c9b7bb4586e1a37057ee2" -} diff --git a/.sqlx/query-525cccfb8c504e5a2f1859303b72b62ad832b8b9ba1eb81820d26cc4ea4b4dbf.json b/.sqlx/query-525cccfb8c504e5a2f1859303b72b62ad832b8b9ba1eb81820d26cc4ea4b4dbf.json new file mode 100644 index 0000000..e2a0984 --- /dev/null +++ b/.sqlx/query-525cccfb8c504e5a2f1859303b72b62ad832b8b9ba1eb81820d26cc4ea4b4dbf.json @@ -0,0 +1,20 @@ +{ + "db_name": "SQLite", + "query": "INSERT INTO DocGenJob (\n package, version, target, useNative, capabilities, state,\n queuedOn, startedOn, finishedOn, lastUpdate,\n triggerUser, triggerEvent, output\n ) VALUES (\n $1, $2, $3, $4, $5, $6,\n $7, $7, $7, $7,\n $8, $9, ''\n ) RETURNING id", + "describe": { + "columns": [ + { + "name": "id", + "ordinal": 0, + "type_info": "Integer" + } + ], + "parameters": { + "Right": 9 + }, + "nullable": [ + false + ] + }, + "hash": "525cccfb8c504e5a2f1859303b72b62ad832b8b9ba1eb81820d26cc4ea4b4dbf" +} diff --git a/.sqlx/query-3df0a8bc40999444dd48a6eee73036ccf4d1d574aa67c7db7180671fd9a0b96a.json b/.sqlx/query-57d52150eff656e039d699a4ee82c21b45b4253255e4f2343ccae3d2fea5b4e2.json similarity index 54% rename from .sqlx/query-3df0a8bc40999444dd48a6eee73036ccf4d1d574aa67c7db7180671fd9a0b96a.json rename to .sqlx/query-57d52150eff656e039d699a4ee82c21b45b4253255e4f2343ccae3d2fea5b4e2.json index 0bdc864..d429934 100644 --- a/.sqlx/query-3df0a8bc40999444dd48a6eee73036ccf4d1d574aa67c7db7180671fd9a0b96a.json +++ b/.sqlx/query-57d52150eff656e039d699a4ee82c21b45b4253255e4f2343ccae3d2fea5b4e2.json @@ -1,10 +1,10 @@ { "db_name": "SQLite", - "query": "SELECT targets FROM Package WHERE name = $1 LIMIT 1", + "query": "SELECT capabilities FROM Package WHERE name = $1 LIMIT 1", "describe": { "columns": [ { - "name": "targets", + "name": "capabilities", "ordinal": 0, "type_info": "Text" } @@ -16,5 +16,5 @@ false ] }, - "hash": "3df0a8bc40999444dd48a6eee73036ccf4d1d574aa67c7db7180671fd9a0b96a" + "hash": "57d52150eff656e039d699a4ee82c21b45b4253255e4f2343ccae3d2fea5b4e2" } diff --git a/.sqlx/query-1a56d9cdcfc118eb3d31969355475322c95421d1195765ba7a2490e446bc6e2d.json b/.sqlx/query-90bc90ebc40a48197ec82f50cfdd49bb2f5608cd35c0a7f02c8e3a992eaeb453.json similarity index 67% rename from .sqlx/query-1a56d9cdcfc118eb3d31969355475322c95421d1195765ba7a2490e446bc6e2d.json rename to .sqlx/query-90bc90ebc40a48197ec82f50cfdd49bb2f5608cd35c0a7f02c8e3a992eaeb453.json index 07a7870..bc36e55 100644 --- a/.sqlx/query-1a56d9cdcfc118eb3d31969355475322c95421d1195765ba7a2490e446bc6e2d.json +++ b/.sqlx/query-90bc90ebc40a48197ec82f50cfdd49bb2f5608cd35c0a7f02c8e3a992eaeb453.json @@ -1,6 +1,6 @@ { "db_name": "SQLite", - "query": "SELECT id, package, version, target, state,\n queuedOn AS queued_on, startedOn AS started_on, finishedOn AS finished_on, lastUpdate AS last_update,\n triggerUser AS trigger_user, triggerEvent AS trigger_event\n FROM DocGenJob\n ORDER BY id DESC", + "query": "SELECT id, package, version, target, useNative AS usenative, capabilities, state,\n queuedOn AS queued_on, startedOn AS started_on, finishedOn AS finished_on, lastUpdate AS last_update,\n triggerUser AS trigger_user, triggerEvent AS trigger_event\n FROM DocGenJob\n ORDER BY id DESC", "describe": { "columns": [ { @@ -24,38 +24,48 @@ "type_info": "Text" }, { - "name": "state", + "name": "usenative", "ordinal": 4, + "type_info": "Bool" + }, + { + "name": "capabilities", + "ordinal": 5, + "type_info": "Text" + }, + { + "name": "state", + "ordinal": 6, "type_info": "Integer" }, { "name": "queued_on", - "ordinal": 5, + "ordinal": 7, "type_info": "Datetime" }, { "name": "started_on", - "ordinal": 6, + "ordinal": 8, "type_info": "Datetime" }, { "name": "finished_on", - "ordinal": 7, + "ordinal": 9, "type_info": "Datetime" }, { "name": "last_update", - "ordinal": 8, + "ordinal": 10, "type_info": "Datetime" }, { "name": "trigger_user", - "ordinal": 9, + "ordinal": 11, "type_info": "Integer" }, { "name": "trigger_event", - "ordinal": 10, + "ordinal": 12, "type_info": "Integer" } ], @@ -72,9 +82,11 @@ false, false, false, + false, + false, true, false ] }, - "hash": "1a56d9cdcfc118eb3d31969355475322c95421d1195765ba7a2490e446bc6e2d" + "hash": "90bc90ebc40a48197ec82f50cfdd49bb2f5608cd35c0a7f02c8e3a992eaeb453" } diff --git a/.sqlx/query-33f4917075aa208e638b04db719431096533afc0e66749dc0a275d4019cba9e2.json b/.sqlx/query-a2f900c3beef9c71756144cd41849af20a0a7b6751916375bbf8ec09a4a69ad9.json similarity index 64% rename from .sqlx/query-33f4917075aa208e638b04db719431096533afc0e66749dc0a275d4019cba9e2.json rename to .sqlx/query-a2f900c3beef9c71756144cd41849af20a0a7b6751916375bbf8ec09a4a69ad9.json index e7f09b9..f93097e 100644 --- a/.sqlx/query-33f4917075aa208e638b04db719431096533afc0e66749dc0a275d4019cba9e2.json +++ b/.sqlx/query-a2f900c3beef9c71756144cd41849af20a0a7b6751916375bbf8ec09a4a69ad9.json @@ -1,6 +1,6 @@ { "db_name": "SQLite", - "query": "SELECT id, package, version, target, state,\n queuedOn AS queued_on, startedOn AS started_on, finishedOn AS finished_on, lastUpdate AS last_update,\n triggerUser AS trigger_user, triggerEvent AS trigger_event\n FROM DocGenJob\n WHERE state = $1 AND package = $2 AND version = $3 AND target = $4\n ORDER BY id DESC\n LIMIT 1", + "query": "SELECT id, package, version, target, useNative AS usenative, capabilities, state,\n queuedOn AS queued_on, startedOn AS started_on, finishedOn AS finished_on, lastUpdate AS last_update,\n triggerUser AS trigger_user, triggerEvent AS trigger_event\n FROM DocGenJob\n WHERE state = $1 AND package = $2 AND version = $3 AND target = $4\n ORDER BY id DESC\n LIMIT 1", "describe": { "columns": [ { @@ -24,38 +24,48 @@ "type_info": "Text" }, { - "name": "state", + "name": "usenative", "ordinal": 4, + "type_info": "Bool" + }, + { + "name": "capabilities", + "ordinal": 5, + "type_info": "Text" + }, + { + "name": "state", + "ordinal": 6, "type_info": "Integer" }, { "name": "queued_on", - "ordinal": 5, + "ordinal": 7, "type_info": "Datetime" }, { "name": "started_on", - "ordinal": 6, + "ordinal": 8, "type_info": "Datetime" }, { "name": "finished_on", - "ordinal": 7, + "ordinal": 9, "type_info": "Datetime" }, { "name": "last_update", - "ordinal": 8, + "ordinal": 10, "type_info": "Datetime" }, { "name": "trigger_user", - "ordinal": 9, + "ordinal": 11, "type_info": "Integer" }, { "name": "trigger_event", - "ordinal": 10, + "ordinal": 12, "type_info": "Integer" } ], @@ -72,9 +82,11 @@ false, false, false, + false, + false, true, false ] }, - "hash": "33f4917075aa208e638b04db719431096533afc0e66749dc0a275d4019cba9e2" + "hash": "a2f900c3beef9c71756144cd41849af20a0a7b6751916375bbf8ec09a4a69ad9" } diff --git a/.sqlx/query-acecad1292fd960a812613923b7d292c946c49c08ef936feeceddcaadaa34065.json b/.sqlx/query-acecad1292fd960a812613923b7d292c946c49c08ef936feeceddcaadaa34065.json new file mode 100644 index 0000000..a1fc5d0 --- /dev/null +++ b/.sqlx/query-acecad1292fd960a812613923b7d292c946c49c08ef936feeceddcaadaa34065.json @@ -0,0 +1,12 @@ +{ + "db_name": "SQLite", + "query": "INSERT INTO Package (name, lowercase, targets, nativeTargets, capabilities, isDeprecated) VALUES ($1, $2, '', '', '', FALSE)", + "describe": { + "columns": [], + "parameters": { + "Right": 2 + }, + "nullable": [] + }, + "hash": "acecad1292fd960a812613923b7d292c946c49c08ef936feeceddcaadaa34065" +} diff --git a/.sqlx/query-7d4a876be75a4dbe251c1482199fba8ac54c91d47a0f32e1e4194a802fbe53f4.json b/.sqlx/query-be6bdd0bdd130a4c8ccbe22d245a75b9e21cffd74c7ad8fe0885ce34d5eb57c3.json similarity index 67% rename from .sqlx/query-7d4a876be75a4dbe251c1482199fba8ac54c91d47a0f32e1e4194a802fbe53f4.json rename to .sqlx/query-be6bdd0bdd130a4c8ccbe22d245a75b9e21cffd74c7ad8fe0885ce34d5eb57c3.json index 2f22361..0e081b7 100644 --- a/.sqlx/query-7d4a876be75a4dbe251c1482199fba8ac54c91d47a0f32e1e4194a802fbe53f4.json +++ b/.sqlx/query-be6bdd0bdd130a4c8ccbe22d245a75b9e21cffd74c7ad8fe0885ce34d5eb57c3.json @@ -1,6 +1,6 @@ { "db_name": "SQLite", - "query": "SELECT id, package, version, target, state,\n queuedOn AS queued_on, startedOn AS started_on, finishedOn AS finished_on, lastUpdate AS last_update,\n triggerUser AS trigger_user, triggerEvent AS trigger_event\n FROM DocGenJob\n WHERE state = $1\n ORDER BY id\n LIMIT 1", + "query": "SELECT id, package, version, target, useNative AS usenative, capabilities, state,\n queuedOn AS queued_on, startedOn AS started_on, finishedOn AS finished_on, lastUpdate AS last_update,\n triggerUser AS trigger_user, triggerEvent AS trigger_event\n FROM DocGenJob\n WHERE id = $1\n LIMIT 1", "describe": { "columns": [ { @@ -24,38 +24,48 @@ "type_info": "Text" }, { - "name": "state", + "name": "usenative", "ordinal": 4, + "type_info": "Bool" + }, + { + "name": "capabilities", + "ordinal": 5, + "type_info": "Text" + }, + { + "name": "state", + "ordinal": 6, "type_info": "Integer" }, { "name": "queued_on", - "ordinal": 5, + "ordinal": 7, "type_info": "Datetime" }, { "name": "started_on", - "ordinal": 6, + "ordinal": 8, "type_info": "Datetime" }, { "name": "finished_on", - "ordinal": 7, + "ordinal": 9, "type_info": "Datetime" }, { "name": "last_update", - "ordinal": 8, + "ordinal": 10, "type_info": "Datetime" }, { "name": "trigger_user", - "ordinal": 9, + "ordinal": 11, "type_info": "Integer" }, { "name": "trigger_event", - "ordinal": 10, + "ordinal": 12, "type_info": "Integer" } ], @@ -72,9 +82,11 @@ false, false, false, + false, + false, true, false ] }, - "hash": "7d4a876be75a4dbe251c1482199fba8ac54c91d47a0f32e1e4194a802fbe53f4" + "hash": "be6bdd0bdd130a4c8ccbe22d245a75b9e21cffd74c7ad8fe0885ce34d5eb57c3" } diff --git a/.sqlx/query-c8f38288739dafe0f36ac13ba6bdd05a3df847d2bbb2fd4f28922a741dc91868.json b/.sqlx/query-c8f38288739dafe0f36ac13ba6bdd05a3df847d2bbb2fd4f28922a741dc91868.json deleted file mode 100644 index 1f15577..0000000 --- a/.sqlx/query-c8f38288739dafe0f36ac13ba6bdd05a3df847d2bbb2fd4f28922a741dc91868.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "db_name": "SQLite", - "query": "UPDATE Package SET targets = $2 WHERE name = $1", - "describe": { - "columns": [], - "parameters": { - "Right": 2 - }, - "nullable": [] - }, - "hash": "c8f38288739dafe0f36ac13ba6bdd05a3df847d2bbb2fd4f28922a741dc91868" -} diff --git a/.sqlx/query-dadb1defa6f0225fa6226e1db99fd980df34e6b27cf3f0407a85bcd2a0a61a26.json b/.sqlx/query-dadb1defa6f0225fa6226e1db99fd980df34e6b27cf3f0407a85bcd2a0a61a26.json new file mode 100644 index 0000000..d3acd24 --- /dev/null +++ b/.sqlx/query-dadb1defa6f0225fa6226e1db99fd980df34e6b27cf3f0407a85bcd2a0a61a26.json @@ -0,0 +1,12 @@ +{ + "db_name": "SQLite", + "query": "UPDATE Package SET capabilities = $2 WHERE name = $1", + "describe": { + "columns": [], + "parameters": { + "Right": 2 + }, + "nullable": [] + }, + "hash": "dadb1defa6f0225fa6226e1db99fd980df34e6b27cf3f0407a85bcd2a0a61a26" +} diff --git a/.sqlx/query-e238744f8c6b032977668aa37fa0efae1104057c8a50ca75a4b324961535d3c5.json b/.sqlx/query-e238744f8c6b032977668aa37fa0efae1104057c8a50ca75a4b324961535d3c5.json deleted file mode 100644 index 56e9fbc..0000000 --- a/.sqlx/query-e238744f8c6b032977668aa37fa0efae1104057c8a50ca75a4b324961535d3c5.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "db_name": "SQLite", - "query": "SELECT package, version, targets\n FROM PackageVersion INNER JOIN Package ON PackageVersion.package = Package.name", - "describe": { - "columns": [ - { - "name": "package", - "ordinal": 0, - "type_info": "Text" - }, - { - "name": "version", - "ordinal": 1, - "type_info": "Text" - }, - { - "name": "targets", - "ordinal": 2, - "type_info": "Text" - } - ], - "parameters": { - "Right": 0 - }, - "nullable": [ - false, - false, - false - ] - }, - "hash": "e238744f8c6b032977668aa37fa0efae1104057c8a50ca75a4b324961535d3c5" -} diff --git a/.sqlx/query-8cde657283afccf23752f3ac0e71d156656d0f955b3bf67314a24bcebc7e6551.json b/.sqlx/query-e689423740539eccaf76b7a981266ffaf334a1ebca3c66d9876569fe019bb303.json similarity index 66% rename from .sqlx/query-8cde657283afccf23752f3ac0e71d156656d0f955b3bf67314a24bcebc7e6551.json rename to .sqlx/query-e689423740539eccaf76b7a981266ffaf334a1ebca3c66d9876569fe019bb303.json index 831c339..f06f4c8 100644 --- a/.sqlx/query-8cde657283afccf23752f3ac0e71d156656d0f955b3bf67314a24bcebc7e6551.json +++ b/.sqlx/query-e689423740539eccaf76b7a981266ffaf334a1ebca3c66d9876569fe019bb303.json @@ -1,6 +1,6 @@ { "db_name": "SQLite", - "query": "SELECT id, package, version, target, state,\n queuedOn AS queued_on, startedOn AS started_on, finishedOn AS finished_on, lastUpdate AS last_update,\n triggerUser AS trigger_user, triggerEvent AS trigger_event\n FROM DocGenJob\n WHERE id = $1\n LIMIT 1", + "query": "SELECT id, package, version, target, useNative AS usenative, capabilities, state,\n queuedOn AS queued_on, startedOn AS started_on, finishedOn AS finished_on, lastUpdate AS last_update,\n triggerUser AS trigger_user, triggerEvent AS trigger_event\n FROM DocGenJob\n WHERE state = $1\n ORDER BY id\n LIMIT 1", "describe": { "columns": [ { @@ -24,38 +24,48 @@ "type_info": "Text" }, { - "name": "state", + "name": "usenative", "ordinal": 4, + "type_info": "Bool" + }, + { + "name": "capabilities", + "ordinal": 5, + "type_info": "Text" + }, + { + "name": "state", + "ordinal": 6, "type_info": "Integer" }, { "name": "queued_on", - "ordinal": 5, + "ordinal": 7, "type_info": "Datetime" }, { "name": "started_on", - "ordinal": 6, + "ordinal": 8, "type_info": "Datetime" }, { "name": "finished_on", - "ordinal": 7, + "ordinal": 9, "type_info": "Datetime" }, { "name": "last_update", - "ordinal": 8, + "ordinal": 10, "type_info": "Datetime" }, { "name": "trigger_user", - "ordinal": 9, + "ordinal": 11, "type_info": "Integer" }, { "name": "trigger_event", - "ordinal": 10, + "ordinal": 12, "type_info": "Integer" } ], @@ -72,9 +82,11 @@ false, false, false, + false, + false, true, false ] }, - "hash": "8cde657283afccf23752f3ac0e71d156656d0f955b3bf67314a24bcebc7e6551" + "hash": "e689423740539eccaf76b7a981266ffaf334a1ebca3c66d9876569fe019bb303" } diff --git a/.sqlx/query-ffc7595568edfe8c3f956640a0180d5f67b932082657c7d1ebf6981f92ade736.json b/.sqlx/query-ffc7595568edfe8c3f956640a0180d5f67b932082657c7d1ebf6981f92ade736.json new file mode 100644 index 0000000..3d15492 --- /dev/null +++ b/.sqlx/query-ffc7595568edfe8c3f956640a0180d5f67b932082657c7d1ebf6981f92ade736.json @@ -0,0 +1,44 @@ +{ + "db_name": "SQLite", + "query": "SELECT package, version, targets, nativeTargets AS nativetargets, capabilities\n FROM PackageVersion INNER JOIN Package ON PackageVersion.package = Package.name", + "describe": { + "columns": [ + { + "name": "package", + "ordinal": 0, + "type_info": "Text" + }, + { + "name": "version", + "ordinal": 1, + "type_info": "Text" + }, + { + "name": "targets", + "ordinal": 2, + "type_info": "Text" + }, + { + "name": "nativetargets", + "ordinal": 3, + "type_info": "Text" + }, + { + "name": "capabilities", + "ordinal": 4, + "type_info": "Text" + } + ], + "parameters": { + "Right": 0 + }, + "nullable": [ + false, + false, + false, + false, + false + ] + }, + "hash": "ffc7595568edfe8c3f956640a0180d5f67b932082657c7d1ebf6981f92ade736" +} diff --git a/Cargo.lock b/Cargo.lock index 8d3f7c7..346d35c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4,9 +4,9 @@ version = 3 [[package]] name = "addr2line" -version = "0.24.1" +version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5fb1d8e4442bd405fdfd1dacb42792696b0cf9cb15882e5d097b742a676d375" +checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" dependencies = [ "gimli", ] @@ -87,9 +87,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.89" +version = "1.0.91" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86fdf8605db99b54d3cd748a44c6d04df638eb5dafb219b135d0149bd0db01f6" +checksum = "c042108f3ed77fd83760a5fd79b53be043192bb3b9dba91d8c574c0ada7850c8" [[package]] name = "async-trait" @@ -119,18 +119,19 @@ checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" [[package]] name = "autocfg" -version = "1.3.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" +checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" [[package]] name = "axum" -version = "0.7.6" +version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f43644eed690f5374f1af436ecd6aea01cd201f6fbdf0178adaf6907afb2cec" +checksum = "504e3947307ac8326a5437504c517c4b56716c9d98fac0028c2acc7ca47d70ae" dependencies = [ "async-trait", "axum-core", + "base64", "bytes", "futures-util", "http", @@ -149,8 +150,10 @@ dependencies = [ "serde_json", "serde_path_to_error", "serde_urlencoded", + "sha1", "sync_wrapper 1.0.1", "tokio", + "tokio-tungstenite", "tower", "tower-layer", "tower-service", @@ -159,9 +162,9 @@ dependencies = [ [[package]] name = "axum-core" -version = "0.4.4" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e6b8ba012a258d63c9adfa28b9ddcf66149da6f986c5b5452e629d5ee64bf00" +checksum = "09f2bd6146b97ae3359fa0cc6d6b376d9539582c7b4220f041a33ec24c226199" dependencies = [ "async-trait", "bytes", @@ -248,15 +251,15 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.7.2" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "428d9aa8fbc0670b7b8d6030a7fadd0f86151cae55e4dbbece15f3780a3dfaf3" +checksum = "9ac0150caa2ae65ca5bd83f25c7de183dea78d4d366469f148435e2acfbad0da" [[package]] name = "cc" -version = "1.1.21" +version = "1.1.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07b1695e2c7e8fc85310cde85aeaab7e3097f593c91d209d3f9df76c928100f0" +checksum = "c2e7962b54006dcfcc61cb72735f4d89bb97061dd6a7ed882ec6b8ee53714c6f" dependencies = [ "shlex", ] @@ -288,7 +291,7 @@ version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8eebd66744a15ded14960ab4ccdbfb51ad3b81f51f3f04a80adac98c985396c9" dependencies = [ - "hashbrown", + "hashbrown 0.14.5", "stacker", ] @@ -411,6 +414,7 @@ dependencies = [ "tar", "tokio", "tokio-stream", + "tokio-tungstenite", "tokio-util", "urlencoding", "uuid", @@ -625,9 +629,9 @@ checksum = "e8c02a5121d4ea3eb16a80748c74f5549a5665e4c21333c6098f283870fbdea6" [[package]] name = "fern" -version = "0.6.2" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9f0c14694cbd524c8720dd69b0e3179344f04ebb5f90f2e4a440c6ea3b2f1ee" +checksum = "69ff9c9d5fb3e6da8ac2f77ab76fe7e8087d512ce095200f8f29ac5b656cf6dc" dependencies = [ "log", ] @@ -652,9 +656,9 @@ checksum = "b3ea1ec5f8307826a5b71094dd91fc04d4ae75d5709b20ad351c7fb4815c86ec" [[package]] name = "flate2" -version = "1.0.33" +version = "1.0.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "324a1be68054ef05ad64b861cc9eaf1d623d2d8cb25b4bf2cb9cdd902b4bf253" +checksum = "a1b589b4dc103969ad3cf85c950899926ec64300a1a46d76c03a6072957036f0" dependencies = [ "crc32fast", "miniz_oxide", @@ -662,9 +666,9 @@ dependencies = [ [[package]] name = "flume" -version = "0.11.0" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55ac459de2512911e4b674ce33cf20befaba382d05b62b008afc1c8b57cbf181" +checksum = "da0e4dd2a88388a1f4ccc7c9ce104604dab68d9f408dc34cd45823d5a9069095" dependencies = [ "futures-core", "futures-sink", @@ -688,9 +692,9 @@ dependencies = [ [[package]] name = "futures" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0" +checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" dependencies = [ "futures-channel", "futures-core", @@ -703,9 +707,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" +checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" dependencies = [ "futures-core", "futures-sink", @@ -713,15 +717,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" +checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" [[package]] name = "futures-executor" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" +checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" dependencies = [ "futures-core", "futures-task", @@ -741,15 +745,15 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" +checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" [[package]] name = "futures-macro" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" +checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", @@ -758,21 +762,21 @@ dependencies = [ [[package]] name = "futures-sink" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" +checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" [[package]] name = "futures-task" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" +checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" [[package]] name = "futures-util" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" +checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" dependencies = [ "futures-channel", "futures-core", @@ -821,9 +825,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.31.0" +version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32085ea23f3234fc7846555e85283ba4de91e21016dc0455a16286d87a292d64" +checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" [[package]] name = "gloo-timers" @@ -866,13 +870,19 @@ dependencies = [ "allocator-api2", ] +[[package]] +name = "hashbrown" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e087f84d4f86bf4b218b927129862374b72199ae7d8657835f1e89000eea4fb" + [[package]] name = "hashlink" version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6ba4ff7128dee98c7dc9794b6a411377e1404dba1c97deb8d1a55297bd25d8af" dependencies = [ - "hashbrown", + "hashbrown 0.14.5", ] [[package]] @@ -956,9 +966,9 @@ dependencies = [ [[package]] name = "httparse" -version = "1.9.4" +version = "1.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fcc0b4a115bf80b728eb8ea024ad5bd707b615bfed49e0665b6e0f86fd082d9" +checksum = "7d71d3574edd2771538b901e6549113b4006ece66150fb69c0fb6d9a2adae946" [[package]] name = "httpdate" @@ -968,9 +978,9 @@ checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" [[package]] name = "hyper" -version = "1.4.1" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50dfd22e0e76d0f662d429a5f80fcaf3855009297eab6a0a9f8543834744ba05" +checksum = "bbbff0a806a4728c99295b254c8838933b5b082d75e3cb70c8dab21fdfbcfa9a" dependencies = [ "bytes", "futures-channel", @@ -998,7 +1008,7 @@ dependencies = [ "hyper", "hyper-util", "rustls", - "rustls-native-certs 0.8.0", + "rustls-native-certs", "rustls-pki-types", "tokio", "tokio-rustls", @@ -1190,12 +1200,12 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.5.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68b900aa2f7301e21c36462b170ee99994de34dff39a4a6a528e80e7376d07e5" +checksum = "707907fe3c25f5424cce2cb7e1cbcafee6bdbe735ca90ef77c29e84591e5b9da" dependencies = [ "equivalent", - "hashbrown", + "hashbrown 0.15.0", ] [[package]] @@ -1209,9 +1219,9 @@ dependencies = [ [[package]] name = "ipnet" -version = "2.10.0" +version = "2.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "187674a687eed5fe42285b40c6291f9a01517d415fad1c3cbc6a9f778af7fcd4" +checksum = "ddc24109865250148c2e0f3d25d4f0f479571723792d3802153c60922a4fb708" [[package]] name = "itoa" @@ -1221,9 +1231,9 @@ checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" [[package]] name = "js-sys" -version = "0.3.70" +version = "0.3.72" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1868808506b929d7b0cfa8f75951347aa71bb21144b7791bae35d9bccfcfe37a" +checksum = "6a88f1bda2bd75b0452a14784937d796722fdebfe50df998aeb3f0b7603019a9" dependencies = [ "wasm-bindgen", ] @@ -1239,9 +1249,9 @@ dependencies = [ [[package]] name = "lettre" -version = "0.11.9" +version = "0.11.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69f204773bab09b150320ea1c83db41dc6ee606a4bc36dc1f43005fe7b58ce06" +checksum = "0161e452348e399deb685ba05e55ee116cae9410f4f51fe42d597361444521d9" dependencies = [ "async-trait", "base64", @@ -1269,9 +1279,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.159" +version = "0.2.161" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "561d97a539a36e26a9a5fad1ea11a3039a67714694aaa379433e580854bc3dc5" +checksum = "8e9489c2807c139ffd9c1794f4af0ebe86a828db53ecdc7fea2111d0fed085d1" [[package]] name = "libm" @@ -1449,18 +1459,18 @@ dependencies = [ [[package]] name = "object" -version = "0.36.4" +version = "0.36.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "084f1a5821ac4c651660a94a7153d27ac9d8a53736203f58b31945ded098070a" +checksum = "aedf0a2d09c573ed1d8d85b30c119153926a2b36dce0ab28322c09a117a4683e" dependencies = [ "memchr", ] [[package]] name = "once_cell" -version = "1.19.0" +version = "1.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" +checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" [[package]] name = "opaque-debug" @@ -1470,9 +1480,9 @@ checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" [[package]] name = "opendal" -version = "0.50.0" +version = "0.50.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36e44fc43be9ffe18dad3e3ef9d61c1ae01991ee6f1c8c026978c35777a711bf" +checksum = "213222b6c86949314d8f51acb26d8241e7c8dd0879b016a79471d49f21ee592f" dependencies = [ "anyhow", "async-trait", @@ -1511,7 +1521,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49203cdcae0030493bad186b28da2fa25645fa276a51b6fec8010d281e02ef79" dependencies = [ "dlv-list", - "hashbrown", + "hashbrown 0.14.5", ] [[package]] @@ -1566,9 +1576,9 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "pin-project-lite" -version = "0.2.14" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" +checksum = "915a1e146535de9163f3987b8944ed8cf49a18bb0056bcebcdcece385cece4ff" [[package]] name = "pin-utils" @@ -1632,9 +1642,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.86" +version = "1.0.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" +checksum = "f139b0662de085916d1fb67d2b4169d1addddda1919e696f3252b740b629986e" dependencies = [ "unicode-ident", ] @@ -1763,9 +1773,9 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.5.5" +version = "0.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62871f2d65009c0256aed1b9cfeeb8ac272833c404e13d53d400cd0dad7a2ac0" +checksum = "9b6dfecf2c74bce2466cabf93f6664d6998a69eb21e39f4207930065b27b771f" dependencies = [ "bitflags", ] @@ -1800,9 +1810,9 @@ dependencies = [ [[package]] name = "reqwest" -version = "0.12.7" +version = "0.12.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8f4955649ef5c38cc7f9e8aa41761d48fb9677197daea9984dc54f56aad5e63" +checksum = "f713147fbe92361e52392c73b8c9e48c04c6625bce969ef54dc901e58e042a7b" dependencies = [ "base64", "bytes", @@ -1823,7 +1833,7 @@ dependencies = [ "pin-project-lite", "quinn", "rustls", - "rustls-native-certs 0.7.3", + "rustls-native-certs", "rustls-pemfile", "rustls-pki-types", "serde", @@ -1925,9 +1935,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.13" +version = "0.23.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2dabaac7466917e566adb06783a81ca48944c6898a1b08b9374106dd671f4c8" +checksum = "5fbb44d7acc4e873d613422379f69f237a1b141928c02f6bc6ccfddddc2d7993" dependencies = [ "log", "once_cell", @@ -1938,19 +1948,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "rustls-native-certs" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5bfb394eeed242e909609f56089eecfe5fda225042e8b171791b9c95f5931e5" -dependencies = [ - "openssl-probe", - "rustls-pemfile", - "rustls-pki-types", - "schannel", - "security-framework", -] - [[package]] name = "rustls-native-certs" version = "0.8.0" @@ -1966,19 +1963,18 @@ dependencies = [ [[package]] name = "rustls-pemfile" -version = "2.1.3" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "196fe16b00e106300d3e45ecfcb764fa292a535d7326a29a5875c579c7417425" +checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50" dependencies = [ - "base64", "rustls-pki-types", ] [[package]] name = "rustls-pki-types" -version = "1.8.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc0a2ce646f8655401bb81e7927b812614bd5d91dbc968696be50603510fcaf0" +checksum = "16f1201b3c9a7ee8039bcadc17b7e605e2945b27eee7631788c1bd2b0643674b" [[package]] name = "rustls-webpki" @@ -1993,9 +1989,9 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.17" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "955d28af4278de8121b7ebeb796b6a45735dc01436d898801014aced2773a3d6" +checksum = "0e819f2bc632f285be6d7cd36e25940d45b2391dd6d9b939e79de557f7014248" [[package]] name = "ryu" @@ -2005,9 +2001,9 @@ checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" [[package]] name = "schannel" -version = "0.1.24" +version = "0.1.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9aaafd5a2b6e3d657ff009d82fbd630b6bd54dd4eb06f21693925cdf80f9b8b" +checksum = "01227be5826fa0690321a2ba6c5cd57a19cf3f6a09e76973b58e61de6ab9d1c1" dependencies = [ "windows-sys 0.59.0", ] @@ -2052,18 +2048,18 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.210" +version = "1.0.213" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8e3592472072e6e22e0a54d5904d9febf8508f65fb8552499a1abc7d1078c3a" +checksum = "3ea7893ff5e2466df8d720bb615088341b295f849602c6956047f8f80f0e9bc1" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.210" +version = "1.0.213" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f" +checksum = "7e85ad2009c50b58e87caa8cd6dac16bdf511bbfb7af6c33df902396aa480fa5" dependencies = [ "proc-macro2", "quote", @@ -2072,9 +2068,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.128" +version = "1.0.132" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ff5456707a1de34e7e37f2a6fd3d3f808c318259cbd01ab6377795054b483d8" +checksum = "d726bfaff4b320266d395898905d0eba0345aae23b54aee3a737e260fd46db03" dependencies = [ "itoa", "memchr", @@ -2240,7 +2236,7 @@ dependencies = [ "futures-intrusive", "futures-io", "futures-util", - "hashbrown", + "hashbrown 0.14.5", "hashlink", "hex", "indexmap", @@ -2447,9 +2443,9 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "syn" -version = "2.0.77" +version = "2.0.85" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f35bcdf61fd8e7be6caf75f429fdca8beb3ed76584befb503b1569faee373ed" +checksum = "5023162dfcd14ef8f32034d8bcd4cc5ddc61ef7a247c024a33e24e1f24d21b56" dependencies = [ "proc-macro2", "quote", @@ -2495,9 +2491,9 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.12.0" +version = "3.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04cbcdd0c794ebb0d4cf35e88edd2f7d2c4c3e9a5a6dab322839b321c6a87a64" +checksum = "f0f2c9fc62d0beef6951ccffd757e241266a2c833136efbe35af6cd2567dca5b" dependencies = [ "cfg-if", "fastrand", @@ -2508,18 +2504,18 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.64" +version = "1.0.65" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d50af8abc119fb8bb6dbabcfa89656f46f84aa0ac7688088608076ad2b459a84" +checksum = "5d11abd9594d9b38965ef50805c5e469ca9cc6f197f883f717e0269a3057b3d5" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.64" +version = "1.0.65" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08904e7672f5eb876eaaf87e0ce17857500934f4981c4a0ab2b4aa98baac7fc3" +checksum = "ae71770322cbd277e69d762a16c444af02aa0575ac0d174f0b9562d3b37f8602" dependencies = [ "proc-macro2", "quote", @@ -2593,9 +2589,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.40.0" +version = "1.41.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2b070231665d27ad9ec9b8df639893f46727666c6767db40317fbe920a5d998" +checksum = "145f3413504347a2be84393cc8a7d2fb4d863b375909ea59f2158261aa258bbb" dependencies = [ "backtrace", "bytes", @@ -2642,6 +2638,18 @@ dependencies = [ "tokio", ] +[[package]] +name = "tokio-tungstenite" +version = "0.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edc5f74e248dc973e0dbb7b74c7e0d6fcc301c694ff50049504004ef4d0cdcd9" +dependencies = [ + "futures-util", + "log", + "tokio", + "tungstenite", +] + [[package]] name = "tokio-util" version = "0.7.12" @@ -2727,6 +2735,24 @@ version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" +[[package]] +name = "tungstenite" +version = "0.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18e5b8366ee7a95b16d32197d0b2604b43a0be89dc5fac9f8e96ccafbaedda8a" +dependencies = [ + "byteorder", + "bytes", + "data-encoding", + "http", + "httparse", + "log", + "rand", + "sha1", + "thiserror", + "utf-8", +] + [[package]] name = "typenum" version = "1.17.0" @@ -2735,9 +2761,9 @@ checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] name = "unicode-bidi" -version = "0.3.15" +version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" +checksum = "5ab17db44d7388991a428b2ee655ce0c212e862eff1768a455c58f9aad6e7893" [[package]] name = "unicode-ident" @@ -2756,9 +2782,9 @@ dependencies = [ [[package]] name = "unicode-properties" -version = "0.1.2" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52ea75f83c0137a9b98608359a5f1af8144876eb67bcb1ce837368e906a9f524" +checksum = "e70f2a8b45122e719eb623c01822704c4e0907e7e426a05927e1a1cfff5b75d0" [[package]] name = "unicode_categories" @@ -2799,6 +2825,12 @@ version = "2.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" +[[package]] +name = "utf-8" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" + [[package]] name = "utf16_iter" version = "1.0.5" @@ -2813,9 +2845,9 @@ checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" [[package]] name = "uuid" -version = "1.10.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81dfa00651efa65069b0b6b651f4aaa31ba9e3c3ce0137aaad053604ee7e0314" +checksum = "f8c5f0a0af699448548ad1a2fbf920fb4bee257eae39953ba95cb84891a0446a" dependencies = [ "getrandom", "rand", @@ -2857,9 +2889,9 @@ checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" [[package]] name = "wasm-bindgen" -version = "0.2.93" +version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a82edfc16a6c469f5f44dc7b571814045d60404b55a0ee849f9bcfa2e63dd9b5" +checksum = "128d1e363af62632b8eb57219c8fd7877144af57558fb2ef0368d0087bddeb2e" dependencies = [ "cfg-if", "once_cell", @@ -2868,9 +2900,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.93" +version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9de396da306523044d3302746f1208fa71d7532227f15e347e2d93e4145dd77b" +checksum = "cb6dd4d3ca0ddffd1dd1c9c04f94b868c37ff5fac97c30b97cff2d74fce3a358" dependencies = [ "bumpalo", "log", @@ -2883,9 +2915,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-futures" -version = "0.4.43" +version = "0.4.45" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61e9300f63a621e96ed275155c108eb6f843b6a26d053f122ab69724559dc8ed" +checksum = "cc7ec4f8827a71586374db3e87abdb5a2bb3a15afed140221307c3ec06b1f63b" dependencies = [ "cfg-if", "js-sys", @@ -2895,9 +2927,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.93" +version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "585c4c91a46b072c92e908d99cb1dcdf95c5218eeb6f3bf1efa991ee7a68cccf" +checksum = "e79384be7f8f5a9dd5d7167216f022090cf1f9ec128e6e6a482a2cb5c5422c56" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -2905,9 +2937,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.93" +version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afc340c74d9005395cf9dd098506f7f44e38f2b4a21c6aaacf9a105ea5e1e836" +checksum = "26c6ab57572f7a24a4985830b120de1594465e5d500f24afe89e16b4e833ef68" dependencies = [ "proc-macro2", "quote", @@ -2918,15 +2950,15 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" -version = "0.2.93" +version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c62a0a307cb4a311d3a07867860911ca130c3494e8c2719593806c08bc5d0484" +checksum = "65fc09f10666a9f147042251e0dda9c18f166ff7de300607007e96bdebc1068d" [[package]] name = "wasm-streams" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b65dc4c90b63b118468cf747d8bf3566c1913ef60be765b5730ead9e0a3ba129" +checksum = "4e072d4e72f700fb3443d8fe94a39315df013eef1104903cdb0a2abd322bbecd" dependencies = [ "futures-util", "js-sys", @@ -2937,9 +2969,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.70" +version = "0.3.72" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26fdeaafd9bd129f65e7c031593c24d62186301e0c72c8978fa1678be7d532c0" +checksum = "f6488b90108c040df0fe62fa815cbdee25124641df01814dd7282749234c6112" dependencies = [ "js-sys", "wasm-bindgen", diff --git a/Cargo.toml b/Cargo.toml index b1e137e..8624470 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -12,7 +12,7 @@ edition = "2021" [dependencies] # logging log = "0.4" -fern = "0.6" +fern = "0.7" # serde serde = "1.0" @@ -47,5 +47,6 @@ tokio-util = {version = "0.7", features = ["io"]} # framework for the application lettre = { version = "0.11", default-features = false, features = ["builder", "smtp-transport", "rustls-tls", "tokio1", "tokio1-rustls-tls"] } sqlx = { version = "0.8", default-features = false, features = ["runtime-tokio-rustls", "sqlite", "macros", "chrono"] } -axum = { version = "0.7", features = ["http2"] } +axum = { version = "0.7", features = ["http2", "ws"] } reqwest = { version = "0.12", default-features = false, features = ["stream", "rustls-tls", "rustls-tls-native-roots"] } +tokio-tungstenite = "0.24" diff --git a/docker-compose.yml b/docker-compose.yml index 5ff1761..ff3987a 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -55,3 +55,5 @@ services: # REGISTRY_EMAIL_SENDER: # REGISTRY_EMAIL_CC: # REGISTRY_SELF_LOCAL_NAME: localhost + # REGISTRY_NODE_ROLE: standalone + # REGISTRY_NODE_WORKER_TOKEN: super secret token diff --git a/src/application.rs b/src/application.rs index 45d54e3..f3baf0a 100644 --- a/src/application.rs +++ b/src/application.rs @@ -18,8 +18,9 @@ use crate::model::cargo::{ use crate::model::config::Configuration; use crate::model::deps::DepsAnalysis; use crate::model::docs::{DocGenEvent, DocGenJob, DocGenJobSpec, DocGenTrigger}; -use crate::model::packages::CrateInfo; +use crate::model::packages::{CrateInfo, CrateInfoTarget}; use crate::model::stats::{DownloadStats, GlobalStats}; +use crate::model::worker::{WorkerEvent, WorkerPublicData, WorkersManager}; use crate::model::{AppEvent, CrateVersion, RegistryInformation}; use crate::services::database::{db_transaction_read, db_transaction_write, Database}; use crate::services::deps::DepsChecker; @@ -29,7 +30,7 @@ use crate::services::index::Index; use crate::services::rustsec::RustSecChecker; use crate::services::storage::Storage; use crate::services::ServiceProvider; -use crate::utils::apierror::{error_invalid_request, error_unauthorized, specialize, ApiError}; +use crate::utils::apierror::{error_forbidden, error_invalid_request, error_unauthorized, specialize, ApiError}; use crate::utils::axum::auth::{AuthData, Token}; use crate::utils::db::RwSqlitePool; @@ -55,6 +56,8 @@ pub struct Application { service_docs_generator: Arc, /// Sender to use to notify about events that will be asynchronously handled app_events_sender: Sender, + /// The connected worker nodes + pub worker_nodes: WorkersManager, } /// The empty database @@ -62,9 +65,9 @@ const DB_EMPTY: &[u8] = include_bytes!("empty.db"); impl Application { /// Creates a new application - pub async fn launch() -> Result, ApiError> { + pub async fn launch(configuration: Configuration) -> Result, ApiError> { // load configuration - let configuration = Arc::new(P::get_configuration().await?); + let configuration = Arc::new(configuration); // connection pool to the database let db_filename = configuration.get_database_filename(); @@ -80,6 +83,8 @@ impl Application { }) .await?; + let worker_nodes = WorkersManager::default(); + let db_is_empty = db_transaction_read(&service_db_pool, |database| async move { database.get_is_empty().await }).await?; let service_storage = P::get_storage(&configuration.deref().clone()); @@ -87,8 +92,12 @@ impl Application { let service_rustsec = P::get_rustsec(&configuration); let service_deps_checker = P::get_deps_checker(configuration.clone(), service_index.clone(), service_rustsec.clone()); let service_email_sender = P::get_email_sender(configuration.clone()); - let service_docs_generator = - P::get_docs_generator(configuration.clone(), service_db_pool.clone(), service_storage.clone()); + let service_docs_generator = P::get_docs_generator( + configuration.clone(), + service_db_pool.clone(), + service_storage.clone(), + worker_nodes.clone(), + ); // check undocumented packages let default_target = &configuration.self_toolchain_host; @@ -131,6 +140,7 @@ impl Application { service_email_sender, service_docs_generator, app_events_sender, + worker_nodes, }); let _handle = { @@ -250,11 +260,32 @@ impl Application { Ok(RegistryInformation { registry_name: self.configuration.self_local_name.clone(), toolchain_host: self.configuration.self_toolchain_host.clone(), - toolchain_version: self.configuration.self_toolchain_version.clone(), - toolchain_targets: self.configuration.self_builtin_targets.clone(), + toolchain_version_stable: self.configuration.self_toolchain_version_stable.clone(), + toolchain_version_nightly: self.configuration.self_toolchain_version_nightly.clone(), + toolchain_targets: self.configuration.self_known_targets.clone(), }) } + /// Gets the connected worker nodes + pub async fn get_workers(&self, auth_data: &AuthData) -> Result, ApiError> { + let authentication = self.authenticate(auth_data).await?; + if !authentication.can_admin { + return Err(error_forbidden()); + } + Ok(self.worker_nodes.get_workers()) + } + + /// Adds a listener to workers updates + pub async fn get_workers_updates(&self, auth_data: &AuthData) -> Result, ApiError> { + let authentication = self.authenticate(auth_data).await?; + if !authentication.can_admin { + return Err(error_forbidden()); + } + let (sender, receiver) = channel(16); + self.worker_nodes.add_listener(sender).await; + Ok(receiver) + } + /// Gets the data about the current user pub async fn get_current_user(&self, auth_data: &AuthData) -> Result { self.db_transaction_read(|app| async move { @@ -404,7 +435,7 @@ impl Application { let package = CrateUploadData::new(content)?; let index_data = package.build_index_data(); - let (user, result, targets) = { + let (user, result, targets, capabilities) = { let package = &package; self.db_transaction_write("publish_crate_version", |app| async move { let authentication = app.authenticate(auth_data).await?; @@ -414,27 +445,33 @@ impl Application { let result = app.database.publish_crate_version(user.id, package).await?; let mut targets = app.database.get_crate_targets(&package.metadata.name).await?; if targets.is_empty() { - targets.push(self.configuration.self_toolchain_host.clone()); + targets.push(CrateInfoTarget { + target: self.configuration.self_toolchain_host.clone(), + docs_use_native: true, + }); } - for target in &targets { + for info in &targets { app.database - .set_crate_documentation(&package.metadata.name, &package.metadata.vers, target, false, false) + .set_crate_documentation(&package.metadata.name, &package.metadata.vers, &info.target, false, false) .await?; } - Ok::<_, ApiError>((user, result, targets)) + let capabilities = app.database.get_crate_required_capabilities(&package.metadata.name).await?; + Ok::<_, ApiError>((user, result, targets, capabilities)) }) .await }?; self.service_storage.store_crate(&package.metadata, package.content).await?; self.service_index.publish_crate_version(&index_data).await?; - for target in targets { + for info in targets { self.service_docs_generator .queue( &DocGenJobSpec { package: index_data.name.clone(), version: index_data.vers.clone(), - target, + target: info.target, + use_native: info.docs_use_native, + capabilities: capabilities.clone(), }, &DocGenTrigger::Upload { by: user.clone() }, ) @@ -566,7 +603,7 @@ impl Application { package: &str, version: &str, ) -> Result, ApiError> { - let (user, targets) = self + let (user, targets, capabilities) = self .db_transaction_write("regen_crate_version_doc", |app| async move { let authentication = app.authenticate(auth_data).await?; let principal_uid = app.check_can_manage_crate(&authentication, package).await?; @@ -575,19 +612,22 @@ impl Application { .database .regen_crate_version_doc(package, version, &self.configuration.self_toolchain_host) .await?; - Ok::<_, ApiError>((user, targets)) + let capabilities = app.database.get_crate_required_capabilities(package).await?; + Ok::<_, ApiError>((user, targets, capabilities)) }) .await?; let mut jobs = Vec::new(); - for target in targets { + for info in targets { jobs.push( self.service_docs_generator .queue( &DocGenJobSpec { package: package.to_string(), version: version.to_string(), - target, + target: info.target, + use_native: info.docs_use_native, + capabilities: capabilities.clone(), }, &DocGenTrigger::Manual { by: user.clone() }, ) @@ -655,7 +695,7 @@ impl Application { } /// Gets the targets for a crate - pub async fn get_crate_targets(&self, auth_data: &AuthData, package: &str) -> Result, ApiError> { + pub async fn get_crate_targets(&self, auth_data: &AuthData, package: &str) -> Result, ApiError> { self.db_transaction_read(|app| async move { let _authentication = app.authenticate(auth_data).await?; app.database.get_crate_targets(package).await @@ -664,15 +704,23 @@ impl Application { } /// Sets the targets for a crate - pub async fn set_crate_targets(&self, auth_data: &AuthData, package: &str, targets: &[String]) -> Result<(), ApiError> { + pub async fn set_crate_targets( + &self, + auth_data: &AuthData, + package: &str, + targets: &[CrateInfoTarget], + ) -> Result<(), ApiError> { let (user, jobs) = self .db_transaction_write("set_crate_targets", |app| async move { let authentication = app.authenticate(auth_data).await?; let principal_uid = app.check_can_manage_crate(&authentication, package).await?; let user = app.database.get_user_profile(principal_uid).await?; - for target in targets { - if !self.configuration.self_builtin_targets.contains(target) { - return Err(specialize(error_invalid_request(), format!("Unknown target: {target}"))); + for info in targets { + if !self.configuration.self_known_targets.contains(&info.target) { + return Err(specialize( + error_invalid_request(), + format!("Unknown target: {}", info.target), + )); } } let jobs = app.database.set_crate_targets(package, targets).await?; @@ -692,6 +740,32 @@ impl Application { Ok(()) } + /// Gets the required capabilities for a crate + pub async fn get_crate_required_capabilities(&self, auth_data: &AuthData, package: &str) -> Result, ApiError> { + self.db_transaction_read(|app| async move { + let _authentication = app.authenticate(auth_data).await?; + app.database.get_crate_required_capabilities(package).await + }) + .await + } + + /// Sets the required capabilities for a crate + pub async fn set_crate_required_capabilities( + &self, + auth_data: &AuthData, + package: &str, + capabilities: &[String], + ) -> Result<(), ApiError> { + self.db_transaction_write("set_crate_required_capabilities", |app| async move { + let authentication = app.authenticate(auth_data).await?; + let _ = app.check_can_manage_crate(&authentication, package).await?; + app.database.set_crate_required_capabilities(package, capabilities).await?; + Ok::<_, ApiError>(()) + }) + .await?; + Ok(()) + } + /// Sets the deprecation status on a crate pub async fn set_crate_deprecation(&self, auth_data: &AuthData, package: &str, deprecated: bool) -> Result<(), ApiError> { self.db_transaction_write("set_crate_deprecation", |app| async move { @@ -740,6 +814,7 @@ impl Application { app.database.get_crate_targets(package).await }) .await?; + let targets = targets.into_iter().map(|info| info.target).collect::>(); self.service_deps_checker.check_crate(package, version, &targets).await } } diff --git a/src/empty.db b/src/empty.db index cf52271269b06442df42762a64bb693c123b1a13..02b37e55e8ce9150bb5b0ad40ff91de0a17035ec 100644 GIT binary patch delta 183 zcmZoTz}j$tbwZ{t!(s-06aL+NXL+yiHt`DaEaDF3dceiRagXyd$0YXk>^`hcEZ3PA zGixyXVpu#`oH2#7MO%bjTwb2BV{!}QC63bKRKLWM%(BV(JW`wmiAk9`nI)O2ljn1a zZVqEQ#l~60%`Pr2&Dd-`S(CGyJrATbHDvNEPATr>M7Z+J^EuPmSl%)4zuPRR@PuDL dfRR~RvMjZzI5R)bP|why`JepufAWk>4ghctIn)3E delta 134 zcmV;10D1p_zy*N71(0bi0VDtqG!MHD%?;EIf(;T3q6<|D;0gi>9|_Y4(+BVejRk`R z7z5-3X#slyB$E*kj*+8R34?1C2Nxe70f&}4{vQDXKtMMq&;S4c diff --git a/src/main.rs b/src/main.rs index 392509b..6c89315 100644 --- a/src/main.rs +++ b/src/main.rs @@ -21,6 +21,7 @@ use log::info; use crate::application::Application; use crate::routes::AxumState; +use crate::services::ServiceProvider; use crate::utils::sigterm::waiting_sigterm; pub mod application; @@ -30,6 +31,7 @@ pub mod routes; pub mod services; pub mod utils; pub mod webapp; +pub mod worker; #[cfg(test)] mod tests; @@ -108,7 +110,10 @@ async fn main_serve_app(application: Arc, cookie_key: Key) -> Resul ) .route("/jobs/docgen", get(routes::api_v1_get_doc_gen_jobs)) .route("/jobs/docgen/updates", get(routes::api_v1_get_doc_gen_job_updates)) - .route("/jobs/docgen/:job_id/log", get(routes::api_v1_get_doc_gen_job_log)), + .route("/jobs/docgen/:job_id/log", get(routes::api_v1_get_doc_gen_job_log)) + .route("/workers", get(routes::api_v1_get_workers)) + .route("/workers/updates", get(routes::api_v1_get_workers_updates)) + .route("/workers/connect", get(routes::api_v1_worker_connect)), ) .nest( "/crates", @@ -132,6 +137,11 @@ async fn main_serve_app(application: Arc, cookie_key: Key) -> Resul .route("/:package/owners", delete(routes::api_v1_cargo_remove_crate_owners)) .route("/:package/targets", get(routes::api_v1_get_crate_targets)) .route("/:package/targets", patch(routes::api_v1_set_crate_targets)) + .route("/:package/capabilities", get(routes::api_v1_get_crate_required_capabilities)) + .route( + "/:package/capabilities", + patch(routes::api_v1_set_crate_required_capabilities), + ) .route("/:package/deprecated", patch(routes::api_v1_set_crate_deprecation)), ), ) @@ -180,16 +190,19 @@ fn setup_log() { async fn main() { setup_log(); info!("{} commit={} tag={}", CRATE_NAME, GIT_HASH, GIT_TAG); - - let application = Application::launch::().await.unwrap(); - - let cookie_key = Key::from( - std::env::var("REGISTRY_WEB_COOKIE_SECRET") - .expect("REGISTRY_WEB_COOKIE_SECRET must be set") - .as_bytes(), - ); - - let server = pin!(main_serve_app(application, cookie_key,)); - - let _ = waiting_sigterm(server).await; + let configuration = services::StandardServiceProvider::get_configuration().await.unwrap(); + if configuration.self_role.is_master() { + let application = Application::launch::(configuration) + .await + .unwrap(); + let cookie_key = Key::from( + std::env::var("REGISTRY_WEB_COOKIE_SECRET") + .expect("REGISTRY_WEB_COOKIE_SECRET must be set") + .as_bytes(), + ); + let server = pin!(main_serve_app(application, cookie_key,)); + let _ = waiting_sigterm(server).await; + } else { + let _ = waiting_sigterm(pin!(worker::main_worker(configuration))).await; + } } diff --git a/src/migrations/mod.rs b/src/migrations/mod.rs index cd80b26..998de51 100644 --- a/src/migrations/mod.rs +++ b/src/migrations/mod.rs @@ -54,6 +54,10 @@ const MIGRATIONS: &[Migration<'static>] = &[ target: "1.9.0", content: MigrationContent::Sql(include_bytes!("v1.9.0.sql")), }, + Migration { + target: "1.10.0", + content: MigrationContent::Sql(include_bytes!("v1.10.0.sql")), + }, ]; /// Gets the value for the metadata item diff --git a/src/migrations/v1.10.0.sql b/src/migrations/v1.10.0.sql new file mode 100644 index 0000000..a95f0ae --- /dev/null +++ b/src/migrations/v1.10.0.sql @@ -0,0 +1,9 @@ +ALTER TABLE Package + ADD COLUMN nativeTargets TEXT NOT NULL DEFAULT ''; +ALTER TABLE Package + ADD COLUMN capabilities TEXT NOT NULL DEFAULT ''; + +ALTER TABLE DocGenJob + ADD COLUMN useNative BOOLEAN NOT NULL DEFAULT FALSE; +ALTER TABLE DocGenJob + ADD COLUMN capabilities TEXT NOT NULL DEFAULT ''; diff --git a/src/model/config.rs b/src/model/config.rs index d21c8c8..a6ac977 100644 --- a/src/model/config.rs +++ b/src/model/config.rs @@ -17,8 +17,10 @@ use tokio::fs::File; use tokio::io::{AsyncWriteExt, BufWriter}; use tokio::process::Command; +use super::{CHANNEL_NIGHTLY, CHANNEL_STABLE}; use crate::model::errors::MissingEnvVar; -use crate::utils::apierror::ApiError; +use crate::utils::apierror::{error_backend_failure, specialize, ApiError}; +use crate::utils::comma_sep_to_vec; use crate::utils::token::generate_token; /// Gets the value for an environment variable @@ -39,6 +41,18 @@ pub enum ExternalRegistryProtocol { Sparse, } +impl ExternalRegistryProtocol { + /// Gets the protocol + #[must_use] + pub fn new(sparse: bool) -> Self { + if sparse { + Self::Sparse + } else { + Self::Git + } + } +} + /// The configuration for an external registry #[derive(Debug, Serialize, Deserialize, Clone)] pub struct ExternalRegistry { @@ -253,8 +267,82 @@ impl EmailConfig { } } +/// The configuration specific to master nodes +#[derive(Debug, Default, Serialize, Deserialize, Clone)] +pub struct NodeRoleMaster { + /// The token that worker need to use to connect to the master + #[serde(rename = "workerToken")] + pub worker_token: Option, +} + +/// The configuration specific to worker nodes +#[derive(Debug, Default, Serialize, Deserialize, Clone)] +pub struct NodeRoleWorker { + /// The user-friendly name of the worker + pub name: String, + /// The token that worker need to use to connect to the master + #[serde(rename = "workerToken")] + pub worker_token: String, + /// The uri to connect to the host + #[serde(rename = "masterUri")] + pub master_uri: String, + /// The declared capabilities for the worker + pub capabilities: Vec, +} + +/// The configuration about the role of a node +#[derive(Debug, Serialize, Deserialize, Clone)] +pub enum NodeRole { + /// For a standalone node, i.e. a master without workers + Standalone, + /// The master-specific configuration + Master(NodeRoleMaster), + /// The worker-specific configuration + Worker(NodeRoleWorker), +} + +impl NodeRole { + /// Loads the configuration for a registry from the environment + fn from_env() -> Result { + let role_name = get_var("REGISTRY_NODE_ROLE").ok(); + match role_name.as_deref() { + Some("master") => Ok(Self::Master(NodeRoleMaster { + worker_token: get_var("REGISTRY_NODE_WORKER_TOKEN").ok(), + })), + Some("worker") => Ok(Self::Worker(NodeRoleWorker { + name: get_var("REGISTRY_NODE_WORKER_NAME")?, + worker_token: get_var("REGISTRY_NODE_WORKER_TOKEN")?, + master_uri: get_var("REGISTRY_NODE_MASTER_URI")?, + capabilities: get_var("REGISTRY_NODE_WORKER_CAPABILITIES") + .ok() + .as_deref() + .map(comma_sep_to_vec) + .unwrap_or_default(), + })), + _ => Ok(Self::Standalone), + } + } + + /// Gets the token that worker need to use to connect to the master, if any + #[must_use] + pub fn get_worker_token(&self) -> Option<&str> { + match self { + Self::Standalone => None, + Self::Master(master_config) => master_config.worker_token.as_deref(), + Self::Worker(worker_config) => Some(&worker_config.worker_token), + } + } + + /// Gets whether this configuration is for a master node + #[must_use] + pub fn is_master(&self) -> bool { + matches!(self, Self::Master(_)) + } +} + /// A configuration for the registry #[derive(Debug, Serialize, Deserialize, Clone)] +#[allow(clippy::struct_excessive_bools)] pub struct Configuration { /// The log level to use #[serde(rename = "logLevel")] @@ -327,6 +415,9 @@ pub struct Configuration { /// Flag to mock the documentation generation #[serde(rename = "docsGenMock")] pub docs_gen_mock: bool, + /// Whether to auto-install missing targets on documentation generation + #[serde(rename = "docsAutoinstallTargets")] + pub docs_autoinstall_targets: bool, /// Number of seconds between each check #[serde(rename = "depsCheckPeriod")] pub deps_check_period: u64, @@ -355,14 +446,26 @@ pub struct Configuration { #[serde(rename = "selfServiceToken")] pub self_service_token: String, /// The version of the locally installed toolchain - #[serde(rename = "selfToolchainVersion")] - pub self_toolchain_version: String, + #[serde(rename = "selfToolchainVersionStable")] + pub self_toolchain_version_stable: semver::Version, + /// The version of the locally installed toolchain + #[serde(rename = "selfToolchainVersionNightly")] + pub self_toolchain_version_nightly: semver::Version, /// The host target of the locally installed toolchain #[serde(rename = "selfToolchainHost")] pub self_toolchain_host: String, - /// The known built-in targets in rustc - #[serde(rename = "selfBuiltinTargets")] - pub self_builtin_targets: Vec, + /// The known targets in rustc + #[serde(rename = "selfKnownTargets")] + pub self_known_targets: Vec, + /// The actually installed and available targets + #[serde(rename = "selfInstalledTargets")] + pub self_installed_targets: Vec, + /// The targets that can be installed (may not be present right now) + #[serde(rename = "selfInstallableTargets")] + pub self_installable_targets: Vec, + /// The role for this node + #[serde(rename = "selfRole")] + pub self_role: NodeRole, } impl Default for Configuration { @@ -407,6 +510,7 @@ impl Default for Configuration { oauth_client_scope: String::new(), external_registries: Vec::new(), docs_gen_mock: true, + docs_autoinstall_targets: false, deps_check_period: 60, deps_stale_registry: 60 * 1000, deps_stale_analysis: 24 * 60, @@ -416,9 +520,13 @@ impl Default for Configuration { self_local_name: String::from("localhost"), self_service_login: String::new(), self_service_token: String::new(), - self_toolchain_version: String::new(), + self_toolchain_version_stable: semver::Version::new(0, 0, 0), + self_toolchain_version_nightly: semver::Version::new(0, 0, 0), self_toolchain_host: String::new(), - self_builtin_targets: Vec::new(), + self_known_targets: Vec::new(), + self_installed_targets: Vec::new(), + self_installable_targets: Vec::new(), + self_role: NodeRole::Master(NodeRoleMaster::default()), } } } @@ -462,6 +570,7 @@ impl Configuration { external_registries.push(registry); external_registry_index += 1; } + let self_role = NodeRole::from_env()?; Ok(Self { log_level: get_var("REGISTRY_LOG_LEVEL").unwrap_or_else(|_| String::from("INFO")), log_datetime_format: get_var("REGISTRY_LOG_DATE_TIME_FORMAT") @@ -497,6 +606,9 @@ impl Configuration { oauth_client_secret: get_var("REGISTRY_OAUTH_CLIENT_SECRET")?, oauth_client_scope: get_var("REGISTRY_OAUTH_CLIENT_SCOPE")?, docs_gen_mock: get_var("REGISTRY_DOCS_GEN_MOCK").map(|v| v == "true").unwrap_or(false), + docs_autoinstall_targets: get_var("REGISTRY_DOCS_AUTOINSTALL_TARGETS") + .map(|v| v == "true") + .unwrap_or(false), deps_check_period: get_var("REGISTRY_DEPS_CHECK_PERIOD") .map(|s| s.parse().expect("invalid REGISTRY_DEPS_CHECK_PERIOD")) .unwrap_or(60), // 1 minute @@ -512,9 +624,13 @@ impl Configuration { self_local_name, self_service_login: generate_token(16), self_service_token: generate_token(64), - self_toolchain_version: get_rustc_version().await, + self_toolchain_version_stable: get_rustc_version(CHANNEL_STABLE).await, + self_toolchain_version_nightly: get_rustc_version(CHANNEL_NIGHTLY).await, self_toolchain_host: get_rustc_host().await, - self_builtin_targets: get_builtin_targets().await, + self_known_targets: get_known_targets().await, + self_installed_targets: get_installed_targets(CHANNEL_NIGHTLY).await, + self_installable_targets: get_installable_targets(CHANNEL_NIGHTLY).await, + self_role, external_registries, }) } @@ -702,24 +818,41 @@ impl Configuration { writer.flush().await?; Ok(()) } + + /// Gets the configuration to connect to this registry from the outside + #[must_use] + pub fn get_self_as_external(&self) -> ExternalRegistry { + ExternalRegistry { + name: self.self_local_name.clone(), + index: if self.index.allow_protocol_sparse { + format!("{}/", self.web_public_uri) + } else { + self.web_public_uri.clone() + }, + protocol: ExternalRegistryProtocol::new(self.index.allow_protocol_sparse), + docs_root: format!("{}/docs", self.web_public_uri), + login: self.self_service_login.clone(), + token: self.self_service_token.clone(), + } + } } /// Gets the rustc version -async fn get_rustc_version() -> String { +async fn get_rustc_version(channel: &'static str) -> semver::Version { let child = Command::new("rustc") - .args(["+stable", "--version"]) + .args([channel, "--version"]) .stdin(Stdio::piped()) .stdout(Stdio::piped()) .spawn() .unwrap(); let output = child.wait_with_output().await.unwrap(); let output = String::from_utf8(output.stdout).unwrap(); - output.split_ascii_whitespace().nth(1).unwrap().to_string() + output.split_ascii_whitespace().nth(1).unwrap().parse().unwrap() } async fn get_rustc_host() -> String { let child = Command::new("rustc") - .args(["+stable", "-vV"]) + .args([CHANNEL_STABLE, "-vV"]) .stdin(Stdio::piped()) .stdout(Stdio::piped()) .spawn() @@ -732,9 +865,21 @@ async fn get_rustc_host() -> String { .unwrap() } -async fn get_builtin_targets() -> Vec { +async fn get_known_targets() -> Vec { let child = Command::new("rustc") - .args(["+stable", "--print", "target-list"]) + .args([CHANNEL_STABLE, "--print", "target-list"]) + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .spawn() + .unwrap(); + let output = child.wait_with_output().await.unwrap(); + let output = String::from_utf8(output.stdout).unwrap(); + output.lines().map(str::to_string).collect() +} + +pub async fn get_installed_targets(channel: &'static str) -> Vec { + let child = Command::new("rustup") + .args([channel, "target", "list", "--installed"]) .stdin(Stdio::piped()) .stdout(Stdio::piped()) .spawn() @@ -743,3 +888,34 @@ async fn get_builtin_targets() -> Vec { let output = String::from_utf8(output.stdout).unwrap(); output.lines().map(str::to_string).collect() } + +async fn get_installable_targets(channel: &'static str) -> Vec { + let child = Command::new("rustup") + .args([channel, "target", "list"]) + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .spawn() + .unwrap(); + let output = child.wait_with_output().await.unwrap(); + let output = String::from_utf8(output.stdout).unwrap(); + output.lines().map(str::to_string).collect() +} + +/// Attempts to install a target +pub async fn install_target(channel: &'static str, target: &str) -> Result<(), ApiError> { + let child = Command::new("rustup") + .args([channel, "target", "add", target]) + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .spawn() + .unwrap(); + let output = child.wait_with_output().await.unwrap(); + if output.status.success() { + Ok(()) + } else { + Err(specialize( + error_backend_failure(), + format!("Failed to install target {target} for channel {channel}"), + )) + } +} diff --git a/src/model/docs.rs b/src/model/docs.rs index 2fe3036..f048bbb 100644 --- a/src/model/docs.rs +++ b/src/model/docs.rs @@ -8,6 +8,7 @@ use chrono::NaiveDateTime; use serde_derive::{Deserialize, Serialize}; use super::cargo::RegistryUser; +use super::worker::WorkerSelector; /// The specification for a documentation generation job #[derive(Debug, Clone, Serialize, Deserialize)] @@ -18,6 +19,10 @@ pub struct DocGenJobSpec { pub version: String, /// The targets for the crate pub target: String, + /// Whether to use a native toolchain for the target + pub use_native: bool, + /// The required capabilities + pub capabilities: Vec, } /// The state of a documentation generation job @@ -120,6 +125,10 @@ pub struct DocGenJob { pub version: String, /// The targets for the crate pub target: String, + /// Whether to use a native toolchain for the target + pub use_native: bool, + /// The required capabilities + pub capabilities: Vec, /// The state of the job pub state: DocGenJobState, /// Timestamp when the job was queued @@ -138,6 +147,20 @@ pub struct DocGenJob { pub trigger: DocGenTrigger, } +impl DocGenJob { + /// Gets the worker selector for this job + #[must_use] + pub fn get_worker_selector(&self) -> WorkerSelector { + let mut selector = if self.use_native { + WorkerSelector::new_native_target(self.target.clone()) + } else { + WorkerSelector::new_available_target(self.target.clone()) + }; + selector.capabilities.clone_from(&self.capabilities); + selector + } +} + /// An update to a documentation generation job #[derive(Debug, Clone, Serialize, Deserialize)] pub struct DocGenJobUpdate { diff --git a/src/model/mod.rs b/src/model/mod.rs index 8ddf56c..38971e7 100644 --- a/src/model/mod.rs +++ b/src/model/mod.rs @@ -14,6 +14,7 @@ pub mod namegen; pub mod osv; pub mod packages; pub mod stats; +pub mod worker; use auth::TokenUsage; use serde_derive::{Deserialize, Serialize}; @@ -34,8 +35,11 @@ pub struct RegistryInformation { #[serde(rename = "registryName")] pub registry_name: String, /// The version of the locally installed toolchain - #[serde(rename = "toolchainVersion")] - pub toolchain_version: String, + #[serde(rename = "toolchainVersionStable")] + pub toolchain_version_stable: semver::Version, + /// The version of the locally installed toolchain + #[serde(rename = "toolchainVersionNightly")] + pub toolchain_version_nightly: semver::Version, /// The host target of the locally installed toolchain #[serde(rename = "toolchainHost")] pub toolchain_host: String, @@ -61,3 +65,9 @@ pub enum AppEvent { /// The download of a crate CrateDownload(CrateVersion), } + +/// The modifier for the stable channel +pub const CHANNEL_STABLE: &str = "+stable"; + +/// The modifier for the nightly channel +pub const CHANNEL_NIGHTLY: &str = "+nightly"; diff --git a/src/model/packages.rs b/src/model/packages.rs index a70339e..8f18b18 100644 --- a/src/model/packages.rs +++ b/src/model/packages.rs @@ -20,7 +20,19 @@ pub struct CrateInfo { /// Gets the versions in the index pub versions: Vec, /// The build targets to use (for docs generation and deps analysis) - pub targets: Vec, + pub targets: Vec, + /// The required capabilities for docs generation + pub capabilities: Vec, +} + +/// A build targets to use (for docs generation and deps analysis) +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CrateInfoTarget { + /// The target triple + pub target: String, + /// Whether to require a native toolchain for this target + #[serde(rename = "docsUseNative")] + pub docs_use_native: bool, } /// The data for a crate version diff --git a/src/model/worker.rs b/src/model/worker.rs new file mode 100644 index 0000000..f4f9149 --- /dev/null +++ b/src/model/worker.rs @@ -0,0 +1,525 @@ +/******************************************************************************* + * Copyright (c) 2024 Cénotélie Opérations SAS (cenotelie.fr) + ******************************************************************************/ + +//! Data model for worker nodes and the protocol to communicate between the master and the workers + +use std::future::Future; +use std::pin::Pin; +use std::sync::{Arc, RwLock}; +use std::task::{Context, Poll, Waker}; + +use log::{error, info}; +use serde_derive::{Deserialize, Serialize}; +use tokio::sync::mpsc::{Receiver, Sender}; +use tokio::sync::Mutex; + +use super::docs::{DocGenJob, DocGenJobUpdate}; +use crate::model::config::{Configuration, NodeRole}; +use crate::utils::apierror::ApiError; +use crate::utils::token::generate_token; + +/// The descriptor of a worker and its capabilities +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct WorkerDescriptor { + /// The unique identifier for the worker + pub identifier: String, + /// The user-friendly name of the worker + pub name: String, + /// The version of the locally installed toolchain + #[serde(rename = "toolchainVersionStable")] + pub toolchain_version_stable: semver::Version, + /// The version of the locally installed toolchain + #[serde(rename = "toolchainVersionNightly")] + pub toolchain_version_nightly: semver::Version, + /// The host target of the locally installed toolchain + #[serde(rename = "toolchainHost")] + pub toolchain_host: String, + /// The locally installed targets + #[serde(rename = "toolchainInstalledTargets")] + pub toolchain_installed_targets: Vec, + /// All the potential targets that the node could install + #[serde(rename = "toolchainInstallableTargets")] + pub toolchain_installable_targets: Vec, + /// The declared capabilities of the worker + pub capabilities: Vec, +} + +impl WorkerDescriptor { + /// Gets the descriptor for this worker, base of the specified configuration + #[must_use] + pub fn get_my_descriptor(config: &Configuration) -> WorkerDescriptor { + Self { + identifier: generate_token(32), + name: if let NodeRole::Worker(worker_config) = &config.self_role { + worker_config.name.clone() + } else { + String::new() + }, + toolchain_version_stable: config.self_toolchain_version_stable.clone(), + toolchain_version_nightly: config.self_toolchain_version_nightly.clone(), + toolchain_host: config.self_toolchain_host.clone(), + toolchain_installed_targets: config.self_installed_targets.clone(), + toolchain_installable_targets: if config.docs_autoinstall_targets { + config.self_installable_targets.clone() + } else { + Vec::new() + }, + capabilities: if let NodeRole::Worker(worker_config) = &config.self_role { + worker_config.capabilities.clone() + } else { + Vec::new() + }, + } + } + + /// Gets whether this worker matches the selector + #[must_use] + pub fn matches(&self, selector: &WorkerSelector) -> bool { + if let Some(host) = &selector.toolchain_host { + if &self.toolchain_host != host { + return false; + } + } + if let Some(target) = &selector.toolchain_installed_target { + if !self.toolchain_installed_targets.contains(target) { + return false; + } + } + if let Some(target) = &selector.toolchain_available_target { + if !self.toolchain_installed_targets.contains(target) && !self.toolchain_installable_targets.contains(target) { + return false; + } + } + selector.capabilities.iter().all(|cap| self.capabilities.contains(cap)) + } +} + +/// The data for registering a worker +#[derive(Debug)] +pub struct WorkerRegistrationData { + /// The worker's description + pub descriptor: WorkerDescriptor, + /// The sender to send jobs to the worker + pub job_sender: Sender, + /// The receiver to receive updates from the worker + pub update_receiver: Receiver, +} + +/// The state of a worker +#[derive(Debug)] +enum WorkerState { + /// Available for jobs + Available(Receiver), + /// In use for a job + InUse(JobIdentifier), +} + +impl WorkerState { + /// Checks whether the worker is available + #[must_use] + pub fn is_available(&self) -> bool { + matches!(self, Self::Available(_)) + } +} + +/// The state of a worker +#[derive(Debug, Serialize, Deserialize, Clone, Copy, PartialEq, Eq)] +pub enum WorkerPublicState { + /// Available for jobs + Available, + /// In use for a job + InUse(JobIdentifier), +} + +impl<'a> From<&'a WorkerState> for WorkerPublicState { + fn from(value: &WorkerState) -> Self { + match value { + WorkerState::Available(_) => Self::Available, + WorkerState::InUse(job_id) => Self::InUse(*job_id), + } + } +} + +/// The data for a worker +#[derive(Debug)] +struct WorkerData { + /// The worker's description + descriptor: WorkerDescriptor, + /// The sender to send jobs to the worker + job_sender: Sender, + /// The worker's state + state: WorkerState, +} + +impl WorkerData { + /// Checkouts this worker + /// + /// # Panics + /// + /// Raise a panic when the worker was not available + #[must_use] + fn checkout(&mut self, job_id: JobIdentifier) -> Receiver { + let mut old_state = WorkerState::InUse(job_id); + std::mem::swap(&mut self.state, &mut old_state); + let WorkerState::Available(receiver) = old_state else { + panic!("expected an available worker") + }; + receiver + } +} + +/// The data for a worker +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct WorkerPublicData { + /// The worker's description + pub descriptor: WorkerDescriptor, + /// The worker's state + pub state: WorkerPublicState, +} + +impl<'a> From<&'a WorkerData> for WorkerPublicData { + fn from(value: &'a WorkerData) -> Self { + Self { + descriptor: value.descriptor.clone(), + state: WorkerPublicState::from(&value.state), + } + } +} + +/// Data to select a suitable worker +#[derive(Debug, Default, Clone)] +pub struct WorkerSelector { + /// Requires a specific native target + pub toolchain_host: Option, + /// Requires a target to be installed + pub toolchain_installed_target: Option, + /// Requires a target to be available, but not necessarily installed + pub toolchain_available_target: Option, + /// All the required capabilities + pub capabilities: Vec, +} + +impl WorkerSelector { + /// Builds a selector that requires a native host for a target + #[must_use] + pub fn new_native_target(target: String) -> Self { + Self { + toolchain_host: Some(target), + toolchain_installed_target: None, + toolchain_available_target: None, + capabilities: Vec::new(), + } + } + + /// Builds a selector that requires a target to be available + #[must_use] + pub fn new_available_target(target: String) -> Self { + Self { + toolchain_host: None, + toolchain_installed_target: None, + toolchain_available_target: Some(target), + capabilities: Vec::new(), + } + } +} + +/// Wait for a worker +pub struct WorkerWaiter { + /// The parent manager + manager: WorkersManager, + /// The selector to use + selector: WorkerSelector, + /// The identifier of the waiting job + job_id: JobIdentifier, + /// The resolved worker if any + worker: Option, +} + +impl Future for WorkerWaiter { + type Output = WorkerCheckout; + + fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { + if let Some(data) = self.worker.take() { + self.manager.send_event(WorkerEvent::WorkerStartedJob { + worker_id: data.descriptor.identifier.clone(), + job_id: self.job_id, + }); + return Poll::Ready(data); + } + let mut inner = self.manager.inner.write().unwrap(); + if let Some(worker) = self + .manager + .clone() + .try_get_worker_for(&mut inner, &self.selector, self.job_id) + { + self.manager.send_event(WorkerEvent::WorkerStartedJob { + worker_id: worker.descriptor.identifier.clone(), + job_id: self.job_id, + }); + return Poll::Ready(worker); + } + // queue + inner.queue.push(QueuedRequest { + selector: self.selector.clone(), + waker: cx.waker().clone(), + }); + Poll::Pending + } +} + +/// A checkout for a worker while it is in use +#[derive(Debug)] +pub struct WorkerCheckout { + /// The parent manager + manager: WorkersManager, + /// The worker's description + descriptor: WorkerDescriptor, + /// The sender to send jobs to the worker + job_sender: Sender, + /// The receiver to receive updates from the worker + update_receiver: Option>, +} + +impl WorkerCheckout { + /// Gets the job sender + pub fn sender(&mut self) -> &mut Sender { + &mut self.job_sender + } + + /// Gets the update receiver + pub fn update_receiver(&mut self) -> &mut Receiver { + self.update_receiver.as_mut().unwrap() + } +} + +impl Drop for WorkerCheckout { + fn drop(&mut self) { + self.manager.clone().put_worker_back(self); + } +} + +/// The data of a queued request for a worker +#[derive(Debug)] +struct QueuedRequest { + /// The associated selector + selector: WorkerSelector, + /// The waker + waker: Waker, +} + +/// The inner data for a manager of workers +#[derive(Debug, Default)] +struct WorkersManagerInner { + /// The workers themselves + workers: Vec, + /// The queue of requests remaining to be solved + queue: Vec, +} + +/// The manager of worker +#[derive(Debug, Default, Clone)] +pub struct WorkersManager { + /// The inner data + inner: Arc>, + /// The active listeners + listeners: Arc>>>, +} + +impl WorkersManager { + /// Gets whether there are connected workers + #[must_use] + pub fn has_workers(&self) -> bool { + !self.inner.read().unwrap().workers.is_empty() + } + + /// Gets all the registered workers + #[must_use] + pub fn get_workers(&self) -> Vec { + self.inner + .read() + .unwrap() + .workers + .iter() + .map(WorkerPublicData::from) + .collect::>() + } + + /// Registers a new worker + pub fn register_worker(&self, data: WorkerRegistrationData) { + info!("=== registering worker {}", data.descriptor.identifier); + let worker_data = WorkerData { + descriptor: data.descriptor, + job_sender: data.job_sender, + state: WorkerState::Available(data.update_receiver), + }; + let event = WorkerEvent::WorkerConnected(Box::new(WorkerPublicData::from(&worker_data))); + self.inner.write().unwrap().workers.push(worker_data); + self.send_event(event); + } + + /// Remove a worker + pub fn remove_worker(&self, worker_id: &str) { + info!("=== removing worker {worker_id}"); + let found = { + let mut inner = self.inner.write().unwrap(); + let size_before = inner.workers.len(); + inner.workers.retain(|w| w.descriptor.identifier != worker_id); + let size_after = inner.workers.len(); + size_before != size_after + }; + if found { + self.send_event(WorkerEvent::WorkerRemoved { + worker_id: worker_id.to_string(), + }); + } + } + + /// Gets a worker for a selector + #[must_use] + pub fn get_worker_for(&self, selector: WorkerSelector, job_id: JobIdentifier) -> WorkerWaiter { + let worker = self + .clone() + .try_get_worker_for(&mut self.inner.write().unwrap(), &selector, job_id); + WorkerWaiter { + manager: self.clone(), + selector, + job_id, + worker, + } + } + + /// Put back a worker as available + fn put_worker_back(&self, checkout: &mut WorkerCheckout) { + let mut inner = self.inner.write().unwrap(); + + let index = if let Some((index, worker)) = inner + .workers + .iter_mut() + .enumerate() + .find(|(_, w)| w.descriptor.identifier == checkout.descriptor.identifier) + { + worker.state = WorkerState::Available(checkout.update_receiver.take().unwrap()); + self.send_event(WorkerEvent::WorkerAvailable { + worker_id: worker.descriptor.identifier.clone(), + }); + Some(index) + } else { + None + }; + + if let Some(worker_index) = index { + // is the worker usable for a specific queued request + let index = inner.queue.iter().enumerate().find_map(|(index, item)| { + if inner.workers[worker_index].descriptor.matches(&item.selector) { + Some(index) + } else { + None + } + }); + if let Some(index) = index { + let item = inner.queue.remove(index); + item.waker.wake(); + } + } + } + + /// Attempts to get a worker for a selector + fn try_get_worker_for( + self, + inner: &mut WorkersManagerInner, + selector: &WorkerSelector, + job_id: JobIdentifier, + ) -> Option { + let target = inner + .workers + .iter_mut() + .find(|w| w.descriptor.matches(selector) && w.state.is_available())?; + Some(WorkerCheckout { + manager: self, + descriptor: target.descriptor.clone(), + job_sender: target.job_sender.clone(), + update_receiver: Some(target.checkout(job_id)), + }) + } + + /// Adds a listener to job updates + pub async fn add_listener(&self, listener: tokio::sync::mpsc::Sender) { + self.listeners.lock().await.push(listener); + } + + /// Send an event to listeners, do not block + fn send_event(&self, event: WorkerEvent) { + let this = self.clone(); + tokio::spawn(async move { + if let Err(e) = this.do_send_event(event).await { + error!("{e}"); + if let Some(backtrace) = &e.backtrace { + error!("{backtrace}"); + } + } + }); + } + + /// Send an event to listeners + async fn do_send_event(&self, event: WorkerEvent) -> Result<(), ApiError> { + let mut listeners = self.listeners.lock().await; + let mut index = if listeners.is_empty() { + None + } else { + Some(listeners.len() - 1) + }; + while let Some(i) = index { + if listeners[i].send(event.clone()).await.is_err() { + // remove + listeners.swap_remove(i); + } + index = if i == 0 { None } else { Some(i - 1) }; + } + Ok(()) + } +} + +/// The identifier of a job for a worker +#[derive(Debug, Serialize, Deserialize, Clone, Copy, PartialEq, Eq)] +pub enum JobIdentifier { + /// A documentation generation job + DocGen(i64), +} + +/// An specification of an job to be executed +#[derive(Debug, Serialize, Deserialize, Clone)] +pub enum JobSpecification { + /// A documentation generation job + DocGen(DocGenJob), +} + +impl JobSpecification { + /// Gets the job identifier + #[must_use] + pub fn get_id(&self) -> JobIdentifier { + match self { + Self::DocGen(doc_gen_job) => JobIdentifier::DocGen(doc_gen_job.id), + } + } +} + +/// An update about the execution of a job, for the client +#[derive(Debug, Serialize, Deserialize, Clone)] +pub enum JobUpdate { + /// An update about a documentation generation job + DocGen(DocGenJobUpdate), +} + +/// An event about workers +#[derive(Debug, Serialize, Deserialize, Clone)] +pub enum WorkerEvent { + /// A worker just connected + WorkerConnected(Box), + /// A worker was removed + WorkerRemoved { worker_id: String }, + /// A worker started a new job + WorkerStartedJob { worker_id: String, job_id: JobIdentifier }, + /// A worker became available + WorkerAvailable { worker_id: String }, +} diff --git a/src/routes.rs b/src/routes.rs index 9917983..c6c0a56 100644 --- a/src/routes.rs +++ b/src/routes.rs @@ -6,20 +6,28 @@ use std::borrow::Cow; use std::collections::HashMap; +use std::future::Future; use std::path::PathBuf; +use std::pin::Pin; use std::str::FromStr; use std::sync::Arc; +use std::time::Duration; use axum::body::{Body, Bytes}; -use axum::extract::{Path, Query, State}; +use axum::extract::ws::{Message, WebSocket}; +use axum::extract::{FromRequest, Path, Query, State, WebSocketUpgrade}; use axum::http::header::{HeaderName, SET_COOKIE}; use axum::http::{header, HeaderValue, Request, StatusCode}; use axum::response::{IntoResponse, Response}; use axum::{BoxError, Json}; use cookie::Key; -use futures::{Stream, StreamExt}; +use futures::future::select_all; +use futures::{SinkExt, Stream, StreamExt}; +use log::error; use serde::Deserialize; use tokio::fs::File; +use tokio::sync::mpsc::channel; +use tokio::sync::Mutex; use tokio_stream::wrappers::ReceiverStream; use tokio_util::io::ReaderStream; @@ -30,11 +38,14 @@ use crate::model::cargo::{ }; use crate::model::deps::DepsAnalysis; use crate::model::docs::{DocGenJob, DocGenJobSpec}; -use crate::model::packages::CrateInfo; +use crate::model::packages::{CrateInfo, CrateInfoTarget}; use crate::model::stats::{DownloadStats, GlobalStats}; +use crate::model::worker::{JobSpecification, JobUpdate, WorkerDescriptor, WorkerPublicData, WorkerRegistrationData}; use crate::model::{AppVersion, CrateVersion, RegistryInformation}; use crate::services::index::Index; -use crate::utils::apierror::{error_invalid_request, error_not_found, specialize, ApiError}; +use crate::utils::apierror::{ + error_backend_failure, error_invalid_request, error_not_found, error_unauthorized, specialize, ApiError, +}; use crate::utils::axum::auth::{AuthData, AxumStateForCookies}; use crate::utils::axum::embedded::{EmbeddedResources, WebappResource}; use crate::utils::axum::extractors::Base64; @@ -280,7 +291,7 @@ pub async fn get_docs_resource( && state .application .configuration - .self_builtin_targets + .self_known_targets .iter() .any(|t| elements[3] == t) { @@ -463,6 +474,167 @@ pub async fn api_v1_get_doc_gen_job_updates( Ok(stream.into_response()) } +/// Gets the connected worker nodes +pub async fn api_v1_get_workers(auth_data: AuthData, State(state): State>) -> ApiResult> { + response(state.application.get_workers(&auth_data).await) +} + +/// Adds a listener to workers updates +pub async fn api_v1_get_workers_updates( + auth_data: AuthData, + State(state): State>, +) -> Result)> { + let receiver = match state.application.get_workers_updates(&auth_data).await { + Ok(r) => r, + Err(e) => return Err(response_error(e)), + }; + let stream = ServerSentEventStream::new(ReceiverStream::new(receiver).map(Event::from_data)); + Ok(stream.into_response()) +} + +/// Endpoint for worker to connect to this host +pub async fn api_v1_worker_connect( + auth_data: AuthData, + State(state): State>, + request: Request, +) -> Result)> { + let token = auth_data.token.as_ref().ok_or_else(|| response_error(error_unauthorized()))?; + if Some(token.secret.as_str()) != state.application.configuration.self_role.get_worker_token() { + return Err(response_error(error_unauthorized())); + } + let ws_upgrade = WebSocketUpgrade::from_request(request, &state) + .await + .map_err(|e| response_error(e.into()))?; + let worker_id = token.id.clone(); + let response = ws_upgrade.on_upgrade(move |socket| worker_connect_handle(socket, state.clone(), worker_id)); + Ok(response) +} + +/// Handles a connection from a worker +async fn worker_connect_handle(web_socket: WebSocket, state: Arc, worker_id: String) { + if let Err(error) = worker_connect_handle_inner(web_socket, state, worker_id).await { + error!("{error}"); + if let Some(backtrace) = error.backtrace.as_ref() { + error!("{backtrace}"); + } + } +} + +/// The timeout for a worker to send an heartbeat +const HEARTBEAT_TIMEOUT: u64 = 150; + +/// Handles a connection from a worker +/// +/// ```text +/// ws_sender <----------------- [ job_bridge ] <------- [ job_receiver ] +/// WS <-- ws_sender <----------------- [ health_checker ] +/// ^ +/// WS ----> [ ws_dispatcher ] -- pong --------+ +/// + update -----> [ updated_sender ] +/// ``` +async fn worker_connect_handle_inner(web_socket: WebSocket, state: Arc, worker_id: String) -> Result<(), ApiError> { + let (mut ws_sender, mut ws_receiver) = web_socket.split(); + let Some(Ok(Message::Text(data))) = ws_receiver.next().await else { + // unexpected message + ws_sender.send(Message::Close(None)).await?; + return Err(specialize( + error_invalid_request(), + String::from("expected the worker descriptor"), + )); + }; + let descriptor = serde_json::from_str::(&data)?; + if worker_id != descriptor.identifier { + ws_sender.send(Message::Close(None)).await?; + return Err(specialize(error_unauthorized(), String::from("unexpected worker identifier"))); + } + + let worker_id = descriptor.identifier.clone(); + let ws_sender = Arc::new(Mutex::new(ws_sender)); + + // send the registry info + ws_sender + .lock() + .await + .send(Message::Text(serde_json::to_string( + &state.application.configuration.get_self_as_external(), + )?)) + .await?; + + // communication channels + let (to_health_checker, mut health_checker_receiver) = channel::>(8); + let (job_sender, mut job_receiver) = channel::(8); + let (updated_sender, update_receiver) = channel::(8); + + // tasks + let ws_dispatcher: Pin> + Send>> = { + Box::pin(async move { + while let Some(message) = ws_receiver.next().await { + match message? { + Message::Text(data) => { + let update = serde_json::from_str(&data)?; + updated_sender.send(update).await?; + } + Message::Binary(data) => { + let update = serde_json::from_slice(&data)?; + updated_sender.send(update).await?; + } + Message::Ping(_) => { /* do nothing */ } + Message::Pong(data) => { + // dispatch to health_checker + to_health_checker.send(data).await?; + } + Message::Close(_) => { + break; + } + } + } + Ok::<_, ApiError>(()) + }) + }; + let health_check = Box::pin(async move { + let mut code: u8 = 0; + loop { + let Some(data) = + tokio::time::timeout(Duration::from_millis(HEARTBEAT_TIMEOUT), health_checker_receiver.recv()).await? + else { + break; + }; + if data[0] != code { + return Err(specialize( + error_backend_failure(), + format!("invalid heartbeat, expected {code}, got {}", data[0]), + )); + } + code = code.wrapping_add(1); + } + Ok::<_, ApiError>(()) + }); + let job_bridge = { + let ws_sender = ws_sender.clone(); + Box::pin(async move { + while let Some(job) = job_receiver.recv().await { + ws_sender + .lock() + .await + .send(Message::Text(serde_json::to_string(&job)?)) + .await?; + } + Ok::<_, ApiError>(()) + }) + }; + + state.application.worker_nodes.register_worker(WorkerRegistrationData { + descriptor, + job_sender, + update_receiver, + }); + + let (result, _index, _rest) = select_all(vec![ws_dispatcher, health_check, job_bridge]).await; + + state.application.worker_nodes.remove_worker(&worker_id); + result +} + /// Gets the known users pub async fn api_v1_get_users(auth_data: AuthData, State(state): State>) -> ApiResult> { response(state.application.get_users(&auth_data).await) @@ -712,7 +884,7 @@ pub async fn api_v1_get_crate_targets( auth_data: AuthData, State(state): State>, Path(PathInfoCrate { package }): Path, -) -> ApiResult> { +) -> ApiResult> { response(state.application.get_crate_targets(&auth_data, &package).await) } @@ -721,11 +893,35 @@ pub async fn api_v1_set_crate_targets( auth_data: AuthData, State(state): State>, Path(PathInfoCrate { package }): Path, - input: Json>, + input: Json>, ) -> ApiResult<()> { response(state.application.set_crate_targets(&auth_data, &package, &input).await) } +/// Gets the required capabilities for a crate +pub async fn api_v1_get_crate_required_capabilities( + auth_data: AuthData, + State(state): State>, + Path(PathInfoCrate { package }): Path, +) -> ApiResult> { + response(state.application.get_crate_required_capabilities(&auth_data, &package).await) +} + +/// Sets the required capabilities for a crate +pub async fn api_v1_set_crate_required_capabilities( + auth_data: AuthData, + State(state): State>, + Path(PathInfoCrate { package }): Path, + input: Json>, +) -> ApiResult<()> { + response( + state + .application + .set_crate_required_capabilities(&auth_data, &package, &input) + .await, + ) +} + /// Sets the deprecation status on a crate pub async fn api_v1_set_crate_deprecation( auth_data: AuthData, diff --git a/src/schema.sql b/src/schema.sql index 8317dc5..579f3bc 100644 --- a/src/schema.sql +++ b/src/schema.sql @@ -5,7 +5,7 @@ CREATE TABLE IF NOT EXISTS SchemaMetadata ( CREATE INDEX IF NOT EXISTS SchemaMetadataIndex ON SchemaMetadata(name); -INSERT INTO SchemaMetadata VALUES ('version', '1.9.0'); +INSERT INTO SchemaMetadata VALUES ('version', '1.10.0'); CREATE TABLE RegistryUser ( id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, @@ -42,6 +42,8 @@ CREATE TABLE Package ( name TEXT NOT NULL PRIMARY KEY, lowercase TEXT NOT NULL, targets TEXT NOT NULL, + nativeTargets TEXT NOT NULL, + capabilities TEXT NOT NULL, isDeprecated BOOLEAN NOT NULL ); @@ -88,6 +90,8 @@ CREATE TABLE DocGenJob ( package TEXT NOT NULL REFERENCES Package(name), version TEXT NOT NULL, target TEXT NOT NULL, + useNative BOOLEAN NOT NULL, + capabilities TEXT NOT NULL, state INTEGER NOT NULL, queuedOn TIMESTAMP NOT NULL, startedOn TIMESTAMP NOT NULL, diff --git a/src/services/database/jobs.rs b/src/services/database/jobs.rs index 12809c2..f697182 100644 --- a/src/services/database/jobs.rs +++ b/src/services/database/jobs.rs @@ -10,12 +10,13 @@ use chrono::Local; use super::Database; use crate::model::docs::{DocGenJob, DocGenJobSpec, DocGenJobState, DocGenTrigger}; use crate::utils::apierror::{error_not_found, ApiError}; +use crate::utils::comma_sep_to_vec; impl Database { /// Gets the documentation generation jobs pub async fn get_docgen_jobs(&self) -> Result, ApiError> { let rows = sqlx::query!( - "SELECT id, package, version, target, state, + "SELECT id, package, version, target, useNative AS usenative, capabilities, state, queuedOn AS queued_on, startedOn AS started_on, finishedOn AS finished_on, lastUpdate AS last_update, triggerUser AS trigger_user, triggerEvent AS trigger_event FROM DocGenJob @@ -30,6 +31,8 @@ impl Database { package: row.package, version: row.version, target: row.target, + use_native: row.usenative, + capabilities: comma_sep_to_vec(&row.capabilities), state: DocGenJobState::from(row.state), queued_on: row.queued_on, started_on: row.started_on, @@ -51,7 +54,7 @@ impl Database { /// Gets a single documentation job pub async fn get_docgen_job(&self, job_id: i64) -> Result { let row = sqlx::query!( - "SELECT id, package, version, target, state, + "SELECT id, package, version, target, useNative AS usenative, capabilities, state, queuedOn AS queued_on, startedOn AS started_on, finishedOn AS finished_on, lastUpdate AS last_update, triggerUser AS trigger_user, triggerEvent AS trigger_event FROM DocGenJob @@ -67,6 +70,8 @@ impl Database { package: row.package, version: row.version, target: row.target, + use_native: row.usenative, + capabilities: comma_sep_to_vec(&row.capabilities), state: DocGenJobState::from(row.state), queued_on: row.queued_on, started_on: row.started_on, @@ -88,7 +93,7 @@ impl Database { // look for already existing queued job let state_value = DocGenJobState::Queued.value(); let row = sqlx::query!( - "SELECT id, package, version, target, state, + "SELECT id, package, version, target, useNative AS usenative, capabilities, state, queuedOn AS queued_on, startedOn AS started_on, finishedOn AS finished_on, lastUpdate AS last_update, triggerUser AS trigger_user, triggerEvent AS trigger_event FROM DocGenJob @@ -109,6 +114,8 @@ impl Database { package: row.package, version: row.version, target: row.target, + use_native: row.usenative, + capabilities: comma_sep_to_vec(&row.capabilities), state: DocGenJobState::from(row.state), queued_on: row.queued_on, started_on: row.started_on, @@ -125,23 +132,26 @@ impl Database { }); } + let capabilities = spec.capabilities.join(","); let trigger_event = trigger.value(); let trigger_user = trigger.by().map(|u| u.id); let now = Local::now().naive_local(); let state_value = DocGenJobState::Queued.value(); let job_id = sqlx::query!( "INSERT INTO DocGenJob ( - package, version, target, state, + package, version, target, useNative, capabilities, state, queuedOn, startedOn, finishedOn, lastUpdate, triggerUser, triggerEvent, output ) VALUES ( - $1, $2, $3, $4, - $5, $5, $5, $5, - $6, $7, '' + $1, $2, $3, $4, $5, $6, + $7, $7, $7, $7, + $8, $9, '' ) RETURNING id", spec.package, spec.version, spec.target, + spec.use_native, + capabilities, state_value, now, trigger_user, @@ -155,6 +165,8 @@ impl Database { package: spec.package.clone(), version: spec.version.clone(), target: spec.target.clone(), + use_native: spec.use_native, + capabilities: spec.capabilities.clone(), state: DocGenJobState::Queued, queued_on: now, started_on: now, @@ -168,7 +180,7 @@ impl Database { pub async fn get_next_docgen_job(&self) -> Result, ApiError> { let state_value = DocGenJobState::Queued.value(); let row = sqlx::query!( - "SELECT id, package, version, target, state, + "SELECT id, package, version, target, useNative AS usenative, capabilities, state, queuedOn AS queued_on, startedOn AS started_on, finishedOn AS finished_on, lastUpdate AS last_update, triggerUser AS trigger_user, triggerEvent AS trigger_event FROM DocGenJob @@ -185,6 +197,8 @@ impl Database { package: row.package, version: row.version, target: row.target, + use_native: row.usenative, + capabilities: comma_sep_to_vec(&row.capabilities), state: DocGenJobState::from(row.state), queued_on: row.queued_on, started_on: row.started_on, diff --git a/src/services/database/packages.rs b/src/services/database/packages.rs index cde9ee3..b5d2c14 100644 --- a/src/services/database/packages.rs +++ b/src/services/database/packages.rs @@ -20,7 +20,7 @@ use crate::model::cargo::{ }; use crate::model::deps::{DepsAnalysisJobSpec, DepsAnalysisState}; use crate::model::docs::DocGenJobSpec; -use crate::model::packages::{CrateInfo, CrateInfoVersion, CrateInfoVersionDocs}; +use crate::model::packages::{CrateInfo, CrateInfoTarget, CrateInfoVersion, CrateInfoVersionDocs}; use crate::model::stats::{DownloadStats, SERIES_LENGTH}; use crate::model::CrateVersion; use crate::utils::apierror::{error_invalid_request, error_not_found, specialize, ApiError}; @@ -100,7 +100,7 @@ impl Database { versions_in_index: Vec, ) -> Result { let row = sqlx::query!( - "SELECT isDeprecated AS is_deprecated, targets FROM Package WHERE name = $1 LIMIT 1", + "SELECT isDeprecated AS is_deprecated, targets, nativeTargets AS nativetargets, capabilities FROM Package WHERE name = $1 LIMIT 1", package ) .fetch_optional(&mut *self.transaction.borrow().await) @@ -108,6 +108,8 @@ impl Database { .ok_or_else(error_not_found)?; let is_deprecated = row.is_deprecated; let targets = comma_sep_to_vec(&row.targets); + let native_targets = comma_sep_to_vec(&row.nativetargets); + let capabilities = comma_sep_to_vec(&row.capabilities); let rows = sqlx::query!( "SELECT version, upload, uploadedBy AS uploaded_by, @@ -155,7 +157,14 @@ impl Database { metadata: None, is_deprecated, versions, - targets, + targets: targets + .into_iter() + .map(|target| CrateInfoTarget { + docs_use_native: native_targets.contains(&target), + target, + }) + .collect(), + capabilities, }) } @@ -197,7 +206,7 @@ impl Database { } else { // create the package sqlx::query!( - "INSERT INTO Package (name, lowercase, targets, isDeprecated) VALUES ($1, $2, '', FALSE)", + "INSERT INTO Package (name, lowercase, targets, nativeTargets, capabilities, isDeprecated) VALUES ($1, $2, '', '', '', FALSE)", package.metadata.name, lowercase ) @@ -299,7 +308,8 @@ impl Database { /// Gets the packages that need documentation generation pub async fn get_undocumented_crates(&self, default_target: &str) -> Result, ApiError> { struct PackageData { - targets: Vec, + targets: Vec, + capabilities: Vec, versions: Vec, } struct VersionData { @@ -312,18 +322,31 @@ impl Database { // retrieve all package versions and associated targets { let mut stream = sqlx::query!( - "SELECT package, version, targets + "SELECT package, version, targets, nativeTargets AS nativetargets, capabilities FROM PackageVersion INNER JOIN Package ON PackageVersion.package = Package.name" ) .fetch(&mut *transaction); while let Some(Ok(row)) = stream.next().await { - let data = packages.entry(row.package).or_insert_with(|| PackageData { - targets: if row.targets.is_empty() { - vec![default_target.to_string()] - } else { - comma_sep_to_vec(&row.targets) - }, - versions: Vec::new(), + let data = packages.entry(row.package).or_insert_with(|| { + let native_targets = comma_sep_to_vec(&row.nativetargets); + PackageData { + targets: if row.targets.is_empty() { + vec![CrateInfoTarget { + target: default_target.to_string(), + docs_use_native: true, + }] + } else { + comma_sep_to_vec(&row.targets) + .into_iter() + .map(|target| CrateInfoTarget { + docs_use_native: native_targets.contains(&target), + target, + }) + .collect() + }, + capabilities: comma_sep_to_vec(&row.capabilities), + versions: Vec::new(), + } }); data.versions.push(VersionData { version: row.version, @@ -341,7 +364,7 @@ impl Database { .fetch(&mut *transaction); while let Some(Ok(row)) = stream.next().await { if let Some(data) = packages.get_mut(&row.package) { - let target_index = data.targets.iter().position(|t| t == &row.target); + let target_index = data.targets.iter().position(|info| info.target == row.target); let version_data = data.versions.iter_mut().find(|d| d.version == row.version); if let (Some(target_index), Some(version_data)) = (target_index, version_data) { version_data.docs |= 1 << target_index; @@ -353,13 +376,15 @@ impl Database { let mut jobs = Vec::new(); for (package, data) in packages { for version in data.versions { - for (index, target) in data.targets.iter().enumerate() { + for (index, info) in data.targets.iter().enumerate() { let is_missing = (version.docs & (1 << index)) == 0; if is_missing { jobs.push(DocGenJobSpec { package: package.clone(), version: version.version.clone(), - target: target.clone(), + target: info.target.clone(), + use_native: info.docs_use_native, + capabilities: data.capabilities.clone(), }); } } @@ -411,23 +436,37 @@ impl Database { package: &str, version: &str, default_target: &str, - ) -> Result, ApiError> { + ) -> Result, ApiError> { self.check_crate_exists(package, version).await?; - let targets = sqlx::query!("SELECT targets FROM Package WHERE name = $1 LIMIT 1", package) - .fetch_optional(&mut *self.transaction.borrow().await) - .await? - .ok_or_else(error_not_found)? - .targets; - let targets = comma_sep_to_vec(&targets); + let row = sqlx::query!( + "SELECT targets, nativeTargets AS nativetargets FROM Package WHERE name = $1 LIMIT 1", + package + ) + .fetch_optional(&mut *self.transaction.borrow().await) + .await? + .ok_or_else(error_not_found)?; + let targets = comma_sep_to_vec(&row.targets); + let native_targets = comma_sep_to_vec(&row.nativetargets); + let targets = targets + .into_iter() + .map(|target| CrateInfoTarget { + docs_use_native: native_targets.contains(&target), + target, + }) + .collect::>(); let targets = if targets.is_empty() { - vec![default_target.to_string()] + vec![CrateInfoTarget { + target: default_target.to_string(), + docs_use_native: true, + }] } else { targets }; - for target in &targets { - self.set_crate_documentation(package, version, target, false, false).await?; + for info in &targets { + self.set_crate_documentation(package, version, &info.target, false, false) + .await?; } Ok(targets) } @@ -664,50 +703,105 @@ impl Database { } /// Gets the targets for a crate - pub async fn get_crate_targets(&self, package: &str) -> Result, ApiError> { - let row = sqlx::query!("SELECT targets FROM Package WHERE name = $1 LIMIT 1", package) - .fetch_optional(&mut *self.transaction.borrow().await) - .await? - .ok_or_else(error_not_found)?; - Ok(comma_sep_to_vec(&row.targets)) + pub async fn get_crate_targets(&self, package: &str) -> Result, ApiError> { + let row = sqlx::query!( + "SELECT targets, nativeTargets AS nativetargets FROM Package WHERE name = $1 LIMIT 1", + package + ) + .fetch_optional(&mut *self.transaction.borrow().await) + .await? + .ok_or_else(error_not_found)?; + let targets = comma_sep_to_vec(&row.targets); + let native_targets = comma_sep_to_vec(&row.nativetargets); + Ok(targets + .into_iter() + .map(|target| CrateInfoTarget { + docs_use_native: native_targets.contains(&target), + target, + }) + .collect()) } /// Sets the targets for a crate - pub async fn set_crate_targets(&self, package: &str, targets: &[String]) -> Result, ApiError> { + pub async fn set_crate_targets(&self, package: &str, targets: &[CrateInfoTarget]) -> Result, ApiError> { let old_targets = self.get_crate_targets(package).await?; let added_targets = targets .iter() - .filter_map(|target| { - if old_targets.contains(target) { + .filter_map(|info| { + if old_targets.iter().any(|t| t.target == info.target) { None } else { - Some(target.clone()) + Some(info.clone()) } }) .collect::>(); - let new_targets = targets.join(","); - sqlx::query!("UPDATE Package SET targets = $2 WHERE name = $1", package, new_targets) - .execute(&mut *self.transaction.borrow().await) - .await?; + let new_targets = targets.iter().map(|info| info.target.as_str()).collect::>(); + let new_targets = new_targets.join(","); + let native_targets = targets + .iter() + .filter_map(|info| { + if info.docs_use_native { + Some(info.target.as_str()) + } else { + None + } + }) + .collect::>(); + let native_targets = native_targets.join(","); + sqlx::query!( + "UPDATE Package SET targets = $2, nativeTargets = $3 WHERE name = $1", + package, + new_targets, + native_targets + ) + .execute(&mut *self.transaction.borrow().await) + .await?; // get versions + let capabilities = comma_sep_to_vec( + &sqlx::query!("SELECT capabilities FROM Package WHERE name = $1 LIMIT 1", package) + .fetch_one(&mut *self.transaction.borrow().await) + .await? + .capabilities, + ); let rows = sqlx::query!("SELECT version FROM PackageVersion WHERE package = $1", package) .fetch_all(&mut *self.transaction.borrow().await) .await?; let mut jobs = Vec::new(); for row in rows { - for target in &added_targets { + for info in &added_targets { jobs.push(DocGenJobSpec { package: package.to_string(), version: row.version.clone(), - target: target.clone(), + target: info.target.clone(), + use_native: info.docs_use_native, + capabilities: capabilities.clone(), }); } } Ok(jobs) } + /// Gets the required capabilities for a crate + pub async fn get_crate_required_capabilities(&self, package: &str) -> Result, ApiError> { + let row = sqlx::query!("SELECT capabilities FROM Package WHERE name = $1 LIMIT 1", package) + .fetch_optional(&mut *self.transaction.borrow().await) + .await? + .ok_or_else(error_not_found)?; + Ok(comma_sep_to_vec(&row.capabilities)) + } + + /// Sets the required capabilities for a crate + pub async fn set_crate_required_capabilities(&self, package: &str, capabilities: &[String]) -> Result<(), ApiError> { + let _ = self.get_crate_required_capabilities(package).await?; + let capabilities = capabilities.join(","); + sqlx::query!("UPDATE Package SET capabilities = $2 WHERE name = $1", package, capabilities) + .execute(&mut *self.transaction.borrow().await) + .await?; + Ok(()) + } + /// Sets the deprecation status on a crate pub async fn set_crate_deprecation(&self, package: &str, deprecated: bool) -> Result<(), ApiError> { sqlx::query!("UPDATE Package SET isDeprecated = $2 WHERE name = $1", package, deprecated) diff --git a/src/services/deps.rs b/src/services/deps.rs index 95bd6a8..2c24c4f 100644 --- a/src/services/deps.rs +++ b/src/services/deps.rs @@ -342,7 +342,10 @@ impl DepsCheckerImpl { async fn get_dependency_versions(&self, registry: Option<&str>, name: &str) -> Result, ApiError> { if let Some(registry) = registry { if registry == BUILTIN_CRATES_REGISTRY_URI { - Ok(Self::generate_for_built_in(name, &self.configuration.self_toolchain_version)) + Ok(Self::generate_for_built_in( + name, + &self.configuration.self_toolchain_version_stable, + )) } else if registry == CRATES_IO_REGISTRY_URI { self.get_dependency_info_git(name, CRATES_IO_NAME, CRATES_IO_REGISTRY_URI) .await @@ -374,7 +377,7 @@ impl DepsCheckerImpl { } /// Generates the versions vector for a built-in crate - fn generate_for_built_in(name: &str, toolchain_version: &str) -> Vec { + fn generate_for_built_in(name: &str, toolchain_version: &semver::Version) -> Vec { vec![IndexCrateMetadata { name: name.to_string(), vers: toolchain_version.to_string(), diff --git a/src/services/docs.rs b/src/services/docs.rs index 6710e2c..a0ed967 100644 --- a/src/services/docs.rs +++ b/src/services/docs.rs @@ -21,9 +21,11 @@ use tokio::time::interval; use crate::model::config::Configuration; use crate::model::docs::{DocGenEvent, DocGenJob, DocGenJobSpec, DocGenJobState, DocGenJobUpdate, DocGenTrigger}; +use crate::model::worker::{JobIdentifier, JobSpecification, JobUpdate, WorkersManager}; +use crate::model::CHANNEL_NIGHTLY; use crate::services::database::{db_transaction_read, db_transaction_write}; use crate::services::storage::Storage; -use crate::utils::apierror::{error_backend_failure, specialize, ApiError}; +use crate::utils::apierror::{error_backend_failure, error_invalid_request, specialize, ApiError}; use crate::utils::concurrent::n_at_a_time; use crate::utils::db::RwSqlitePool; use crate::utils::FaillibleFuture; @@ -48,12 +50,14 @@ pub fn get_service( configuration: Arc, service_db_pool: RwSqlitePool, service_storage: Arc, + worker_nodes: WorkersManager, ) -> Arc { let service = Arc::new(DocsGeneratorImpl { configuration, service_db_pool, service_storage, listeners: Arc::new(Mutex::new(Vec::new())), + worker_nodes, }); // launch workers let _handle = tokio::spawn({ @@ -76,6 +80,8 @@ struct DocsGeneratorImpl { service_storage: Arc, /// The active listeners listeners: Arc>>>, + /// The worker nodes + worker_nodes: WorkersManager, } impl DocsGenerator for DocsGeneratorImpl { @@ -97,7 +103,7 @@ impl DocsGenerator for DocsGeneratorImpl { database.get_docgen_job(job_id).await }) .await?; - let data = self.service_storage.download_doc_file(&Self::job_log_location(&job)).await?; + let data = self.service_storage.download_doc_file(&job_log_location(&job)).await?; Ok(String::from_utf8(data)?) }) } @@ -124,11 +130,6 @@ impl DocsGenerator for DocsGeneratorImpl { } impl DocsGeneratorImpl { - /// Gets the location in storage of the log for a documentation job - fn job_log_location(job: &DocGenJob) -> String { - format!("logs/job_{:06}", job.id) - } - /// Send an event to listeners async fn send_event(&self, event: DocGenEvent) -> Result<(), ApiError> { let mut listeners = self.listeners.lock().await; @@ -221,7 +222,7 @@ impl DocsGeneratorImpl { write!(log, "{backtrace}").unwrap(); } self.service_storage - .store_doc_data(&Self::job_log_location(job), log.as_bytes().to_vec()) + .store_doc_data(&job_log_location(job), log.as_bytes().to_vec()) .await?; } Ok(()) @@ -229,145 +230,230 @@ impl DocsGeneratorImpl { /// Executes a documentation generation job async fn docs_worker_execute_job(&self, job: &DocGenJob) -> Result<(), ApiError> { - info!("generating doc for {} {}", job.package, job.version); - self.update_job(job, DocGenJobState::Working, None).await?; - - let content = self.service_storage.download_crate(&job.package, &job.version).await?; - let temp_folder = Self::extract_content(&job.package, &job.version, &content)?; - let project_folder = Self::get_project_folder_in(&temp_folder).await?; - - let (final_state, output) = if self.configuration.docs_gen_mock { - (DocGenJobState::Success, String::from("mocked")) + if self.worker_nodes.has_workers() { + self.docs_worker_execute_job_remote(job).await } else { - match self.do_generate_doc(&project_folder, &job.target).await { - Ok(log) => { - self.service_storage - .store_doc_data(&Self::job_log_location(job), log.as_bytes().to_vec()) - .await?; - let mut project_folder = project_folder.clone(); - project_folder.push("target"); - project_folder.push(&job.target); - project_folder.push("doc"); - let doc_folder = project_folder; - self.upload_package(&doc_folder, &format!("{}/{}/{}", &job.package, &job.version, &job.target)) - .await?; - (DocGenJobState::Success, log) - } - Err(e) => { - // upload the log - let log = e.details.unwrap(); - self.service_storage - .store_doc_data(&Self::job_log_location(job), log.as_bytes().to_vec()) - .await?; - (DocGenJobState::Failure, log) - } + self.docs_worker_execute_job_local(job).await + } + } + + /// Executes a documentation generation job + async fn docs_worker_execute_job_remote(&self, job: &DocGenJob) -> Result<(), ApiError> { + let mut worker = self + .worker_nodes + .get_worker_for(job.get_worker_selector(), JobIdentifier::DocGen(job.id)) + .await; + self.update_job(job, DocGenJobState::Working, None).await?; + worker.sender().send(JobSpecification::DocGen(job.clone())).await?; + while let Some(update) = worker.update_receiver().recv().await { + let JobUpdate::DocGen(update) = update; + self.update_job(job, update.state, update.log.as_deref()).await?; + if update.state.is_final() { + return Ok(()); } - }; - tokio::fs::remove_dir_all(&temp_folder).await?; + } + Err(specialize( + error_backend_failure(), + String::from("worker unexpectedly disconnected"), + )) + } + + /// Executes a documentation generation job + async fn docs_worker_execute_job_local(&self, job: &DocGenJob) -> Result<(), ApiError> { + self.update_job(job, DocGenJobState::Working, None).await?; + let (final_state, output) = generate_doc_for_job(&self.configuration, self.service_storage.clone(), job).await?; self.update_job(job, final_state, Some(&output)).await?; Ok(()) } +} + +/// Gets the location in storage of the log for a documentation job +fn job_log_location(job: &DocGenJob) -> String { + format!("logs/job_{:06}", job.id) +} + +/// Executes a documentation generation job +pub async fn generate_doc_for_job( + configuration: &Configuration, + service_storage: Arc, + job: &DocGenJob, +) -> Result<(DocGenJobState, String), ApiError> { + info!("generating doc for {} {}", job.package, job.version); + on_job_check_target(configuration, job).await?; + let content = service_storage.download_crate(&job.package, &job.version).await?; + let temp_folder = extract_content(&job.package, &job.version, &content)?; + let project_folder = get_project_folder_in(&temp_folder).await?; + + let (final_state, output) = if configuration.docs_gen_mock { + (DocGenJobState::Success, String::from("mocked")) + } else { + match do_generate_doc(configuration, &project_folder, &job.target).await { + Ok(log) => { + service_storage + .store_doc_data(&job_log_location(job), log.as_bytes().to_vec()) + .await?; + let mut project_folder = project_folder.clone(); + project_folder.push("target"); + project_folder.push(&job.target); + project_folder.push("doc"); + let doc_folder = project_folder; + upload_package( + service_storage.clone(), + &doc_folder, + &format!("{}/{}/{}", &job.package, &job.version, &job.target), + ) + .await?; + (DocGenJobState::Success, log) + } + Err(e) => { + // upload the log + let log = e.details.unwrap(); + service_storage + .store_doc_data(&job_log_location(job), log.as_bytes().to_vec()) + .await?; + (DocGenJobState::Failure, log) + } + } + }; + tokio::fs::remove_dir_all(&temp_folder).await?; + Ok((final_state, output)) +} - /// Generates and upload the documentation for a crate - fn extract_content(name: &str, version: &str, content: &[u8]) -> Result { - let decoder = GzDecoder::new(content); - let mut archive = Archive::new(decoder); - let target = format!("/tmp/{name}_{version}"); - archive.unpack(&target)?; - Ok(PathBuf::from(target)) +/// Checks for a target to be present when generating documentation +async fn on_job_check_target(configuration: &Configuration, job: &DocGenJob) -> Result<(), ApiError> { + if configuration.self_installed_targets.iter().any(|t| t == &job.target) { + // target is installed + return Ok(()); + } + if !configuration.docs_autoinstall_targets { + // cannot install the target + return Err(specialize( + error_invalid_request(), + String::from("target is not installed and installation is de-activated in configuration"), + )); } + if configuration.self_installable_targets.iter().all(|t| t != &job.target) { + // target cannot installed + return Err(specialize( + error_invalid_request(), + String::from("target is not installed and cannot be installed on this host"), + )); + } + // check whether it was actually installed after launch + let actually_installed = crate::model::config::get_installed_targets(CHANNEL_NIGHTLY).await; + if actually_installed.iter().any(|t| t == &job.target) { + // the target was actually installed (after the app launched) + return Ok(()); + } + // try to install + crate::model::config::install_target(CHANNEL_NIGHTLY, &job.target).await?; + Ok(()) +} - /// Gets the project folder in the specified temp - async fn get_project_folder_in(temp_folder: &Path) -> Result { - let temp_folder = temp_folder.to_path_buf(); - // get the first sub dir - let mut dir = tokio::fs::read_dir(&temp_folder).await?; - Ok(dir.next_entry().await?.unwrap().path()) +/// Generates and upload the documentation for a crate +fn extract_content(name: &str, version: &str, content: &[u8]) -> Result { + let decoder = GzDecoder::new(content); + let mut archive = Archive::new(decoder); + let target = format!("/tmp/{name}_{version}"); + archive.unpack(&target)?; + Ok(PathBuf::from(target)) +} + +/// Gets the project folder in the specified temp +async fn get_project_folder_in(temp_folder: &Path) -> Result { + let temp_folder = temp_folder.to_path_buf(); + // get the first sub dir + let mut dir = tokio::fs::read_dir(&temp_folder).await?; + Ok(dir.next_entry().await?.unwrap().path()) +} + +/// Generate the documentation for the package in a specific folder +async fn do_generate_doc(configuration: &Configuration, project_folder: &Path, target: &str) -> Result { + let mut command = Command::new("cargo"); + command + .current_dir(project_folder) + .arg(CHANNEL_NIGHTLY) + .arg("rustdoc") + .arg("-Zunstable-options") + .arg("-Zrustdoc-map") + .arg("--all-features") + .arg("--target") + .arg(target) + .arg("--config") + .arg("build.rustdocflags=[\"-Zunstable-options\",\"--extern-html-root-takes-precedence\"]") + .arg("--config") + .arg(format!( + "doc.extern-map.registries.{}=\"{}/docs\"", + configuration.self_local_name, configuration.web_public_uri + )); + if configuration.index.allow_protocol_git && configuration.index.allow_protocol_sparse { + // both git and sparse => add specialized sparse + command.arg(format!( + "doc.extern-map.registries.{}sparse=\"{}/docs\"", + configuration.self_local_name, configuration.web_public_uri + )); + } + for external in &configuration.external_registries { + command.arg("--config").arg(format!( + "doc.extern-map.registries.{}=\"{}\"", + external.name, external.docs_root + )); } + let mut child = command + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .spawn()?; + drop(child.stdin.take()); // close stdin + let output = child.wait_with_output().await?; + let stdout = String::from_utf8_lossy(&output.stdout); + let stderr = String::from_utf8_lossy(&output.stderr); + let log = format!("-- stdout\n{stdout}\n\n-- stderr\n{stderr}"); - /// Generate the documentation for the package in a specific folder - async fn do_generate_doc(&self, project_folder: &Path, target: &str) -> Result { - let mut command = Command::new("cargo"); - command - .current_dir(project_folder) - .arg("rustdoc") - .arg("-Zunstable-options") - .arg("-Zrustdoc-map") - .arg("--all-features") - .arg("--target") - .arg(target) - .arg("--config") - .arg("build.rustdocflags=[\"-Zunstable-options\",\"--extern-html-root-takes-precedence\"]") - .arg("--config") - .arg(format!( - "doc.extern-map.registries.{}=\"{}/docs\"", - self.configuration.self_local_name, self.configuration.web_public_uri - )); - if self.configuration.index.allow_protocol_git && self.configuration.index.allow_protocol_sparse { - // both git and sparse => add specialized sparse - command.arg(format!( - "doc.extern-map.registries.{}sparse=\"{}/docs\"", - self.configuration.self_local_name, self.configuration.web_public_uri - )); - } - for external in &self.configuration.external_registries { - command.arg("--config").arg(format!( - "doc.extern-map.registries.{}=\"{}\"", - external.name, external.docs_root - )); - } - let mut child = command - .stdin(Stdio::piped()) - .stdout(Stdio::piped()) - .stderr(Stdio::piped()) - .spawn()?; - drop(child.stdin.take()); // close stdin - let output = child.wait_with_output().await?; - let stdout = String::from_utf8_lossy(&output.stdout); - let stderr = String::from_utf8_lossy(&output.stderr); - let log = format!("-- stdout\n{stdout}\n\n-- stderr\n{stderr}"); - - if output.status.success() { - Ok(log) - } else { - Err(specialize(error_backend_failure(), log)) - } + if output.status.success() { + Ok(log) + } else { + Err(specialize(error_backend_failure(), log)) } +} - /// Uploads the documentation for package - async fn upload_package(&self, doc_folder: &Path, key_prefix: &str) -> Result<(), ApiError> { - let files = Self::upload_package_find_files(doc_folder, key_prefix).await?; - let results = n_at_a_time( - files.into_iter().map(|(key, path)| { - let service_storage = self.service_storage.clone(); - Box::pin(async move { service_storage.store_doc_file(&key, &path).await }) - }), - 8, - Result::is_err, - ) - .await; - for result in results { - result?; - } - Ok(()) +/// Uploads the documentation for package +async fn upload_package( + service_storage: Arc, + doc_folder: &Path, + key_prefix: &str, +) -> Result<(), ApiError> { + let files = upload_package_find_files(doc_folder, key_prefix).await?; + let results = n_at_a_time( + files.into_iter().map(|(key, path)| { + let service_storage = service_storage.clone(); + Box::pin(async move { service_storage.store_doc_file(&key, &path).await }) + }), + 8, + Result::is_err, + ) + .await; + for result in results { + result?; } + Ok(()) +} - /// Find target to upload in a folder and its sub-folders - async fn upload_package_find_files(folder: &Path, key_prefix: &str) -> Result, std::io::Error> { - let mut results = Vec::new(); - let mut to_explore = vec![(folder.to_path_buf(), key_prefix.to_string())]; - while let Some((folder, prefix)) = to_explore.pop() { - let mut dir = tokio::fs::read_dir(folder).await?; - while let Some(entry) = dir.next_entry().await? { - let entry_path = entry.path(); - let entry_type = entry.file_type().await?; - if entry_type.is_file() { - results.push((format!("{prefix}/{}", entry.file_name().to_str().unwrap()), entry_path)); - } else if entry_type.is_dir() { - to_explore.push((entry_path, format!("{prefix}/{}", entry.file_name().to_str().unwrap()))); - } +/// Find target to upload in a folder and its sub-folders +async fn upload_package_find_files(folder: &Path, key_prefix: &str) -> Result, std::io::Error> { + let mut results = Vec::new(); + let mut to_explore = vec![(folder.to_path_buf(), key_prefix.to_string())]; + while let Some((folder, prefix)) = to_explore.pop() { + let mut dir = tokio::fs::read_dir(folder).await?; + while let Some(entry) = dir.next_entry().await? { + let entry_path = entry.path(); + let entry_type = entry.file_type().await?; + if entry_type.is_file() { + results.push((format!("{prefix}/{}", entry.file_name().to_str().unwrap()), entry_path)); + } else if entry_type.is_dir() { + to_explore.push((entry_path, format!("{prefix}/{}", entry.file_name().to_str().unwrap()))); } } - Ok(results) } + Ok(results) } diff --git a/src/services/mod.rs b/src/services/mod.rs index 2237b95..bbc55b3 100644 --- a/src/services/mod.rs +++ b/src/services/mod.rs @@ -7,6 +7,7 @@ use std::sync::Arc; use crate::model::config::Configuration; +use crate::model::worker::WorkersManager; use crate::utils::apierror::ApiError; use crate::utils::db::RwSqlitePool; @@ -48,6 +49,7 @@ pub trait ServiceProvider { configuration: Arc, service_db_pool: RwSqlitePool, service_storage: Arc, + worker_nodes: WorkersManager, ) -> Arc; } @@ -96,7 +98,8 @@ impl ServiceProvider for StandardServiceProvider { configuration: Arc, service_db_pool: RwSqlitePool, service_storage: Arc, + worker_nodes: WorkersManager, ) -> Arc { - docs::get_service(configuration, service_db_pool, service_storage) + docs::get_service(configuration, service_db_pool, service_storage, worker_nodes) } } diff --git a/src/tests/mocks.rs b/src/tests/mocks.rs index 435564e..61e1f02 100644 --- a/src/tests/mocks.rs +++ b/src/tests/mocks.rs @@ -17,6 +17,7 @@ use crate::model::config::Configuration; use crate::model::deps::DepsAnalysis; use crate::model::docs::{DocGenEvent, DocGenJob, DocGenJobSpec, DocGenJobState, DocGenTrigger}; use crate::model::osv::SimpleAdvisory; +use crate::model::worker::WorkersManager; use crate::services::deps::DepsChecker; use crate::services::docs::DocsGenerator; use crate::services::emails::EmailSender; @@ -76,6 +77,7 @@ impl ServiceProvider for MockService { _configuration: Arc, _service_db_pool: RwSqlitePool, _service_storage: Arc, + _worker_nodes: WorkersManager, ) -> Arc { Arc::new(MockService) } @@ -134,6 +136,8 @@ impl DocsGenerator for MockService { package: spec.package.clone(), version: spec.version.clone(), target: spec.target.clone(), + use_native: false, + capabilities: Vec::new(), state: DocGenJobState::Queued, queued_on: NaiveDateTime::default(), started_on: NaiveDateTime::default(), diff --git a/src/tests/mod.rs b/src/tests/mod.rs index 25d3e9b..887426b 100644 --- a/src/tests/mod.rs +++ b/src/tests/mod.rs @@ -12,6 +12,7 @@ use tokio::runtime::Builder; use crate::application::Application; use crate::model::auth::ROLE_ADMIN; +use crate::services::ServiceProvider; use crate::utils::apierror::ApiError; use crate::utils::axum::auth::{AuthData, Token}; use crate::utils::token::{generate_token, hash_token}; @@ -30,7 +31,7 @@ where { let runtime = Builder::new_current_thread().enable_all().build()?; runtime.block_on(async move { - let application = Application::launch::().await?; + let application = Application::launch::(mocks::MockService::get_configuration().await?).await?; println!("data_dir={}", &application.configuration.data_dir); // create the first user ad admin and its token setup_create_admin(&application, ADMIN_NAME).await?; diff --git a/src/utils/concurrent.rs b/src/utils/concurrent.rs index 1a17c7e..9761936 100644 --- a/src/utils/concurrent.rs +++ b/src/utils/concurrent.rs @@ -5,10 +5,11 @@ //! Utility to run at most n concurrent jobs use std::future::Future; -use std::pin::pin; +use std::pin::{pin, Pin}; +use std::task::{Context, Poll}; -use futures::future::{select, select_all, Either}; -use futures::{Stream, StreamExt}; +use futures::future::{select, select_all, Either, FusedFuture}; +use futures::{FutureExt, Stream, StreamExt}; /// Takes an iterator of futures and executes them concurrently, with at most n concurrent futures. /// This is similar to the `futures::future::join_all` function, except that instead of executing them all, @@ -114,3 +115,70 @@ where results } + +/// A future that may be there but never resolve if there is none +pub struct MaybeOrNever { + /// The inner future + inner: Option, + /// Whether the inner futurer is terminated + is_terminated: bool, +} + +impl Default for MaybeOrNever { + fn default() -> Self { + Self { + inner: None, + is_terminated: false, + } + } +} + +impl MaybeOrNever { + /// Creates a new future + pub fn new(inner: F) -> Self { + MaybeOrNever { + inner: Some(inner), + is_terminated: false, + } + } + + /// Gets whether there is no future inside + pub fn is_never(&self) -> bool { + self.inner.is_none() + } +} + +impl FusedFuture for MaybeOrNever { + fn is_terminated(&self) -> bool { + self.is_terminated + } +} + +/// Transforms a future into a maybe missing one +pub trait MaybeFutureExt: Sized { + /// Transforms this future into a maybe missing one + fn maybe(self) -> MaybeOrNever; +} + +impl MaybeFutureExt for T { + fn maybe(self) -> MaybeOrNever { + MaybeOrNever::new(self) + } +} + +impl Future for MaybeOrNever +where + F: Future + Unpin, +{ + type Output = O; + + fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { + if self.inner.is_none() { + Poll::Pending + } else { + let r = self.as_mut().inner.as_mut().unwrap().poll_unpin(cx); + self.is_terminated = r.is_ready(); + r + } + } +} diff --git a/src/webapp/admin-jobs-docgen.html b/src/webapp/admin-jobs-docgen.html index 703aab5..e05dc26 100644 --- a/src/webapp/admin-jobs-docgen.html +++ b/src/webapp/admin-jobs-docgen.html @@ -40,32 +40,48 @@ Back to admin -
-

Documentation generation jobs

-
+
+
+

Workers

- - - - - - - - - - - -
- Crate / Version - - Status - - Updates - - Trigger - - Actions -
+ + + + Node + + + + + + +
+
+

Documentation generation jobs

+
+ + + + + + + + + + + + +
+ Crate / Version + + Status + + Updates + + Trigger + + Actions +
+
@@ -100,19 +116,88 @@

Log

+ + +
+ +
+ +
+ +
+

Worker nodes

+
+ + + + + + + + + + + + + +
+ Node + + Host + + Versions + + Installed targets + + Capabilities + + Status +
+
+
+
+ +
+
+ Version , Copyright © Cénotélie. All Rights Reserved. +
+
+ + + + + + diff --git a/src/webapp/admin.html b/src/webapp/admin.html index 3e5cb46..9a105b6 100644 --- a/src/webapp/admin.html +++ b/src/webapp/admin.html @@ -42,6 +42,9 @@

See documentation generation jobs +
  • + Manage worker nodes +
  • Manage users
  • diff --git a/src/webapp/api.js b/src/webapp/api.js index 23da2d1..271dd38 100644 --- a/src/webapp/api.js +++ b/src/webapp/api.js @@ -89,6 +89,11 @@ async function apiGetDocGenJobLog(jobId) { return await onResponseJson(response); } +async function apiGetWorkers() { + const response = await fetch(`/api/v1/admin/workers`); + return await onResponseJson(response); +} + async function apiGetUsers() { const response = await fetch("/api/v1/admin/users"); return await onResponseJson(response); @@ -206,6 +211,20 @@ async function apiSetCrateTargets(crate, targets) { return await onResponseJson(response); } +async function apiGetCrateCapabilities(crate) { + const response = await fetch(`/api/v1/crates/${crate}/capabilities`); + return await onResponseJson(response); +} + +async function apiSetCrateCapabilities(crate, capabilities) { + const response = await fetch(`/api/v1/crates/${crate}/capabilities`, { + method: "PATCH", + body: JSON.stringify(capabilities), + headers: [["content-type", "application/json"]], + }); + return await onResponseJson(response); +} + async function apiSetCrateDeprecation(crate, isDeprecated) { const response = await fetch(`/api/v1/crates/${crate}/deprecated`, { method: "PATCH", diff --git a/src/webapp/mod.rs b/src/webapp/mod.rs index 72e541f..abe1df7 100644 --- a/src/webapp/mod.rs +++ b/src/webapp/mod.rs @@ -31,6 +31,7 @@ pub fn get_resources() -> EmbeddedResources { add!(resources, "admin-users.html"); add!(resources, "admin-tokens.html"); add!(resources, "admin-jobs-docgen.html"); + add!(resources, "admin-workers.html"); add!(resources, "crate.html"); add!(resources, "oauthcallback.html"); // CSS diff --git a/src/worker.rs b/src/worker.rs new file mode 100644 index 0000000..0781ec3 --- /dev/null +++ b/src/worker.rs @@ -0,0 +1,204 @@ +/******************************************************************************* + * Copyright (c) 2024 Cénotélie Opérations SAS (cenotelie.fr) + ******************************************************************************/ + +//! Main application for worker nodes + +use std::sync::Arc; +use std::time::Duration; + +use base64::engine::general_purpose::STANDARD; +use base64::Engine; +use chrono::Local; +use futures::{select, FutureExt, Sink, SinkExt, StreamExt}; +use log::{error, info, warn}; +use tokio::net::TcpStream; +use tokio::sync::Mutex; +use tokio::time::MissedTickBehavior; +use tokio_tungstenite::tungstenite::{ClientRequestBuilder, Message}; +use tokio_tungstenite::{MaybeTlsStream, WebSocketStream}; + +use crate::model::config::{Configuration, ExternalRegistry, NodeRole, NodeRoleWorker}; +use crate::model::docs::{DocGenJobState, DocGenJobUpdate}; +use crate::model::worker::{JobSpecification, JobUpdate, WorkerDescriptor}; +use crate::services::{ServiceProvider, StandardServiceProvider}; +use crate::utils::apierror::{error_backend_failure, specialize, ApiError}; +use crate::utils::concurrent::{MaybeFutureExt, MaybeOrNever}; + +/// The interval between heartbeats, in milliseconds +const HEARTBEAT_INTERVAL: u64 = 100; + +pub async fn main_worker(config: Configuration) { + let descriptor = WorkerDescriptor::get_my_descriptor(&config); + let NodeRole::Worker(worker_config) = &config.self_role else { + panic!("expected worker role config"); + }; + let ws = main_worker_connect(worker_config, &descriptor).await.unwrap(); + main_loop(ws, &descriptor, config).await.unwrap(); +} + +/// Establishes the connection to the server +async fn main_worker_connect( + config: &NodeRoleWorker, + descriptor: &WorkerDescriptor, +) -> Result>, ApiError> { + info!("connecting to server ..."); + loop { + let request = ClientRequestBuilder::new(format!("{}/api/v1/admin/workers/connect", config.master_uri).parse()?) + .with_header( + "Authorization", + format!( + "Basic {}", + STANDARD.encode(format!("{}:{}", descriptor.identifier, config.worker_token)) + ), + ); + if let Ok((ws, response)) = tokio_tungstenite::connect_async(request).await { + if response.status().as_u16() != 101 { + return Err(specialize( + error_backend_failure(), + format!("Error from controller when connecting: {}", response.status().as_u16()), + )); + } + return Ok(ws); + } + } +} + +async fn main_loop( + ws: WebSocketStream>, + descriptor: &WorkerDescriptor, + mut config: Configuration, +) -> Result<(), ApiError> { + let (mut sender, mut receiver) = ws.split(); + // handshake: send the worker's descriptor + sender.send(Message::Text(serde_json::to_string(descriptor)?)).await?; + // handshake: get the connection data to the master + let message = receiver + .next() + .await + .ok_or_else(|| specialize(error_backend_failure(), String::from("expected configuration from server")))??; + let Message::Text(message) = message else { + return Err(specialize( + error_backend_failure(), + String::from("expected configuration from server"), + )); + }; + let external_config = serde_json::from_str::(&message)?; + config.external_registries.push(external_config); + config.write_auth_config().await?; + let config = &config; + + info!("connected as {}-{}, waiting for jobs", descriptor.name, descriptor.identifier); + let sender = Arc::new(Mutex::new(sender)); + + let mut receiver_next = receiver.next().fuse(); + let mut current_job = MaybeOrNever::default(); + let mut heartbeat = { + let sender = sender.clone(); + Box::pin(async move { + let mut code: u8 = 0; + let mut last = std::time::Instant::now(); + + let mut ticks_interval = tokio::time::interval(Duration::from_millis(HEARTBEAT_INTERVAL)); + ticks_interval.set_missed_tick_behavior(MissedTickBehavior::Delay); + loop { + ticks_interval.tick().await; + let elapsed = last.elapsed(); + if elapsed.as_millis() > (HEARTBEAT_INTERVAL + HEARTBEAT_INTERVAL / 2).into() { + warn!("heartbeat: waited too long: {}ms", elapsed.as_millis()); + } + // send the hearbeat + sender.lock().await.send(Message::Pong(vec![code])).await?; + code = code.wrapping_add(1); + last = std::time::Instant::now(); + } + }) + .fuse() + }; + loop { + select! { + message = receiver_next => { + if let Some(message) = message { + let message = message?; + match message { + Message::Ping(_) | Message::Pong(_) | Message::Frame(_) => { /* do nothing */ } + Message::Close(_) => { + sender.lock().await.send(Message::Close(None)).await?; + return Ok(()); + } + Message::Binary(bytes) => { + if let Ok(job) = serde_json::from_slice::(&bytes) { + current_job = Box::pin(worker_on_job(sender.clone(), job, config)).maybe(); + } + } + Message::Text(data) => { + if let Ok(job) = serde_json::from_str::(&data) { + current_job = Box::pin(worker_on_job(sender.clone(), job, config)).maybe(); + } + } + } + receiver_next = receiver.next().fuse(); + } else { + // end of socket + return Ok(()); + } + } + result = current_job => { + if let Err(error) = result { + error!("{error}"); + if let Some(backtrace) = &error.backtrace { + error!("{backtrace}"); + } + } + current_job = MaybeOrNever::default(); + } + result = heartbeat => { + let result: Result<(), ApiError> = result; + if let Err(error) = result { + error!("{error}"); + if let Some(backtrace) = &error.backtrace { + error!("{backtrace}"); + } + } + } + } + } +} + +/// The main payload when a job was received +async fn worker_on_job(sender: Arc>, job: JobSpecification, config: &Configuration) -> Result<(), ApiError> +where + S: Sink + Unpin, +{ + let JobSpecification::DocGen(job) = job; + let service_storage = StandardServiceProvider::get_storage(config); + match crate::services::docs::generate_doc_for_job(config, service_storage, &job).await { + Ok((state, log)) => { + let now = Local::now().naive_local(); + sender + .lock() + .await + .send(Message::Text(serde_json::to_string(&JobUpdate::DocGen(DocGenJobUpdate { + job_id: job.id, + state, + last_update: now, + log: Some(log), + }))?)) + .await?; + } + Err(error) => { + let now = Local::now().naive_local(); + sender + .lock() + .await + .send(Message::Text(serde_json::to_string(&JobUpdate::DocGen(DocGenJobUpdate { + job_id: job.id, + state: DocGenJobState::Failure, + last_update: now, + log: Some(format!("{error}")), + }))?)) + .await?; + } + } + Ok(()) +}