Skip to content

Commit

Permalink
feat: using libcurl to download
Browse files Browse the repository at this point in the history
  • Loading branch information
namchuai committed Sep 13, 2024
1 parent 653439b commit 457e5a3
Show file tree
Hide file tree
Showing 21 changed files with 581 additions and 378 deletions.
2 changes: 2 additions & 0 deletions engine/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,7 @@ find_package(CLI11 CONFIG REQUIRED)
find_package(unofficial-minizip CONFIG REQUIRED)
find_package(LibArchive REQUIRED)
find_package(tabulate CONFIG REQUIRED)
find_package(CURL REQUIRED)

# Build using CMAKE-JS
if(DEFINED CMAKE_JS_INC)
Expand Down Expand Up @@ -150,6 +151,7 @@ target_link_libraries(${PROJECT_NAME} PRIVATE CLI11::CLI11)
target_link_libraries(${PROJECT_NAME} PRIVATE unofficial::minizip::minizip)
target_link_libraries(${PROJECT_NAME} PRIVATE LibArchive::LibArchive)
target_link_libraries(${PROJECT_NAME} PRIVATE tabulate::tabulate)
target_link_libraries(${PROJECT_NAME} PRIVATE CURL::libcurl)

# Build using CMAKE-JS
if(DEFINED CMAKE_JS_INC)
Expand Down
139 changes: 64 additions & 75 deletions engine/commands/cortex_upd_cmd.cc
Original file line number Diff line number Diff line change
@@ -1,6 +1,3 @@
// clang-format off
#include "utils/cortex_utils.h"
// clang-format on
#include "cortex_upd_cmd.h"
#include "httplib.h"
#include "nlohmann/json.hpp"
Expand All @@ -10,11 +7,10 @@
#include "utils/file_manager_utils.h"
#include "utils/logging_utils.h"
#include "utils/system_info_utils.h"
#include "utils/url_parser.h"

namespace commands {

CortexUpdCmd::CortexUpdCmd() {}

void CortexUpdCmd::Exec(std::string v) {
{
auto config = file_manager_utils::GetCortexConfig();
Expand All @@ -38,14 +34,7 @@ void CortexUpdCmd::Exec(std::string v) {
}

bool CortexUpdCmd::GetStableAndBeta(const std::string& v) {
// Check if the architecture and OS are supported
auto system_info = system_info_utils::GetSystemInfo();
if (system_info.arch == system_info_utils::kUnsupported ||
system_info.os == system_info_utils::kUnsupported) {
CTL_ERR("Unsupported OS or architecture: " << system_info.os << ", "
<< system_info.arch);
return false;
}
CTL_INF("OS: " << system_info.os << ", Arch: " << system_info.arch);

// Download file
Expand Down Expand Up @@ -84,38 +73,35 @@ bool CortexUpdCmd::GetStableAndBeta(const std::string& v) {
for (auto& asset : assets) {
auto asset_name = asset["name"].get<std::string>();
if (asset_name == matched_variant) {
std::string host{"https://github.com"};

auto full_url = asset["browser_download_url"].get<std::string>();
std::string path = full_url.substr(host.length());

auto fileName = asset["name"].get<std::string>();
CTL_INF("URL: " << full_url);

auto download_task = DownloadTask{.id = "cortex",
.type = DownloadType::Cortex,
.error = std::nullopt,
.items = {DownloadItem{
.id = "cortex",
.host = host,
.fileName = fileName,
.type = DownloadType::Cortex,
.path = path,
}}};

DownloadService download_service;
download_service.AddDownloadTask(
download_task,
[this](const std::string& absolute_path, bool unused) {
auto download_url =
asset["browser_download_url"].get<std::string>();
auto file_name = asset["name"].get<std::string>();
CTL_INF("Download url: " << download_url);

auto local_path =
file_manager_utils::GetExecutableFolderContainerPath() /
"cortex" / asset_name;
auto download_task{DownloadTask{.id = "cortex",
.type = DownloadType::Cortex,
.items = {DownloadItem{
.id = "cortex",
.downloadUrl = download_url,
.localPath = local_path,
}}}};

DownloadService().AddDownloadTask(
download_task, [](const DownloadTask& finishedTask) {
// try to unzip the downloaded file
std::filesystem::path download_path{absolute_path};
CTL_INF("Downloaded engine path: " << download_path.string());
CTL_INF("Downloaded engine path: "
<< finishedTask.items[0].localPath.string());

std::filesystem::path extract_path =
download_path.parent_path().parent_path();
auto extract_path = finishedTask.items[0]
.localPath.parent_path()
.parent_path();

archive_utils::ExtractArchive(download_path.string(),
extract_path.string());
archive_utils::ExtractArchive(
finishedTask.items[0].localPath.string(),
extract_path.string());

CTL_INF("Finished!");
});
Expand Down Expand Up @@ -145,56 +131,59 @@ bool CortexUpdCmd::GetStableAndBeta(const std::string& v) {
}

bool CortexUpdCmd::GetNightly(const std::string& v) {
// Check if the architecture and OS are supported
auto system_info = system_info_utils::GetSystemInfo();
if (system_info.arch == system_info_utils::kUnsupported ||
system_info.os == system_info_utils::kUnsupported) {
CTL_ERR("Unsupported OS or architecture: " << system_info.os << ", "
<< system_info.arch);
return false;
}
CTL_INF("OS: " << system_info.os << ", Arch: " << system_info.arch);

// Download file
std::string version = v.empty() ? "latest" : std::move(v);
std::ostringstream release_path;
release_path << "cortex/" << version << "/" << system_info.os << "-"
<< system_info.arch << "/" << kNightlyFileName;
CTL_INF("Engine release path: " << kNightlyHost << "/" << release_path.str());

auto download_task = DownloadTask{.id = "cortex",
.type = DownloadType::Cortex,
.error = std::nullopt,
.items = {DownloadItem{
.id = "cortex",
.host = kNightlyHost,
.fileName = kNightlyFileName,
.type = DownloadType::Cortex,
.path = release_path.str(),
}}};

DownloadService download_service;
download_service.AddDownloadTask(
download_task, [this](const std::string& absolute_path, bool unused) {
std::string os_arch{system_info.os + "-" + system_info.arch};
const char* paths[] = {
"cortex",
version.c_str(),
os_arch.c_str(),
kNightlyFileName,
};
std::vector<std::string> path_list(paths, std::end(paths));
auto url_obj = url_parser::Url{
.protocol = "https",
.host = kNightlyHost,
.pathParams = path_list,
};

CTL_INF("Engine release path: " << url_parser::FromUrl(url_obj));

std::filesystem::path localPath =
file_manager_utils::GetExecutableFolderContainerPath() / "cortex" /
path_list.back();
auto download_task =
DownloadTask{.id = "cortex",
.type = DownloadType::Cortex,
.items = {DownloadItem{
.id = "cortex",
.downloadUrl = url_parser::FromUrl(url_obj),
.localPath = localPath,
}}};

DownloadService().AddDownloadTask(
download_task, [](const DownloadTask& finishedTask) {
// try to unzip the downloaded file
std::filesystem::path download_path{absolute_path};
CTL_INF("Downloaded engine path: " << download_path.string());
CTL_INF("Downloaded engine path: "
<< finishedTask.items[0].localPath.string());

std::filesystem::path extract_path =
download_path.parent_path().parent_path();
auto extract_path =
finishedTask.items[0].localPath.parent_path().parent_path();

archive_utils::ExtractArchive(download_path.string(),
archive_utils::ExtractArchive(finishedTask.items[0].localPath.string(),
extract_path.string());

CTL_INF("Finished!");
});

// Replace binay file
// Replace binary file
auto executable_path = file_manager_utils::GetExecutableFolderContainerPath();
auto src = std::filesystem::temp_directory_path() / "cortex" / kCortexBinary /
GetCortexBinary();
auto dst = executable_path / GetCortexBinary();
return ReplaceBinaryInflight(src, dst);
}

} // namespace commands
} // namespace commands
7 changes: 2 additions & 5 deletions engine/commands/cortex_upd_cmd.h
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
#pragma once
#include <optional>
#include <string>

#include "httplib.h"
Expand All @@ -11,7 +10,7 @@ namespace commands {
#ifndef CORTEX_VARIANT
#define CORTEX_VARIANT file_manager_utils::kProdVariant
#endif
constexpr const auto kNightlyHost = "https://delta.jan.ai";
constexpr const auto kNightlyHost = "delta.jan.ai";
constexpr const auto kNightlyFileName = "cortex-nightly.tar.gz";
const std::string kCortexBinary = "cortex";

Expand Down Expand Up @@ -113,12 +112,10 @@ inline bool ReplaceBinaryInflight(const std::filesystem::path& src,

class CortexUpdCmd {
public:
CortexUpdCmd();
void Exec(std::string version);

private:
bool GetStableAndBeta(const std::string& v);
bool GetNightly(const std::string& v);
};

} // namespace commands
} // namespace commands
Loading

0 comments on commit 457e5a3

Please sign in to comment.