diff --git a/engine/commands/engine_init_cmd.cc b/engine/commands/engine_init_cmd.cc index b0c633042..350f3c6b1 100644 --- a/engine/commands/engine_init_cmd.cc +++ b/engine/commands/engine_init_cmd.cc @@ -7,7 +7,6 @@ #include "utils/archive_utils.h" #include "utils/system_info_utils.h" // clang-format on -#include "utils/cortex_utils.h" #include "utils/cuda_toolkit_utils.h" #include "utils/engine_matcher_utils.h" #include "utils/file_manager_utils.h" @@ -125,6 +124,11 @@ bool EngineInitCmd::Exec() const { return true; } + if (cuda_driver_version.empty()) { + CTL_WRN("No cuda driver, continue with CPU"); + return true; + } + // download cuda toolkit const std::string jan_host = "https://catalog.jan.ai"; const std::string cuda_toolkit_file_name = "cuda.tar.gz"; @@ -164,35 +168,30 @@ bool EngineInitCmd::Exec() const { << cuda_driver_version << "/" << system_info.os << "/" << cuda_toolkit_file_name; - LOG_DEBUG << "Cuda toolkit download url: " << jan_host + LOG_DEBUG << "Cuda toolkit download url: " << cuda_toolkit_url.str(); - auto cuda_toollkit_local_path = - file_manager_utils::GetExecutableFolderContainerPath() / + auto cuda_toolkit_local_path = + file_manager_utils::GetContainerFolderPath( + file_manager_utils::DownloadTypeToString( + DownloadType::CudaToolkit)) / cuda_toolkit_file_name; + LOG_DEBUG << "Download to: " << cuda_toolkit_local_path.string(); auto downloadCudaToolkitTask{DownloadTask{ .id = download_id, .type = DownloadType::CudaToolkit, .items = {DownloadItem{.id = download_id, .downloadUrl = cuda_toolkit_url.str(), - .localPath = cuda_toollkit_local_path}}, + .localPath = cuda_toolkit_local_path}}, }}; download_service.AddDownloadTask( downloadCudaToolkitTask, [&](const DownloadTask& finishedTask) { - // TODO(any) This is a temporary fix. The issue will be fixed when we has CIs - // to pack CUDA dependecies into engine release - auto get_engine_path = [](std::string_view e) { - if (e == "cortex.llamacpp") { - return cortex_utils::kLlamaLibPath; - } else { - return cortex_utils::kTensorrtLlmPath; - } - }; - std::string engine_path = - file_manager_utils::GetCortexDataPath().string() + - get_engine_path(engineName_); + auto engine_path = + file_manager_utils::GetEnginesContainerPath() / + engineName_; archive_utils::ExtractArchive( - finishedTask.items[0].localPath.string(), engine_path); + finishedTask.items[0].localPath.string(), + engine_path.string()); try { std::filesystem::remove(finishedTask.items[0].localPath); diff --git a/engine/utils/file_manager_utils.h b/engine/utils/file_manager_utils.h index e9ab6d515..8e449b3d2 100644 --- a/engine/utils/file_manager_utils.h +++ b/engine/utils/file_manager_utils.h @@ -229,24 +229,24 @@ inline std::filesystem::path GetEnginesContainerPath() { inline std::filesystem::path GetContainerFolderPath( const std::string_view type) { - const auto current_path{GetExecutableFolderContainerPath()}; - auto container_folder_path = std::filesystem::path{}; + std::filesystem::path container_folder_path; if (type == "Model") { container_folder_path = GetModelsContainerPath(); } else if (type == "Engine") { container_folder_path = GetEnginesContainerPath(); } else if (type == "CudaToolkit") { - container_folder_path = current_path; + container_folder_path = + std::filesystem::temp_directory_path() / "cuda-dependencies"; } else if (type == "Cortex") { container_folder_path = std::filesystem::temp_directory_path() / "cortex"; } else { - container_folder_path = current_path / "misc"; + container_folder_path = std::filesystem::temp_directory_path() / "misc"; } if (!std::filesystem::exists(container_folder_path)) { CTL_INF("Creating folder: " << container_folder_path.string() << "\n"); - std::filesystem::create_directory(container_folder_path); + std::filesystem::create_directories(container_folder_path); } return container_folder_path;