diff --git a/engine/commands/chat_cmd.cc b/engine/commands/chat_cmd.cc index 6de88ed28..e535fa704 100644 --- a/engine/commands/chat_cmd.cc +++ b/engine/commands/chat_cmd.cc @@ -2,6 +2,7 @@ #include "httplib.h" #include "trantor/utils/Logger.h" +#include "utils/logging_utils.h" namespace commands { namespace { @@ -48,12 +49,12 @@ void ChatCmd::Exec(std::string msg) { data_str.data(), data_str.size(), "application/json"); if (res) { if (res->status != httplib::StatusCode::OK_200) { - LOG_INFO << res->body; + CTL_ERR(res->body); return; } } else { auto err = res.error(); - LOG_WARN << "HTTP error: " << httplib::to_string(err); + CTL_ERR("HTTP error: " << httplib::to_string(err)); return; } } diff --git a/engine/commands/cmd_info.cc b/engine/commands/cmd_info.cc index bf13c6bc7..a25697b8d 100644 --- a/engine/commands/cmd_info.cc +++ b/engine/commands/cmd_info.cc @@ -1,6 +1,7 @@ #include "cmd_info.h" #include #include "trantor/utils/Logger.h" +#include "utils/logging_utils.h" namespace commands { namespace { @@ -33,7 +34,7 @@ void CmdInfo::Parse(std::string model_id) { } else { auto res = split(model_id, kDelimiter); if (res.size() != 2) { - LOG_ERROR << "model_id does not valid"; + CTL_ERR(" does not valid"); return; } else { model_name = std::move(res[0]); @@ -45,7 +46,7 @@ void CmdInfo::Parse(std::string model_id) { } else if (branch.find("gguf") != std::string::npos) { engine_name = "cortex.llamacpp"; } else { - LOG_ERROR << "Not a valid branch model_name " << branch; + CTL_ERR("Not a valid branch model_name " << branch); } } } diff --git a/engine/commands/engine_init_cmd.cc b/engine/commands/engine_init_cmd.cc index c354c8373..e0b05804f 100644 --- a/engine/commands/engine_init_cmd.cc +++ b/engine/commands/engine_init_cmd.cc @@ -20,7 +20,7 @@ EngineInitCmd::EngineInitCmd(std::string engineName, std::string version) bool EngineInitCmd::Exec() const { if (engineName_.empty()) { - LOG_ERROR << "Engine name is required"; + CTL_ERR("Engine name is required"); return false; } @@ -28,16 +28,16 @@ bool EngineInitCmd::Exec() const { auto system_info = system_info_utils::GetSystemInfo(); if (system_info.arch == system_info_utils::kUnsupported || system_info.os == system_info_utils::kUnsupported) { - LOG_ERROR << "Unsupported OS or architecture: " << system_info.os << ", " - << system_info.arch; + CTL_ERR("Unsupported OS or architecture: " << system_info.os << ", " + << system_info.arch); return false; } - LOG_INFO << "OS: " << system_info.os << ", Arch: " << system_info.arch; + CTL_INF("OS: " << system_info.os << ", Arch: " << system_info.arch); // check if engine is supported if (std::find(supportedEngines_.begin(), supportedEngines_.end(), engineName_) == supportedEngines_.end()) { - LOG_ERROR << "Engine not supported"; + CTL_ERR("Engine not supported"); return false; } @@ -46,7 +46,7 @@ bool EngineInitCmd::Exec() const { std::ostringstream engineReleasePath; engineReleasePath << "/repos/janhq/" << engineName_ << "/releases/" << version; - LOG_INFO << "Engine release path: " << gitHubHost << engineReleasePath.str(); + CTL_INF("Engine release path: " << gitHubHost << engineReleasePath.str()); using namespace nlohmann; httplib::Client cli(gitHubHost); @@ -64,10 +64,10 @@ bool EngineInitCmd::Exec() const { } auto cuda_driver_version = system_info_utils::GetCudaVersion(); - LOG_INFO << "Engine: " << engineName_ - << ", CUDA driver version: " << cuda_driver_version; + CTL_INF("engineName_: " << engineName_); + CTL_INF("CUDA version: " << cuda_driver_version); + std::string matched_variant = ""; - std::string matched_variant{""}; if (engineName_ == "cortex.tensorrt-llm") { matched_variant = engine_matcher_utils::ValidateTensorrtLlm( variants, system_info.os, cuda_driver_version); @@ -80,9 +80,9 @@ bool EngineInitCmd::Exec() const { variants, system_info.os, system_info.arch, suitable_avx, cuda_driver_version); } - LOG_INFO << "Matched variant: " << matched_variant; + CTL_INF("Matched variant: " << matched_variant); if (matched_variant.empty()) { - LOG_ERROR << "No variant found for " << os_arch; + CTL_ERR("No variant found for " << os_arch); return false; } @@ -95,7 +95,7 @@ bool EngineInitCmd::Exec() const { std::string path = full_url.substr(host.length()); auto fileName = asset["name"].get(); - LOG_INFO << "URL: " << full_url; + CTL_INF("URL: " << full_url); auto downloadTask = DownloadTask{.id = engineName_, .type = DownloadType::Engine, @@ -115,8 +115,8 @@ bool EngineInitCmd::Exec() const { bool unused) { // try to unzip the downloaded file std::filesystem::path downloadedEnginePath{absolute_path}; - LOG_INFO << "Downloaded engine path: " - << downloadedEnginePath.string(); + CTL_INF( + "Downloaded engine path: " << downloadedEnginePath.string()); std::filesystem::path extract_path = downloadedEnginePath.parent_path().parent_path(); @@ -156,9 +156,9 @@ bool EngineInitCmd::Exec() const { try { std::filesystem::remove(absolute_path); } catch (const std::exception& e) { - LOG_ERROR << "Could not delete file: " << e.what(); + CTL_WRN("Could not delete file: " << e.what()); } - LOG_INFO << "Finished!"; + CTL_INF("Finished!"); }); if (system_info.os == "mac" || engineName_ == "cortex.onnx") { // mac and onnx engine does not require cuda toolkit @@ -192,9 +192,9 @@ bool EngineInitCmd::Exec() const { // cuda driver version should be greater than toolkit version to ensure compatibility if (semantic_version_utils::CompareSemanticVersion( cuda_driver_version, suitable_toolkit_version) < 0) { - LOG_ERROR << "Your Cuda driver version " << cuda_driver_version + CTL_ERR("Your Cuda driver version " << cuda_driver_version << " is not compatible with cuda toolkit version " - << suitable_toolkit_version; + << suitable_toolkit_version); return false; } @@ -233,7 +233,7 @@ bool EngineInitCmd::Exec() const { try { std::filesystem::remove(absolute_path); } catch (std::exception& e) { - LOG_ERROR << "Error removing downloaded file: " << e.what(); + CTL_ERR("Error removing downloaded file: " << e.what()); } }); @@ -245,12 +245,12 @@ bool EngineInitCmd::Exec() const { return false; } } else { - LOG_ERROR << "HTTP error: " << res->status; + CTL_ERR("HTTP error: " << res->status); return false; } } else { auto err = res.error(); - LOG_ERROR << "HTTP error: " << httplib::to_string(err); + CTL_ERR("HTTP error: " << httplib::to_string(err)); return false; } return true; diff --git a/engine/commands/model_get_cmd.cc b/engine/commands/model_get_cmd.cc index 82691ea32..cc6639d33 100644 --- a/engine/commands/model_get_cmd.cc +++ b/engine/commands/model_get_cmd.cc @@ -2,9 +2,11 @@ #include #include #include +#include "cmd_info.h" #include "config/yaml_config.h" #include "trantor/utils/Logger.h" #include "utils/cortex_utils.h" +#include "utils/logging_utils.h" namespace commands { @@ -14,12 +16,15 @@ ModelGetCmd::ModelGetCmd(std::string model_handle) void ModelGetCmd::Exec() { if (std::filesystem::exists(cortex_utils::models_folder) && std::filesystem::is_directory(cortex_utils::models_folder)) { + CmdInfo ci(model_handle_); + std::string model_file = + ci.branch == "main" ? ci.model_name : ci.model_name + "-" + ci.branch; bool found_model = false; // Iterate through directory for (const auto& entry : std::filesystem::directory_iterator(cortex_utils::models_folder)) { - if (entry.is_regular_file() && entry.path().stem() == model_handle_ && + if (entry.is_regular_file() && entry.path().stem() == model_file && entry.path().extension() == ".yaml") { try { config::YamlHandler handler; @@ -131,11 +136,16 @@ void ModelGetCmd::Exec() { found_model = true; break; } catch (const std::exception& e) { - LOG_ERROR << "Error reading yaml file '" << entry.path().string() - << "': " << e.what(); + CTL_ERR("Error reading yaml file '" << entry.path().string() + << "': " << e.what()); } } } + if (!found_model) { + CLI_LOG("Model not found!"); + } + } else { + CLI_LOG("Model not found!"); } } }; // namespace commands \ No newline at end of file diff --git a/engine/commands/model_list_cmd.cc b/engine/commands/model_list_cmd.cc index 199ca25f8..c2a04f06f 100644 --- a/engine/commands/model_list_cmd.cc +++ b/engine/commands/model_list_cmd.cc @@ -8,6 +8,7 @@ #include #include "config/yaml_config.h" #include "trantor/utils/Logger.h" +#include "utils/logging_utils.h" namespace commands { void ModelListCmd::Exec() { @@ -30,8 +31,8 @@ void ModelListCmd::Exec() { table.add_row({std::to_string(count), model_config.id, model_config.engine, model_config.version}); } catch (const std::exception& e) { - LOG_ERROR << "Error reading yaml file '" << entry.path().string() - << "': " << e.what(); + CTL_ERR("Error reading yaml file '" << entry.path().string() + << "': " << e.what()); } } } diff --git a/engine/commands/model_pull_cmd.cc b/engine/commands/model_pull_cmd.cc index f8e3a7947..b058bd305 100644 --- a/engine/commands/model_pull_cmd.cc +++ b/engine/commands/model_pull_cmd.cc @@ -4,6 +4,7 @@ #include "trantor/utils/Logger.h" #include "utils/cortexso_parser.h" #include "utils/model_callback_utils.h" +#include "utils/logging_utils.h" namespace commands { ModelPullCmd::ModelPullCmd(std::string model_handle, std::string branch) @@ -15,10 +16,10 @@ bool ModelPullCmd::Exec() { DownloadService downloadService; downloadService.AddDownloadTask(downloadTask.value(), model_callback_utils::DownloadModelCb); - std::cout << "Download finished" << std::endl; + CTL_INF("Download finished"); return true; } else { - std::cout << "Model not found" << std::endl; + CTL_ERR("Model not found"); return false; } } diff --git a/engine/commands/model_start_cmd.cc b/engine/commands/model_start_cmd.cc index 0342c3d35..db64c7ee3 100644 --- a/engine/commands/model_start_cmd.cc +++ b/engine/commands/model_start_cmd.cc @@ -2,6 +2,7 @@ #include "httplib.h" #include "nlohmann/json.hpp" #include "trantor/utils/Logger.h" +#include "utils/logging_utils.h" namespace commands { ModelStartCmd::ModelStartCmd(std::string host, int port, @@ -32,11 +33,11 @@ bool ModelStartCmd::Exec() { data_str.data(), data_str.size(), "application/json"); if (res) { if (res->status == httplib::StatusCode::OK_200) { - LOG_INFO << res->body; + CLI_LOG("Model loaded!"); } } else { auto err = res.error(); - LOG_WARN << "HTTP error: " << httplib::to_string(err); + CTL_ERR("HTTP error: " << httplib::to_string(err)); return false; } return true; diff --git a/engine/commands/stop_model_cmd.cc b/engine/commands/model_stop_cmd.cc similarity index 72% rename from engine/commands/stop_model_cmd.cc rename to engine/commands/model_stop_cmd.cc index 628007efe..0f4816dad 100644 --- a/engine/commands/stop_model_cmd.cc +++ b/engine/commands/model_stop_cmd.cc @@ -1,14 +1,15 @@ -#include "stop_model_cmd.h" +#include "model_stop_cmd.h" #include "httplib.h" #include "nlohmann/json.hpp" #include "trantor/utils/Logger.h" +#include "utils/logging_utils.h" namespace commands { -StopModelCmd::StopModelCmd(std::string host, int port, +ModelStopCmd::ModelStopCmd(std::string host, int port, const config::ModelConfig& mc) : host_(std::move(host)), port_(port), mc_(mc) {} -void StopModelCmd::Exec() { +void ModelStopCmd::Exec() { httplib::Client cli(host_ + ":" + std::to_string(port_)); nlohmann::json json_data; json_data["model"] = mc_.name; @@ -20,11 +21,12 @@ void StopModelCmd::Exec() { data_str.data(), data_str.size(), "application/json"); if (res) { if (res->status == httplib::StatusCode::OK_200) { - LOG_INFO << res->body; + // LOG_INFO << res->body; + CLI_LOG("Model unloaded!"); } } else { auto err = res.error(); - LOG_WARN << "HTTP error: " << httplib::to_string(err); + CTL_ERR("HTTP error: " << httplib::to_string(err)); } } diff --git a/engine/commands/stop_model_cmd.h b/engine/commands/model_stop_cmd.h similarity index 74% rename from engine/commands/stop_model_cmd.h rename to engine/commands/model_stop_cmd.h index 9ead32370..9ac36e36d 100644 --- a/engine/commands/stop_model_cmd.h +++ b/engine/commands/model_stop_cmd.h @@ -5,9 +5,9 @@ namespace commands { -class StopModelCmd{ +class ModelStopCmd{ public: - StopModelCmd(std::string host, int port, const config::ModelConfig& mc); + ModelStopCmd(std::string host, int port, const config::ModelConfig& mc); void Exec(); private: diff --git a/engine/commands/run_cmd.cc b/engine/commands/run_cmd.cc index a84393652..7d3734805 100644 --- a/engine/commands/run_cmd.cc +++ b/engine/commands/run_cmd.cc @@ -34,8 +34,10 @@ void RunCmd::Exec() { { if (!IsEngineExisted(ci.engine_name)) { EngineInitCmd eic(ci.engine_name, ""); - if (!eic.Exec()) + if (!eic.Exec()) { + LOG_INFO << "Failed to install engine"; return; + } } } diff --git a/engine/commands/stop_server_cmd.cc b/engine/commands/server_stop_cmd.cc similarity index 57% rename from engine/commands/stop_server_cmd.cc rename to engine/commands/server_stop_cmd.cc index cb312ef99..f3d83d6d2 100644 --- a/engine/commands/stop_server_cmd.cc +++ b/engine/commands/server_stop_cmd.cc @@ -1,19 +1,20 @@ -#include "stop_server_cmd.h" +#include "server_stop_cmd.h" #include "httplib.h" #include "trantor/utils/Logger.h" +#include "utils/logging_utils.h" namespace commands { -StopServerCmd::StopServerCmd(std::string host, int port) +ServerStopCmd::ServerStopCmd(std::string host, int port) : host_(std::move(host)), port_(port) {} -void StopServerCmd::Exec() { +void ServerStopCmd::Exec() { httplib::Client cli(host_ + ":" + std::to_string(port_)); auto res = cli.Delete("/processManager/destroy"); if (res) { - LOG_INFO << res->body; + CLI_LOG("Server stopped!"); } else { auto err = res.error(); - LOG_WARN << "HTTP error: " << httplib::to_string(err); + CTL_ERR("HTTP error: " << httplib::to_string(err)); } } diff --git a/engine/commands/stop_server_cmd.h b/engine/commands/server_stop_cmd.h similarity index 69% rename from engine/commands/stop_server_cmd.h rename to engine/commands/server_stop_cmd.h index 03735d81c..4beb0d05f 100644 --- a/engine/commands/stop_server_cmd.h +++ b/engine/commands/server_stop_cmd.h @@ -3,9 +3,9 @@ namespace commands { -class StopServerCmd{ +class ServerStopCmd{ public: - StopServerCmd(std::string host, int port); + ServerStopCmd(std::string host, int port); void Exec(); private: diff --git a/engine/controllers/command_line_parser.cc b/engine/controllers/command_line_parser.cc index 339fbcd46..12e1db88c 100644 --- a/engine/controllers/command_line_parser.cc +++ b/engine/controllers/command_line_parser.cc @@ -8,10 +8,11 @@ #include "commands/model_pull_cmd.h" #include "commands/model_start_cmd.h" #include "commands/run_cmd.h" -#include "commands/stop_model_cmd.h" -#include "commands/stop_server_cmd.h" +#include "commands/model_stop_cmd.h" +#include "commands/server_stop_cmd.h" #include "config/yaml_config.h" #include "utils/cortex_utils.h" +#include "utils/logging_utils.h" CommandLineParser::CommandLineParser() : app_("Cortex.cpp CLI") {} @@ -47,7 +48,7 @@ bool CommandLineParser::SetupCommand(int argc, char** argv) { config::YamlHandler yaml_handler; yaml_handler.ModelConfigFromFile(cortex_utils::GetCurrentPath() + "/models/" + model_file + ".yaml"); - commands::StopModelCmd smc("127.0.0.1", 3928, + commands::ModelStopCmd smc("127.0.0.1", 3928, yaml_handler.GetModelConfig()); smc.Exec(); }); @@ -146,10 +147,12 @@ bool CommandLineParser::SetupCommand(int argc, char** argv) { stop_cmd->callback([] { // TODO get info from config file - commands::StopServerCmd ssc("127.0.0.1", 3928); + commands::ServerStopCmd ssc("127.0.0.1", 3928); ssc.Exec(); }); + app_.add_flag("--verbose", log_verbose, "Verbose logging"); + CLI11_PARSE(app_, argc, argv); return true; } diff --git a/engine/main.cc b/engine/main.cc index 4b966b3f6..7bd780488 100644 --- a/engine/main.cc +++ b/engine/main.cc @@ -6,6 +6,7 @@ #include "utils/archive_utils.h" #include "utils/cortex_utils.h" #include "utils/dylib.h" +#include "utils/logging_utils.h" #if defined(__APPLE__) && defined(__MACH__) #include // for dirname() @@ -22,6 +23,7 @@ #error "Unsupported platform!" #endif + void RunServer() { // Create logs/ folder and setup log to file std::filesystem::create_directory(cortex_utils::logs_folder); diff --git a/engine/services/download_service.cc b/engine/services/download_service.cc index 97c16d650..5f261918f 100644 --- a/engine/services/download_service.cc +++ b/engine/services/download_service.cc @@ -7,6 +7,7 @@ #include "download_service.h" #include "utils/file_manager_utils.h" +#include "utils/logging_utils.h" void DownloadService::AddDownloadTask(const DownloadTask& task, std::optional callback) { @@ -32,28 +33,29 @@ void DownloadService::AddAsyncDownloadTask( void DownloadService::StartDownloadItem( const std::string& downloadId, const DownloadItem& item, std::optional callback) { - LOG_INFO << "Downloading item: " << downloadId; + CTL_INF("Downloading item: " << downloadId); auto containerFolderPath{file_manager_utils::GetContainerFolderPath( file_manager_utils::downloadTypeToString(item.type))}; - LOG_INFO << "Container folder path: " << containerFolderPath.string() << "\n"; + CTL_INF("Container folder path: " << containerFolderPath.string() + << "\n"); auto itemFolderPath{containerFolderPath / std::filesystem::path(downloadId)}; - LOG_INFO << "itemFolderPath: " << itemFolderPath.string(); + CTL_INF("itemFolderPath: " << itemFolderPath.string()); if (!std::filesystem::exists(itemFolderPath)) { - LOG_INFO << "Creating " << itemFolderPath.string(); + CTL_INF("Creating " << itemFolderPath.string()); std::filesystem::create_directory(itemFolderPath); } auto outputFilePath{itemFolderPath / std::filesystem::path(item.fileName)}; - LOG_INFO << "Absolute file output: " << outputFilePath.string(); + CTL_INF("Absolute file output: " << outputFilePath.string()); uint64_t last = 0; uint64_t tot = 0; std::ofstream outputFile(outputFilePath, std::ios::binary); auto downloadUrl{item.host + "/" + item.path}; - LOG_INFO << "Downloading url: " << downloadUrl; + CLI_LOG("Downloading url: " << downloadUrl); httplib::Client client(item.host); @@ -76,12 +78,12 @@ void DownloadService::StartDownloadItem( uint64_t current, uint64_t total) { if (current - last > kUpdateProgressThreshold) { last = current; - LOG_INFO << "Downloading: " << current << " / " << total; + CLI_LOG("Downloading: " << current << " / " << total); } if (current == total) { outputFile.flush(); - LOG_INFO << "Done download: " - << static_cast(total) / 1024 / 1024 << " MiB"; + CLI_LOG("Done download: " + << static_cast(total) / 1024 / 1024 << " MiB"); if (callback.has_value()) { auto need_parse_gguf = item.path.find("cortexso") == std::string::npos; diff --git a/engine/utils/archive_utils.h b/engine/utils/archive_utils.h index 6b2f5767d..fb52bdb9e 100644 --- a/engine/utils/archive_utils.h +++ b/engine/utils/archive_utils.h @@ -5,6 +5,7 @@ #include #include #include +#include "logging_utils.h" namespace archive_utils { inline bool UnzipFile(const std::string& input_zip_path, @@ -139,8 +140,8 @@ inline bool UntarFile(const std::string& input_tar_path, } archive_read_free(tar_archive); - LOG_INFO << "Extracted successfully " << input_tar_path << " to " - << destination_path << "\n"; + CTL_INF("Extracted successfully " << input_tar_path << " to " + << destination_path << "\n"); return true; } } // namespace archive_utils \ No newline at end of file diff --git a/engine/utils/engine_matcher_utils.h b/engine/utils/engine_matcher_utils.h index ed7304d9c..77baf1f72 100644 --- a/engine/utils/engine_matcher_utils.h +++ b/engine/utils/engine_matcher_utils.h @@ -5,6 +5,7 @@ #include #include #include "utils/cpuid/cpu_info.h" +#include "utils/logging_utils.h" namespace engine_matcher_utils { // for testing purpose @@ -48,7 +49,7 @@ const std::vector cortex_tensorrt_variants{ inline std::string GetSuitableAvxVariant() { cortex::cpuid::CpuInfo cpu_info; - LOG_INFO << "GetSuitableAvxVariant:" << "\n" << cpu_info.to_string(); + CTL_INF("GetSuitableAvxVariant:" << "\n" << cpu_info.to_string()); if (cpu_info.has_avx512_f()) return "avx512"; diff --git a/engine/utils/file_manager_utils.h b/engine/utils/file_manager_utils.h index 334116fe7..a3c2d39c6 100644 --- a/engine/utils/file_manager_utils.h +++ b/engine/utils/file_manager_utils.h @@ -1,5 +1,5 @@ #pragma once - +#include "logging_utils.h" #include #include #include @@ -42,7 +42,7 @@ inline std::filesystem::path GetExecutableFolderContainerPath() { // TODO: haven't tested char buffer[MAX_PATH]; GetModuleFileNameA(NULL, buffer, MAX_PATH); - LOG_INFO << "Executable path: " << buffer; + CTL_INF("Executable path: " << buffer); return std::filesystem::path{buffer}.parent_path(); #else LOG_ERROR << "Unsupported platform!"; @@ -66,7 +66,7 @@ inline std::filesystem::path GetContainerFolderPath( } if (!std::filesystem::exists(container_folder_path)) { - LOG_INFO << "Creating folder: " << container_folder_path.string() << "\n"; + CTL_INF("Creating folder: " << container_folder_path.string() << "\n"); std::filesystem::create_directory(container_folder_path); } diff --git a/engine/utils/logging_utils.h b/engine/utils/logging_utils.h index 346b0945c..62c44421c 100644 --- a/engine/utils/logging_utils.h +++ b/engine/utils/logging_utils.h @@ -1,5 +1,32 @@ #pragma once -#define LOG_INFO_REQUEST(RID) LOG_INFO << "Request " << RID << ": " -#define LOG_WARN_REQUEST(RID) LOG_WARN << "Request " << RID << ": " -#define LOG_ERROR_REQUEST(RID) LOG_ERROR << "Request " << RID << ": " +#include "trantor/utils/Logger.h" +// if verbose log all to console +// if not verbose only log result to console +inline bool log_verbose = false; + +// Only use trantor log +#define CTL_INF(msg) \ + if (log_verbose) { \ + LOG_INFO << msg; \ + } + +#define CTL_WRN(msg) \ + if (log_verbose) { \ + LOG_WARN << msg; \ + } + +// Use std::cout if not verbose, use trantor log if verbose +#define CTL_ERR(msg) \ + if (log_verbose) { \ + LOG_ERROR << msg; \ + } else { \ + std::cout << msg << std::endl; \ + } + +#define CLI_LOG(msg) \ + if (log_verbose) { \ + LOG_INFO << msg; \ + } else { \ + std::cout << msg << std::endl; \ + } \ No newline at end of file diff --git a/engine/utils/model_callback_utils.h b/engine/utils/model_callback_utils.h index f5504cda3..9f54d20d4 100644 --- a/engine/utils/model_callback_utils.h +++ b/engine/utils/model_callback_utils.h @@ -8,6 +8,7 @@ #include "config/gguf_parser.h" #include "config/yaml_config.h" #include "utils/file_manager_utils.h" +#include "utils/logging_utils.h" namespace model_callback_utils { inline void DownloadModelCb(const std::string& path, bool need_parse_gguf) { @@ -21,7 +22,7 @@ inline void DownloadModelCb(const std::string& path, bool need_parse_gguf) { config::ModelConfig model_config = handler.GetModelConfig(); model_config.id = path_obj.parent_path().filename().string(); - LOG_INFO << "Updating model config in " << path; + CTL_INF("Updating model config in " << path); handler.UpdateModelConfig(model_config); handler.WriteYamlFile(path_obj.parent_path().parent_path().string() + "/" + model_config.id + ".yaml"); diff --git a/engine/utils/system_info_utils.h b/engine/utils/system_info_utils.h index 16a9570b7..e57725301 100644 --- a/engine/utils/system_info_utils.h +++ b/engine/utils/system_info_utils.h @@ -4,6 +4,7 @@ #include #include #include "utils/command_executor.h" +#include "utils/logging_utils.h" #ifdef _WIN32 #include #endif @@ -180,7 +181,7 @@ inline bool IsNvidiaSmiAvailable() { inline std::string GetDriverVersion() { if (!IsNvidiaSmiAvailable()) { - LOG_INFO << "nvidia-smi is not available!"; + CTL_INF("nvidia-smi is not available!"); return ""; } try { @@ -205,7 +206,7 @@ inline std::string GetDriverVersion() { inline std::string GetCudaVersion() { if (!IsNvidiaSmiAvailable()) { - LOG_INFO << "nvidia-smi is not available!"; + CTL_INF("nvidia-smi is not available!"); return ""; } try {