Skip to content

Commit

Permalink
update
Browse files Browse the repository at this point in the history
  • Loading branch information
namchuai committed Sep 5, 2024
1 parent 5715cb0 commit 8b51f6e
Show file tree
Hide file tree
Showing 5 changed files with 104 additions and 11 deletions.
60 changes: 59 additions & 1 deletion engine/commands/engine_get_cmd.cc
Original file line number Diff line number Diff line change
@@ -1,6 +1,64 @@
#include "engine_get_cmd.h"
#include <iostream>
#include <tabulate/table.hpp>
#include "utils/file_manager_utils.h"
#include "utils/logging_utils.h"

namespace commands {

void EngineGetCmd::Exec() const {}
void EngineGetCmd::Exec() const {
CTL_INF("[EneingeGetCmd] engine: " << engine_);

auto ecp = file_manager_utils::GetEnginesContainerPath();
std::string onnx_status{"not_supported"};
std::string llamacpp_status = std::filesystem::exists(ecp / "cortex.llamacpp")
? "ready"
: "not_initialized";
std::string tenssorrt_status{"not_supported"};

#ifdef _WIN32
onnx_status = std::filesystem::exists(ecp / "cortex.onnx")
? "ready"
: "not_initialized";
tenssort_status = std::filesystem::exists(ecp / "cortex.tensorrt-llm")
? "ready"
: "not_initialized";
#elif defined(__linux__)
tenssort_status = std::filesystem::exists(ecp / "cortex.tensorrt-llm")
? "ready"
: "not_initialized";
#endif
std::vector<EngineInfo> engines = {
{.name = "cortex.onnx",
.description = "This extension enables chat completion API calls using "
"the Onnx engine",
.version = "0.0.1",
.product_name = "Onnx Inference Engine",
.status = onnx_status},
{.name = "cortex.llamacpp",
.description = "This extension enables chat completion API calls using "
"the LlamaCPP engine",
.version = "0.0.1",
.product_name = "LlamaCPP Inference Engine",
.status = llamacpp_status},
{.name = "cortex.tensorrt-llm",
.description = "This extension enables chat completion API calls using "
"the TensorrtLLM engine",
.version = "0.0.1",
.product_name = "TensorrtLLM Inference Engine",
.status = tenssorrt_status},
};

tabulate::Table table;
table.add_row({"name", "description", "version", "product name", "status"});
table.format().font_color(tabulate::Color::green);
for (auto& engine : engines) {
if (engine.name == engine_) {
table.add_row({engine.name, engine.description, engine.version,
engine.product_name, engine.status});
}
}

std::cout << table << std::endl;
}
}; // namespace commands
14 changes: 14 additions & 0 deletions engine/commands/engine_get_cmd.h
Original file line number Diff line number Diff line change
@@ -1,9 +1,23 @@
#pragma once
#include <string>

namespace commands {
class EngineGetCmd {
struct EngineInfo {
std::string name;
std::string description;
std::string version;
std::string product_name;
std::string status;
};

public:
EngineGetCmd(const std::string& engine) : engine_{engine} {};

void Exec() const;

private:
std::string engine_;
};

} // namespace commands
2 changes: 1 addition & 1 deletion engine/commands/engine_list_cmd.cc
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ bool EngineListCmd::Exec() {
tenssort_status = std::filesystem::exists(ecp / "cortex.tensorrt-llm")
? "ready"
: "not_initialized";
#elif __linux__
#elif defined(__linux__)
tenssort_status = std::filesystem::exists(ecp / "cortex.tensorrt-llm")
? "ready"
: "not_initialized";
Expand Down
29 changes: 22 additions & 7 deletions engine/controllers/command_line_parser.cc
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
#include "command_line_parser.h"
#include "commands/chat_cmd.h"
#include "commands/cmd_info.h"
#include "commands/engine_get_cmd.h"
#include "commands/engine_init_cmd.h"
#include "commands/engine_list_cmd.h"
#include "commands/engine_uninstall_cmd.h"
Expand Down Expand Up @@ -125,11 +126,11 @@ bool CommandLineParser::SetupCommand(int argc, char** argv) {
command.Exec();
});

auto get_engine_cmd = engines_cmd->add_subcommand("get", "Get an engine");
EngineManagement(engines_cmd, "cortex.llamacpp", version);
EngineManagement(engines_cmd, "cortex.onnx", version);
EngineManagement(engines_cmd, "cortex.tensorrt-llm", version);

EngineInstall(engines_cmd, "cortex.llamacpp", version);
EngineInstall(engines_cmd, "cortex.onnx", version);
EngineInstall(engines_cmd, "cortex.tensorrt-llm", version);
EngineGet(engines_cmd);
}

{
Expand Down Expand Up @@ -158,9 +159,9 @@ bool CommandLineParser::SetupCommand(int argc, char** argv) {
return true;
}

void CommandLineParser::EngineInstall(CLI::App* parent,
const std::string& engine_name,
std::string& version) {
void CommandLineParser::EngineManagement(CLI::App* parent,
const std::string& engine_name,
std::string& version) {
auto engine_cmd =
parent->add_subcommand(engine_name, "Manage " + engine_name + " engine");

Expand All @@ -181,3 +182,17 @@ void CommandLineParser::EngineInstall(CLI::App* parent,
cmd.Exec();
});
}

void CommandLineParser::EngineGet(CLI::App* parent) {
auto get_cmd = parent->add_subcommand("get", "Get an engine info");

for (auto& engine : supportedEngines_) {
std::string engine_name{engine};
std::string desc = "Get " + engine_name + " status";
auto engine_get_cmd = get_cmd->add_subcommand(engine, desc);
engine_get_cmd->callback([engine_name] {
commands::EngineGetCmd cmd(engine_name);
cmd.Exec();
});
}
}
10 changes: 8 additions & 2 deletions engine/controllers/command_line_parser.h
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,14 @@ class CommandLineParser {
bool SetupCommand(int argc, char** argv);

private:
void EngineInstall(CLI::App* parent, const std::string& engine_name,
std::string& version);
void EngineManagement(CLI::App* parent, const std::string& engine_name,
std::string& version);

void EngineGet(CLI::App* parent);

CLI::App app_;

// TODO: move this one to somewhere else
static constexpr std::array<const char*, 3> supportedEngines_ = {
"cortex.llamacpp", "cortex.onnx", "cortex.tensorrt-llm"};
};

0 comments on commit 8b51f6e

Please sign in to comment.