Skip to content
This repository was archived by the owner on Jul 4, 2025. It is now read-only.

Commit 670a477

Browse files
add engine list cmd (#1050)
1 parent e963e63 commit 670a477

File tree

8 files changed

+202
-7
lines changed

8 files changed

+202
-7
lines changed

engine/CMakeLists.txt

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -95,6 +95,7 @@ find_package(nlohmann_json CONFIG REQUIRED)
9595
find_package(CLI11 CONFIG REQUIRED)
9696
find_package(unofficial-minizip CONFIG REQUIRED)
9797
find_package(LibArchive REQUIRED)
98+
find_package(tabulate CONFIG REQUIRED)
9899

99100
# Build using CMAKE-JS
100101
if(DEFINED CMAKE_JS_INC)
@@ -136,6 +137,7 @@ target_link_libraries(${PROJECT_NAME} PRIVATE jinja2cpp)
136137
target_link_libraries(${PROJECT_NAME} PRIVATE CLI11::CLI11)
137138
target_link_libraries(${PROJECT_NAME} PRIVATE unofficial::minizip::minizip)
138139
target_link_libraries(${PROJECT_NAME} PRIVATE LibArchive::LibArchive)
140+
target_link_libraries(${PROJECT_NAME} PRIVATE tabulate::tabulate)
139141

140142
# Build using CMAKE-JS
141143
if(DEFINED CMAKE_JS_INC)

engine/commands/engine_list_cmd.cc

Lines changed: 81 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,81 @@
1+
// clang-format off
2+
#include "utils/cortex_utils.h"
3+
// clang-format on
4+
#include "engine_list_cmd.h"
5+
#include <filesystem>
6+
#include <tabulate/table.hpp>
7+
#include <utility>
8+
#include "trantor/utils/Logger.h"
9+
10+
namespace commands {
11+
12+
bool EngineListCmd::Exec() {
13+
tabulate::Table table;
14+
table.add_row(
15+
{"(Index)", "name", "description", "version", "product name", "status"});
16+
table.format().font_color(tabulate::Color::green);
17+
#ifdef _WIN32
18+
if (std::filesystem::exists(std::filesystem::current_path().string() +
19+
cortex_utils::kOnnxLibPath)) {
20+
table.add_row({"1", "cortex.onnx",
21+
"This extension enables chat completion API calls using the "
22+
"Onnx engine",
23+
"0.0.1", "Onnx Inference Engine", "ready"});
24+
} else {
25+
table.add_row({"1", "cortex.onnx",
26+
"This extension enables chat completion API calls using the "
27+
"Onnx engine",
28+
"0.0.1", "Onnx Inference Engine", "not_initialized"});
29+
}
30+
31+
#else
32+
table.add_row(
33+
{"1", "cortex.onnx",
34+
"This extension enables chat completion API calls using the Onnx engine",
35+
"0.0.1", "Onnx Inference Engine", "not_supported"});
36+
#endif
37+
// lllamacpp
38+
if (std::filesystem::exists(std::filesystem::current_path().string() +
39+
cortex_utils::kLlamaLibPath)) {
40+
table.add_row({"2", "cortex.llamacpp",
41+
"This extension enables chat completion API calls using the "
42+
"LlamaCPP engine",
43+
"0.0.1", "LlamaCPP Inference Engine", "ready"});
44+
} else {
45+
table.add_row({"2", "cortex.llamacpp",
46+
"This extension enables chat completion API calls using the "
47+
"LlamaCPP engine",
48+
"0.0.1", "LlamaCPP Inference Engine", "not_initialized"});
49+
}
50+
// tensorrt llm
51+
if (std::filesystem::exists(std::filesystem::current_path().string() +
52+
cortex_utils::kTensorrtLlmPath)) {
53+
table.add_row({"3", "cortex.tensorrt-llm",
54+
"This extension enables chat completion API calls using the "
55+
"TensorrtLLM engine",
56+
"0.0.1", "TensorrtLLM Inference Engine", "ready"});
57+
} else {
58+
table.add_row({"3", "cortex.tensorrt-llm",
59+
"This extension enables chat completion API calls using the "
60+
"TensorrtLLM engine",
61+
"0.0.1", "TensorrtLLM Inference Engine", "not_initialized"});
62+
}
63+
for (int i = 0; i < 6; i++) {
64+
table[0][i]
65+
.format()
66+
.font_color(tabulate::Color::white) // Set font color
67+
.font_style({tabulate::FontStyle::bold})
68+
.font_align(tabulate::FontAlign::center);
69+
}
70+
for (int i = 1; i < 4; i++) {
71+
table[i][0]
72+
.format()
73+
.font_color(tabulate::Color::white) // Set font color
74+
.font_align(tabulate::FontAlign::center);
75+
}
76+
77+
std::cout << table << std::endl;
78+
return true;
79+
}
80+
81+
}; // namespace commands

engine/commands/engine_list_cmd.h

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
#pragma once
2+
3+
#include <string>
4+
5+
namespace commands {
6+
class EngineListCmd {
7+
public:
8+
bool Exec() ;
9+
};
10+
11+
} // namespace commands

engine/commands/model_list_cmd.cc

Lines changed: 32 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,30 +1,57 @@
1+
// clang-format off
2+
#include "utils/cortex_utils.h"
3+
// clang-format on
14
#include "model_list_cmd.h"
25
#include <filesystem>
36
#include <iostream>
7+
#include <tabulate/table.hpp>
48
#include <vector>
5-
#include "utils/cortex_utils.h"
69
#include "config/yaml_config.h"
710
#include "trantor/utils/Logger.h"
811
namespace commands {
912

1013
void ModelListCmd::Exec() {
1114
if (std::filesystem::exists(cortex_utils::models_folder) &&
1215
std::filesystem::is_directory(cortex_utils::models_folder)) {
16+
tabulate::Table table;
17+
18+
table.add_row({"(Index)", "ID", "engine", "version"});
19+
table.format().font_color(tabulate::Color::green);
20+
int count = 0;
1321
// Iterate through directory
1422
for (const auto& entry :
1523
std::filesystem::directory_iterator(cortex_utils::models_folder)) {
1624
if (entry.is_regular_file() && entry.path().extension() == ".yaml") {
1725
try {
18-
config::YamlHandler handler;
19-
handler.ModelConfigFromFile(entry.path().string());
20-
std::cout<<"Model ID: "<< entry.path().stem().string() <<", Engine: "<< handler.GetModelConfig().engine <<std::endl;
21-
26+
count += 1;
27+
config::YamlHandler handler;
28+
handler.ModelConfigFromFile(entry.path().string());
29+
const auto& model_config = handler.GetModelConfig();
30+
table.add_row({std::to_string(count), model_config.id,
31+
model_config.engine, model_config.version});
2232
} catch (const std::exception& e) {
2333
LOG_ERROR << "Error reading yaml file '" << entry.path().string()
2434
<< "': " << e.what();
2535
}
2636
}
2737
}
38+
for (int i = 0; i < 4; i++) {
39+
table[0][i]
40+
.format()
41+
.font_color(tabulate::Color::white) // Set font color
42+
.font_style({tabulate::FontStyle::bold})
43+
.font_align(tabulate::FontAlign::center);
44+
}
45+
for (int i = 1; i <= count; i++) {
46+
table[i][0] //index value
47+
.format()
48+
.font_color(tabulate::Color::white) // Set font color
49+
.font_align(tabulate::FontAlign::center);
50+
table[i][3] //version value
51+
.format()
52+
.font_align(tabulate::FontAlign::center);
53+
}
54+
std::cout << table << std::endl;
2855
}
2956
}
3057
}; // namespace commands

engine/controllers/command_line_parser.cc

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
#include "commands/chat_cmd.h"
33
#include "commands/cmd_info.h"
44
#include "commands/engine_init_cmd.h"
5+
#include "commands/engine_list_cmd.h"
56
#include "commands/model_get_cmd.h"
67
#include "commands/model_list_cmd.h"
78
#include "commands/model_pull_cmd.h"
@@ -117,6 +118,11 @@ bool CommandLineParser::SetupCommand(int argc, char** argv) {
117118
auto engines_cmd = app_.add_subcommand("engines", "Get cortex engines");
118119
auto list_engines_cmd =
119120
engines_cmd->add_subcommand("list", "List all cortex engines");
121+
list_engines_cmd->callback([]() {
122+
commands::EngineListCmd command;
123+
command.Exec();
124+
});
125+
120126
auto get_engine_cmd = engines_cmd->add_subcommand("get", "Get an engine");
121127

122128
EngineInstall(engines_cmd, "cortex.llamacpp", version);

engine/controllers/engines.cc

Lines changed: 65 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,8 @@ void Engines::InitEngine(const HttpRequestPtr& req,
6868
}}};
6969

7070
DownloadService().AddAsyncDownloadTask(
71-
downloadTask, [](const std::string& absolute_path, bool unused) {
71+
downloadTask,
72+
[](const std::string& absolute_path, bool unused) {
7273
// try to unzip the downloaded file
7374
std::filesystem::path downloadedEnginePath{absolute_path};
7475
LOG_INFO << "Downloaded engine path: "
@@ -108,4 +109,67 @@ void Engines::InitEngine(const HttpRequestPtr& req,
108109
auto err = res.error();
109110
LOG_ERROR << "HTTP error: " << httplib::to_string(err);
110111
}
112+
}
113+
114+
void Engines::ListEngine(
115+
const HttpRequestPtr& req,
116+
std::function<void(const HttpResponsePtr&)>&& callback) const {
117+
Json::Value ret;
118+
ret["object"] = "list";
119+
Json::Value data(Json::arrayValue);
120+
Json::Value obj_onnx, obj_llamacpp, obj_tensorrt;
121+
obj_onnx["name"] = "cortex.onnx";
122+
obj_onnx["description"] =
123+
"This extension enables chat completion API calls using the Onnx engine";
124+
obj_onnx["version"] = "0.0.1";
125+
obj_onnx["productName"] = "Onnx Inference Engine";
126+
127+
obj_llamacpp["name"] = "cortex.llamacpp";
128+
obj_llamacpp["description"] =
129+
"This extension enables chat completion API calls using the LlamaCPP "
130+
"engine";
131+
obj_llamacpp["version"] = "0.0.1";
132+
obj_llamacpp["productName"] = "LlamaCPP Inference Engine";
133+
134+
obj_tensorrt["name"] = "cortex.tensorrt-llm";
135+
obj_tensorrt["description"] =
136+
"This extension enables chat completion API calls using the TensorrtLLM "
137+
"engine";
138+
obj_tensorrt["version"] = "0.0.1";
139+
obj_tensorrt["productName"] = "TensorrtLLM Inference Engine";
140+
141+
#ifdef _WIN32
142+
if (std::filesystem::exists(std::filesystem::current_path().string() +
143+
cortex_utils::kOnnxLibPath)) {
144+
obj_onnx["status"] = "ready";
145+
} else {
146+
obj_onnx["status"] = "not_initialized";
147+
}
148+
#else
149+
obj_onnx["status"] = "not_supported";
150+
#endif
151+
// lllamacpp
152+
if (std::filesystem::exists(std::filesystem::current_path().string() +
153+
cortex_utils::kLlamaLibPath)) {
154+
155+
obj_llamacpp["status"] = "ready";
156+
} else {
157+
obj_llamacpp["status"] = "not_initialized";
158+
}
159+
// tensorrt llm
160+
if (std::filesystem::exists(std::filesystem::current_path().string() +
161+
cortex_utils::kTensorrtLlmPath)) {
162+
obj_tensorrt["status"] = "ready";
163+
} else {
164+
obj_tensorrt["status"] = "not_initialized";
165+
}
166+
167+
data.append(std::move(obj_onnx));
168+
data.append(std::move(obj_llamacpp));
169+
data.append(std::move(obj_tensorrt));
170+
ret["data"] = data;
171+
ret["result"] = "OK";
172+
auto resp = cortex_utils::CreateCortexHttpJsonResponse(ret);
173+
resp->setStatusCode(k200OK);
174+
callback(resp);
111175
}

engine/controllers/engines.h

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,9 +13,12 @@ class Engines : public drogon::HttpController<Engines> {
1313
public:
1414
METHOD_LIST_BEGIN
1515
METHOD_ADD(Engines::InitEngine, "/{1}/init", Post);
16+
METHOD_ADD(Engines::ListEngine, "/list", Get);
1617
METHOD_LIST_END
1718

1819
void InitEngine(const HttpRequestPtr& req,
1920
std::function<void(const HttpResponsePtr&)>&& callback,
2021
const std::string& engine) const;
22+
void ListEngine(const HttpRequestPtr& req,
23+
std::function<void(const HttpResponsePtr&)>&& callback) const;
2124
};

engine/vcpkg.json

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
"minizip",
1414
"nlohmann-json",
1515
"yaml-cpp",
16-
"libarchive"
16+
"libarchive",
17+
"tabulate"
1718
]
1819
}

0 commit comments

Comments
 (0)