From 631a535fa511e2bed727619144eb3c163caf87c2 Mon Sep 17 00:00:00 2001 From: vansangpfiev Date: Mon, 23 Sep 2024 13:50:50 +0700 Subject: [PATCH 1/4] feat: cortex chat/run/models start with new model data structure --- engine/commands/chat_cmd.cc | 34 ++++++++++---- engine/commands/chat_cmd.h | 9 ++-- engine/commands/model_start_cmd.cc | 56 +++++++++++++++-------- engine/commands/model_start_cmd.h | 9 +--- engine/commands/model_status_cmd.cc | 17 +++++++ engine/commands/model_status_cmd.h | 2 + engine/commands/run_cmd.cc | 38 +++++++-------- engine/controllers/command_line_parser.cc | 29 +++--------- 8 files changed, 114 insertions(+), 80 deletions(-) diff --git a/engine/commands/chat_cmd.cc b/engine/commands/chat_cmd.cc index da232a321..e4d0eda3d 100644 --- a/engine/commands/chat_cmd.cc +++ b/engine/commands/chat_cmd.cc @@ -6,6 +6,7 @@ #include "server_start_cmd.h" #include "trantor/utils/Logger.h" #include "utils/logging_utils.h" +#include "utils/modellist_utils.h" namespace commands { namespace { @@ -36,23 +37,36 @@ struct ChunkParser { } }; -ChatCmd::ChatCmd(std::string host, int port, const config::ModelConfig& mc) - : host_(std::move(host)), port_(port), mc_(mc) {} +void ChatCmd::Exec(const std::string& host, int port, + const std::string& model_handle, std::string msg) { + modellist_utils::ModelListUtils modellist_handler; + config::YamlHandler yaml_handler; + try { + auto model_entry = modellist_handler.GetModelInfo(model_handle); + yaml_handler.ModelConfigFromFile(model_entry.path_to_model_yaml); + auto mc = yaml_handler.GetModelConfig(); + Exec(host, port, mc, std::move(msg)); + } catch (const std::exception& e) { + CLI_LOG("Fail to start model information with ID '" + model_handle + + "': " + e.what()); + } +} -void ChatCmd::Exec(std::string msg) { +void ChatCmd::Exec(const std::string& host, int port, + const config::ModelConfig& mc, std::string msg) { + auto address = host + ":" + std::to_string(port); // Check if server is started { - if (!commands::IsServerAlive(host_, port_)) { + if (!commands::IsServerAlive(host, port)) { CLI_LOG("Server is not started yet, please run `" << commands::GetCortexBinary() << " start` to start server!"); return; } } - auto address = host_ + ":" + std::to_string(port_); // Only check if llamacpp engine - if ((mc_.engine.find("llamacpp") != std::string::npos) && - !commands::ModelStatusCmd().IsLoaded(host_, port_, mc_)) { + if ((mc.engine.find("llamacpp") != std::string::npos) && + !commands::ModelStatusCmd().IsLoaded(host, port, mc)) { CLI_LOG("Model is not loaded yet!"); return; } @@ -78,12 +92,12 @@ void ChatCmd::Exec(std::string msg) { new_data["role"] = kUser; new_data["content"] = user_input; histories_.push_back(std::move(new_data)); - json_data["engine"] = mc_.engine; + json_data["engine"] = mc.engine; json_data["messages"] = histories_; - json_data["model"] = mc_.name; + json_data["model"] = mc.name; //TODO: support non-stream json_data["stream"] = true; - json_data["stop"] = mc_.stop; + json_data["stop"] = mc.stop; auto data_str = json_data.dump(); // std::cout << data_str << std::endl; cli.set_read_timeout(std::chrono::seconds(60)); diff --git a/engine/commands/chat_cmd.h b/engine/commands/chat_cmd.h index d5b48927c..596cfce2d 100644 --- a/engine/commands/chat_cmd.h +++ b/engine/commands/chat_cmd.h @@ -7,13 +7,12 @@ namespace commands { class ChatCmd { public: - ChatCmd(std::string host, int port, const config::ModelConfig& mc); - void Exec(std::string msg); + void Exec(const std::string& host, int port, const std::string& model_handle, + std::string msg); + void Exec(const std::string& host, int port, const config::ModelConfig& mc, + std::string msg); private: - std::string host_; - int port_; - const config::ModelConfig& mc_; std::vector histories_; }; } // namespace commands \ No newline at end of file diff --git a/engine/commands/model_start_cmd.cc b/engine/commands/model_start_cmd.cc index 1a96b4fee..1340614d9 100644 --- a/engine/commands/model_start_cmd.cc +++ b/engine/commands/model_start_cmd.cc @@ -7,43 +7,59 @@ #include "trantor/utils/Logger.h" #include "utils/file_manager_utils.h" #include "utils/logging_utils.h" +#include "utils/modellist_utils.h" namespace commands { -ModelStartCmd::ModelStartCmd(std::string host, int port, - const config::ModelConfig& mc) - : host_(std::move(host)), port_(port), mc_(mc) {} +bool ModelStartCmd::Exec(const std::string& host, int port, + const std::string& model_handle) { -bool ModelStartCmd::Exec() { + modellist_utils::ModelListUtils modellist_handler; + config::YamlHandler yaml_handler; + try { + auto model_entry = modellist_handler.GetModelInfo(model_handle); + yaml_handler.ModelConfigFromFile(model_entry.path_to_model_yaml); + auto mc = yaml_handler.GetModelConfig(); + return Exec(host, port, mc); + } catch (const std::exception& e) { + CLI_LOG("Fail to start model information with ID '" + model_handle + + "': " + e.what()); + return false; + } +} + +bool ModelStartCmd::Exec(const std::string& host, int port, + const config::ModelConfig& mc) { // Check if server is started - if (!commands::IsServerAlive(host_, port_)) { + if (!commands::IsServerAlive(host, port)) { CLI_LOG("Server is not started yet, please run `" << commands::GetCortexBinary() << " start` to start server!"); return false; } + // Only check for llamacpp for now - if ((mc_.engine.find("llamacpp") != std::string::npos) && - commands::ModelStatusCmd().IsLoaded(host_, port_, mc_)) { + if ((mc.engine.find("llamacpp") != std::string::npos) && + commands::ModelStatusCmd().IsLoaded(host, port, mc)) { CLI_LOG("Model has already been started!"); return true; } - httplib::Client cli(host_ + ":" + std::to_string(port_)); + httplib::Client cli(host + ":" + std::to_string(port)); nlohmann::json json_data; - if (mc_.files.size() > 0) { + if (mc.files.size() > 0) { // TODO(sang) support multiple files - json_data["model_path"] = mc_.files[0]; + json_data["model_path"] = mc.files[0]; } else { LOG_WARN << "model_path is empty"; return false; } - json_data["model"] = mc_.name; - json_data["system_prompt"] = mc_.system_template; - json_data["user_prompt"] = mc_.user_template; - json_data["ai_prompt"] = mc_.ai_template; - json_data["ctx_len"] = mc_.ctx_len; - json_data["stop"] = mc_.stop; - json_data["engine"] = mc_.engine; + json_data["model"] = mc.name; + json_data["system_prompt"] = mc.system_template; + json_data["user_prompt"] = mc.user_template; + json_data["ai_prompt"] = mc.ai_template; + json_data["ctx_len"] = mc.ctx_len; + json_data["stop"] = mc.stop; + json_data["engine"] = mc.engine; auto data_str = json_data.dump(); cli.set_read_timeout(std::chrono::seconds(60)); @@ -52,13 +68,17 @@ bool ModelStartCmd::Exec() { if (res) { if (res->status == httplib::StatusCode::OK_200) { CLI_LOG("Model loaded!"); + return true; + } else { + CTL_ERR("Model failed to load with status code: " << res->status); + return false; } } else { auto err = res.error(); CTL_ERR("HTTP error: " << httplib::to_string(err)); return false; } - return true; + return false; } }; // namespace commands diff --git a/engine/commands/model_start_cmd.h b/engine/commands/model_start_cmd.h index 26daf9d0e..fbf3c0645 100644 --- a/engine/commands/model_start_cmd.h +++ b/engine/commands/model_start_cmd.h @@ -6,13 +6,8 @@ namespace commands { class ModelStartCmd { public: - explicit ModelStartCmd(std::string host, int port, - const config::ModelConfig& mc); - bool Exec(); + bool Exec(const std::string& host, int port, const std::string& model_handle); - private: - std::string host_; - int port_; - const config::ModelConfig& mc_; + bool Exec(const std::string& host, int port, const config::ModelConfig& mc); }; } // namespace commands diff --git a/engine/commands/model_status_cmd.cc b/engine/commands/model_status_cmd.cc index f54aa9100..e6ba9bbe0 100644 --- a/engine/commands/model_status_cmd.cc +++ b/engine/commands/model_status_cmd.cc @@ -3,8 +3,25 @@ #include "httplib.h" #include "nlohmann/json.hpp" #include "utils/logging_utils.h" +#include "utils/modellist_utils.h" namespace commands { +bool ModelStatusCmd::IsLoaded(const std::string& host, int port, + const std::string& model_handle) { + modellist_utils::ModelListUtils modellist_handler; + config::YamlHandler yaml_handler; + try { + auto model_entry = modellist_handler.GetModelInfo(model_handle); + yaml_handler.ModelConfigFromFile(model_entry.path_to_model_yaml); + auto mc = yaml_handler.GetModelConfig(); + return IsLoaded(host, port, mc); + } catch (const std::exception& e) { + CLI_LOG("Fail to get model status with ID '" + model_handle + + "': " + e.what()); + return false; + } +} + bool ModelStatusCmd::IsLoaded(const std::string& host, int port, const config::ModelConfig& mc) { httplib::Client cli(host + ":" + std::to_string(port)); diff --git a/engine/commands/model_status_cmd.h b/engine/commands/model_status_cmd.h index 2ef44a41d..273d73ef9 100644 --- a/engine/commands/model_status_cmd.h +++ b/engine/commands/model_status_cmd.h @@ -6,6 +6,8 @@ namespace commands { class ModelStatusCmd { public: + bool IsLoaded(const std::string& host, int port, + const std::string& model_handle); bool IsLoaded(const std::string& host, int port, const config::ModelConfig& mc); }; diff --git a/engine/commands/run_cmd.cc b/engine/commands/run_cmd.cc index 16b496b0d..805edab47 100644 --- a/engine/commands/run_cmd.cc +++ b/engine/commands/run_cmd.cc @@ -6,6 +6,7 @@ #include "model_status_cmd.h" #include "server_start_cmd.h" #include "utils/file_manager_utils.h" +#include "utils/modellist_utils.h" namespace commands { @@ -48,28 +49,29 @@ void RunCmd::Exec() { } } - config::YamlHandler yaml_handler; - yaml_handler.ModelConfigFromFile( - file_manager_utils::GetModelsContainerPath().string() + "/" + model_file + - ".yaml"); - auto mc = yaml_handler.GetModelConfig(); + // TODO(sang) refactor after `cortex pull` done with new data structure + try { + modellist_utils::ModelListUtils modellist_handler; + config::YamlHandler yaml_handler; + auto model_entry = modellist_handler.GetModelInfo(model_id_); + yaml_handler.ModelConfigFromFile(model_entry.path_to_model_yaml); + auto mc = yaml_handler.GetModelConfig(); - // Always start model if not llamacpp - // If it is llamacpp, then check model status first - { - if ((mc.engine.find("llamacpp") == std::string::npos) || - !commands::ModelStatusCmd().IsLoaded(host_, port_, mc)) { - ModelStartCmd msc(host_, port_, mc); - if (!msc.Exec()) { - return; + // Always start model if not llamacpp + // If it is llamacpp, then check model status first + { + if ((mc.engine.find("llamacpp") == std::string::npos) || + !commands::ModelStatusCmd().IsLoaded(host_, port_, mc)) { + if (!ModelStartCmd().Exec(host_, port_, mc)) { + return; + } } } - } - // Chat - { - ChatCmd cc(host_, port_, mc); - cc.Exec(""); + // Chat + ChatCmd().Exec(host_, port_, mc, ""); + } catch (const std::exception& e) { + CLI_LOG("Fail to run model with ID '" + model_id_ + "': " + e.what()); } } }; // namespace commands diff --git a/engine/controllers/command_line_parser.cc b/engine/controllers/command_line_parser.cc index f57efb7a2..68e9b4d50 100644 --- a/engine/controllers/command_line_parser.cc +++ b/engine/controllers/command_line_parser.cc @@ -131,17 +131,10 @@ void CommandLineParser::SetupCommonCommands() { CLI_LOG(chat_cmd->help()); return; } - commands::CmdInfo ci(cml_data_.model_id); - std::string model_file = - ci.branch == "main" ? ci.model_name : ci.model_name + "-" + ci.branch; - config::YamlHandler yaml_handler; - yaml_handler.ModelConfigFromFile( - file_manager_utils::GetModelsContainerPath().string() + "/" + - model_file + ".yaml"); - commands::ChatCmd cc(cml_data_.config.apiServerHost, - std::stoi(cml_data_.config.apiServerPort), - yaml_handler.GetModelConfig()); - cc.Exec(cml_data_.msg); + + commands::ChatCmd().Exec(cml_data_.config.apiServerHost, + std::stoi(cml_data_.config.apiServerPort), cml_data_.model_id, + cml_data_.msg); }); } @@ -177,17 +170,9 @@ void CommandLineParser::SetupModelCommands() { CLI_LOG(model_start_cmd->help()); return; }; - commands::CmdInfo ci(cml_data_.model_id); - std::string model_file = - ci.branch == "main" ? ci.model_name : ci.model_name + "-" + ci.branch; - config::YamlHandler yaml_handler; - yaml_handler.ModelConfigFromFile( - file_manager_utils::GetModelsContainerPath().string() + "/" + - model_file + ".yaml"); - commands::ModelStartCmd msc(cml_data_.config.apiServerHost, - std::stoi(cml_data_.config.apiServerPort), - yaml_handler.GetModelConfig()); - msc.Exec(); + commands::ModelStartCmd().Exec(cml_data_.config.apiServerHost, + std::stoi(cml_data_.config.apiServerPort), + cml_data_.model_id); }); auto stop_model_cmd = From 0324e4d71f0fce552cfc7ef66c8eb49f97be1e7f Mon Sep 17 00:00:00 2001 From: vansangpfiev Date: Tue, 24 Sep 2024 08:30:09 +0700 Subject: [PATCH 2/4] temp --- engine/commands/run_cmd.cc | 76 ++++++++++--------- engine/commands/run_cmd.h | 6 +- .../test/components/test_modellist_utils.cc | 8 ++ engine/utils/cortex_utils.h | 24 ++++++ engine/utils/modellist_utils.cc | 15 ++++ engine/utils/modellist_utils.h | 1 + engine/utils/url_parser.h | 2 + 7 files changed, 92 insertions(+), 40 deletions(-) diff --git a/engine/commands/run_cmd.cc b/engine/commands/run_cmd.cc index 805edab47..40fd28832 100644 --- a/engine/commands/run_cmd.cc +++ b/engine/commands/run_cmd.cc @@ -7,56 +7,58 @@ #include "server_start_cmd.h" #include "utils/file_manager_utils.h" #include "utils/modellist_utils.h" - +#include "utils/cortex_utils.h" namespace commands { void RunCmd::Exec() { - auto address = host_ + ":" + std::to_string(port_); - CmdInfo ci(model_id_); - std::string model_file = - ci.branch == "main" ? ci.model_name : ci.model_name + "-" + ci.branch; - // TODO should we clean all resource if something fails? - // Check if model existed. If not, download it - { - auto model_conf = model_service_.GetDownloadedModel(model_file + ".yaml"); - if (!model_conf.has_value()) { - model_service_.DownloadModel(model_id_); - } - } + auto model_id = cortex_utils::GetModelIdFromHandle(model_handle_); - // Check if engine existed. If not, download it - { - auto required_engine = engine_service_.GetEngineInfo(ci.engine_name); - if (!required_engine.has_value()) { - throw std::runtime_error("Engine not found: " + ci.engine_name); - } - if (required_engine.value().status == EngineService::kIncompatible) { - throw std::runtime_error("Engine " + ci.engine_name + " is incompatible"); - } - if (required_engine.value().status == EngineService::kNotInstalled) { - engine_service_.InstallEngine(ci.engine_name); - } + if (!model_id.has_value()) { + CTL_ERR("Could not get model_id from handle: " << model_handle_); + return; } + modellist_utils::ModelListUtils modellist_handler; + config::YamlHandler yaml_handler; + auto address = host_ + ":" + std::to_string(port_); - // Start server if it is not running +// Download model if it does not exist { - if (!commands::IsServerAlive(host_, port_)) { - CLI_LOG("Starting server ..."); - commands::ServerStartCmd ssc; - if (!ssc.Exec(host_, port_)) { - return; - } + if (!modellist_handler.HasModel(*model_id)) { + model_service_.DownloadModel(model_handle_); } } - // TODO(sang) refactor after `cortex pull` done with new data structure try { - modellist_utils::ModelListUtils modellist_handler; - config::YamlHandler yaml_handler; - auto model_entry = modellist_handler.GetModelInfo(model_id_); + auto model_entry = modellist_handler.GetModelInfo(*model_id); yaml_handler.ModelConfigFromFile(model_entry.path_to_model_yaml); auto mc = yaml_handler.GetModelConfig(); + // Check if engine existed. If not, download it + { + auto required_engine = engine_service_.GetEngineInfo(mc.engine); + if (!required_engine.has_value()) { + throw std::runtime_error("Engine not found: " + mc.engine); + } + if (required_engine.value().status == EngineService::kIncompatible) { + throw std::runtime_error("Engine " + mc.engine + + " is incompatible"); + } + if (required_engine.value().status == EngineService::kNotInstalled) { + engine_service_.InstallEngine(mc.engine); + } + } + + // Start server if it is not running + { + if (!commands::IsServerAlive(host_, port_)) { + CLI_LOG("Starting server ..."); + commands::ServerStartCmd ssc; + if (!ssc.Exec(host_, port_)) { + return; + } + } + } + // Always start model if not llamacpp // If it is llamacpp, then check model status first { @@ -71,7 +73,7 @@ void RunCmd::Exec() { // Chat ChatCmd().Exec(host_, port_, mc, ""); } catch (const std::exception& e) { - CLI_LOG("Fail to run model with ID '" + model_id_ + "': " + e.what()); + CLI_LOG("Fail to run model with ID '" + model_handle_ + "': " + e.what()); } } }; // namespace commands diff --git a/engine/commands/run_cmd.h b/engine/commands/run_cmd.h index c862926a6..136800102 100644 --- a/engine/commands/run_cmd.h +++ b/engine/commands/run_cmd.h @@ -6,10 +6,10 @@ namespace commands { class RunCmd { public: - explicit RunCmd(std::string host, int port, std::string model_id) + explicit RunCmd(std::string host, int port, std::string model_handle) : host_{std::move(host)}, port_{port}, - model_id_{std::move(model_id)}, + model_handle_{std::move(model_handle)}, model_service_{ModelService()} {}; void Exec(); @@ -17,7 +17,7 @@ class RunCmd { private: std::string host_; int port_; - std::string model_id_; + std::string model_handle_; ModelService model_service_; EngineService engine_service_; diff --git a/engine/test/components/test_modellist_utils.cc b/engine/test/components/test_modellist_utils.cc index 2a7abc05a..68b06483d 100644 --- a/engine/test/components/test_modellist_utils.cc +++ b/engine/test/components/test_modellist_utils.cc @@ -120,4 +120,12 @@ TEST_F(ModelListUtilsTestSuite, TestUpdateModelAlias) { // Clean up model_list_.DeleteModelEntry("test_model_id"); model_list_.DeleteModelEntry("another_model_id"); +} + +TEST_F(ModelListUtilsTestSuite, TestHasModel) { + model_list_.AddModelEntry(kTestModel); + + EXPECT_TRUE(model_list_.HasModel("test_model_id")); + EXPECT_TRUE(model_list_.HasModel("test_alias")); + EXPECT_FALSE(model_list_.HasModel("non_existent_model")); } \ No newline at end of file diff --git a/engine/utils/cortex_utils.h b/engine/utils/cortex_utils.h index 9673f0c1a..2d4f401d5 100644 --- a/engine/utils/cortex_utils.h +++ b/engine/utils/cortex_utils.h @@ -5,12 +5,16 @@ #include #include #include +#include #include #include #include #include #include +#include "utils/string_utils.h" +#include "utils/url_parser.h" + // Include platform-specific headers #ifdef _WIN32 #include @@ -342,4 +346,24 @@ inline std::string GetCurrentPath() { } #endif +inline std::optional GetModelIdFromHandle( + const std::string& model_handle) { + if (string_utils::StartsWith(model_handle, "https://")) { + auto url_obj = url_parser::FromUrlString(model_handle); + return url_obj.pathParams[1]; + } + + if (model_handle.find("/") != std::string::npos) { + auto parsed = string_utils::SplitBy(model_handle, "/"); + if (parsed.size() != 2) { + return std::nullopt; + } + + auto model_name = parsed[1]; + return model_name; + } + + return model_handle; +} + } // namespace cortex_utils diff --git a/engine/utils/modellist_utils.cc b/engine/utils/modellist_utils.cc index 7e1a43833..d577519f3 100644 --- a/engine/utils/modellist_utils.cc +++ b/engine/utils/modellist_utils.cc @@ -238,4 +238,19 @@ bool ModelListUtils::DeleteModelEntry(const std::string& identifier) { } return false; // Entry not found or not in READY state } + +bool ModelListUtils::HasModel(const std::string& identifier) const { + std::lock_guard lock(mutex_); + auto entries = LoadModelList(); + auto it = std::find_if( + entries.begin(), entries.end(), [&identifier](const ModelEntry& entry) { + return entry.model_id == identifier || entry.model_alias == identifier; + }); + + if (it != entries.end()) { + return true; + } else { + return false; + } +} } // namespace modellist_utils diff --git a/engine/utils/modellist_utils.h b/engine/utils/modellist_utils.h index b7aaca81a..113591f25 100644 --- a/engine/utils/modellist_utils.h +++ b/engine/utils/modellist_utils.h @@ -43,5 +43,6 @@ class ModelListUtils { bool DeleteModelEntry(const std::string& identifier); bool UpdateModelAlias(const std::string& model_id, const std::string& model_alias); + bool HasModel(const std::string& identifier) const; }; } // namespace modellist_utils diff --git a/engine/utils/url_parser.h b/engine/utils/url_parser.h index 97d499a97..90b62143e 100644 --- a/engine/utils/url_parser.h +++ b/engine/utils/url_parser.h @@ -1,3 +1,5 @@ +#pragma once + #include #include #include From 7fc0bbe3b78d8497a61eeb378f9ec35da6279529 Mon Sep 17 00:00:00 2001 From: vansangpfiev Date: Tue, 24 Sep 2024 10:44:39 +0700 Subject: [PATCH 3/4] fix: use model_id from model service and std::get_line --- engine/commands/run_cmd.cc | 23 ++++++++++++----------- engine/services/download_service.cc | 4 ++-- engine/utils/cli_selection_utils.h | 2 +- engine/utils/cortex_utils.h | 24 ------------------------ 4 files changed, 15 insertions(+), 38 deletions(-) diff --git a/engine/commands/run_cmd.cc b/engine/commands/run_cmd.cc index 40fd28832..d17d91e9f 100644 --- a/engine/commands/run_cmd.cc +++ b/engine/commands/run_cmd.cc @@ -5,26 +5,28 @@ #include "model_start_cmd.h" #include "model_status_cmd.h" #include "server_start_cmd.h" +#include "utils/cortex_utils.h" #include "utils/file_manager_utils.h" #include "utils/modellist_utils.h" -#include "utils/cortex_utils.h" namespace commands { void RunCmd::Exec() { - auto model_id = cortex_utils::GetModelIdFromHandle(model_handle_); + std::optional model_id = model_handle_; - if (!model_id.has_value()) { - CTL_ERR("Could not get model_id from handle: " << model_handle_); - return; - } modellist_utils::ModelListUtils modellist_handler; config::YamlHandler yaml_handler; auto address = host_ + ":" + std::to_string(port_); -// Download model if it does not exist + // Download model if it does not exist { - if (!modellist_handler.HasModel(*model_id)) { - model_service_.DownloadModel(model_handle_); + if (!modellist_handler.HasModel(model_handle_)) { + model_id = model_service_.DownloadModel(model_handle_); + if (!model_id.has_value()) { + CTL_ERR("Error: Could not get model_id from handle: " << model_handle_); + return; + } else { + CTL_INF("model_id: " << model_id.value()); + } } } @@ -40,8 +42,7 @@ void RunCmd::Exec() { throw std::runtime_error("Engine not found: " + mc.engine); } if (required_engine.value().status == EngineService::kIncompatible) { - throw std::runtime_error("Engine " + mc.engine + - " is incompatible"); + throw std::runtime_error("Engine " + mc.engine + " is incompatible"); } if (required_engine.value().status == EngineService::kNotInstalled) { engine_service_.InstallEngine(mc.engine); diff --git a/engine/services/download_service.cc b/engine/services/download_service.cc index 1cf8b68c4..496d01116 100644 --- a/engine/services/download_service.cc +++ b/engine/services/download_service.cc @@ -113,7 +113,7 @@ void DownloadService::Download(const std::string& download_id, << " need to be downloaded."); std::cout << "Continue download [Y/n]: " << std::flush; std::string answer{""}; - std::cin >> answer; + std::getline(std::cin, answer); if (answer == "Y" || answer == "y" || answer.empty()) { mode = "ab"; CLI_LOG("Resuming download.."); @@ -126,7 +126,7 @@ void DownloadService::Download(const std::string& download_id, std::cout << "Re-download? [Y/n]: " << std::flush; std::string answer = ""; - std::cin >> answer; + std::getline(std::cin, answer); if (answer == "Y" || answer == "y" || answer.empty()) { CLI_LOG("Re-downloading.."); } else { diff --git a/engine/utils/cli_selection_utils.h b/engine/utils/cli_selection_utils.h index d3848c5bb..0c2453478 100644 --- a/engine/utils/cli_selection_utils.h +++ b/engine/utils/cli_selection_utils.h @@ -20,7 +20,7 @@ inline std::optional PrintSelection( std::string selection{""}; PrintMenu(options); std::cout << "Select an option (" << 1 << "-" << options.size() << "): "; - std::cin >> selection; + std::getline(std::cin, selection); if (selection.empty()) { return std::nullopt; diff --git a/engine/utils/cortex_utils.h b/engine/utils/cortex_utils.h index 2d4f401d5..9673f0c1a 100644 --- a/engine/utils/cortex_utils.h +++ b/engine/utils/cortex_utils.h @@ -5,16 +5,12 @@ #include #include #include -#include #include #include #include #include #include -#include "utils/string_utils.h" -#include "utils/url_parser.h" - // Include platform-specific headers #ifdef _WIN32 #include @@ -346,24 +342,4 @@ inline std::string GetCurrentPath() { } #endif -inline std::optional GetModelIdFromHandle( - const std::string& model_handle) { - if (string_utils::StartsWith(model_handle, "https://")) { - auto url_obj = url_parser::FromUrlString(model_handle); - return url_obj.pathParams[1]; - } - - if (model_handle.find("/") != std::string::npos) { - auto parsed = string_utils::SplitBy(model_handle, "/"); - if (parsed.size() != 2) { - return std::nullopt; - } - - auto model_name = parsed[1]; - return model_name; - } - - return model_handle; -} - } // namespace cortex_utils From a61daac6ddddcdb562170b7504e23c3da4a94688 Mon Sep 17 00:00:00 2001 From: vansangpfiev Date: Tue, 24 Sep 2024 11:03:07 +0700 Subject: [PATCH 4/4] f:m --- engine/main.cc | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/engine/main.cc b/engine/main.cc index e7fe9bd22..c461342c9 100644 --- a/engine/main.cc +++ b/engine/main.cc @@ -29,8 +29,8 @@ void RunServer() { auto config = file_manager_utils::GetCortexConfig(); - LOG_INFO << "Host: " << config.apiServerHost - << " Port: " << config.apiServerPort << "\n"; + std::cout << "Host: " << config.apiServerHost + << " Port: " << config.apiServerPort << "\n"; // Create logs/ folder and setup log to file std::filesystem::create_directories( @@ -46,6 +46,8 @@ void RunServer() { asyncFileLogger.output_(msg, len); }, [&]() { asyncFileLogger.flush(); }); + LOG_INFO << "Host: " << config.apiServerHost + << " Port: " << config.apiServerPort << "\n"; // Number of cortex.cpp threads // if (argc > 1) { // thread_num = std::atoi(argv[1]);