From fd9930c7807fe2ced6bd67d9e0648ed055493553 Mon Sep 17 00:00:00 2001 From: vansangpfiev Date: Fri, 20 Sep 2024 13:35:06 +0700 Subject: [PATCH 1/4] fix: temp --- engine/controllers/command_line_parser.cc | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/engine/controllers/command_line_parser.cc b/engine/controllers/command_line_parser.cc index 25557fd41..75ea79ee0 100644 --- a/engine/controllers/command_line_parser.cc +++ b/engine/controllers/command_line_parser.cc @@ -28,6 +28,7 @@ constexpr const auto kInferenceGroup = "Inference"; constexpr const auto kModelsGroup = "Models"; constexpr const auto kEngineGroup = "Engines"; constexpr const auto kSystemGroup = "System"; +constexpr const auto kSubcommnds = "Subcommands"; } // namespace CommandLineParser::CommandLineParser() : app_("Cortex.cpp CLI"), engine_service_{EngineService()} {} @@ -93,14 +94,26 @@ bool CommandLineParser::SetupCommand(int argc, char** argv) { // Models group commands auto models_cmd = app_.add_subcommand("models", "Subcommands for managing models"); + models_cmd->usage(commands::GetCortexBinary() + " models [OPTIONS] [SUBCOMMAND]"); models_cmd->group(kModelsGroup); - models_cmd->require_subcommand(); + + // models_cmd->require_subcommand(); + models_cmd->callback([&]{ + if(models_cmd->get_subcommands().size() == 0) { + std::cout << models_cmd->help() << std::endl; + } + }); auto model_start_cmd = models_cmd->add_subcommand("start", "Start a model by ID"); model_start_cmd->add_option("model_id", model_id, ""); - model_start_cmd->require_option(); - model_start_cmd->callback([&model_id, &config]() { + model_start_cmd->group(kSubcommnds); + // model_start_cmd->require_option(); + model_start_cmd->callback([&model_start_cmd, &model_id, &config]() { + if(model_id.empty()) { + std::cout << model_start_cmd->help() << std::endl; + return; + }; commands::CmdInfo ci(model_id); std::string model_file = ci.branch == "main" ? ci.model_name : ci.model_name + "-" + ci.branch; From f8a6664c2fe8519a17cfa6c3bf00e6c278eec93b Mon Sep 17 00:00:00 2001 From: vansangpfiev Date: Fri, 20 Sep 2024 14:12:04 +0700 Subject: [PATCH 2/4] fix: more --- engine/controllers/command_line_parser.cc | 168 +++++++++++++++++----- 1 file changed, 133 insertions(+), 35 deletions(-) diff --git a/engine/controllers/command_line_parser.cc b/engine/controllers/command_line_parser.cc index 75ea79ee0..66cc77236 100644 --- a/engine/controllers/command_line_parser.cc +++ b/engine/controllers/command_line_parser.cc @@ -28,13 +28,14 @@ constexpr const auto kInferenceGroup = "Inference"; constexpr const auto kModelsGroup = "Models"; constexpr const auto kEngineGroup = "Engines"; constexpr const auto kSystemGroup = "System"; -constexpr const auto kSubcommnds = "Subcommands"; +constexpr const auto kSubcommands = "Subcommands"; } // namespace CommandLineParser::CommandLineParser() : app_("Cortex.cpp CLI"), engine_service_{EngineService()} {} bool CommandLineParser::SetupCommand(int argc, char** argv) { - app_.usage(commands::GetCortexBinary() + " [OPTIONS] [SUBCOMMAND]"); + app_.usage("Usage:\n" + commands::GetCortexBinary() + + " [options] [subcommand]"); auto config = file_manager_utils::GetCortexConfig(); std::string model_id; std::string msg; @@ -44,9 +45,15 @@ bool CommandLineParser::SetupCommand(int argc, char** argv) { "Download a model by URL (or HuggingFace ID) " "See built-in models: https://huggingface.co/cortexso"); model_pull_cmd->group(kCommonCommandsGroup); + model_pull_cmd->usage("Usage:\n" + commands::GetCortexBinary() + + " pull [options] [model_id]"); model_pull_cmd->add_option("model_id", model_id, ""); - model_pull_cmd->require_option(); - model_pull_cmd->callback([&model_id]() { + model_pull_cmd->callback([model_pull_cmd, &model_id]() { + if (model_id.empty()) { + CLI_LOG("[model_id] is required\n"); + CLI_LOG(model_pull_cmd->help()); + return; + } try { commands::ModelPullCmd().Exec(model_id); } catch (const std::exception& e) { @@ -57,9 +64,15 @@ bool CommandLineParser::SetupCommand(int argc, char** argv) { auto run_cmd = app_.add_subcommand("run", "Shortcut to start a model and chat"); run_cmd->group(kCommonCommandsGroup); + run_cmd->usage("Usage:\n" + commands::GetCortexBinary() + + " run [options] [model_id]"); run_cmd->add_option("model_id", model_id, ""); - run_cmd->require_option(); - run_cmd->callback([&model_id, &config] { + run_cmd->callback([run_cmd, &model_id, &config] { + if (model_id.empty()) { + CLI_LOG("[model_id] is required\n"); + CLI_LOG(run_cmd->help()); + return; + } commands::RunCmd rc(config.apiServerHost, std::stoi(config.apiServerPort), model_id); rc.Exec(); @@ -67,12 +80,14 @@ bool CommandLineParser::SetupCommand(int argc, char** argv) { auto chat_cmd = app_.add_subcommand("chat", "Send a chat completion request"); chat_cmd->group(kCommonCommandsGroup); + chat_cmd->usage("Usage:\n" + commands::GetCortexBinary() + + " chat [model_id] [options]"); chat_cmd->add_option("model_id", model_id, ""); - chat_cmd->require_option(); chat_cmd->add_option("-m,--message", msg, "Message to chat with model"); - chat_cmd->callback([&model_id, &msg, &config] { + chat_cmd->callback([chat_cmd, &model_id, &msg, &config] { if (model_id.empty()) { - CLI_LOG("Please input [model_id] in command!"); + CLI_LOG("[model_id] is required\n"); + CLI_LOG(chat_cmd->help()); return; } commands::CmdInfo ci(model_id); @@ -94,24 +109,26 @@ bool CommandLineParser::SetupCommand(int argc, char** argv) { // Models group commands auto models_cmd = app_.add_subcommand("models", "Subcommands for managing models"); - models_cmd->usage(commands::GetCortexBinary() + " models [OPTIONS] [SUBCOMMAND]"); + models_cmd->usage("Usage:\n" + commands::GetCortexBinary() + + " models [options] [subcommand]"); models_cmd->group(kModelsGroup); - // models_cmd->require_subcommand(); - models_cmd->callback([&]{ - if(models_cmd->get_subcommands().size() == 0) { - std::cout << models_cmd->help() << std::endl; + models_cmd->callback([&] { + if (models_cmd->get_subcommands().empty()) { + CLI_LOG(models_cmd->help()); } }); auto model_start_cmd = models_cmd->add_subcommand("start", "Start a model by ID"); + model_start_cmd->usage("Usage:\n" + commands::GetCortexBinary() + + " models start [model_id]"); model_start_cmd->add_option("model_id", model_id, ""); - model_start_cmd->group(kSubcommnds); - // model_start_cmd->require_option(); + model_start_cmd->group(kSubcommands); model_start_cmd->callback([&model_start_cmd, &model_id, &config]() { - if(model_id.empty()) { - std::cout << model_start_cmd->help() << std::endl; + if (model_id.empty()) { + CLI_LOG("[model_id] is required\n"); + CLI_LOG(model_start_cmd->help()); return; }; commands::CmdInfo ci(model_id); @@ -129,9 +146,16 @@ bool CommandLineParser::SetupCommand(int argc, char** argv) { auto stop_model_cmd = models_cmd->add_subcommand("stop", "Stop a model by ID"); + stop_model_cmd->usage("Usage:\n" + commands::GetCortexBinary() + + " models stop [model_id]"); + stop_model_cmd->group(kSubcommands); stop_model_cmd->add_option("model_id", model_id, ""); - stop_model_cmd->require_option(); - stop_model_cmd->callback([&model_id, &config]() { + stop_model_cmd->callback([&stop_model_cmd, &model_id, &config]() { + if (model_id.empty()) { + CLI_LOG("[model_id] is required\n"); + CLI_LOG(stop_model_cmd->help()); + return; + }; commands::CmdInfo ci(model_id); std::string model_file = ci.branch == "main" ? ci.model_name : ci.model_name + "-" + ci.branch; @@ -147,20 +171,36 @@ bool CommandLineParser::SetupCommand(int argc, char** argv) { auto list_models_cmd = models_cmd->add_subcommand("list", "List all models locally"); + list_models_cmd->group(kSubcommands); list_models_cmd->callback([]() { commands::ModelListCmd().Exec(); }); auto get_models_cmd = models_cmd->add_subcommand("get", "Get info of {model_id} locally"); + get_models_cmd->usage("Usage:\n" + commands::GetCortexBinary() + + " models get [model_id]"); + get_models_cmd->group(kSubcommands); get_models_cmd->add_option("model_id", model_id, ""); - get_models_cmd->require_option(); - get_models_cmd->callback( - [&model_id]() { commands::ModelGetCmd().Exec(model_id); }); + get_models_cmd->callback([&get_models_cmd, &model_id]() { + if (model_id.empty()) { + CLI_LOG("[model_id] is required\n"); + CLI_LOG(get_models_cmd->help()); + return; + }; + commands::ModelGetCmd().Exec(model_id); + }); auto model_del_cmd = models_cmd->add_subcommand("delete", "Delete a model by ID locally"); + model_del_cmd->usage("Usage:\n" + commands::GetCortexBinary() + + " models delete [model_id]"); + model_del_cmd->group(kSubcommands); model_del_cmd->add_option("model_id", model_id, ""); - model_del_cmd->require_option(); - model_del_cmd->callback([&model_id]() { + model_del_cmd->callback([&model_del_cmd, &model_id]() { + if (model_id.empty()) { + CLI_LOG("[model_id] is required\n"); + CLI_LOG(model_del_cmd->help()); + return; + }; commands::ModelDelCmd mdc; mdc.Exec(model_id); }); @@ -168,27 +208,44 @@ bool CommandLineParser::SetupCommand(int argc, char** argv) { std::string model_alias; auto model_alias_cmd = models_cmd->add_subcommand("alias", "Add alias name for a modelID"); - model_alias_cmd->add_option("--model_id", model_id, "Can be modelID or model alias to identify model"); - model_alias_cmd->require_option(); + model_alias_cmd->usage("Usage:\n" + commands::GetCortexBinary() + + " models alias --model_id [model_id] --alias [alias]"); + model_alias_cmd->group(kSubcommands); + model_alias_cmd->add_option( + "--model_id", model_id, + "Can be modelID or model alias to identify model"); model_alias_cmd->add_option("--alias", model_alias, "new alias to be set"); - model_alias_cmd->require_option(); - model_alias_cmd->callback([&model_id, &model_alias]() { + model_alias_cmd->callback([&model_alias_cmd, &model_id, &model_alias]() { + if (model_id.empty() || model_alias.empty()) { + CLI_LOG("[model_id] and [alias] are required\n"); + CLI_LOG(model_alias_cmd->help()); + return; + } commands::ModelAliasCmd mdc; mdc.Exec(model_id, model_alias); }); auto model_update_cmd = models_cmd->add_subcommand("update", "Update configuration of a model"); + model_update_cmd->group(kSubcommands); std::string model_path; auto model_import_cmd = models_cmd->add_subcommand( "import", "Import a gguf model from local file"); + model_import_cmd->usage( + "Usage:\n" + commands::GetCortexBinary() + + " models import --model_id [model_id] --model_path [model_path]"); + model_import_cmd->group(kSubcommands); model_import_cmd->add_option("--model_id", model_id, ""); model_import_cmd->add_option("--model_path", model_path, "Absolute path to .gguf model, the path should " "include the gguf file name"); - model_import_cmd->require_option(2); - model_import_cmd->callback([&model_id,&model_path]() { + model_import_cmd->callback([&model_import_cmd, &model_id, &model_path]() { + if (model_id.empty() || model_path.empty()) { + CLI_LOG("[model_id] and [model_path] are required\n"); + CLI_LOG(model_import_cmd->help()); + return; + } commands::ModelImportCmd command(model_id, model_path); command.Exec(); }); @@ -198,18 +255,34 @@ bool CommandLineParser::SetupCommand(int argc, char** argv) { // engines group commands auto engines_cmd = app_.add_subcommand("engines", "Subcommands for managing engines"); + engines_cmd->usage("Usage:\n" + commands::GetCortexBinary() + + " engines [options] [subcommand]"); engines_cmd->group(kEngineGroup); - engines_cmd->require_subcommand(); + engines_cmd->callback([&] { + if (engines_cmd->get_subcommands().empty()) { + CLI_LOG("A subcommand is required\n"); + CLI_LOG(engines_cmd->help()); + } + }); auto list_engines_cmd = engines_cmd->add_subcommand("list", "List all cortex engines"); + list_engines_cmd->group(kSubcommands); list_engines_cmd->callback([]() { commands::EngineListCmd command; command.Exec(); }); auto install_cmd = engines_cmd->add_subcommand("install", "Install engine"); - install_cmd->require_subcommand(); + install_cmd->usage("Usage:\n" + commands::GetCortexBinary() + + " engines install [engine_name] [options]"); + install_cmd->group(kSubcommands); + install_cmd->callback([&install_cmd] { + if (install_cmd->get_subcommands().empty()) { + CLI_LOG("[engine_name] is required\n"); + CLI_LOG(install_cmd->help()); + } + }); for (auto& engine : engine_service_.kSupportEngines) { std::string engine_name{engine}; EngineInstall(install_cmd, engine_name, version); @@ -217,7 +290,15 @@ bool CommandLineParser::SetupCommand(int argc, char** argv) { auto uninstall_cmd = engines_cmd->add_subcommand("uninstall", "Uninstall engine"); - uninstall_cmd->require_subcommand(); + uninstall_cmd->usage("Usage:\n" + commands::GetCortexBinary() + + " engines uninstall [engine_name] [options]"); + uninstall_cmd->callback([&uninstall_cmd] { + if (uninstall_cmd->get_subcommands().empty()) { + CLI_LOG("[engine_name] is required\n"); + CLI_LOG(uninstall_cmd->help()); + } + }); + uninstall_cmd->group(kSubcommands); for (auto& engine : engine_service_.kSupportEngines) { std::string engine_name{engine}; EngineUninstall(uninstall_cmd, engine_name); @@ -297,6 +378,9 @@ void CommandLineParser::EngineInstall(CLI::App* parent, const std::string& engine_name, std::string& version) { auto install_engine_cmd = parent->add_subcommand(engine_name, ""); + install_engine_cmd->usage("Usage:\n" + commands::GetCortexBinary() + + " engines install " + engine_name + " [options]"); + install_engine_cmd->group(kEngineGroup); install_engine_cmd->add_option("-v, --version", version, "Engine version to download"); @@ -313,6 +397,9 @@ void CommandLineParser::EngineInstall(CLI::App* parent, void CommandLineParser::EngineUninstall(CLI::App* parent, const std::string& engine_name) { auto uninstall_engine_cmd = parent->add_subcommand(engine_name, ""); + uninstall_engine_cmd->usage("Usage:\n" + commands::GetCortexBinary() + + " engines install " + engine_name + " [options]"); + uninstall_engine_cmd->group(kEngineGroup); uninstall_engine_cmd->callback([engine_name] { try { @@ -325,13 +412,24 @@ void CommandLineParser::EngineUninstall(CLI::App* parent, void CommandLineParser::EngineGet(CLI::App* parent) { auto get_cmd = parent->add_subcommand("get", "Get an engine info"); - get_cmd->require_subcommand(); + get_cmd->usage("Usage:\n" + commands::GetCortexBinary() + + " engines get [engine_name] [options]"); + get_cmd->group(kSubcommands); + get_cmd->callback([get_cmd] { + if (get_cmd->get_subcommands().empty()) { + CLI_LOG("[engine_name] is required\n"); + CLI_LOG(get_cmd->help()); + } + }); for (auto& engine : engine_service_.kSupportEngines) { std::string engine_name{engine}; std::string desc = "Get " + engine_name + " status"; auto engine_get_cmd = get_cmd->add_subcommand(engine_name, desc); + engine_get_cmd->usage("Usage:\n" + commands::GetCortexBinary() + + " engines get " + engine_name + " [options]"); + engine_get_cmd->group(kEngineGroup); engine_get_cmd->callback( [engine_name] { commands::EngineGetCmd().Exec(engine_name); }); } From 5543cea79b56e34b445c828894c38362e1dc2aef Mon Sep 17 00:00:00 2001 From: vansangpfiev Date: Sat, 21 Sep 2024 08:52:43 +0700 Subject: [PATCH 3/4] feat: more --- engine/controllers/command_line_parser.cc | 229 ++++++++++++---------- engine/controllers/command_line_parser.h | 23 +++ 2 files changed, 147 insertions(+), 105 deletions(-) diff --git a/engine/controllers/command_line_parser.cc b/engine/controllers/command_line_parser.cc index 66cc77236..3fd85d849 100644 --- a/engine/controllers/command_line_parser.cc +++ b/engine/controllers/command_line_parser.cc @@ -36,10 +36,49 @@ CommandLineParser::CommandLineParser() bool CommandLineParser::SetupCommand(int argc, char** argv) { app_.usage("Usage:\n" + commands::GetCortexBinary() + " [options] [subcommand]"); - auto config = file_manager_utils::GetCortexConfig(); + cml_data_.config = file_manager_utils::GetCortexConfig(); std::string model_id; std::string msg; + SetupCommonCommands(); + + SetupInferenceCommands(); + + SetupModelCommands(); + + SetupEngineCommands(); + + SetupSystemCommands(); + + app_.add_flag("--verbose", log_verbose, "Verbose logging"); + + // cortex version + auto cb = [&](int c) { +#ifdef CORTEX_CPP_VERSION + CLI_LOG(CORTEX_CPP_VERSION); +#else + CLI_LOG("default"); +#endif + }; + app_.add_flag_function("-v,--version", cb, "Cortex version"); + + CLI11_PARSE(app_, argc, argv); + if (argc == 1) { + CLI_LOG(app_.help()); + return true; + } + + // Check new update, only check for stable release for now +#ifdef CORTEX_CPP_VERSION + if (cml_data_.check_upd) { + commands::CheckNewUpdate(); + } +#endif + + return true; +} + +void CommandLineParser::SetupCommonCommands() { auto model_pull_cmd = app_.add_subcommand( "pull", "Download a model by URL (or HuggingFace ID) " @@ -47,15 +86,15 @@ bool CommandLineParser::SetupCommand(int argc, char** argv) { model_pull_cmd->group(kCommonCommandsGroup); model_pull_cmd->usage("Usage:\n" + commands::GetCortexBinary() + " pull [options] [model_id]"); - model_pull_cmd->add_option("model_id", model_id, ""); - model_pull_cmd->callback([model_pull_cmd, &model_id]() { - if (model_id.empty()) { + model_pull_cmd->add_option("model_id", cml_data_.model_id, ""); + model_pull_cmd->callback([this, model_pull_cmd]() { + if (cml_data_.model_id.empty()) { CLI_LOG("[model_id] is required\n"); CLI_LOG(model_pull_cmd->help()); return; } try { - commands::ModelPullCmd().Exec(model_id); + commands::ModelPullCmd().Exec(cml_data_.model_id); } catch (const std::exception& e) { CLI_LOG(e.what()); } @@ -65,47 +104,54 @@ bool CommandLineParser::SetupCommand(int argc, char** argv) { app_.add_subcommand("run", "Shortcut to start a model and chat"); run_cmd->group(kCommonCommandsGroup); run_cmd->usage("Usage:\n" + commands::GetCortexBinary() + - " run [options] [model_id]"); - run_cmd->add_option("model_id", model_id, ""); - run_cmd->callback([run_cmd, &model_id, &config] { - if (model_id.empty()) { + " run [options] [model_id]"); + run_cmd->add_option("model_id", cml_data_.model_id, ""); + run_cmd->callback([this, run_cmd] { + if (cml_data_.model_id.empty()) { CLI_LOG("[model_id] is required\n"); CLI_LOG(run_cmd->help()); return; } - commands::RunCmd rc(config.apiServerHost, std::stoi(config.apiServerPort), - model_id); + commands::RunCmd rc(cml_data_.config.apiServerHost, + std::stoi(cml_data_.config.apiServerPort), + cml_data_.model_id); rc.Exec(); }); auto chat_cmd = app_.add_subcommand("chat", "Send a chat completion request"); chat_cmd->group(kCommonCommandsGroup); chat_cmd->usage("Usage:\n" + commands::GetCortexBinary() + - " chat [model_id] [options]"); - chat_cmd->add_option("model_id", model_id, ""); - chat_cmd->add_option("-m,--message", msg, "Message to chat with model"); - chat_cmd->callback([chat_cmd, &model_id, &msg, &config] { - if (model_id.empty()) { + " chat [model_id] [options]"); + chat_cmd->add_option("model_id", cml_data_.model_id, ""); + chat_cmd->add_option("-m,--message", cml_data_.msg, + "Message to chat with model"); + chat_cmd->callback([this, chat_cmd] { + if (cml_data_.model_id.empty()) { CLI_LOG("[model_id] is required\n"); CLI_LOG(chat_cmd->help()); return; } - commands::CmdInfo ci(model_id); + commands::CmdInfo ci(cml_data_.model_id); std::string model_file = ci.branch == "main" ? ci.model_name : ci.model_name + "-" + ci.branch; config::YamlHandler yaml_handler; yaml_handler.ModelConfigFromFile( file_manager_utils::GetModelsContainerPath().string() + "/" + model_file + ".yaml"); - commands::ChatCmd cc(config.apiServerHost, std::stoi(config.apiServerPort), + commands::ChatCmd cc(cml_data_.config.apiServerHost, + std::stoi(cml_data_.config.apiServerPort), yaml_handler.GetModelConfig()); - cc.Exec(msg); + cc.Exec(cml_data_.msg); }); +} +void CommandLineParser::SetupInferenceCommands() { auto embeddings_cmd = app_.add_subcommand( "embeddings", "Creates an embedding vector representing the input text"); embeddings_cmd->group(kInferenceGroup); +} +void CommandLineParser::SetupModelCommands() { // Models group commands auto models_cmd = app_.add_subcommand("models", "Subcommands for managing models"); @@ -123,23 +169,23 @@ bool CommandLineParser::SetupCommand(int argc, char** argv) { models_cmd->add_subcommand("start", "Start a model by ID"); model_start_cmd->usage("Usage:\n" + commands::GetCortexBinary() + " models start [model_id]"); - model_start_cmd->add_option("model_id", model_id, ""); + model_start_cmd->add_option("model_id", cml_data_.model_id, ""); model_start_cmd->group(kSubcommands); - model_start_cmd->callback([&model_start_cmd, &model_id, &config]() { - if (model_id.empty()) { + model_start_cmd->callback([this, model_start_cmd]() { + if (cml_data_.model_id.empty()) { CLI_LOG("[model_id] is required\n"); CLI_LOG(model_start_cmd->help()); return; }; - commands::CmdInfo ci(model_id); + commands::CmdInfo ci(cml_data_.model_id); std::string model_file = ci.branch == "main" ? ci.model_name : ci.model_name + "-" + ci.branch; config::YamlHandler yaml_handler; yaml_handler.ModelConfigFromFile( file_manager_utils::GetModelsContainerPath().string() + "/" + model_file + ".yaml"); - commands::ModelStartCmd msc(config.apiServerHost, - std::stoi(config.apiServerPort), + commands::ModelStartCmd msc(cml_data_.config.apiServerHost, + std::stoi(cml_data_.config.apiServerPort), yaml_handler.GetModelConfig()); msc.Exec(); }); @@ -149,22 +195,22 @@ bool CommandLineParser::SetupCommand(int argc, char** argv) { stop_model_cmd->usage("Usage:\n" + commands::GetCortexBinary() + " models stop [model_id]"); stop_model_cmd->group(kSubcommands); - stop_model_cmd->add_option("model_id", model_id, ""); - stop_model_cmd->callback([&stop_model_cmd, &model_id, &config]() { - if (model_id.empty()) { + stop_model_cmd->add_option("model_id", cml_data_.model_id, ""); + stop_model_cmd->callback([this, stop_model_cmd]() { + if (cml_data_.model_id.empty()) { CLI_LOG("[model_id] is required\n"); CLI_LOG(stop_model_cmd->help()); return; }; - commands::CmdInfo ci(model_id); + commands::CmdInfo ci(cml_data_.model_id); std::string model_file = ci.branch == "main" ? ci.model_name : ci.model_name + "-" + ci.branch; config::YamlHandler yaml_handler; yaml_handler.ModelConfigFromFile( file_manager_utils::GetModelsContainerPath().string() + "/" + model_file + ".yaml"); - commands::ModelStopCmd smc(config.apiServerHost, - std::stoi(config.apiServerPort), + commands::ModelStopCmd smc(cml_data_.config.apiServerHost, + std::stoi(cml_data_.config.apiServerPort), yaml_handler.GetModelConfig()); smc.Exec(); }); @@ -179,14 +225,14 @@ bool CommandLineParser::SetupCommand(int argc, char** argv) { get_models_cmd->usage("Usage:\n" + commands::GetCortexBinary() + " models get [model_id]"); get_models_cmd->group(kSubcommands); - get_models_cmd->add_option("model_id", model_id, ""); - get_models_cmd->callback([&get_models_cmd, &model_id]() { - if (model_id.empty()) { + get_models_cmd->add_option("model_id", cml_data_.model_id, ""); + get_models_cmd->callback([this, get_models_cmd]() { + if (cml_data_.model_id.empty()) { CLI_LOG("[model_id] is required\n"); CLI_LOG(get_models_cmd->help()); return; }; - commands::ModelGetCmd().Exec(model_id); + commands::ModelGetCmd().Exec(cml_data_.model_id); }); auto model_del_cmd = @@ -194,15 +240,15 @@ bool CommandLineParser::SetupCommand(int argc, char** argv) { model_del_cmd->usage("Usage:\n" + commands::GetCortexBinary() + " models delete [model_id]"); model_del_cmd->group(kSubcommands); - model_del_cmd->add_option("model_id", model_id, ""); - model_del_cmd->callback([&model_del_cmd, &model_id]() { - if (model_id.empty()) { + model_del_cmd->add_option("model_id", cml_data_.model_id, ""); + model_del_cmd->callback([this, model_del_cmd]() { + if (cml_data_.model_id.empty()) { CLI_LOG("[model_id] is required\n"); CLI_LOG(model_del_cmd->help()); return; }; commands::ModelDelCmd mdc; - mdc.Exec(model_id); + mdc.Exec(cml_data_.model_id); }); std::string model_alias; @@ -212,17 +258,18 @@ bool CommandLineParser::SetupCommand(int argc, char** argv) { " models alias --model_id [model_id] --alias [alias]"); model_alias_cmd->group(kSubcommands); model_alias_cmd->add_option( - "--model_id", model_id, + "--model_id", cml_data_.model_id, "Can be modelID or model alias to identify model"); - model_alias_cmd->add_option("--alias", model_alias, "new alias to be set"); - model_alias_cmd->callback([&model_alias_cmd, &model_id, &model_alias]() { - if (model_id.empty() || model_alias.empty()) { + model_alias_cmd->add_option("--alias", cml_data_.model_alias, + "new alias to be set"); + model_alias_cmd->callback([this, model_alias_cmd]() { + if (cml_data_.model_id.empty() || cml_data_.model_alias.empty()) { CLI_LOG("[model_id] and [alias] are required\n"); CLI_LOG(model_alias_cmd->help()); return; } commands::ModelAliasCmd mdc; - mdc.Exec(model_id, model_alias); + mdc.Exec(cml_data_.model_id, cml_data_.model_alias); }); auto model_update_cmd = @@ -236,29 +283,28 @@ bool CommandLineParser::SetupCommand(int argc, char** argv) { "Usage:\n" + commands::GetCortexBinary() + " models import --model_id [model_id] --model_path [model_path]"); model_import_cmd->group(kSubcommands); - model_import_cmd->add_option("--model_id", model_id, ""); - model_import_cmd->add_option("--model_path", model_path, + model_import_cmd->add_option("--model_id", cml_data_.model_id, ""); + model_import_cmd->add_option("--model_path", cml_data_.model_path, "Absolute path to .gguf model, the path should " "include the gguf file name"); - model_import_cmd->callback([&model_import_cmd, &model_id, &model_path]() { - if (model_id.empty() || model_path.empty()) { + model_import_cmd->callback([this, model_import_cmd]() { + if (cml_data_.model_id.empty() || cml_data_.model_path.empty()) { CLI_LOG("[model_id] and [model_path] are required\n"); CLI_LOG(model_import_cmd->help()); return; } - commands::ModelImportCmd command(model_id, model_path); + commands::ModelImportCmd command(cml_data_.model_id, cml_data_.model_path); command.Exec(); }); +} - // Default version is latest - std::string version{"latest"}; - // engines group commands +void CommandLineParser::SetupEngineCommands() { auto engines_cmd = app_.add_subcommand("engines", "Subcommands for managing engines"); engines_cmd->usage("Usage:\n" + commands::GetCortexBinary() + " engines [options] [subcommand]"); engines_cmd->group(kEngineGroup); - engines_cmd->callback([&] { + engines_cmd->callback([engines_cmd] { if (engines_cmd->get_subcommands().empty()) { CLI_LOG("A subcommand is required\n"); CLI_LOG(engines_cmd->help()); @@ -277,7 +323,7 @@ bool CommandLineParser::SetupCommand(int argc, char** argv) { install_cmd->usage("Usage:\n" + commands::GetCortexBinary() + " engines install [engine_name] [options]"); install_cmd->group(kSubcommands); - install_cmd->callback([&install_cmd] { + install_cmd->callback([install_cmd] { if (install_cmd->get_subcommands().empty()) { CLI_LOG("[engine_name] is required\n"); CLI_LOG(install_cmd->help()); @@ -285,14 +331,14 @@ bool CommandLineParser::SetupCommand(int argc, char** argv) { }); for (auto& engine : engine_service_.kSupportEngines) { std::string engine_name{engine}; - EngineInstall(install_cmd, engine_name, version); + EngineInstall(install_cmd, engine_name, cml_data_.engine_version); } auto uninstall_cmd = engines_cmd->add_subcommand("uninstall", "Uninstall engine"); uninstall_cmd->usage("Usage:\n" + commands::GetCortexBinary() + " engines uninstall [engine_name] [options]"); - uninstall_cmd->callback([&uninstall_cmd] { + uninstall_cmd->callback([uninstall_cmd] { if (uninstall_cmd->get_subcommands().empty()) { CLI_LOG("[engine_name] is required\n"); CLI_LOG(uninstall_cmd->help()); @@ -305,73 +351,46 @@ bool CommandLineParser::SetupCommand(int argc, char** argv) { } EngineGet(engines_cmd); +} +void CommandLineParser::SetupSystemCommands() { auto start_cmd = app_.add_subcommand("start", "Start the API server"); start_cmd->group(kSystemGroup); - int port = std::stoi(config.apiServerPort); - start_cmd->add_option("-p, --port", port, "Server port to listen"); - start_cmd->callback([&config, &port] { - if (port != stoi(config.apiServerPort)) { - CTL_INF("apiServerPort changed from " << config.apiServerPort << " to " - << port); + cml_data_.port = std::stoi(cml_data_.config.apiServerPort); + start_cmd->add_option("-p, --port", cml_data_.port, "Server port to listen"); + start_cmd->callback([this] { + if (cml_data_.port != stoi(cml_data_.config.apiServerPort)) { + CTL_INF("apiServerPort changed from " << cml_data_.config.apiServerPort + << " to " << cml_data_.port); auto config_path = file_manager_utils::GetConfigurationPath(); - config.apiServerPort = std::to_string(port); - config_yaml_utils::DumpYamlConfig(config, config_path.string()); + cml_data_.config.apiServerPort = std::to_string(cml_data_.port); + config_yaml_utils::DumpYamlConfig(cml_data_.config, config_path.string()); } commands::ServerStartCmd ssc; - ssc.Exec(config.apiServerHost, std::stoi(config.apiServerPort)); + ssc.Exec(cml_data_.config.apiServerHost, + std::stoi(cml_data_.config.apiServerPort)); }); auto stop_cmd = app_.add_subcommand("stop", "Stop the API server"); stop_cmd->group(kSystemGroup); - stop_cmd->callback([&config] { - commands::ServerStopCmd ssc(config.apiServerHost, - std::stoi(config.apiServerPort)); + stop_cmd->callback([this] { + commands::ServerStopCmd ssc(cml_data_.config.apiServerHost, + std::stoi(cml_data_.config.apiServerPort)); ssc.Exec(); }); - app_.add_flag("--verbose", log_verbose, "Verbose logging"); - - // cortex version - auto cb = [&](int c) { -#ifdef CORTEX_CPP_VERSION - CLI_LOG(CORTEX_CPP_VERSION); -#else - CLI_LOG("default"); -#endif - }; - app_.add_flag_function("-v,--version", cb, "Cortex version"); - - std::string cortex_version; - bool check_update = true; + auto ps_cmd = + app_.add_subcommand("ps", "Show running models and their status"); + ps_cmd->group(kSystemGroup); auto update_cmd = app_.add_subcommand("update", "Update cortex version"); update_cmd->group(kSystemGroup); - update_cmd->add_option("-v", cortex_version, ""); - update_cmd->callback([&cortex_version, &check_update] { + update_cmd->add_option("-v", cml_data_.cortex_version, ""); + update_cmd->callback([this] { commands::CortexUpdCmd cuc; - cuc.Exec(cortex_version); - check_update = false; + cuc.Exec(cml_data_.cortex_version); + cml_data_.check_upd = false; }); - - auto ps_cmd = - app_.add_subcommand("ps", "Show running models and their status"); - ps_cmd->group(kSystemGroup); - - CLI11_PARSE(app_, argc, argv); - if (argc == 1) { - CLI_LOG(app_.help()); - return true; - } - - // Check new update, only check for stable release for now -#ifdef CORTEX_CPP_VERSION - if (check_update) { - commands::CheckNewUpdate(); - } -#endif - - return true; } void CommandLineParser::EngineInstall(CLI::App* parent, diff --git a/engine/controllers/command_line_parser.h b/engine/controllers/command_line_parser.h index e4a2f47c5..98f437098 100644 --- a/engine/controllers/command_line_parser.h +++ b/engine/controllers/command_line_parser.h @@ -2,6 +2,7 @@ #include "CLI/CLI.hpp" #include "services/engine_service.h" +#include "utils/config_yaml_utils.h" class CommandLineParser { public: @@ -9,6 +10,16 @@ class CommandLineParser { bool SetupCommand(int argc, char** argv); private: + void SetupCommonCommands(); + + void SetupInferenceCommands(); + + void SetupModelCommands(); + + void SetupEngineCommands(); + + void SetupSystemCommands(); + void EngineInstall(CLI::App* parent, const std::string& engine_name, std::string& version); @@ -18,4 +29,16 @@ class CommandLineParser { CLI::App app_; EngineService engine_service_; + struct CmlData{ + std::string model_id; + std::string msg; + std::string model_alias; + std::string model_path; + std::string engine_version = "latest"; + std::string cortex_version; + bool check_upd = true; + int port; + config_yaml_utils::CortexConfig config; + }; + CmlData cml_data_; }; From 6a982e63983220773931b7f342580d7f37f416e3 Mon Sep 17 00:00:00 2001 From: vansangpfiev Date: Sat, 21 Sep 2024 09:04:39 +0700 Subject: [PATCH 4/4] f:m --- engine/controllers/command_line_parser.cc | 2 +- engine/e2e-test/main.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/engine/controllers/command_line_parser.cc b/engine/controllers/command_line_parser.cc index 3fd85d849..f57efb7a2 100644 --- a/engine/controllers/command_line_parser.cc +++ b/engine/controllers/command_line_parser.cc @@ -159,7 +159,7 @@ void CommandLineParser::SetupModelCommands() { " models [options] [subcommand]"); models_cmd->group(kModelsGroup); - models_cmd->callback([&] { + models_cmd->callback([models_cmd] { if (models_cmd->get_subcommands().empty()) { CLI_LOG(models_cmd->help()); } diff --git a/engine/e2e-test/main.py b/engine/e2e-test/main.py index f5a1c65ff..37725a2fa 100644 --- a/engine/e2e-test/main.py +++ b/engine/e2e-test/main.py @@ -1,4 +1,5 @@ import pytest +import sys from test_api_engine_list import TestApiEngineList from test_cli_engine_get import TestCliEngineGet from test_cli_engine_install import TestCliEngineInstall @@ -12,4 +13,4 @@ from test_cli_model_import import TestCliModelImport if __name__ == "__main__": - pytest.main([__file__, "-v"]) + sys.exit(pytest.main([__file__, "-v"]))