44#include < iostream>
55#include < string>
66#include " utils/logging_utils.h"
7+ #include " utils/result.hpp"
78#include " yaml-cpp/yaml.h"
89
910namespace config_yaml_utils {
@@ -21,11 +22,12 @@ struct CortexConfig {
2122 std::string latestRelease;
2223
2324 std::string huggingFaceToken;
24-
2525 /* *
2626 * Github's API requires a user-agent string.
2727 */
2828 std::string gitHubUserAgent;
29+ std::string llamacppVariant;
30+ std::string llamacppVersion;
2931};
3032
3133const std::string kDefaultHost {" 127.0.0.1" };
@@ -34,8 +36,8 @@ const int kDefaultMaxLines{100000};
3436constexpr const uint64_t kDefaultCheckedForUpdateAt = 0u ;
3537constexpr const auto kDefaultLatestRelease = " default_version" ;
3638
37- inline void DumpYamlConfig (const CortexConfig& config,
38- const std::string& path) {
39+ inline cpp::result< void , std::string> DumpYamlConfig (const CortexConfig& config,
40+ const std::string& path) {
3941 std::filesystem::path config_file_path{path};
4042
4143 try {
@@ -56,12 +58,15 @@ inline void DumpYamlConfig(const CortexConfig& config,
5658 node[" latestRelease" ] = config.latestRelease ;
5759 node[" huggingFaceToken" ] = config.huggingFaceToken ;
5860 node[" gitHubUserAgent" ] = config.gitHubUserAgent ;
61+ node[" llamacppVariant" ] = config.llamacppVariant ;
62+ node[" llamacppVersion" ] = config.llamacppVersion ;
5963
6064 out_file << node;
6165 out_file.close ();
66+ return {};
6267 } catch (const std::exception& e) {
6368 CTL_ERR (" Error writing to file: " << e.what ());
64- throw ;
69+ return cpp::fail ( " Error writing to file: " + std::string (e. what ())) ;
6570 }
6671}
6772
@@ -80,7 +85,8 @@ inline CortexConfig FromYaml(const std::string& path,
8085 !node[" apiServerPort" ] || !node[" checkedForUpdateAt" ] ||
8186 !node[" latestRelease" ] || !node[" logLlamaCppPath" ] ||
8287 !node[" logOnnxPath" ] || !node[" logTensorrtLLMPath" ] ||
83- !node[" huggingFaceToken" ] || !node[" gitHubUserAgent" ]);
88+ !node[" huggingFaceToken" ] || !node[" gitHubUserAgent" ] ||
89+ !node[" llamacppVariant" ] || !node[" llamacppVersion" ]);
8490
8591 CortexConfig config = {
8692 .logFolderPath = node[" logFolderPath" ]
@@ -118,9 +124,18 @@ inline CortexConfig FromYaml(const std::string& path,
118124 .gitHubUserAgent = node[" gitHubUserAgent" ]
119125 ? node[" gitHubUserAgent" ].as <std::string>()
120126 : " " ,
127+ .llamacppVariant = node[" llamacppVariant" ]
128+ ? node[" llamacppVariant" ].as <std::string>()
129+ : " " ,
130+ .llamacppVersion = node[" llamacppVersion" ]
131+ ? node[" llamacppVersion" ].as <std::string>()
132+ : " " ,
121133 };
122134 if (should_update_config) {
123- DumpYamlConfig (config, path);
135+ auto result = DumpYamlConfig (config, path);
136+ if (result.has_error ()) {
137+ CTL_ERR (" Failed to update config file: " << result.error ());
138+ }
124139 }
125140 return config;
126141 } catch (const YAML::BadFile& e) {
0 commit comments