diff --git a/server/src/banner.rs b/server/src/banner.rs index ff2b0b0a5..a7e1c8690 100644 --- a/server/src/banner.rs +++ b/server/src/banner.rs @@ -59,16 +59,23 @@ fn status_info(config: &Config, scheme: &str, id: Uid) { credentials = "\"Using default creds admin, admin. Please set credentials with P_USERNAME and P_PASSWORD.\"".red().to_string(); } + let llm_status = match &config.parseable.open_ai_key { + Some(_) => "OpenAI Configured".green(), + None => "Not Configured".grey(), + }; + eprintln!( " {} URL: {} Credentials: {} - Deployment UID: \"{}\"", + Deployment UID: \"{}\" + LLM Status: \"{}\"", "Server:".to_string().bold(), url, credentials, id.to_string(), + llm_status ); } diff --git a/server/src/handlers/http.rs b/server/src/handlers/http.rs index 804317821..1ccde0830 100644 --- a/server/src/handlers/http.rs +++ b/server/src/handlers/http.rs @@ -231,19 +231,13 @@ pub fn configure_routes(cfg: &mut web::ServiceConfig) { ), ); - let llm_query_api = web::scope("/llm") - .service( - web::resource("").route( - web::post() - .to(llm::make_llm_request) - .authorize(Action::Query), - ), - ) - .service( - // to check if the API key for an LLM has been set up as env var - web::resource("isactive") - .route(web::post().to(llm::is_llm_active).authorize(Action::Query)), - ); + let llm_query_api = web::scope("/llm").service( + web::resource("").route( + web::post() + .to(llm::make_llm_request) + .authorize(Action::Query), + ), + ); // Deny request if username is same as the env variable P_USERNAME. cfg.service( diff --git a/server/src/handlers/http/about.rs b/server/src/handlers/http/about.rs index ba6feb188..40a0e4d93 100644 --- a/server/src/handlers/http/about.rs +++ b/server/src/handlers/http/about.rs @@ -40,7 +40,10 @@ pub async fn about() -> Json { let deployment_id = meta.deployment_id.to_string(); let mode = CONFIG.mode_string(); let staging = CONFIG.staging_dir(); + let store = CONFIG.storage().get_endpoint(); + let is_llm_active = &CONFIG.parseable.open_ai_key.is_some(); + let llm_provider = is_llm_active.then_some("OpenAI"); Json(json!({ "version": current_version, @@ -48,6 +51,8 @@ pub async fn about() -> Json { "deploymentId": deployment_id, "updateAvailable": update_available, "latestVersion": latest_release, + "llmActive": is_llm_active, + "llmProvider": llm_provider, "license": "AGPL-3.0-only", "mode": mode, "staging": staging, diff --git a/server/src/handlers/http/llm.rs b/server/src/handlers/http/llm.rs index 1ed3dc6eb..9f7955f9a 100644 --- a/server/src/handlers/http/llm.rs +++ b/server/src/handlers/http/llm.rs @@ -139,13 +139,6 @@ pub async fn make_llm_request(body: web::Json) -> Result HttpResponse { - let is_active = matches!(&CONFIG.parseable.open_ai_key, Some(api_key) if api_key.len() > 3); - HttpResponse::Ok() - .content_type("application/json") - .json(json!({"is_active": is_active})) -} - #[derive(Debug, thiserror::Error)] pub enum LLMError { #[error("Either OpenAI key was not provided or was invalid")]