diff --git a/packages/kbot/cpp/packages/kbot/llm_client.cpp b/packages/kbot/cpp/packages/kbot/llm_client.cpp index 4b3611c7..4a399daf 100644 --- a/packages/kbot/cpp/packages/kbot/llm_client.cpp +++ b/packages/kbot/cpp/packages/kbot/llm_client.cpp @@ -43,16 +43,17 @@ LLMClient::~LLMClient() = default; LLMResponse LLMClient::execute_chat(const std::string& prompt) { LLMResponse res; + logger::info("LLMClient::execute_chat: Starting. api_key length: " + std::to_string(api_key_.length())); if (api_key_.empty()) { res.success = false; res.error = "API Key is empty."; return res; } - oai::OpenAI oai_impl; + logger::info("LLMClient::execute_chat: base_url_: " + base_url_); + liboai::OpenAI oai_impl(base_url_.empty() ? "https://api.openai.com/v1" : base_url_); - // Use liboai Auth component. - // If we need a custom base_url, liboai uses oai_impl.auth.SetBaseUrl() if it exists. + logger::info("LLMClient::execute_chat: Setting API Key"); bool success = oai_impl.auth.SetKey(api_key_); if (!success) { res.success = false; @@ -60,26 +61,40 @@ LLMResponse LLMClient::execute_chat(const std::string& prompt) { return res; } - // Set custom base URL for OpenRouter/DeepSeek. - oai_impl.auth.SetBaseUrl(base_url_); - std::string target_model = model_.empty() ? "gpt-4o" : model_; + logger::info("LLMClient::execute_chat: Target model: " + target_model); + logger::info("LLMClient::execute_chat: Init Conversation"); + liboai::Conversation convo; + convo.AddUserData(prompt); + + logger::info("LLMClient::execute_chat: Calling create()"); try { - oai::Response response = oai_impl.ChatCompletion->create( + liboai::Response response = oai_impl.ChatCompletion->create( target_model, - { - {{"role", "user"}, {"content", prompt}} - } + convo ); + logger::info("LLMClient::execute_chat: Got response with status: " + std::to_string(response.status_code)); + + if (!response.raw_json.contains("choices") || response.raw_json["choices"].empty()) { + res.success = false; + if (response.raw_json.contains("error")) { + res.error = "API Error: " + response.raw_json["error"].dump(); + } else { + res.error = "Invalid response format: no choices found. Raw: " + response.content; + } + return res; + } res.success = true; - res.text = response["choices"][0]["message"]["content"].get(); + res.text = response.raw_json["choices"][0]["message"]["content"].get(); } catch (std::exception& e) { + logger::error("LLMClient::execute_chat: Exception caught: " + std::string(e.what())); res.success = false; res.error = e.what(); } catch (...) { + logger::error("LLMClient::execute_chat: Unknown exception caught"); res.success = false; res.error = "Unknown error occurred inside LLMClient execute_chat."; }