yaze 0.3.2
Link to the Past ROM Editor
 
Loading...
Searching...
No Matches
openai_ai_service.cc
Go to the documentation of this file.
2
3#include <atomic>
4#include <cstdlib>
5#include <iostream>
6#include <map>
7#include <mutex>
8#include <string>
9#include <vector>
10
11#include "absl/strings/match.h"
12#include "absl/strings/str_cat.h"
13#include "absl/strings/str_format.h"
14#include "absl/strings/str_split.h"
15#include "absl/strings/strip.h"
16#include "absl/time/clock.h"
17#include "absl/time/time.h"
21#include "util/platform_paths.h"
22
23#if defined(__APPLE__)
24#include <TargetConditionals.h>
25#endif
26
27#if defined(__APPLE__) && \
28 (TARGET_OS_IPHONE == 1 || TARGET_IPHONE_SIMULATOR == 1)
30#define YAZE_AI_IOS_URLSESSION 1
31#endif
32
33#ifdef YAZE_WITH_JSON
34#include <filesystem>
35#include <fstream>
36
37#include "httplib.h"
38#include "nlohmann/json.hpp"
39
40// OpenSSL initialization for HTTPS support
41#ifdef CPPHTTPLIB_OPENSSL_SUPPORT
42#include <openssl/crypto.h>
43#include <openssl/err.h>
44#include <openssl/ssl.h>
45
46// OpenSSL initialization guards (local to this TU)
47static std::atomic<bool> g_openssl_initialized{false};
48static std::mutex g_openssl_init_mutex;
49
50static void EnsureOpenSSLInitialized() {
51 std::lock_guard<std::mutex> lock(g_openssl_init_mutex);
52 if (!g_openssl_initialized.exchange(true)) {
53 OPENSSL_init_ssl(
54 OPENSSL_INIT_LOAD_SSL_STRINGS | OPENSSL_INIT_LOAD_CRYPTO_STRINGS,
55 nullptr);
56 std::cerr << "✓ OpenSSL initialized for HTTPS support" << std::endl;
57 }
58}
59#endif
60#endif
61
62namespace yaze {
63namespace cli {
64
65#ifdef YAZE_AI_RUNTIME_AVAILABLE
66
67namespace {
68
69absl::StatusOr<nlohmann::json> BuildOpenAIToolPayload(
70 const PromptBuilder& prompt_builder) {
71 auto declarations_or =
73 if (!declarations_or.ok()) {
74 return declarations_or.status();
75 }
76 return ToolSchemaBuilder::BuildOpenAITools(*declarations_or);
77}
78
79} // namespace
80
81OpenAIAIService::OpenAIAIService(const OpenAIConfig& config)
82 : function_calling_enabled_(config.use_function_calling), config_(config) {
83 if (config_.verbose) {
84 std::cerr << "[DEBUG] Initializing OpenAI service..." << std::endl;
85 std::cerr << "[DEBUG] Model: " << config_.model << std::endl;
86 std::cerr << "[DEBUG] Function calling: "
87 << (function_calling_enabled_ ? "enabled" : "disabled")
88 << std::endl;
89 }
90
91#ifdef CPPHTTPLIB_OPENSSL_SUPPORT
92 EnsureOpenSSLInitialized();
93 if (config_.verbose) {
94 std::cerr << "[DEBUG] OpenSSL initialized for HTTPS" << std::endl;
95 }
96#endif
97
98 // Load command documentation into prompt builder
99 std::string catalogue_path = config_.prompt_version == "v2"
100 ? "assets/agent/prompt_catalogue_v2.yaml"
101 : "assets/agent/prompt_catalogue.yaml";
102 if (auto status = prompt_builder_.LoadResourceCatalogue(catalogue_path);
103 !status.ok()) {
104 std::cerr << "⚠️ Failed to load agent prompt catalogue: "
105 << status.message() << std::endl;
106 }
107
108 if (config_.system_instruction.empty()) {
109 // Load system prompt file
110 std::string prompt_file;
111 if (config_.prompt_version == "v3") {
112 prompt_file = "agent/system_prompt_v3.txt";
113 } else if (config_.prompt_version == "v2") {
114 prompt_file = "agent/system_prompt_v2.txt";
115 } else {
116 prompt_file = "agent/system_prompt.txt";
117 }
118
119 auto prompt_path = util::PlatformPaths::FindAsset(prompt_file);
120 if (prompt_path.ok()) {
121 std::ifstream file(prompt_path->string());
122 if (file.good()) {
123 std::stringstream buffer;
124 buffer << file.rdbuf();
125 config_.system_instruction = buffer.str();
126 if (config_.verbose) {
127 std::cerr << "[DEBUG] Loaded prompt: " << prompt_path->string()
128 << std::endl;
129 }
130 }
131 }
132
133 if (config_.system_instruction.empty()) {
134 config_.system_instruction = BuildSystemInstruction();
135 }
136 }
137
138 if (config_.verbose) {
139 std::cerr << "[DEBUG] OpenAI service initialized" << std::endl;
140 }
141}
142
143void OpenAIAIService::EnableFunctionCalling(bool enable) {
144 function_calling_enabled_ = enable;
145}
146
147std::vector<std::string> OpenAIAIService::GetAvailableTools() const {
148 return {"resource-list", "resource-search",
149 "dungeon-list-sprites", "dungeon-describe-room",
150 "overworld-find-tile", "overworld-describe-map",
151 "overworld-list-warps"};
152}
153
154std::string OpenAIAIService::BuildSystemInstruction() {
155 return prompt_builder_.BuildSystemInstruction();
156}
157
158void OpenAIAIService::SetRomContext(Rom* rom) {
159 prompt_builder_.SetRom(rom);
160}
161
162absl::StatusOr<std::vector<ModelInfo>> OpenAIAIService::ListAvailableModels() {
163#ifndef YAZE_WITH_JSON
164 return absl::UnimplementedError("OpenAI AI service requires JSON support");
165#else
166 const bool is_openai_cloud =
167 absl::StrContains(config_.base_url, "api.openai.com");
168 if (config_.api_key.empty() && is_openai_cloud) {
169 // Return default known models if API key is missing
170 std::vector<ModelInfo> defaults = {
171 {.name = "gpt-4o",
172 .display_name = "GPT-4o",
173 .provider = kProviderOpenAi,
174 .description = "Most capable GPT-4 model"},
175 {.name = "gpt-4o-mini",
176 .display_name = "GPT-4o Mini",
177 .provider = kProviderOpenAi,
178 .description = "Fast and cost-effective"},
179 {.name = "gpt-4-turbo",
180 .display_name = "GPT-4 Turbo",
181 .provider = kProviderOpenAi,
182 .description = "GPT-4 with larger context"},
183 {.name = "gpt-3.5-turbo",
184 .display_name = "GPT-3.5 Turbo",
185 .provider = kProviderOpenAi,
186 .description = "Fast and efficient"}};
187 return defaults;
188 }
189
190 try {
191 if (config_.verbose) {
192 std::cerr << "[DEBUG] Listing OpenAI models..." << std::endl;
193 }
194
195 std::string response_str;
196#if defined(YAZE_AI_IOS_URLSESSION)
197 std::map<std::string, std::string> headers;
198 if (!config_.api_key.empty()) {
199 headers.emplace("Authorization", "Bearer " + config_.api_key);
200 }
201 auto resp_or = ios::UrlSessionHttpRequest(
202 "GET", config_.base_url + "/v1/models", headers, "", 8000);
203 if (!resp_or.ok()) {
204 if (config_.verbose) {
205 std::cerr << "[DEBUG] OpenAI /v1/models failed: "
206 << resp_or.status().message() << std::endl;
207 }
208 // Return defaults on failure so the UI remains usable.
209 std::vector<ModelInfo> defaults = {{.name = "gpt-4o-mini",
210 .display_name = "GPT-4o Mini",
211 .provider = kProviderOpenAi},
212 {.name = "gpt-4o",
213 .display_name = "GPT-4o",
214 .provider = kProviderOpenAi},
215 {.name = "gpt-3.5-turbo",
216 .display_name = "GPT-3.5 Turbo",
217 .provider = kProviderOpenAi}};
218 return defaults;
219 }
220 if (resp_or->status_code != 200) {
221 if (config_.verbose) {
222 std::cerr << "[DEBUG] OpenAI /v1/models HTTP " << resp_or->status_code
223 << std::endl;
224 }
225 std::vector<ModelInfo> defaults = {{.name = "gpt-4o-mini",
226 .display_name = "GPT-4o Mini",
227 .provider = kProviderOpenAi},
228 {.name = "gpt-4o",
229 .display_name = "GPT-4o",
230 .provider = kProviderOpenAi},
231 {.name = "gpt-3.5-turbo",
232 .display_name = "GPT-3.5 Turbo",
233 .provider = kProviderOpenAi}};
234 return defaults;
235 }
236 response_str = resp_or->body;
237#else
238 // Use curl to list models from the API
239 std::string auth_header =
240 config_.api_key.empty()
241 ? ""
242 : "-H 'Authorization: Bearer " + config_.api_key + "' ";
243 std::string curl_cmd = "curl -s -X GET '" + config_.base_url +
244 "/v1/models' " + auth_header + "2>&1";
245
246#ifdef _WIN32
247 FILE* pipe = _popen(curl_cmd.c_str(), "r");
248#else
249 FILE* pipe = popen(curl_cmd.c_str(), "r");
250#endif
251 if (!pipe) {
252 return absl::InternalError("Failed to execute curl command");
253 }
254
255 char buffer[4096];
256 while (fgets(buffer, sizeof(buffer), pipe) != nullptr) {
257 response_str += buffer;
258 }
259
260#ifdef _WIN32
261 _pclose(pipe);
262#else
263 pclose(pipe);
264#endif
265#endif // YAZE_AI_IOS_URLSESSION
266
267 auto models_json = nlohmann::json::parse(response_str, nullptr, false);
268 if (models_json.is_discarded()) {
269 return absl::InternalError("Failed to parse OpenAI models JSON");
270 }
271
272 if (!models_json.contains("data")) {
273 // Return defaults on error
274 std::vector<ModelInfo> defaults = {{.name = "gpt-4o-mini",
275 .display_name = "GPT-4o Mini",
276 .provider = kProviderOpenAi},
277 {.name = "gpt-4o",
278 .display_name = "GPT-4o",
279 .provider = kProviderOpenAi},
280 {.name = "gpt-3.5-turbo",
281 .display_name = "GPT-3.5 Turbo",
282 .provider = kProviderOpenAi}};
283 return defaults;
284 }
285
286 std::vector<ModelInfo> models;
287 for (const auto& m : models_json["data"]) {
288 std::string id = m.value("id", "");
289
290 // Filter for chat models (gpt-4*, gpt-3.5-turbo*, o1*, chatgpt*)
291 // For local servers (LM Studio), we accept all models.
292 bool is_local = !absl::StrContains(config_.base_url, "api.openai.com");
293
294 if (is_local || absl::StartsWith(id, "gpt-4") ||
295 absl::StartsWith(id, "gpt-3.5") || absl::StartsWith(id, "o1") ||
296 absl::StartsWith(id, "chatgpt")) {
297 ModelInfo info;
298 info.name = id;
299 info.display_name = id;
300 info.provider = kProviderOpenAi;
301 info.family = is_local ? "local" : "gpt";
302 info.is_local = is_local;
303
304 // Set display name based on model
305 if (id == "gpt-4o")
306 info.display_name = "GPT-4o";
307 else if (id == "gpt-4o-mini")
308 info.display_name = "GPT-4o Mini";
309 else if (id == "gpt-4-turbo")
310 info.display_name = "GPT-4 Turbo";
311 else if (id == "gpt-3.5-turbo")
312 info.display_name = "GPT-3.5 Turbo";
313 else if (id == "o1-preview")
314 info.display_name = "o1 Preview";
315 else if (id == "o1-mini")
316 info.display_name = "o1 Mini";
317
318 models.push_back(std::move(info));
319 }
320 }
321 return models;
322
323 } catch (const std::exception& e) {
324 return absl::InternalError(
325 absl::StrCat("Failed to list models: ", e.what()));
326 }
327#endif
328}
329
330absl::Status OpenAIAIService::CheckAvailability() {
331#ifndef YAZE_WITH_JSON
332 return absl::UnimplementedError(
333 "OpenAI AI service requires JSON support. Build with "
334 "-DYAZE_WITH_JSON=ON");
335#else
336 try {
337 // LMStudio and other local servers don't require API keys
338 bool is_local_server = config_.base_url != "https://api.openai.com";
339 if (config_.api_key.empty() && !is_local_server) {
340 return absl::FailedPreconditionError(
341 "❌ OpenAI API key not configured\n"
342 " Set OPENAI_API_KEY environment variable\n"
343 " Get your API key at: https://platform.openai.com/api-keys\n"
344 " For LMStudio, use --openai_base_url=http://localhost:1234");
345 }
346
347 // Test API connectivity with a simple request
348#if defined(YAZE_AI_IOS_URLSESSION)
349 std::map<std::string, std::string> headers;
350 if (!config_.api_key.empty()) {
351 headers.emplace("Authorization", "Bearer " + config_.api_key);
352 }
353 auto resp_or = ios::UrlSessionHttpRequest(
354 "GET", config_.base_url + "/v1/models", headers, "", 8000);
355 if (!resp_or.ok()) {
356 return absl::UnavailableError(absl::StrCat(
357 "❌ Cannot reach OpenAI API\n ", resp_or.status().message()));
358 }
359 if (resp_or->status_code == 401) {
360 return absl::PermissionDeniedError(
361 "❌ Invalid OpenAI API key\n"
362 " Verify your key at: https://platform.openai.com/api-keys");
363 }
364 if (resp_or->status_code != 200) {
365 return absl::InternalError(
366 absl::StrCat("❌ OpenAI API error: ", resp_or->status_code, "\n ",
367 resp_or->body));
368 }
369#else
370 httplib::Client cli(config_.base_url);
371 cli.set_connection_timeout(5, 0);
372
373 httplib::Headers headers = {};
374 if (!config_.api_key.empty()) {
375 headers.emplace("Authorization", "Bearer " + config_.api_key);
376 }
377
378 auto res = cli.Get("/v1/models", headers);
379
380 if (!res) {
381 return absl::UnavailableError(
382 "❌ Cannot reach OpenAI API\n"
383 " Check your internet connection");
384 }
385
386 if (res->status == 401) {
387 return absl::PermissionDeniedError(
388 "❌ Invalid OpenAI API key\n"
389 " Verify your key at: https://platform.openai.com/api-keys");
390 }
391
392 if (res->status != 200) {
393 return absl::InternalError(absl::StrCat(
394 "❌ OpenAI API error: ", res->status, "\n ", res->body));
395 }
396#endif
397
398 return absl::OkStatus();
399 } catch (const std::exception& e) {
400 return absl::InternalError(
401 absl::StrCat("Exception during availability check: ", e.what()));
402 }
403#endif
404}
405
406absl::StatusOr<AgentResponse> OpenAIAIService::GenerateResponse(
407 const std::string& prompt) {
408 return GenerateResponse(
409 {{{agent::ChatMessage::Sender::kUser, prompt, absl::Now()}}});
410}
411
412absl::StatusOr<AgentResponse> OpenAIAIService::GenerateResponse(
413 const std::vector<agent::ChatMessage>& history) {
414#ifndef YAZE_WITH_JSON
415 return absl::UnimplementedError(
416 "OpenAI AI service requires JSON support. Build with "
417 "-DYAZE_WITH_JSON=ON");
418#else
419 if (history.empty()) {
420 return absl::InvalidArgumentError("History cannot be empty.");
421 }
422
423 const bool is_openai_cloud =
424 absl::StrContains(config_.base_url, "api.openai.com");
425 if (config_.api_key.empty() && is_openai_cloud) {
426 return absl::FailedPreconditionError("OpenAI API key not configured");
427 }
428
429 absl::Time request_start = absl::Now();
430
431 try {
432 if (config_.verbose) {
433 std::cerr << "[DEBUG] Using curl for OpenAI HTTPS request" << std::endl;
434 std::cerr << "[DEBUG] Processing " << history.size()
435 << " messages in history" << std::endl;
436 }
437
438 // Build messages array for OpenAI format
439 nlohmann::json messages = nlohmann::json::array();
440
441 // Add system message
442 messages.push_back(
443 {{"role", "system"}, {"content", config_.system_instruction}});
444
445 // Add conversation history (up to last 10 messages for context window)
446 int start_idx = std::max(0, static_cast<int>(history.size()) - 10);
447 for (size_t i = start_idx; i < history.size(); ++i) {
448 const auto& msg = history[i];
449 std::string role = (msg.sender == agent::ChatMessage::Sender::kUser)
450 ? "user"
451 : "assistant";
452
453 messages.push_back({{"role", role}, {"content", msg.message}});
454 }
455
456 // Build request body
457 nlohmann::json request_body = {{"model", config_.model},
458 {"messages", messages},
459 {"temperature", config_.temperature},
460 {"max_tokens", config_.max_output_tokens}};
461
462 // Add function calling tools if enabled
463 if (function_calling_enabled_) {
464 auto tools_or = BuildOpenAIToolPayload(prompt_builder_);
465 if (!tools_or.ok()) {
466 if (config_.verbose) {
467 std::cerr << "[DEBUG] Function calling schemas unavailable: "
468 << tools_or.status().message() << std::endl;
469 }
470 } else if (!tools_or->empty()) {
471 if (config_.verbose) {
472 std::string tools_str = tools_or->dump();
473 std::cerr << "[DEBUG] Function calling schemas: "
474 << tools_str.substr(0, 200) << "..." << std::endl;
475 }
476
477 request_body["tools"] = *tools_or;
478 }
479 }
480
481 if (config_.verbose) {
482 std::cerr << "[DEBUG] Sending " << messages.size()
483 << " messages to OpenAI" << std::endl;
484 }
485
486 std::string response_str;
487#if defined(YAZE_AI_IOS_URLSESSION)
488 std::map<std::string, std::string> headers;
489 headers.emplace("Content-Type", "application/json");
490 if (!config_.api_key.empty()) {
491 headers.emplace("Authorization", "Bearer " + config_.api_key);
492 }
493 auto resp_or = ios::UrlSessionHttpRequest(
494 "POST", config_.base_url + "/v1/chat/completions", headers,
495 request_body.dump(), 60000);
496 if (!resp_or.ok()) {
497 return resp_or.status();
498 }
499 if (resp_or->status_code == 401) {
500 return absl::PermissionDeniedError(
501 "❌ Invalid OpenAI API key\n"
502 " Verify your key at: https://platform.openai.com/api-keys");
503 }
504 if (resp_or->status_code != 200) {
505 return absl::InternalError(
506 absl::StrCat("❌ OpenAI API error: ", resp_or->status_code, "\n ",
507 resp_or->body));
508 }
509 response_str = resp_or->body;
510#else
511 // Write request body to temp file
512 std::string temp_file = "/tmp/openai_request.json";
513 std::ofstream out(temp_file);
514 out << request_body.dump();
515 out.close();
516
517 // Use curl to make the request
518 std::string auth_header =
519 config_.api_key.empty()
520 ? ""
521 : "-H 'Authorization: Bearer " + config_.api_key + "' ";
522 std::string curl_cmd = "curl -s -X POST '" + config_.base_url +
523 "/v1/chat/completions' "
524 "-H 'Content-Type: application/json' " +
525 auth_header + "-d @" + temp_file + " 2>&1";
526
527 if (config_.verbose) {
528 std::cerr << "[DEBUG] Executing OpenAI API request..." << std::endl;
529 }
530
531#ifdef _WIN32
532 FILE* pipe = _popen(curl_cmd.c_str(), "r");
533#else
534 FILE* pipe = popen(curl_cmd.c_str(), "r");
535#endif
536 if (!pipe) {
537 return absl::InternalError("Failed to execute curl command");
538 }
539
540 char buffer[4096];
541 while (fgets(buffer, sizeof(buffer), pipe) != nullptr) {
542 response_str += buffer;
543 }
544
545#ifdef _WIN32
546 int status = _pclose(pipe);
547#else
548 int status = pclose(pipe);
549#endif
550 std::remove(temp_file.c_str());
551
552 if (status != 0) {
553 return absl::InternalError(
554 absl::StrCat("Curl failed with status ", status));
555 }
556#endif // YAZE_AI_IOS_URLSESSION
557
558 if (response_str.empty()) {
559 return absl::InternalError("Empty response from OpenAI API");
560 }
561
562 if (config_.verbose) {
563 std::cout << "\n"
564 << "\033[35m"
565 << "🔍 Raw OpenAI API Response:"
566 << "\033[0m"
567 << "\n"
568 << "\033[2m" << response_str.substr(0, 500) << "\033[0m"
569 << "\n\n";
570 }
571
572 if (config_.verbose) {
573 std::cerr << "[DEBUG] Parsing response..." << std::endl;
574 }
575
576 auto parsed_or = ParseOpenAIResponse(response_str);
577 if (!parsed_or.ok()) {
578 return parsed_or.status();
579 }
580
581 AgentResponse agent_response = std::move(parsed_or.value());
582 agent_response.provider = kProviderOpenAi;
583 agent_response.model = config_.model;
584 agent_response.latency_seconds =
585 absl::ToDoubleSeconds(absl::Now() - request_start);
586 agent_response.parameters["prompt_version"] = config_.prompt_version;
587 agent_response.parameters["temperature"] =
588 absl::StrFormat("%.2f", config_.temperature);
589 agent_response.parameters["max_output_tokens"] =
590 absl::StrFormat("%d", config_.max_output_tokens);
591 agent_response.parameters["function_calling"] =
592 function_calling_enabled_ ? "true" : "false";
593
594 return agent_response;
595
596 } catch (const std::exception& e) {
597 if (config_.verbose) {
598 std::cerr << "[ERROR] Exception: " << e.what() << std::endl;
599 }
600 return absl::InternalError(
601 absl::StrCat("Exception during generation: ", e.what()));
602 }
603#endif
604}
605
606absl::StatusOr<AgentResponse> OpenAIAIService::ParseOpenAIResponse(
607 const std::string& response_body) {
608#ifndef YAZE_WITH_JSON
609 return absl::UnimplementedError("JSON support required");
610#else
611 AgentResponse agent_response;
612
613 auto response_json = nlohmann::json::parse(response_body, nullptr, false);
614 if (response_json.is_discarded()) {
615 return absl::InternalError("❌ Failed to parse OpenAI response JSON");
616 }
617
618 // Check for errors
619 if (response_json.contains("error")) {
620 std::string error_msg =
621 response_json["error"].value("message", "Unknown error");
622 return absl::InternalError(
623 absl::StrCat("❌ OpenAI API error: ", error_msg));
624 }
625
626 // Navigate OpenAI's response structure
627 if (!response_json.contains("choices") || response_json["choices"].empty()) {
628 return absl::InternalError("❌ No choices in OpenAI response");
629 }
630
631 const auto& choice = response_json["choices"][0];
632 if (!choice.contains("message")) {
633 return absl::InternalError("❌ No message in OpenAI response");
634 }
635
636 const auto& message = choice["message"];
637
638 // Extract text content
639 if (message.contains("content") && !message["content"].is_null()) {
640 std::string text_content = message["content"].get<std::string>();
641
642 if (config_.verbose) {
643 std::cout << "\n"
644 << "\033[35m"
645 << "🔍 Raw LLM Response:"
646 << "\033[0m"
647 << "\n"
648 << "\033[2m" << text_content << "\033[0m"
649 << "\n\n";
650 }
651
652 // Strip markdown code blocks if present
653 text_content = std::string(absl::StripAsciiWhitespace(text_content));
654 if (absl::StartsWith(text_content, "```json")) {
655 text_content = text_content.substr(7);
656 } else if (absl::StartsWith(text_content, "```")) {
657 text_content = text_content.substr(3);
658 }
659 if (absl::EndsWith(text_content, "```")) {
660 text_content = text_content.substr(0, text_content.length() - 3);
661 }
662 text_content = std::string(absl::StripAsciiWhitespace(text_content));
663
664 // Try to parse as JSON object
665 auto parsed_text = nlohmann::json::parse(text_content, nullptr, false);
666 if (!parsed_text.is_discarded()) {
667 // Extract text_response
668 if (parsed_text.contains("text_response") &&
669 parsed_text["text_response"].is_string()) {
670 agent_response.text_response =
671 parsed_text["text_response"].get<std::string>();
672 }
673
674 // Extract reasoning
675 if (parsed_text.contains("reasoning") &&
676 parsed_text["reasoning"].is_string()) {
677 agent_response.reasoning = parsed_text["reasoning"].get<std::string>();
678 }
679
680 // Extract commands
681 if (parsed_text.contains("commands") &&
682 parsed_text["commands"].is_array()) {
683 for (const auto& cmd : parsed_text["commands"]) {
684 if (cmd.is_string()) {
685 std::string command = cmd.get<std::string>();
686 if (absl::StartsWith(command, "z3ed ")) {
687 command = command.substr(5);
688 }
689 agent_response.commands.push_back(command);
690 }
691 }
692 }
693
694 // Extract tool_calls from parsed JSON
695 if (parsed_text.contains("tool_calls") &&
696 parsed_text["tool_calls"].is_array()) {
697 for (const auto& call : parsed_text["tool_calls"]) {
698 if (call.contains("tool_name") && call["tool_name"].is_string()) {
699 ToolCall tool_call;
700 tool_call.tool_name = call["tool_name"].get<std::string>();
701 if (call.contains("args") && call["args"].is_object()) {
702 for (auto& [key, value] : call["args"].items()) {
703 if (value.is_string()) {
704 tool_call.args[key] = value.get<std::string>();
705 } else if (value.is_number()) {
706 tool_call.args[key] = std::to_string(value.get<double>());
707 } else if (value.is_boolean()) {
708 tool_call.args[key] = value.get<bool>() ? "true" : "false";
709 }
710 }
711 }
712 agent_response.tool_calls.push_back(tool_call);
713 }
714 }
715 }
716 } else {
717 // Use raw text as response
718 agent_response.text_response = text_content;
719 }
720 }
721
722 // Handle native OpenAI tool calls
723 if (message.contains("tool_calls") && message["tool_calls"].is_array()) {
724 for (const auto& call : message["tool_calls"]) {
725 if (call.contains("function")) {
726 const auto& func = call["function"];
727 ToolCall tool_call;
728 tool_call.tool_name = func.value("name", "");
729
730 if (func.contains("arguments") && func["arguments"].is_string()) {
731 auto args_json = nlohmann::json::parse(
732 func["arguments"].get<std::string>(), nullptr, false);
733 if (!args_json.is_discarded() && args_json.is_object()) {
734 for (auto& [key, value] : args_json.items()) {
735 if (value.is_string()) {
736 tool_call.args[key] = value.get<std::string>();
737 } else if (value.is_number()) {
738 tool_call.args[key] = std::to_string(value.get<double>());
739 }
740 }
741 }
742 }
743 agent_response.tool_calls.push_back(tool_call);
744 }
745 }
746 }
747
748 if (agent_response.text_response.empty() && agent_response.commands.empty() &&
749 agent_response.tool_calls.empty()) {
750 return absl::InternalError(
751 "❌ No valid response extracted from OpenAI\n"
752 " Expected at least one of: text_response, commands, or tool_calls");
753 }
754
755 return agent_response;
756#endif
757}
758
759#endif // YAZE_AI_RUNTIME_AVAILABLE
760
761} // namespace cli
762} // namespace yaze
OpenAIAIService(const OpenAIConfig &)
static absl::StatusOr< nlohmann::json > ResolveFunctionDeclarations(const PromptBuilder &prompt_builder)
static nlohmann::json BuildOpenAITools(const nlohmann::json &function_declarations)
constexpr char kProviderOpenAi[]