11#include "absl/strings/str_cat.h"
12#include "absl/strings/str_format.h"
13#include "absl/strings/str_split.h"
14#include "absl/strings/strip.h"
15#include "absl/time/clock.h"
16#include "absl/time/time.h"
23#include <TargetConditionals.h>
26#if defined(__APPLE__) && \
27 (TARGET_OS_IPHONE == 1 || TARGET_IPHONE_SIMULATOR == 1)
29#define YAZE_AI_IOS_URLSESSION 1
37#include "nlohmann/json.hpp"
43#ifdef YAZE_AI_RUNTIME_AVAILABLE
47absl::StatusOr<nlohmann::json> BuildAnthropicToolPayload(
48 const PromptBuilder& prompt_builder) {
49 auto declarations_or =
51 if (!declarations_or.ok()) {
52 return declarations_or.status();
60 : function_calling_enabled_(config.use_function_calling), config_(config) {
61 if (config_.verbose) {
62 std::cerr <<
"[DEBUG] Initializing Anthropic service..." << std::endl;
63 std::cerr <<
"[DEBUG] Model: " << config_.model << std::endl;
67 std::string catalogue_path = config_.prompt_version ==
"v2"
68 ?
"assets/agent/prompt_catalogue_v2.yaml"
69 :
"assets/agent/prompt_catalogue.yaml";
70 if (
auto status = prompt_builder_.LoadResourceCatalogue(catalogue_path);
72 std::cerr <<
"⚠️ Failed to load agent prompt catalogue: "
73 << status.message() << std::endl;
76 if (config_.system_instruction.empty()) {
78 std::string prompt_file;
79 if (config_.prompt_version ==
"v3") {
80 prompt_file =
"agent/system_prompt_v3.txt";
81 }
else if (config_.prompt_version ==
"v2") {
82 prompt_file =
"agent/system_prompt_v2.txt";
84 prompt_file =
"agent/system_prompt.txt";
87 auto prompt_path = util::PlatformPaths::FindAsset(prompt_file);
88 if (prompt_path.ok()) {
89 std::ifstream file(prompt_path->string());
91 std::stringstream buffer;
92 buffer << file.rdbuf();
93 config_.system_instruction = buffer.str();
94 if (config_.verbose) {
95 std::cerr <<
"[DEBUG] Loaded prompt: " << prompt_path->string()
101 if (config_.system_instruction.empty()) {
102 config_.system_instruction = BuildSystemInstruction();
106 if (config_.verbose) {
107 std::cerr <<
"[DEBUG] Anthropic service initialized" << std::endl;
111void AnthropicAIService::EnableFunctionCalling(
bool enable) {
112 function_calling_enabled_ = enable;
115std::vector<std::string> AnthropicAIService::GetAvailableTools()
const {
116 return {
"resource-list",
"resource-search",
117 "dungeon-list-sprites",
"dungeon-describe-room",
118 "overworld-find-tile",
"overworld-describe-map",
119 "overworld-list-warps"};
122std::string AnthropicAIService::BuildSystemInstruction() {
123 return prompt_builder_.BuildSystemInstruction();
126void AnthropicAIService::SetRomContext(Rom* rom) {
127 prompt_builder_.SetRom(rom);
130absl::StatusOr<std::vector<ModelInfo>>
131AnthropicAIService::ListAvailableModels() {
134 std::vector<ModelInfo> defaults = {
135 {.name =
"claude-3-5-sonnet-20241022",
136 .display_name =
"Claude 3.5 Sonnet",
138 .description =
"Most intelligent model"},
139 {.name =
"claude-3-5-haiku-20241022",
140 .display_name =
"Claude 3.5 Haiku",
142 .description =
"Fastest and most cost-effective"},
143 {.name =
"claude-3-opus-20240229",
144 .display_name =
"Claude 3 Opus",
146 .description =
"Strong reasoning model"}};
150absl::Status AnthropicAIService::CheckAvailability() {
151#ifndef YAZE_WITH_JSON
152 return absl::UnimplementedError(
153 "Anthropic AI service requires JSON support. Build with "
154 "-DYAZE_WITH_JSON=ON");
156 if (config_.api_key.empty()) {
157 return absl::FailedPreconditionError(
158 "❌ Anthropic API key not configured\n"
159 " Set ANTHROPIC_API_KEY environment variable\n"
160 " Get your API key at: https://console.anthropic.com/");
162 return absl::OkStatus();
166absl::StatusOr<AgentResponse> AnthropicAIService::GenerateResponse(
167 const std::string& prompt) {
168 return GenerateResponse(
169 {{{agent::ChatMessage::Sender::kUser, prompt, absl::Now()}}});
172absl::StatusOr<AgentResponse> AnthropicAIService::GenerateResponse(
173 const std::vector<agent::ChatMessage>& history) {
174#ifndef YAZE_WITH_JSON
175 return absl::UnimplementedError(
176 "Anthropic AI service requires JSON support. Build with "
177 "-DYAZE_WITH_JSON=ON");
179 if (history.empty()) {
180 return absl::InvalidArgumentError(
"History cannot be empty.");
183 if (config_.api_key.empty()) {
184 return absl::FailedPreconditionError(
"Anthropic API key not configured");
187 absl::Time request_start = absl::Now();
190 if (config_.verbose) {
191 std::cerr <<
"[DEBUG] Using curl for Anthropic HTTPS request"
196 nlohmann::json messages = nlohmann::json::array();
199 int start_idx = std::max(0,
static_cast<int>(history.size()) - 10);
200 for (
size_t i = start_idx; i < history.size(); ++i) {
201 const auto& msg = history[i];
202 std::string role = (msg.sender == agent::ChatMessage::Sender::kUser)
206 messages.push_back({{
"role", role}, {
"content", msg.message}});
210 nlohmann::json request_body = {{
"model", config_.model},
211 {
"max_tokens", config_.max_output_tokens},
212 {
"system", config_.system_instruction},
213 {
"messages", messages}};
216 if (function_calling_enabled_) {
217 auto tools_or = BuildAnthropicToolPayload(prompt_builder_);
218 if (!tools_or.ok()) {
219 if (config_.verbose) {
220 std::cerr <<
"[DEBUG] Function calling schemas unavailable: "
221 << tools_or.status().message() << std::endl;
223 }
else if (!tools_or->empty()) {
224 if (config_.verbose) {
225 std::string tools_str = tools_or->dump();
226 std::cerr <<
"[DEBUG] Function calling schemas: "
227 << tools_str.substr(0, 200) <<
"..." << std::endl;
230 request_body[
"tools"] = *tools_or;
234 if (config_.verbose) {
235 std::cerr <<
"[DEBUG] Sending " << messages.size()
236 <<
" messages to Anthropic" << std::endl;
239 std::string response_str;
240#if defined(YAZE_AI_IOS_URLSESSION)
241 std::map<std::string, std::string> headers;
242 headers.emplace(
"x-api-key", config_.api_key);
243 headers.emplace(
"anthropic-version",
"2023-06-01");
244 headers.emplace(
"content-type",
"application/json");
245 auto resp_or = ios::UrlSessionHttpRequest(
246 "POST",
"https://api.anthropic.com/v1/messages", headers,
247 request_body.dump(), 60000);
249 return resp_or.status();
251 if (resp_or->status_code != 200) {
252 return absl::InternalError(absl::StrCat(
253 "Anthropic API error: ", resp_or->status_code,
"\n", resp_or->body));
255 response_str = resp_or->body;
258 std::string temp_file =
"/tmp/anthropic_request.json";
259 std::ofstream out(temp_file);
260 out << request_body.dump();
264 std::string curl_cmd =
265 "curl -s -X POST 'https://api.anthropic.com/v1/messages' "
269 "-H 'anthropic-version: 2023-06-01' "
270 "-H 'content-type: application/json' "
274 if (config_.verbose) {
275 std::cerr <<
"[DEBUG] Executing Anthropic API request..." << std::endl;
279 FILE* pipe = _popen(curl_cmd.c_str(),
"r");
281 FILE* pipe = popen(curl_cmd.c_str(),
"r");
284 return absl::InternalError(
"Failed to execute curl command");
288 while (fgets(buffer,
sizeof(buffer), pipe) !=
nullptr) {
289 response_str += buffer;
293 int status = _pclose(pipe);
295 int status = pclose(pipe);
297 std::remove(temp_file.c_str());
300 return absl::InternalError(
301 absl::StrCat(
"Curl failed with status ", status));
305 if (response_str.empty()) {
306 return absl::InternalError(
"Empty response from Anthropic API");
309 if (config_.verbose) {
312 <<
"🔍 Raw Anthropic API Response:"
315 <<
"\033[2m" << response_str.substr(0, 500) <<
"\033[0m"
319 if (config_.verbose) {
320 std::cerr <<
"[DEBUG] Parsing response..." << std::endl;
323 auto parsed_or = ParseAnthropicResponse(response_str);
324 if (!parsed_or.ok()) {
325 return parsed_or.status();
328 AgentResponse agent_response = std::move(parsed_or.value());
330 agent_response.model = config_.model;
331 agent_response.latency_seconds =
332 absl::ToDoubleSeconds(absl::Now() - request_start);
333 agent_response.parameters[
"prompt_version"] = config_.prompt_version;
334 agent_response.parameters[
"temperature"] =
335 absl::StrFormat(
"%.2f", config_.temperature);
336 agent_response.parameters[
"max_output_tokens"] =
337 absl::StrFormat(
"%d", config_.max_output_tokens);
338 agent_response.parameters[
"function_calling"] =
339 function_calling_enabled_ ?
"true" :
"false";
341 return agent_response;
343 }
catch (
const std::exception& e) {
344 if (config_.verbose) {
345 std::cerr <<
"[ERROR] Exception: " << e.what() << std::endl;
347 return absl::InternalError(
348 absl::StrCat(
"Exception during generation: ", e.what()));
353absl::StatusOr<AgentResponse> AnthropicAIService::ParseAnthropicResponse(
354 const std::string& response_body) {
355#ifndef YAZE_WITH_JSON
356 return absl::UnimplementedError(
"JSON support required");
358 AgentResponse agent_response;
360 auto response_json = nlohmann::json::parse(response_body,
nullptr,
false);
361 if (response_json.is_discarded()) {
362 return absl::InternalError(
"❌ Failed to parse Anthropic response JSON");
366 if (response_json.contains(
"error")) {
367 std::string error_msg =
368 response_json[
"error"].value(
"message",
"Unknown error");
369 return absl::InternalError(
370 absl::StrCat(
"❌ Anthropic API error: ", error_msg));
374 if (!response_json.contains(
"content") ||
375 !response_json[
"content"].is_array()) {
376 return absl::InternalError(
"❌ No content in Anthropic response");
379 for (
const auto& block : response_json[
"content"]) {
380 std::string type = block.value(
"type",
"");
382 if (type ==
"text") {
383 std::string text_content = block.value(
"text",
"");
385 if (config_.verbose) {
391 <<
"\033[2m" << text_content <<
"\033[0m"
399 std::string clean_text =
400 std::string(absl::StripAsciiWhitespace(text_content));
401 if (absl::StartsWith(clean_text,
"```json")) {
402 clean_text = clean_text.substr(7);
403 }
else if (absl::StartsWith(clean_text,
"```")) {
404 clean_text = clean_text.substr(3);
406 if (absl::EndsWith(clean_text,
"```")) {
407 clean_text = clean_text.substr(0, clean_text.length() - 3);
409 clean_text = std::string(absl::StripAsciiWhitespace(clean_text));
412 auto parsed_text = nlohmann::json::parse(clean_text,
nullptr,
false);
413 if (!parsed_text.is_discarded()) {
414 if (parsed_text.contains(
"text_response") &&
415 parsed_text[
"text_response"].is_string()) {
416 agent_response.text_response =
417 parsed_text[
"text_response"].get<std::string>();
419 if (parsed_text.contains(
"commands") &&
420 parsed_text[
"commands"].is_array()) {
421 for (
const auto& cmd : parsed_text[
"commands"]) {
422 if (cmd.is_string()) {
423 std::string command = cmd.get<std::string>();
424 if (absl::StartsWith(command,
"z3ed ")) {
425 command = command.substr(5);
427 agent_response.commands.push_back(command);
433 if (agent_response.text_response.empty()) {
434 agent_response.text_response = text_content;
436 agent_response.text_response +=
"\n\n" + text_content;
439 }
else if (type ==
"tool_use") {
441 tool_call.tool_name = block.value(
"name",
"");
443 if (block.contains(
"input") && block[
"input"].is_object()) {
444 for (
auto& [key, value] : block[
"input"].items()) {
445 if (value.is_string()) {
446 tool_call.args[
key] = value.get<std::string>();
447 }
else if (value.is_number()) {
448 tool_call.args[
key] = std::to_string(value.get<
double>());
449 }
else if (value.is_boolean()) {
450 tool_call.args[
key] = value.get<
bool>() ?
"true" :
"false";
454 agent_response.tool_calls.push_back(tool_call);
458 if (agent_response.text_response.empty() && agent_response.commands.empty() &&
459 agent_response.tool_calls.empty()) {
460 return absl::InternalError(
461 "❌ No valid response extracted from Anthropic\n"
462 " Expected text or tool use");
465 return agent_response;
AnthropicAIService(const AnthropicConfig &)
constexpr char kProviderAnthropic[]