yaze 0.3.2
Link to the Past ROM Editor
 
Loading...
Searching...
No Matches
conversational_agent_service.cc
Go to the documentation of this file.
2
3#include <algorithm>
4#include <cctype>
5#include <iostream>
6#include <optional>
7#include <set>
8#include <sstream>
9#include <string>
10#include <vector>
11
12#include "absl/flags/declare.h"
13#include "absl/flags/flag.h"
14#include "absl/status/status.h"
15#include "absl/status/statusor.h"
16#include "absl/strings/ascii.h"
17#include "absl/strings/str_cat.h"
18#include "absl/strings/str_format.h"
19#include "absl/strings/str_join.h"
20#include "absl/strings/str_split.h"
21#include "absl/strings/string_view.h"
22#include "absl/time/clock.h"
23#include "absl/time/time.h"
24#ifdef YAZE_AI_RUNTIME_AVAILABLE
27#endif
32#include "nlohmann/json.hpp"
33#include "rom/rom.h"
36#include "zelda3/dungeon/room.h"
37
38#ifdef SendMessage
39#undef SendMessage
40#endif
41
42ABSL_DECLARE_FLAG(std::string, ai_provider);
43
44namespace yaze {
45namespace cli {
46namespace agent {
47
48namespace {
49
50std::string TrimWhitespace(const std::string& input) {
51 auto begin =
52 std::find_if_not(input.begin(), input.end(),
53 [](unsigned char c) { return std::isspace(c); });
54 auto end =
55 std::find_if_not(input.rbegin(), input.rend(), [](unsigned char c) {
56 return std::isspace(c);
57 }).base();
58 if (begin >= end) {
59 return "";
60 }
61 return std::string(begin, end);
62}
63
64std::string JsonValueToString(const nlohmann::json& value) {
65 if (value.is_string()) {
66 return value.get<std::string>();
67 }
68 if (value.is_boolean()) {
69 return value.get<bool>() ? "true" : "false";
70 }
71 if (value.is_number()) {
72 return value.dump();
73 }
74 if (value.is_null()) {
75 return "null";
76 }
77 return value.dump();
78}
79
80std::set<std::string> CollectObjectKeys(const nlohmann::json& array) {
81 std::set<std::string> keys;
82 for (const auto& item : array) {
83 if (!item.is_object()) {
84 continue;
85 }
86 for (const auto& [key, _] : item.items()) {
87 keys.insert(key);
88 }
89 }
90 return keys;
91}
92
93std::optional<ChatMessage::TableData> BuildTableData(
94 const nlohmann::json& data) {
95 using TableData = ChatMessage::TableData;
96
97 if (data.is_object()) {
98 TableData table;
99 table.headers = {"Key", "Value"};
100 table.rows.reserve(data.size());
101 for (const auto& [key, value] : data.items()) {
102 table.rows.push_back({key, JsonValueToString(value)});
103 }
104 return table;
105 }
106
107 if (data.is_array()) {
108 TableData table;
109 if (data.empty()) {
110 table.headers = {"Value"};
111 return table;
112 }
113
114 const bool all_objects = std::all_of(
115 data.begin(), data.end(),
116 [](const nlohmann::json& item) { return item.is_object(); });
117
118 if (all_objects) {
119 auto keys = CollectObjectKeys(data);
120 if (keys.empty()) {
121 table.headers = {"Value"};
122 for (const auto& item : data) {
123 table.rows.push_back({JsonValueToString(item)});
124 }
125 return table;
126 }
127
128 table.headers.assign(keys.begin(), keys.end());
129 table.rows.reserve(data.size());
130 for (const auto& item : data) {
131 std::vector<std::string> row;
132 row.reserve(table.headers.size());
133 for (const auto& key : table.headers) {
134 if (item.contains(key)) {
135 row.push_back(JsonValueToString(item.at(key)));
136 } else {
137 row.emplace_back("-");
138 }
139 }
140 table.rows.push_back(std::move(row));
141 }
142 return table;
143 }
144
145 table.headers = {"Value"};
146 table.rows.reserve(data.size());
147 for (const auto& item : data) {
148 table.rows.push_back({JsonValueToString(item)});
149 }
150 return table;
151 }
152
153 return std::nullopt;
154}
155
156bool IsExecutableCommand(absl::string_view command) {
157 return !command.empty() && command.front() != '#';
158}
159
160int CountExecutableCommands(const std::vector<std::string>& commands) {
161 int count = 0;
162 for (const auto& command : commands) {
163 if (IsExecutableCommand(command)) {
164 ++count;
165 }
166 }
167 return count;
168}
169
170bool IsLikelyOracleRom(const Rom* rom) {
171 if (rom == nullptr) {
172 return false;
173 }
174 const std::string rom_path = absl::AsciiStrToLower(rom->filename());
175 return absl::StrContains(rom_path, "oracle") ||
176 absl::StrContains(rom_path, "oos");
177}
178
179bool IsOracleDebugIntent(const std::string& user_message) {
180 const std::string lowered = absl::AsciiStrToLower(user_message);
181 static constexpr absl::string_view kOracleDebugKeywords[] = {
182 "oracle", "oos", "mesen", "dungeon", "collision", "minecart",
183 "preflight", "smoke", "room", "hook", "sprite", "water"};
184 for (const auto keyword : kOracleDebugKeywords) {
185 if (absl::StrContains(lowered, keyword)) {
186 return true;
187 }
188 }
189 return false;
190}
191
193 const std::string& user_message) {
194 if (rom == nullptr || !rom->is_loaded() || !IsLikelyOracleRom(rom) ||
195 !IsOracleDebugIntent(user_message)) {
196 return "";
197 }
198
199 const bool expanded =
201
203 structural_opts.require_water_fill_reserved_region = true;
204 structural_opts.require_custom_collision_write_support = false;
205 structural_opts.validate_water_fill_table = true;
206 structural_opts.validate_custom_collision_maps = false;
207 structural_opts.max_collision_errors = 0;
208 const auto structural =
209 zelda3::RunOracleRomSafetyPreflight(rom, structural_opts);
210
211 bool d4_required_rooms_ok = false;
212 bool d3_required_room_ok = false;
213 std::string d4_check_state = "skipped";
214 std::string d3_check_state = "skipped";
215 if (expanded) {
219 d4_opts.validate_water_fill_table = false;
220 d4_opts.validate_custom_collision_maps = false;
221 d4_opts.room_ids_requiring_custom_collision = {0x25, 0x27};
222 const auto d4 = zelda3::RunOracleRomSafetyPreflight(rom, d4_opts);
223 d4_required_rooms_ok = d4.ok();
224 d4_check_state = "ran";
225
229 d3_opts.validate_water_fill_table = false;
230 d3_opts.validate_custom_collision_maps = false;
232 const auto d3 = zelda3::RunOracleRomSafetyPreflight(rom, d3_opts);
233 d3_required_room_ok = d3.ok();
234 d3_check_state = "ran";
235 }
236
237 int d6_track_rooms = 0;
238 for (int room_id : {0xA8, 0xB8, 0xD8, 0xDA}) {
239 zelda3::Room room = zelda3::LoadRoomFromRom(rom, room_id);
240 bool has_track_object = false;
241 for (const auto& object : room.GetTileObjects()) {
242 if (object.id_ == 0x31) {
243 has_track_object = true;
244 break;
245 }
246 }
247 if (has_track_object) {
248 ++d6_track_rooms;
249 }
250 }
251
252 std::ostringstream hook;
253 hook << "[AUTO_ORACLE_STATE_HOOK]\n";
254 hook << "UserRequest: " << user_message << "\n";
255 hook << "OracleRom: " << rom->filename() << "\n";
256 hook << "OracleStructuralOk: " << (structural.ok() ? "true" : "false")
257 << "\n";
258 hook << "CustomCollisionWriteSupport: " << (expanded ? "true" : "false")
259 << "\n";
260 hook << "D4RequiredRoomsCheck: " << d4_check_state << "\n";
261 if (d4_check_state == "ran") {
262 hook << "D4RequiredRoomsOk: " << (d4_required_rooms_ok ? "true" : "false")
263 << "\n";
264 }
265 hook << "D6TrackRoomsFound: " << d6_track_rooms << "/4\n";
266 hook << "D3ReadinessCheck: " << d3_check_state << "\n";
267 if (d3_check_state == "ran") {
268 hook << "D3ReadinessOk: " << (d3_required_room_ok ? "true" : "false")
269 << "\n";
270 }
271 hook << "Guidance: Prefer oracle-smoke-check, dungeon-oracle-preflight, "
272 "dungeon-room-graph, and mesen-* commands when diagnosing Oracle "
273 "runtime state.\n";
274 hook << "[/AUTO_ORACLE_STATE_HOOK]";
275 return hook.str();
276}
277
279 const std::string& content) {
280 ChatMessage message;
281 message.sender = sender;
282 message.message = content;
283 message.timestamp = absl::Now();
284
285 if (sender == ChatMessage::Sender::kAgent) {
286 const std::string trimmed = TrimWhitespace(content);
287 if (!trimmed.empty() &&
288 (trimmed.front() == '{' || trimmed.front() == '[')) {
289 try {
290 nlohmann::json parsed = nlohmann::json::parse(trimmed);
291 message.table_data = BuildTableData(parsed);
292 message.json_pretty = parsed.dump(2);
293 } catch (const nlohmann::json::parse_error&) {
294 // Ignore parse errors, fall back to raw text.
295 }
296 }
297 }
298
299 return message;
300}
301
302} // namespace
303
305 // Default to a lightweight mock provider to avoid slow network checks during
306 // startup (especially on mac-ai builds). The real provider is created when
307 // ConfigureProvider is called from the UI.
309 ai_service_ = std::make_unique<MockAIService>();
311
312#ifdef Z3ED_AI
313 // Initialize advanced features
314 auto learn_status = learned_knowledge_.Initialize();
315 if (!learn_status.ok() && config_.verbose) {
316 std::cerr << "Warning: Failed to initialize learned knowledge: "
317 << learn_status.message() << std::endl;
318 }
319
320 auto todo_status = todo_manager_.Initialize();
321 if (!todo_status.ok() && config_.verbose) {
322 std::cerr << "Warning: Failed to initialize TODO manager: "
323 << todo_status.message() << std::endl;
324 }
325#endif
326}
327
329 const AgentConfig& config)
330 : config_(config) {
331 // Avoid auto-detecting providers (which can block on network) until the UI
332 // applies an explicit configuration.
334 ai_service_ = std::make_unique<MockAIService>();
336
337#ifdef Z3ED_AI
338 // Initialize advanced features
339 auto learn_status = learned_knowledge_.Initialize();
340 if (!learn_status.ok() && config_.verbose) {
341 std::cerr << "Warning: Failed to initialize learned knowledge: "
342 << learn_status.message() << std::endl;
343 }
344
345 auto todo_status = todo_manager_.Initialize();
346 if (!todo_status.ok() && config_.verbose) {
347 std::cerr << "Warning: Failed to initialize TODO manager: "
348 << todo_status.message() << std::endl;
349 }
350#endif
351}
352
361
363 const std::map<std::string, core::AsarSymbol>* table) {
365}
366
371
374 return;
375 }
376
377 while (history_.size() > config_.max_history_messages) {
378 history_.erase(history_.begin());
379 }
380}
381
400
404
410
412 const AgentResponse& agent_response) {
413 // Process the response similar to the internal loop
414 // 1. Check for tool calls
415 // 2. Execute tools
416 // 3. Create proposal if needed
417 // 4. Append Agent message to history
418 // 5. If tools executed, call external driver again (loop)
419
420 bool executed_tool = false;
421 std::vector<std::string> executed_tools;
422
423 if (!agent_response.tool_calls.empty()) {
424 for (const auto& tool_call : agent_response.tool_calls) {
425 // Format tool arguments for display
426 std::vector<std::string> arg_parts;
427 for (const auto& [key, value] : tool_call.args) {
428 arg_parts.push_back(absl::StrCat(key, "=", value));
429 }
430 std::string args_str = absl::StrJoin(arg_parts, ", ");
431
432 util::PrintToolCall(tool_call.tool_name, args_str);
433
434 auto tool_result_or = tool_dispatcher_.Dispatch(tool_call);
435 std::string tool_output;
436 if (!tool_result_or.ok()) {
437 tool_output =
438 absl::StrCat("Error: ", tool_result_or.status().message());
439 util::PrintError(tool_output);
440 } else {
441 tool_output = tool_result_or.value();
442 util::PrintSuccess("Tool executed successfully");
443 }
444
445 if (!tool_output.empty()) {
447 // Add tool result as internal message
448 std::string marked_output = absl::StrCat(
449 "[TOOL RESULT for ", tool_call.tool_name, "]\n",
450 "The tool returned the following data:\n", tool_output, "\n\n",
451 "Please provide a text_response field in your JSON to summarize "
452 "this information for the user.");
453 auto tool_result_msg =
454 CreateMessage(ChatMessage::Sender::kUser, marked_output);
455 tool_result_msg.is_internal = true;
456 history_.push_back(tool_result_msg);
457 }
458 executed_tool = true;
459 executed_tools.push_back(tool_call.tool_name);
460 }
461 }
462
463 // If tools were executed, we need to loop back to the AI
464 if (executed_tool && has_external_driver_) {
466 return; // Wait for next response
467 }
468
469 // Final text response processing
470 std::optional<ProposalCreationResult> proposal_result;
471 absl::Status proposal_status = absl::OkStatus();
472 bool attempted_proposal = false;
473
474 if (!agent_response.commands.empty()) {
475 attempted_proposal = true;
478 // Use last user message as prompt context if available
479 if (!history_.empty()) {
480 for (auto it = history_.rbegin(); it != history_.rend(); ++it) {
481 if (it->sender == ChatMessage::Sender::kUser && !it->is_internal) {
482 request.prompt = it->message;
483 break;
484 }
485 }
486 }
487 request.response = &agent_response;
488 request.rom = rom_context_;
489 request.sandbox_label = "agent-chat";
491
492 auto creation_or = CreateProposalFromAgentResponse(request);
493 if (!creation_or.ok()) {
494 proposal_status = creation_or.status();
495 util::PrintError(absl::StrCat("Failed to create proposal: ",
496 proposal_status.message()));
497 } else {
498 proposal_result = std::move(creation_or.value());
499 }
500 }
501 }
502
503 // Construct text response
504 std::string response_text = agent_response.text_response;
505 if (!agent_response.reasoning.empty()) {
506 if (!response_text.empty())
507 response_text.append("\n\n");
508 response_text.append("Reasoning: ").append(agent_response.reasoning);
509 }
510
511 if (!agent_response.commands.empty()) {
512 if (!response_text.empty())
513 response_text.append("\n\n");
514 response_text.append("Commands:\n")
515 .append(absl::StrJoin(agent_response.commands, "\n"));
516 }
518 CountExecutableCommands(agent_response.commands);
519
520 if (proposal_result.has_value()) {
521 const auto& metadata = proposal_result->metadata;
522 if (!response_text.empty())
523 response_text.append("\n\n");
524 response_text.append(
525 absl::StrFormat("✅ Proposal %s ready with %d change%s (%d command%s).",
526 metadata.id, proposal_result->change_count,
527 proposal_result->change_count == 1 ? "" : "s",
528 proposal_result->executed_commands,
529 proposal_result->executed_commands == 1 ? "" : "s"));
531 } else if (attempted_proposal && !proposal_status.ok()) {
532 if (!response_text.empty())
533 response_text.append("\n\n");
534 response_text.append(absl::StrCat("⚠️ Failed to prepare proposal: ",
535 proposal_status.message()));
536 }
537
538 // Remove the "Thinking..." placeholder if present
539 if (!history_.empty() &&
540 history_.back().sender == ChatMessage::Sender::kAgent &&
541 history_.back().message == "Thinking...") {
542 history_.pop_back();
543 }
544
545 // Add final message
546 ChatMessage chat_response =
547 CreateMessage(ChatMessage::Sender::kAgent, response_text);
548 if (proposal_result.has_value()) {
550 summary.id = proposal_result->metadata.id;
551 summary.change_count = proposal_result->change_count;
552 summary.executed_commands = proposal_result->executed_commands;
553 chat_response.proposal = summary;
554 }
555
556 // Metadata
559 meta.model = "gemini"; // Could get this from JS
560 meta.tool_names = executed_tools;
561 chat_response.model_metadata = meta;
562
563 history_.push_back(chat_response);
565
568}
569
570absl::StatusOr<ChatMessage> ConversationalAgentService::SendMessage(
571 const std::string& message) {
572 if (message.empty() && history_.empty()) {
573 return absl::InvalidArgumentError(
574 "Conversation must start with a non-empty message.");
575 }
576
577 if (!message.empty()) {
578#ifdef Z3ED_AI
579 const std::string auto_hook =
580 BuildAutoOracleStateHook(rom_context_, message);
581 if (!auto_hook.empty()) {
582 auto hook_message = CreateMessage(ChatMessage::Sender::kUser, auto_hook);
583 hook_message.is_internal = true;
584 history_.push_back(std::move(hook_message));
586 }
587#endif
588 history_.push_back(CreateMessage(ChatMessage::Sender::kUser, message));
591 }
592
593 // External Driver Path (WASM/Sidecar)
596 // Return a placeholder that indicates waiting
597 // The UI should handle this update gracefully via callbacks
598 return CreateMessage(ChatMessage::Sender::kAgent, "Thinking...");
599 }
600
601 const int max_iterations = config_.max_tool_iterations;
602 bool waiting_for_text_response = false;
603 absl::Time turn_start = absl::Now();
604 std::vector<std::string> executed_tools;
605
606 if (config_.verbose) {
607 util::PrintInfo(absl::StrCat("Starting agent loop (max ", max_iterations,
608 " iterations)"));
610 absl::StrCat("History size: ", history_.size(), " messages"));
611 }
612
613 for (int iteration = 0; iteration < max_iterations; ++iteration) {
614 if (config_.verbose) {
616 std::cout << util::colors::kCyan << "Iteration " << (iteration + 1) << "/"
617 << max_iterations << util::colors::kReset << std::endl;
618 }
619
620 // Show loading indicator while waiting for AI response
622 waiting_for_text_response ? "Generating final response..."
623 : "Thinking...",
624 !config_.verbose); // Hide spinner in verbose mode
625 loader.Start();
626
627 auto response_or = ai_service_->GenerateResponse(history_);
628 loader.Stop();
629
630 if (!response_or.ok()) {
631 util::PrintError(absl::StrCat("Failed to get AI response: ",
632 response_or.status().message()));
633 return absl::InternalError(absl::StrCat("Failed to get AI response: ",
634 response_or.status().message()));
635 }
636
637 const auto& agent_response = response_or.value();
638
639 if (config_.verbose) {
640 util::PrintInfo("Received agent response:");
641 std::cout << util::colors::kDim
642 << " - Tool calls: " << agent_response.tool_calls.size()
643 << util::colors::kReset << std::endl;
644 std::cout << util::colors::kDim
645 << " - Commands: " << agent_response.commands.size()
646 << util::colors::kReset << std::endl;
647 std::cout << util::colors::kDim << " - Text response: "
648 << (agent_response.text_response.empty() ? "empty" : "present")
649 << util::colors::kReset << std::endl;
650 if (!agent_response.reasoning.empty() && config_.show_reasoning) {
651 std::cout << util::colors::kYellow
652 << " 💭 Reasoning: " << util::colors::kDim
653 << agent_response.reasoning << util::colors::kReset
654 << std::endl;
655 }
656 }
657
658 if (!agent_response.tool_calls.empty()) {
659 // Check if we were waiting for a text response but got more tool calls
660 // instead
661 if (waiting_for_text_response) {
663 absl::StrCat("LLM called tools again instead of providing final "
664 "response (Iteration: ",
665 iteration + 1, "/", max_iterations, ")"));
666 }
667
668 bool executed_tool = false;
669 for (const auto& tool_call : agent_response.tool_calls) {
670 // Format tool arguments for display
671 std::vector<std::string> arg_parts;
672 for (const auto& [key, value] : tool_call.args) {
673 arg_parts.push_back(absl::StrCat(key, "=", value));
674 }
675 std::string args_str = absl::StrJoin(arg_parts, ", ");
676
677 util::PrintToolCall(tool_call.tool_name, args_str);
678
679 auto tool_result_or = tool_dispatcher_.Dispatch(tool_call);
680 if (!tool_result_or.ok()) {
681 util::PrintError(absl::StrCat("Tool execution failed: ",
682 tool_result_or.status().message()));
683 return absl::InternalError(absl::StrCat(
684 "Tool execution failed: ", tool_result_or.status().message()));
685 }
686
687 const std::string& tool_output = tool_result_or.value();
688 if (!tool_output.empty()) {
689 util::PrintSuccess("Tool executed successfully");
691
692 if (config_.verbose) {
693 std::cout << util::colors::kDim
694 << "Tool output (truncated):" << util::colors::kReset
695 << std::endl;
696 std::string preview = tool_output.substr(
697 0, std::min(size_t(200), tool_output.size()));
698 if (tool_output.size() > 200)
699 preview += "...";
700 std::cout << util::colors::kDim << preview << util::colors::kReset
701 << std::endl;
702 }
703
704 // Add tool result with a clear marker for the LLM
705 // Format as plain text to avoid confusing the LLM with nested JSON
706 std::string marked_output = absl::StrCat(
707 "[TOOL RESULT for ", tool_call.tool_name, "]\n",
708 "The tool returned the following data:\n", tool_output, "\n\n",
709 "Please provide a text_response field in your JSON to summarize "
710 "this information for the user.");
711 auto tool_result_msg =
712 CreateMessage(ChatMessage::Sender::kUser, marked_output);
713 tool_result_msg.is_internal =
714 true; // Don't show this to the human user
715 history_.push_back(tool_result_msg);
716 }
717 executed_tool = true;
718 executed_tools.push_back(tool_call.tool_name);
719 }
720
721 if (executed_tool) {
722 // Now we're waiting for the LLM to provide a text response
723 waiting_for_text_response = true;
724 // Re-query the AI with updated context.
725 continue;
726 }
727 }
728
729 // Check if we received a text response after tool execution
730 if (waiting_for_text_response && agent_response.text_response.empty() &&
731 agent_response.commands.empty()) {
733 absl::StrCat("LLM did not provide text_response after receiving tool "
734 "results (Iteration: ",
735 iteration + 1, "/", max_iterations, ")"));
736 // Continue to give it another chance
737 continue;
738 }
739
740 std::optional<ProposalCreationResult> proposal_result;
741 absl::Status proposal_status = absl::OkStatus();
742 bool attempted_proposal = false;
743
744 if (!agent_response.commands.empty()) {
745 attempted_proposal = true;
746
747 if (rom_context_ == nullptr) {
748 proposal_status = absl::FailedPreconditionError(
749 "No ROM context available for proposal creation");
751 "Cannot create proposal because no ROM context is active.");
752 } else if (!rom_context_->is_loaded()) {
753 proposal_status =
754 absl::FailedPreconditionError("ROM context is not loaded");
756 "Cannot create proposal because the ROM context is not loaded.");
757 } else {
759 request.prompt = message;
760 request.response = &agent_response;
761 request.rom = rom_context_;
762 request.sandbox_label = "agent-chat";
763 request.ai_provider = absl::GetFlag(FLAGS_ai_provider);
764
765 auto creation_or = CreateProposalFromAgentResponse(request);
766 if (!creation_or.ok()) {
767 proposal_status = creation_or.status();
768 util::PrintError(absl::StrCat("Failed to create proposal: ",
769 proposal_status.message()));
770 } else {
771 proposal_result = std::move(creation_or.value());
772 if (config_.verbose) {
773 util::PrintSuccess(absl::StrCat(
774 "Created proposal ", proposal_result->metadata.id, " with ",
775 proposal_result->change_count, " change(s)."));
776 }
777 }
778 }
779 }
780
781 std::string response_text = agent_response.text_response;
782 if (!agent_response.reasoning.empty()) {
783 if (!response_text.empty()) {
784 response_text.append("\n\n");
785 }
786 response_text.append("Reasoning: ");
787 response_text.append(agent_response.reasoning);
788 }
789 const int executable_commands =
790 CountExecutableCommands(agent_response.commands);
791 if (!agent_response.commands.empty()) {
792 if (!response_text.empty()) {
793 response_text.append("\n\n");
794 }
795 response_text.append("Commands:\n");
796 response_text.append(absl::StrJoin(agent_response.commands, "\n"));
797 }
798 metrics_.commands_generated += executable_commands;
799
800 if (proposal_result.has_value()) {
801 const auto& metadata = proposal_result->metadata;
802 if (!response_text.empty()) {
803 response_text.append("\n\n");
804 }
805 response_text.append(absl::StrFormat(
806 "✅ Proposal %s ready with %d change%s (%d command%s).\n"
807 "Review it in the Proposal drawer or run `z3ed agent diff "
808 "--proposal-id %s`.\n"
809 "Sandbox ROM: %s\nProposal JSON: %s",
810 metadata.id, proposal_result->change_count,
811 proposal_result->change_count == 1 ? "" : "s",
812 proposal_result->executed_commands,
813 proposal_result->executed_commands == 1 ? "" : "s", metadata.id,
814 metadata.sandbox_rom_path.string(),
815 proposal_result->proposal_json_path.string()));
817 } else if (attempted_proposal && !proposal_status.ok()) {
818 if (!response_text.empty()) {
819 response_text.append("\n\n");
820 }
821 response_text.append(
822 absl::StrCat("⚠️ Failed to prepare a proposal automatically: ",
823 proposal_status.message()));
824 }
825 ChatMessage chat_response =
826 CreateMessage(ChatMessage::Sender::kAgent, response_text);
827 if (proposal_result.has_value()) {
829 summary.id = proposal_result->metadata.id;
830 summary.change_count = proposal_result->change_count;
831 summary.executed_commands = proposal_result->executed_commands;
832 summary.sandbox_rom_path = proposal_result->metadata.sandbox_rom_path;
833 summary.proposal_json_path = proposal_result->proposal_json_path;
834 chat_response.proposal = summary;
835 }
838 metrics_.total_latency += absl::Now() - turn_start;
839 chat_response.metrics = BuildMetricsSnapshot();
840 if (!agent_response.warnings.empty()) {
841 chat_response.warnings = agent_response.warnings;
842 }
844 meta.provider = !agent_response.provider.empty()
845 ? agent_response.provider
847 meta.model = !agent_response.model.empty() ? agent_response.model
849 meta.latency_seconds =
850 agent_response.latency_seconds > 0.0
851 ? agent_response.latency_seconds
852 : absl::ToDoubleSeconds(absl::Now() - turn_start);
854 meta.tool_names = executed_tools;
855 meta.parameters = agent_response.parameters;
856 chat_response.model_metadata = meta;
857 history_.push_back(chat_response);
859 return chat_response;
860 }
861
862 return absl::InternalError(
863 "Agent did not produce a response after executing tools.");
864}
865
867 const AIServiceConfig& config) {
868 AIServiceConfig effective_config = config;
869 if (effective_config.rom_context == nullptr) {
870 effective_config.rom_context = rom_context_;
871 }
872 if (effective_config.rom_path_hint.empty() && rom_context_ != nullptr) {
873 effective_config.rom_path_hint = rom_context_->filename();
874 }
875
876 auto service_or = CreateAIServiceStrict(effective_config);
877 if (!service_or.ok()) {
878 // Keep the existing service running and fall back to mock so the UI stays
879 // responsive.
880 std::cerr << "Provider configuration failed: " << service_or.status()
881 << " — falling back to mock" << std::endl;
882 ai_service_ = std::make_unique<MockAIService>();
884 if (rom_context_) {
885 ai_service_->SetRomContext(rom_context_);
886 }
887 return service_or.status();
888 }
889
890 ai_service_ = std::move(service_or.value());
891 provider_config_ = effective_config;
892 if (rom_context_) {
893 ai_service_->SetRomContext(rom_context_);
894 }
895 return absl::OkStatus();
896}
897
903
904const std::vector<ChatMessage>& ConversationalAgentService::GetHistory() const {
905 return history_;
906}
907
909 std::vector<ChatMessage> history) {
910 history_ = std::move(history);
913}
914
917
919 bool has_snapshot = false;
920
921 for (const auto& message : history_) {
922 if (message.sender == ChatMessage::Sender::kUser) {
924 } else if (message.sender == ChatMessage::Sender::kAgent) {
927 }
928
929 if (message.proposal.has_value()) {
931 }
932
933 if (message.metrics.has_value()) {
934 snapshot = *message.metrics;
935 has_snapshot = true;
936 }
937 }
938
939 if (has_snapshot) {
940 metrics_.user_messages = snapshot.total_user_messages;
941 metrics_.agent_messages = snapshot.total_agent_messages;
942 metrics_.tool_calls = snapshot.total_tool_calls;
943 metrics_.commands_generated = snapshot.total_commands;
944 metrics_.proposals_created = snapshot.total_proposals;
945 metrics_.turns_completed = snapshot.turn_index;
946 metrics_.total_latency = absl::Seconds(snapshot.total_elapsed_seconds);
947 }
948}
949
950#ifdef Z3ED_AI
951// === Advanced Feature Integration ===
952
953std::string ConversationalAgentService::BuildEnhancedPrompt(
954 const std::string& user_message) {
955 std::ostringstream enhanced;
956
957 // Inject pretraining on first message
958 if (inject_pretraining_ && !pretraining_injected_ && rom_context_) {
959 enhanced << InjectPretraining() << "\n\n";
960 pretraining_injected_ = true;
961 }
962
963 // Inject learned context
964 if (inject_learned_context_) {
965 enhanced << InjectLearnedContext(user_message) << "\n";
966 }
967
968 enhanced << user_message;
969 return enhanced.str();
970}
971
972std::string ConversationalAgentService::InjectLearnedContext(
973 const std::string& message) {
974 std::ostringstream context;
975
976 // Add relevant preferences
977 auto prefs = learned_knowledge_.GetAllPreferences();
978 if (!prefs.empty() && prefs.size() <= 5) { // Don't overwhelm with too many
979 context << "[User Preferences: ";
980 std::vector<std::string> pref_strings;
981 for (const auto& [key, value] : prefs) {
982 pref_strings.push_back(absl::StrCat(key, "=", value));
983 }
984 context << absl::StrJoin(pref_strings, ", ") << "]\n";
985 }
986
987 // Add ROM-specific patterns
989 // TODO: Get ROM hash
990 // auto patterns = learned_knowledge_.QueryPatterns("", rom_hash);
991 }
992
993 // Add recent relevant memories
994 std::vector<std::string> keywords;
995 // Extract keywords from message (simple word splitting)
996 for (const auto& word : absl::StrSplit(message, ' ')) {
997 if (word.length() > 4) { // Only meaningful words
998 keywords.push_back(std::string(word));
999 }
1000 }
1001
1002 if (!keywords.empty()) {
1003 auto memories = learned_knowledge_.SearchMemories(keywords[0]);
1004 if (!memories.empty() && memories.size() <= 3) {
1005 context << "[Relevant Past Context:\n";
1006 for (const auto& mem : memories) {
1007 context << "- " << mem.topic << ": " << mem.summary << "\n";
1008 }
1009 context << "]\n";
1010 }
1011 }
1012
1013 return context.str();
1014}
1015
1016std::string ConversationalAgentService::InjectPretraining() {
1017 if (!rom_context_) {
1018 return "";
1019 }
1020
1021 std::ostringstream pretraining;
1022 pretraining << "[SYSTEM KNOWLEDGE INJECTION - Read this first]\n\n";
1023#ifdef YAZE_AI_RUNTIME_AVAILABLE
1025#else
1026 pretraining << "AI Runtime not available - pretraining disabled.\n";
1027#endif
1028 pretraining << "\n[END KNOWLEDGE INJECTION]\n";
1029
1030 return pretraining.str();
1031}
1032
1033ChatMessage ConversationalAgentService::EnhanceResponse(
1034 const ChatMessage& response, const std::string& user_message) {
1035 // Use AdvancedRouter to enhance tool-based responses
1036 // This would synthesize multi-tool results into coherent insights
1037
1038 // For now, return response as-is
1039 // TODO: Integrate AdvancedRouter here
1040 return response;
1041}
1042#endif // Z3ED_AI
1043
1044} // namespace agent
1045} // namespace cli
1046} // namespace yaze
The Rom class is used to load, save, and modify Rom data. This is a generic SNES ROM container and do...
Definition rom.h:28
auto filename() const
Definition rom.h:145
const auto & vector() const
Definition rom.h:143
bool is_loaded() const
Definition rom.h:132
static std::string GeneratePretrainingPrompt(Rom *rom)
Generate pre-training prompt for agent.
absl::StatusOr< ChatMessage > SendMessage(const std::string &message)
absl::Status ConfigureProvider(const AIServiceConfig &config)
void SetAssemblySymbolTable(const std::map< std::string, core::AsarSymbol > *table)
void SetToolPreferences(const ToolDispatcher::ToolPreferences &prefs)
const std::vector< ChatMessage > & GetHistory() const
void HandleExternalResponse(const AgentResponse &response)
std::function< void(const std::vector< ChatMessage > &history)> ExternalDriverCallback
void ReplaceHistory(std::vector< ChatMessage > history)
void SetToolPreferences(const ToolPreferences &prefs)
void SetAssemblySymbolTable(const std::map< std::string, core::AsarSymbol > *table)
absl::StatusOr< std::string > Dispatch(const ::yaze::cli::ToolCall &tool_call)
const std::vector< RoomObject > & GetTileObjects() const
Definition room.h:330
ABSL_DECLARE_FLAG(std::string, ai_provider)
std::string BuildAutoOracleStateHook(Rom *rom, const std::string &user_message)
ChatMessage CreateMessage(ChatMessage::Sender sender, const std::string &content)
std::optional< ChatMessage::TableData > BuildTableData(const nlohmann::json &data)
std::string TrimWhitespace(absl::string_view value)
absl::StatusOr< ProposalCreationResult > CreateProposalFromAgentResponse(const ProposalCreationRequest &)
constexpr const char * kDim
constexpr const char * kYellow
constexpr const char * kReset
constexpr const char * kCyan
void PrintWarning(const std::string &message)
void PrintToolCall(const std::string &tool_name, const std::string &details="")
void PrintInfo(const std::string &message)
void PrintSuccess(const std::string &message)
void PrintError(const std::string &message)
absl::StatusOr< std::unique_ptr< AIService > > CreateAIServiceStrict(const AIServiceConfig &config)
constexpr char kProviderMock[]
Definition provider_ids.h:7
constexpr char kProviderExternal[]
OracleRomSafetyPreflightResult RunOracleRomSafetyPreflight(Rom *rom, const OracleRomSafetyPreflightOptions &options)
Room LoadRoomFromRom(Rom *rom, int room_id)
Definition room.cc:325
constexpr bool HasCustomCollisionWriteSupport(std::size_t rom_size)
std::vector< std::string > commands
Definition common.h:26
std::string reasoning
Definition common.h:29
std::vector< ToolCall > tool_calls
Definition common.h:23
std::string text_response
Definition common.h:20
std::optional< ModelMetadata > model_metadata
std::optional< std::string > json_pretty
std::optional< ProposalSummary > proposal
std::optional< SessionMetrics > metrics