yaze 0.3.2
Link to the Past ROM Editor
 
Loading...
Searching...
No Matches
service_factory.cc
Go to the documentation of this file.
2
3#include <cstdlib>
4#include <fstream>
5#include <iostream>
6#include <set>
7#include <utility>
8
9#include "absl/flags/declare.h"
10#include "absl/flags/flag.h"
11#include "absl/strings/ascii.h"
12#include "absl/strings/match.h"
13#include "absl/strings/str_format.h"
19#include "rom/rom.h"
20#include "util/platform_paths.h"
21
22#ifdef YAZE_WITH_JSON
26#endif
27
28namespace {
29
30constexpr char kDefaultOpenAiBaseUrl[] = "https://api.openai.com";
31constexpr char kDefaultOllamaHost[] = "http://localhost:11434";
32constexpr char kOraclePromptAsset[] = "agent/oracle_of_secrets_guide.txt";
33
34std::string NormalizeProviderAlias(std::string provider) {
35 provider = absl::AsciiStrToLower(provider);
36 if (provider == yaze::cli::kProviderClaude ||
38 provider == yaze::cli::kProviderSonnet ||
39 provider == yaze::cli::kProviderOpus) {
41 }
42 if (provider == yaze::cli::kProviderChatGpt ||
43 provider == yaze::cli::kProviderGpt ||
44 provider == yaze::cli::kProviderLmStudio ||
49 }
50 if (provider == yaze::cli::kProviderGoogle ||
53 }
54 return provider;
55}
56
57bool IsLikelyOracleRomPath(absl::string_view rom_path) {
58 if (rom_path.empty()) {
59 return false;
60 }
61 const std::string lowered = absl::AsciiStrToLower(std::string(rom_path));
62 return absl::StrContains(lowered, "oracle") ||
63 absl::StrContains(lowered, "oos");
64}
65
66std::string ReadAssetFile(absl::string_view relative_path) {
67 auto asset_path =
68 yaze::util::PlatformPaths::FindAsset(std::string(relative_path));
69 if (!asset_path.ok()) {
70 return "";
71 }
72 std::ifstream file(asset_path->string());
73 if (!file.good()) {
74 return "";
75 }
76 return std::string(std::istreambuf_iterator<char>(file),
77 std::istreambuf_iterator<char>());
78}
79
81 if (config.ollama_host != kDefaultOllamaHost) {
82 return true;
83 }
84 const char* env_ollama_host = std::getenv("OLLAMA_HOST");
85 if (env_ollama_host && *env_ollama_host) {
86 return true;
87 }
88 const char* env_ollama_model = std::getenv("OLLAMA_MODEL");
89 return env_ollama_model && *env_ollama_model;
90}
91
93 return !config.openai_base_url.empty() &&
95}
96
98 if (config.gemini_api_key.empty()) {
99 if (const char* env_key = std::getenv("GEMINI_API_KEY")) {
100 config.gemini_api_key = env_key;
101 }
102 }
103 if (config.anthropic_api_key.empty()) {
104 if (const char* env_key = std::getenv("ANTHROPIC_API_KEY")) {
105 config.anthropic_api_key = env_key;
106 }
107 }
108 if (config.openai_api_key.empty()) {
109 if (const char* openai_key = std::getenv("OPENAI_API_KEY")) {
110 config.openai_api_key = openai_key;
111 }
112 }
113 if (config.openai_base_url.empty() ||
115 const char* env_openai_base = std::getenv("OPENAI_BASE_URL");
116 if (!env_openai_base || !*env_openai_base) {
117 env_openai_base = std::getenv("OPENAI_API_BASE");
118 }
119 if (env_openai_base && *env_openai_base) {
120 config.openai_base_url = env_openai_base;
121 }
122 }
123 if (config.ollama_host.empty() || config.ollama_host == kDefaultOllamaHost) {
124 if (const char* env_ollama_host = std::getenv("OLLAMA_HOST");
125 env_ollama_host && *env_ollama_host) {
126 config.ollama_host = env_ollama_host;
127 }
128 }
129 if (config.model.empty()) {
130 if (const char* env_model = std::getenv("OLLAMA_MODEL")) {
131 config.model = env_model;
132 }
133 }
134}
135
137 config.provider = NormalizeProviderAlias(std::move(config.provider));
138 if (config.provider.empty()) {
140 }
141 config.openai_base_url =
143 return config;
144}
145
154
155std::unique_ptr<yaze::cli::AIService> FinalizeService(
156 std::unique_ptr<yaze::cli::AIService> service,
157 const yaze::cli::AIServiceConfig& config) {
158 if (service != nullptr && config.rom_context != nullptr) {
159 service->SetRomContext(config.rom_context);
160 }
161 return service;
162}
163
164} // namespace
165
166ABSL_DECLARE_FLAG(std::string, ai_provider);
167ABSL_DECLARE_FLAG(std::string, ai_model);
168ABSL_DECLARE_FLAG(std::string, gemini_api_key);
169ABSL_DECLARE_FLAG(std::string, anthropic_api_key);
170ABSL_DECLARE_FLAG(std::string, ollama_host);
171ABSL_DECLARE_FLAG(std::string, openai_base_url);
172ABSL_DECLARE_FLAG(std::string, prompt_version);
173ABSL_DECLARE_FLAG(bool, use_function_calling);
174ABSL_DECLARE_FLAG(std::string, rom);
175
176namespace yaze {
177namespace cli {
178
180 AIServiceConfig config;
181 config.provider = absl::GetFlag(FLAGS_ai_provider);
182 config.model = absl::GetFlag(FLAGS_ai_model);
183 config.gemini_api_key = absl::GetFlag(FLAGS_gemini_api_key);
184 config.anthropic_api_key = absl::GetFlag(FLAGS_anthropic_api_key);
185 config.ollama_host = absl::GetFlag(FLAGS_ollama_host);
186 config.openai_base_url = absl::GetFlag(FLAGS_openai_base_url);
187 config.rom_path_hint = absl::GetFlag(FLAGS_rom);
188 ApplyEnvironmentFallbacks(config);
189 return NormalizeConfig(std::move(config));
190}
191
193 if (config.rom_context != nullptr &&
194 IsLikelyOracleRomPath(config.rom_context->filename())) {
196 }
197 if (IsLikelyOracleRomPath(config.rom_path_hint)) {
199 }
201}
202
203std::vector<AIServiceConfig> DiscoverModelRegistryConfigs(
204 const AIServiceConfig& base_config) {
205 const AIServiceConfig effective_config = NormalizeConfig(base_config);
206 std::vector<AIServiceConfig> configs;
207 std::set<std::string> seen_providers;
208
209 auto append_provider = [&](absl::string_view provider_name) {
210 const std::string canonical =
211 NormalizeProviderAlias(std::string(provider_name));
212 if (canonical.empty() || !seen_providers.insert(canonical).second) {
213 return;
214 }
215 AIServiceConfig provider_config = effective_config;
216 provider_config.provider = canonical;
217 configs.push_back(std::move(provider_config));
218 };
219
220 if (effective_config.provider != kProviderAuto) {
221 append_provider(effective_config.provider);
222 return configs;
223 }
224
225 if (!effective_config.gemini_api_key.empty()) {
226 append_provider(kProviderGemini);
227 }
228 if (HasOpenAiEndpointHint(effective_config) ||
229 !effective_config.openai_api_key.empty()) {
230 append_provider(kProviderOpenAi);
231 }
232 if (!effective_config.anthropic_api_key.empty()) {
233 append_provider(kProviderAnthropic);
234 }
235 if (HasOllamaHint(effective_config)) {
236 append_provider(kProviderOllama);
237 }
238 return configs;
239}
240
241std::unique_ptr<AIService> CreateAIService() {
243}
244
245std::unique_ptr<AIService> CreateAIService(const AIServiceConfig& config) {
246 AIServiceConfig effective_config = NormalizeConfig(config);
247
248 if (effective_config.provider == kProviderAuto) {
249 if (!effective_config.gemini_api_key.empty()) {
250 std::cout << "🤖 Auto-detecting AI provider...\n";
251 std::cout << " Found Gemini API key, using Gemini\n";
252 effective_config.provider = kProviderGemini;
253 } else if (HasOpenAiEndpointHint(effective_config)) {
254 std::cout << "🤖 Auto-detecting AI provider...\n";
255 std::cout << " Found OpenAI-compatible base URL, using OpenAI\n";
256 if (effective_config.model.empty()) {
257 std::cout << " Tip: Set --ai_model for local servers\n";
258 }
259 effective_config.provider = kProviderOpenAi;
260 } else if (!effective_config.anthropic_api_key.empty()) {
261 std::cout << "🤖 Auto-detecting AI provider...\n";
262 std::cout << " Found Anthropic API key, using Anthropic\n";
263 effective_config.provider = kProviderAnthropic;
264 } else if (!effective_config.openai_api_key.empty()) {
265 std::cout << "🤖 Auto-detecting AI provider...\n";
266 std::cout << " Found OpenAI API key, using OpenAI\n";
267 effective_config.provider = kProviderOpenAi;
268 if (effective_config.model.empty()) {
269 effective_config.model = "gpt-4o-mini";
270 }
271 } else if (HasOllamaHint(effective_config)) {
272 std::cout << "🤖 Auto-detecting AI provider...\n";
273 std::cout << " Found Ollama configuration, using Ollama\n";
274 effective_config.provider = kProviderOllama;
275 } else {
276 std::cout << "🤖 No AI provider configured, using MockAIService\n";
277 std::cout
278 << " Tip: Set GEMINI_API_KEY, ANTHROPIC_API_KEY, OPENAI_API_KEY,"
279 " OPENAI_BASE_URL, or OLLAMA_HOST/OLLAMA_MODEL\n";
280 effective_config.provider = kProviderMock;
281 }
282 }
283
284 if (effective_config.provider != kProviderMock) {
285 std::cout << "🤖 AI Provider: " << effective_config.provider << "\n";
286 }
287
288 auto service_or = CreateAIServiceStrict(effective_config);
289 if (service_or.ok()) {
290 return std::move(service_or.value());
291 }
292
293 std::cerr << "⚠️ " << service_or.status().message() << std::endl;
294 std::cerr << " Falling back to MockAIService" << std::endl;
295 return FinalizeService(std::make_unique<MockAIService>(), effective_config);
296}
297
298absl::StatusOr<std::unique_ptr<AIService>> CreateAIServiceStrict(
299 const AIServiceConfig& config) {
300 const AIServiceConfig effective_config = NormalizeConfig(config);
301 const std::string provider = effective_config.provider;
302 if (provider.empty() || provider == kProviderAuto) {
303 return absl::InvalidArgumentError(
304 "CreateAIServiceStrict requires an explicit provider (not 'auto')");
305 }
306
307 const std::string oracle_system_instruction =
308 ResolveOracleSystemInstruction(effective_config);
309
310 if (provider == kProviderMock) {
311 return FinalizeService(std::make_unique<MockAIService>(), effective_config);
312 }
313
314 if (provider == kProviderOllama) {
315 OllamaConfig ollama_config;
316 ollama_config.base_url = effective_config.ollama_host;
317 if (!effective_config.model.empty()) {
318 ollama_config.model = effective_config.model;
319 } else if (const char* env_model = std::getenv("OLLAMA_MODEL")) {
320 ollama_config.model = env_model;
321 }
322 if (!oracle_system_instruction.empty()) {
323 ollama_config.system_prompt = oracle_system_instruction;
324 }
325 return FinalizeService(std::make_unique<OllamaAIService>(ollama_config),
326 effective_config);
327 }
328
329 if (provider == kProviderGeminiCli || provider == kProviderLocalGemini) {
330 return FinalizeService(
331 std::make_unique<LocalGeminiCliService>(effective_config.model.empty()
332 ? "gemini-2.5-flash"
333 : effective_config.model),
334 effective_config);
335 }
336
337#ifdef YAZE_WITH_JSON
338 if (provider == kProviderGemini) {
339 if (effective_config.gemini_api_key.empty()) {
340 return absl::FailedPreconditionError(
341 "Gemini API key not provided. Set --gemini_api_key or "
342 "GEMINI_API_KEY.");
343 }
344 GeminiConfig gemini_config(effective_config.gemini_api_key);
345 if (!effective_config.model.empty()) {
346 gemini_config.model = effective_config.model;
347 }
348 if (!oracle_system_instruction.empty()) {
349 gemini_config.system_instruction = oracle_system_instruction;
350 }
351 gemini_config.prompt_version = absl::GetFlag(FLAGS_prompt_version);
352 gemini_config.use_function_calling =
353 absl::GetFlag(FLAGS_use_function_calling);
354 gemini_config.verbose = effective_config.verbose;
355 return FinalizeService(std::make_unique<GeminiAIService>(gemini_config),
356 effective_config);
357 }
358 if (provider == kProviderAnthropic) {
359 if (effective_config.anthropic_api_key.empty()) {
360 return absl::FailedPreconditionError(
361 "Anthropic API key not provided. Set --anthropic_api_key or "
362 "ANTHROPIC_API_KEY.");
363 }
364 AnthropicConfig anthropic_config(effective_config.anthropic_api_key);
365 if (!effective_config.model.empty()) {
366 anthropic_config.model = effective_config.model;
367 }
368 if (!oracle_system_instruction.empty()) {
369 anthropic_config.system_instruction = oracle_system_instruction;
370 }
371 anthropic_config.prompt_version = absl::GetFlag(FLAGS_prompt_version);
372 anthropic_config.use_function_calling =
373 absl::GetFlag(FLAGS_use_function_calling);
374 anthropic_config.verbose = effective_config.verbose;
375 return FinalizeService(
376 std::make_unique<AnthropicAIService>(anthropic_config),
377 effective_config);
378 }
379 if (provider == kProviderOpenAi) {
380 const bool is_local_server =
381 effective_config.openai_base_url != kDefaultOpenAiBaseUrl;
382 if (effective_config.openai_api_key.empty() && !is_local_server) {
383 return absl::FailedPreconditionError(
384 "OpenAI API key not provided. Set OPENAI_API_KEY.\n"
385 "For LMStudio, use --openai_base_url=http://localhost:1234");
386 }
387 OpenAIConfig openai_config(effective_config.openai_api_key);
388 openai_config.base_url = effective_config.openai_base_url;
389 if (!effective_config.model.empty()) {
390 openai_config.model = effective_config.model;
391 }
392 if (!oracle_system_instruction.empty()) {
393 openai_config.system_instruction = oracle_system_instruction;
394 }
395 openai_config.prompt_version = absl::GetFlag(FLAGS_prompt_version);
396 openai_config.use_function_calling =
397 absl::GetFlag(FLAGS_use_function_calling);
398 openai_config.verbose = effective_config.verbose;
399 return FinalizeService(std::make_unique<OpenAIAIService>(openai_config),
400 effective_config);
401 }
402#else
403 if (provider == kProviderGemini || provider == kProviderAnthropic) {
404 return absl::FailedPreconditionError(
405 "AI support not available: rebuild with YAZE_WITH_JSON=ON");
406 }
407#endif
408
409 return absl::InvalidArgumentError(
410 absl::StrFormat("Unknown AI provider: %s", config.provider));
411}
412
413} // namespace cli
414} // namespace yaze
auto filename() const
Definition rom.h:145
static absl::StatusOr< std::filesystem::path > FindAsset(const std::string &relative_path)
Find an asset file in multiple standard locations.
yaze::cli::AIServiceConfig NormalizeConfig(yaze::cli::AIServiceConfig config)
std::unique_ptr< yaze::cli::AIService > FinalizeService(std::unique_ptr< yaze::cli::AIService > service, const yaze::cli::AIServiceConfig &config)
bool HasOllamaHint(const yaze::cli::AIServiceConfig &config)
std::string NormalizeProviderAlias(std::string provider)
void ApplyEnvironmentFallbacks(yaze::cli::AIServiceConfig &config)
bool IsLikelyOracleRomPath(absl::string_view rom_path)
std::string ResolveOracleSystemInstruction(const yaze::cli::AIServiceConfig &config)
bool HasOpenAiEndpointHint(const yaze::cli::AIServiceConfig &config)
std::string ReadAssetFile(absl::string_view relative_path)
constexpr char kProviderGemini[]
Definition provider_ids.h:9
constexpr char kProviderGpt[]
constexpr char kProviderOpus[]
constexpr char kProviderGoogle[]
constexpr char kProviderCustomOpenAi[]
std::unique_ptr< AIService > CreateAIService()
AgentPromptProfile DetectPromptProfile(const AIServiceConfig &config)
constexpr char kProviderAnthropic[]
constexpr char kProviderGeminiCli[]
absl::StatusOr< std::unique_ptr< AIService > > CreateAIServiceStrict(const AIServiceConfig &config)
constexpr char kProviderAuto[]
Definition provider_ids.h:6
constexpr char kProviderChatGpt[]
AIServiceConfig BuildAIServiceConfigFromFlags()
std::string NormalizeOpenAiBaseUrl(std::string base)
constexpr char kProviderGoogleGemini[]
constexpr char kProviderMock[]
Definition provider_ids.h:7
constexpr char kProviderClaude[]
constexpr char kProviderLocalGemini[]
constexpr char kProviderAnthropicClaude[]
constexpr char kProviderOpenAiCompatible[]
std::vector< AIServiceConfig > DiscoverModelRegistryConfigs(const AIServiceConfig &base_config)
constexpr char kProviderLmStudioDashed[]
constexpr char kProviderOpenAi[]
constexpr char kProviderOllama[]
Definition provider_ids.h:8
constexpr char kProviderSonnet[]
constexpr char kProviderLmStudio[]
ABSL_DECLARE_FLAG(std::string, ai_provider)