yaze 0.3.2
Link to the Past ROM Editor
 
Loading...
Searching...
No Matches
agent_editor_models.cc
Go to the documentation of this file.
2
3#include <algorithm>
4#include <cstdlib>
5#include <filesystem>
6#include <memory>
7#include <optional>
8#include <unordered_set>
9
10#include "absl/strings/ascii.h"
11#include "absl/strings/match.h"
12#include "absl/time/clock.h"
13#include "absl/time/time.h"
23#ifndef __EMSCRIPTEN__
24#include "httplib.h"
25#endif
26#include "rom/rom.h"
27#include "util/platform_paths.h"
28
29namespace yaze {
30namespace editor {
31
32namespace {
33
34constexpr char kDefaultOpenAiBaseUrl[] = "https://api.openai.com";
35
36std::filesystem::path ExpandUserPath(const std::string& input) {
37 if (input.empty()) {
38 return {};
39 }
40 if (input.front() != '~') {
41 return std::filesystem::path(input);
42 }
43 const auto home_dir = util::PlatformPaths::GetHomeDirectory();
44 if (home_dir.empty() || home_dir == ".") {
45 return std::filesystem::path(input);
46 }
47 if (input.size() == 1) {
48 return home_dir;
49 }
50 if (input[1] == '/' || input[1] == '\\') {
51 return home_dir / input.substr(2);
52 }
53 return home_dir / input.substr(1);
54}
55
56bool HasModelExtension(const std::filesystem::path& path) {
57 const std::string ext = absl::AsciiStrToLower(path.extension().string());
58 return ext == ".gguf" || ext == ".ggml" || ext == ".bin" ||
59 ext == ".safetensors";
60}
61
62void AddUniqueModelName(const std::string& name,
63 std::vector<std::string>* output,
64 std::unordered_set<std::string>* seen) {
65 if (!output || !seen || name.empty()) {
66 return;
67 }
68 if (output->size() >= 512) {
69 return;
70 }
71 if (seen->insert(name).second) {
72 output->push_back(name);
73 }
74}
75
76bool IsOllamaModelsPath(const std::filesystem::path& path) {
77 if (path.filename() != "models") {
78 return false;
79 }
80 return path.parent_path().filename() == ".ollama";
81}
82
83void CollectOllamaManifestModels(const std::filesystem::path& models_root,
84 std::vector<std::string>* output,
85 std::unordered_set<std::string>* seen) {
86 if (!output || !seen) {
87 return;
88 }
89 std::error_code ec;
90 const auto library_path =
91 models_root / "manifests" / "registry.ollama.ai" / "library";
92 if (!std::filesystem::exists(library_path, ec)) {
93 return;
94 }
95 std::filesystem::directory_options options =
96 std::filesystem::directory_options::skip_permission_denied;
97 for (std::filesystem::recursive_directory_iterator
98 it(library_path, options, ec),
99 end;
100 it != end; it.increment(ec)) {
101 if (ec) {
102 ec.clear();
103 continue;
104 }
105 if (!it->is_regular_file(ec)) {
106 continue;
107 }
108 const auto rel = it->path().lexically_relative(library_path);
109 if (rel.empty()) {
110 continue;
111 }
112 std::vector<std::string> parts;
113 for (const auto& part : rel) {
114 if (!part.empty()) {
115 parts.push_back(part.string());
116 }
117 }
118 if (parts.empty()) {
119 continue;
120 }
121 std::string model = parts.front();
122 std::string tag;
123 for (size_t i = 1; i < parts.size(); ++i) {
124 if (!tag.empty()) {
125 tag += "/";
126 }
127 tag += parts[i];
128 }
129 const std::string name = tag.empty() ? model : model + ":" + tag;
130 AddUniqueModelName(name, output, seen);
131 if (output->size() >= 512) {
132 return;
133 }
134 }
135}
136
137void CollectModelFiles(const std::filesystem::path& base_path,
138 std::vector<std::string>* output,
139 std::unordered_set<std::string>* seen) {
140 if (!output || !seen) {
141 return;
142 }
143 std::error_code ec;
144 if (!std::filesystem::exists(base_path, ec)) {
145 return;
146 }
147 std::filesystem::directory_options options =
148 std::filesystem::directory_options::skip_permission_denied;
149 constexpr int kMaxDepth = 4;
150 for (std::filesystem::recursive_directory_iterator it(base_path, options, ec),
151 end;
152 it != end; it.increment(ec)) {
153 if (ec) {
154 ec.clear();
155 continue;
156 }
157 if (it->is_directory(ec)) {
158 if (it.depth() >= kMaxDepth) {
159 it.disable_recursion_pending();
160 }
161 continue;
162 }
163 if (!it->is_regular_file(ec)) {
164 continue;
165 }
166 if (!HasModelExtension(it->path())) {
167 continue;
168 }
169 std::filesystem::path rel = it->path().lexically_relative(base_path);
170 if (rel.empty()) {
171 rel = it->path().filename();
172 }
173 rel.replace_extension();
174 AddUniqueModelName(rel.generic_string(), output, seen);
175 if (output->size() >= 512) {
176 return;
177 }
178 }
179}
180
181std::vector<std::string> CollectLocalModelNames(
182 const UserSettings::Preferences* prefs) {
183 std::vector<std::string> results;
184 if (!prefs) {
185 return results;
186 }
187 std::unordered_set<std::string> seen;
188 for (const auto& raw_path : prefs->ai_model_paths) {
189 auto expanded = ExpandUserPath(raw_path);
190 if (expanded.empty()) {
191 continue;
192 }
193 std::error_code ec;
194 if (!std::filesystem::exists(expanded, ec)) {
195 continue;
196 }
197 if (std::filesystem::is_regular_file(expanded, ec)) {
198 std::filesystem::path rel = expanded.filename();
199 rel.replace_extension();
200 AddUniqueModelName(rel.string(), &results, &seen);
201 continue;
202 }
203 if (!std::filesystem::is_directory(expanded, ec)) {
204 continue;
205 }
206 if (IsOllamaModelsPath(expanded)) {
207 CollectOllamaManifestModels(expanded, &results, &seen);
208 continue;
209 }
210 CollectModelFiles(expanded, &results, &seen);
211 }
212 std::sort(results.begin(), results.end());
213 return results;
214}
215
216bool ContainsText(const std::string& haystack, const std::string& needle) {
217 return haystack.find(needle) != std::string::npos;
218}
219
220bool StartsWithText(const std::string& text, const std::string& prefix) {
221 return text.rfind(prefix, 0) == 0;
222}
223
224bool IsTailscaleEndpoint(const std::string& base_url) {
225 if (base_url.empty()) {
226 return false;
227 }
228 std::string lower = absl::AsciiStrToLower(base_url);
229 return ContainsText(lower, ".ts.net") || ContainsText(lower, "tailscale");
230}
231
232bool IsLocalOrTrustedEndpoint(const std::string& base_url,
233 bool allow_insecure) {
234 if (allow_insecure) {
235 return true;
236 }
237 if (IsTailscaleEndpoint(base_url)) {
238 return true;
239 }
240 if (base_url.empty()) {
241 return false;
242 }
243 std::string lower = absl::AsciiStrToLower(base_url);
244 return ContainsText(lower, "localhost") || ContainsText(lower, "127.0.0.1") ||
245 ContainsText(lower, "0.0.0.0") || ContainsText(lower, "::1") ||
246 ContainsText(lower, "192.168.") || StartsWithText(lower, "10.") ||
247 ContainsText(lower, "100.64.");
248}
249
250#ifndef __EMSCRIPTEN__
251bool ProbeHttpEndpoint(const std::string& base_url, const char* path) {
252 if (base_url.empty()) {
253 return false;
254 }
255 httplib::Client client(base_url);
256 client.set_connection_timeout(0, 200000);
257 client.set_read_timeout(0, 250000);
258 client.set_write_timeout(0, 250000);
259 client.set_follow_location(true);
260 auto response = client.Get(path);
261 if (!response) {
262 return false;
263 }
264 return response->status > 0 && response->status < 500;
265}
266
267bool ProbeOllamaHost(const std::string& base_url) {
268 return ProbeHttpEndpoint(base_url, "/api/version");
269}
270
271bool ProbeOpenAICompatible(const std::string& base_url) {
272 return ProbeHttpEndpoint(base_url, "/v1/models");
273}
274#else
275bool ProbeOllamaHost(const std::string&) {
276 return false;
277}
278
279bool ProbeOpenAICompatible(const std::string&) {
280 return false;
281}
282#endif
283
284} // namespace
285
287 auto* settings = dependencies_.user_settings;
288 if (!settings) {
289 return;
290 }
291 const auto& prefs = settings->prefs();
292 if (prefs.ai_hosts.empty() && prefs.ai_profiles.empty()) {
293 return;
294 }
295 bool applied = false;
296 if (!force) {
297 if (!current_profile_.host_id.empty()) {
298 return;
299 }
301 return;
302 }
303 }
304 if (!prefs.ai_hosts.empty()) {
305 const std::string& active_id = prefs.active_ai_host_id.empty()
306 ? prefs.ai_hosts.front().id
307 : prefs.active_ai_host_id;
308 if (!active_id.empty()) {
309 for (const auto& host : prefs.ai_hosts) {
310 if (host.id == active_id) {
312 applied = true;
313 break;
314 }
315 }
316 }
317 }
318 if (!prefs.ai_profiles.empty()) {
319 const UserSettings::Preferences::AiModelProfile* active_profile = nullptr;
320 if (!prefs.active_ai_profile.empty()) {
321 for (const auto& profile : prefs.ai_profiles) {
322 if (profile.name == prefs.active_ai_profile) {
323 active_profile = &profile;
324 break;
325 }
326 }
327 }
328 if (!active_profile) {
329 active_profile = &prefs.ai_profiles.front();
330 }
331 if (active_profile && (force || current_profile_.model.empty())) {
332 if (!active_profile->model.empty()) {
333 current_profile_.model = active_profile->model;
334 current_profile_.temperature = active_profile->temperature;
335 current_profile_.top_p = active_profile->top_p;
337 applied = true;
338 }
339 }
340 }
341 if (current_profile_.openai_api_key.empty() &&
342 !prefs.openai_api_key.empty()) {
343 current_profile_.openai_api_key = prefs.openai_api_key;
344 applied = true;
345 }
346 if (current_profile_.gemini_api_key.empty() &&
347 !prefs.gemini_api_key.empty()) {
348 current_profile_.gemini_api_key = prefs.gemini_api_key;
349 applied = true;
350 }
352 !prefs.anthropic_api_key.empty()) {
353 current_profile_.anthropic_api_key = prefs.anthropic_api_key;
354 applied = true;
355 }
356 if (applied) {
359 }
360}
361
363 ProposalDrawer* proposal_drawer,
364 Rom* rom) {
365 toast_manager_ = toast_manager;
366 proposal_drawer_ = proposal_drawer;
367 rom_ = rom;
368
369 bool profile_updated = false;
370 auto env_value = [](const char* key) -> std::string {
371 const char* value = std::getenv(key);
372 return value ? std::string(value) : std::string();
373 };
374
375 std::string env_openai_base = env_value("OPENAI_BASE_URL");
376 if (env_openai_base.empty()) {
377 env_openai_base = env_value("OPENAI_API_BASE");
378 }
379 std::string env_openai_model = env_value("OPENAI_MODEL");
380 std::string env_ollama_host = env_value("OLLAMA_HOST");
381 std::string env_ollama_model = env_value("OLLAMA_MODEL");
382 std::string env_gemini_model = env_value("GEMINI_MODEL");
383 std::string env_anthropic_model = env_value("ANTHROPIC_MODEL");
384
385 if (!env_ollama_host.empty() &&
386 current_profile_.ollama_host != env_ollama_host) {
387 current_profile_.ollama_host = env_ollama_host;
388 current_config_.ollama_host = env_ollama_host;
389 profile_updated = true;
390 }
391 if (!env_openai_base.empty()) {
392 std::string normalized_base = cli::NormalizeOpenAiBaseUrl(env_openai_base);
393 if (current_profile_.openai_base_url.empty() ||
395 "https://api.openai.com") {
396 current_profile_.openai_base_url = normalized_base;
397 current_config_.openai_base_url = normalized_base;
398 profile_updated = true;
399 }
400 }
401
402 if (const char* gemini_key = std::getenv("GEMINI_API_KEY")) {
403 current_profile_.gemini_api_key = gemini_key;
404 current_config_.gemini_api_key = gemini_key;
405 profile_updated = true;
406 }
407
408 if (const char* anthropic_key = std::getenv("ANTHROPIC_API_KEY")) {
409 current_profile_.anthropic_api_key = anthropic_key;
410 current_config_.anthropic_api_key = anthropic_key;
411 profile_updated = true;
412 }
413
414 if (const char* openai_key = std::getenv("OPENAI_API_KEY")) {
415 current_profile_.openai_api_key = openai_key;
416 current_config_.openai_api_key = openai_key;
417 profile_updated = true;
418 }
419
420 const bool has_openai_endpoint_hint =
422 kDefaultOpenAiBaseUrl;
423 const bool provider_is_default =
426
427 if (provider_is_default) {
428 if (!current_profile_.gemini_api_key.empty()) {
431 if (current_profile_.model.empty()) {
433 env_gemini_model.empty() ? "gemini-2.5-flash" : env_gemini_model;
435 }
436 profile_updated = true;
437 } else if (has_openai_endpoint_hint) {
440 if (current_profile_.model.empty() && !env_openai_model.empty()) {
441 current_profile_.model = env_openai_model;
443 }
444 profile_updated = true;
445 } else if (!current_profile_.anthropic_api_key.empty()) {
448 if (current_profile_.model.empty()) {
449 current_profile_.model = env_anthropic_model.empty()
450 ? "claude-3-5-sonnet-20241022"
451 : env_anthropic_model;
453 }
454 profile_updated = true;
455 } else if (!current_profile_.openai_api_key.empty()) {
458 if (current_profile_.model.empty()) {
459 if (!env_openai_model.empty()) {
460 current_profile_.model = env_openai_model;
461 } else if (!current_profile_.openai_api_key.empty()) {
462 current_profile_.model = "gpt-4o-mini";
463 }
465 }
466 profile_updated = true;
467 } else if (!env_ollama_host.empty() || !env_ollama_model.empty()) {
470 if (current_profile_.model.empty() && !env_ollama_model.empty()) {
471 current_profile_.model = env_ollama_model;
473 }
474 profile_updated = true;
475 }
476 }
477
479 current_profile_.model.empty() && !env_ollama_model.empty()) {
480 current_profile_.model = env_ollama_model;
481 current_config_.model = env_ollama_model;
482 profile_updated = true;
483 }
485 current_profile_.model.empty() && !env_openai_model.empty()) {
486 current_profile_.model = env_openai_model;
487 current_config_.model = env_openai_model;
488 profile_updated = true;
489 }
491 current_profile_.model.empty() && !env_anthropic_model.empty()) {
492 current_profile_.model = env_anthropic_model;
493 current_config_.model = env_anthropic_model;
494 profile_updated = true;
495 }
497 current_profile_.model.empty() && !env_gemini_model.empty()) {
498 current_profile_.model = env_gemini_model;
499 current_config_.model = env_gemini_model;
500 profile_updated = true;
501 }
502 if (profile_updated) {
504 }
505
507 profile_updated = true;
508 }
509
510 if (agent_chat_) {
511 agent_chat_->Initialize(toast_manager, proposal_drawer);
512 if (rom) {
513 agent_chat_->SetRomContext(rom);
514 }
515 }
516
519
520#ifdef YAZE_WITH_GRPC
521 if (agent_chat_) {
522 harness_telemetry_bridge_.SetAgentChat(agent_chat_.get());
523 test::TestManager::Get().SetHarnessListener(&harness_telemetry_bridge_);
524 }
525#endif
526
528}
529
531 rom_ = rom;
532 if (agent_chat_) {
533 agent_chat_->SetRomContext(rom);
534 }
535}
536
538 if (!context_) {
539 return;
540 }
541
542 auto& model_cache = context_->model_cache();
543 if (model_cache.loading) {
544 return;
545 }
546 if (!force && model_cache.last_refresh != absl::InfinitePast()) {
547 const absl::Duration since_refresh = absl::Now() - model_cache.last_refresh;
548 if (since_refresh < absl::Seconds(15)) {
549 return;
550 }
551 }
552
553 model_cache.loading = true;
554 model_cache.auto_refresh_requested = true;
555 model_cache.available_models.clear();
556 model_cache.model_names.clear();
558 const auto& prefs = dependencies_.user_settings->prefs();
559 bool needs_local_refresh = force;
560 if (!needs_local_refresh) {
561 if (prefs.ai_model_paths != last_local_model_paths_) {
562 needs_local_refresh = true;
563 } else if (last_local_model_scan_ == absl::InfinitePast() ||
564 (absl::Now() - last_local_model_scan_) > absl::Seconds(30)) {
565 needs_local_refresh = true;
566 }
567 }
568 if (needs_local_refresh) {
569 model_cache.local_model_names = CollectLocalModelNames(&prefs);
570 last_local_model_paths_ = prefs.ai_model_paths;
571 last_local_model_scan_ = absl::Now();
572 }
573 } else {
574 model_cache.local_model_names.clear();
575 }
576
577 const auto& config = context_->agent_config();
578 ModelServiceKey next_key;
579 next_key.provider =
580 config.ai_provider.empty() ? cli::kProviderMock : config.ai_provider;
581 next_key.model = config.ai_model;
582 next_key.ollama_host = config.ollama_host;
583 next_key.gemini_api_key = config.gemini_api_key;
584 next_key.anthropic_api_key = config.anthropic_api_key;
585 next_key.openai_api_key = config.openai_api_key;
586 next_key.openai_base_url =
587 cli::NormalizeOpenAiBaseUrl(config.openai_base_url);
588 next_key.verbose = config.verbose;
589
590 auto same_key = [](const ModelServiceKey& a, const ModelServiceKey& b) {
591 return a.provider == b.provider && a.model == b.model &&
592 a.ollama_host == b.ollama_host &&
593 a.gemini_api_key == b.gemini_api_key &&
594 a.anthropic_api_key == b.anthropic_api_key &&
595 a.openai_api_key == b.openai_api_key &&
596 a.openai_base_url == b.openai_base_url && a.verbose == b.verbose;
597 };
598
599 if (next_key.provider == cli::kProviderMock) {
600 model_cache.loading = false;
601 model_cache.model_names = model_cache.local_model_names;
602 model_cache.last_refresh = absl::Now();
603 return;
604 }
605
606 if (!model_service_ || !same_key(next_key, last_model_service_key_)) {
607 cli::AIServiceConfig service_config;
608 service_config.provider = next_key.provider;
609 service_config.model = next_key.model;
610 service_config.ollama_host = next_key.ollama_host;
611 service_config.gemini_api_key = next_key.gemini_api_key;
612 service_config.anthropic_api_key = next_key.anthropic_api_key;
613 service_config.openai_api_key = next_key.openai_api_key;
614 service_config.openai_base_url = next_key.openai_base_url;
615 service_config.verbose = next_key.verbose;
616 service_config.rom_context = rom_;
617 if (rom_ != nullptr) {
618 service_config.rom_path_hint = rom_->filename();
619 }
620
621 auto service_or = cli::CreateAIServiceStrict(service_config);
622 if (!service_or.ok()) {
623 model_service_.reset();
624 model_cache.loading = false;
625 model_cache.model_names = model_cache.local_model_names;
626 model_cache.last_refresh = absl::Now();
627 if (toast_manager_) {
628 toast_manager_->Show(std::string(service_or.status().message()),
629 ToastType::kWarning, 2.0f);
630 }
631 return;
632 }
633 model_service_ = std::move(service_or.value());
634 last_model_service_key_ = next_key;
635 }
636
637 auto models_or = model_service_->ListAvailableModels();
638 if (!models_or.ok()) {
639 model_cache.loading = false;
640 model_cache.model_names = model_cache.local_model_names;
641 model_cache.last_refresh = absl::Now();
642 if (toast_manager_) {
643 toast_manager_->Show(std::string(models_or.status().message()),
644 ToastType::kWarning, 2.0f);
645 }
646 return;
647 }
648
649 model_cache.available_models = models_or.value();
650 std::unordered_set<std::string> seen;
651 for (const auto& info : model_cache.available_models) {
652 if (!info.name.empty()) {
653 AddUniqueModelName(info.name, &model_cache.model_names, &seen);
654 }
655 }
656 std::sort(model_cache.model_names.begin(), model_cache.model_names.end());
657 if (context_->agent_config().ai_model.empty()) {
658 auto& ctx_config = context_->agent_config();
659 std::string selected;
660 for (const auto& info : model_cache.available_models) {
661 if (ctx_config.ai_provider.empty() ||
662 info.provider == ctx_config.ai_provider) {
663 selected = info.name;
664 break;
665 }
666 }
667 if (selected.empty() && !model_cache.model_names.empty()) {
668 selected = model_cache.model_names.front();
669 }
670 if (!selected.empty()) {
671 ctx_config.ai_model = selected;
672 internal::CopyStringToBuffer(ctx_config.ai_model,
673 ctx_config.model_buffer);
674 }
675 }
676 model_cache.last_refresh = absl::Now();
677 model_cache.loading = false;
678}
679
681 if (!context_) {
682 return;
683 }
684
685 auto& config = context_->agent_config();
686 if (!preset.provider.empty()) {
687 config.ai_provider = preset.provider;
688 }
689 if (!preset.model.empty()) {
690 config.ai_model = preset.model;
691 }
692 if (!preset.host.empty()) {
693 if (config.ai_provider == cli::kProviderOllama) {
694 config.ollama_host = preset.host;
695 } else if (config.ai_provider == cli::kProviderOpenAi) {
696 config.openai_base_url = cli::NormalizeOpenAiBaseUrl(preset.host);
697 }
698 }
699
700 for (auto& entry : config.model_presets) {
701 if (entry.name == preset.name) {
702 entry.last_used = absl::Now();
703 break;
704 }
705 }
706
707 internal::CopyStringToBuffer(config.ai_provider, config.provider_buffer);
708 internal::CopyStringToBuffer(config.ai_model, config.model_buffer);
709 internal::CopyStringToBuffer(config.ollama_host, config.ollama_host_buffer);
710 internal::CopyStringToBuffer(config.openai_base_url,
711 config.openai_base_url_buffer);
712
714}
715
717 if (auto_probe_done_) {
718 return false;
719 }
720 auto_probe_done_ = true;
721
722 auto* settings = dependencies_.user_settings;
723 if (!settings) {
724 return false;
725 }
726 const auto& prefs = settings->prefs();
727 if (prefs.ai_hosts.empty()) {
728 return false;
729 }
730 if (!current_profile_.host_id.empty() ||
732 return false;
733 }
734
735 auto build_host = [&](const UserSettings::Preferences::AiHost& host) {
736 auto resolved = host;
737 if (resolved.api_key.empty()) {
738 resolved.api_key = internal::ResolveHostApiKey(&prefs, host);
739 }
740 return resolved;
741 };
742
743 auto select_host = [&](const UserSettings::Preferences::AiHost& host) {
748 if (context_) {
750 }
751 return true;
752 };
753
754 auto try_host = [&](const UserSettings::Preferences::AiHost& host,
755 bool probe_only_local) {
756 std::string api_type =
757 host.api_type.empty() ? cli::kProviderOpenAi : host.api_type;
758 if (api_type == cli::kProviderLmStudio) {
759 api_type = cli::kProviderOpenAi;
760 }
761 auto resolved = build_host(host);
762 const bool has_key = !resolved.api_key.empty();
763
764 if (api_type == cli::kProviderOllama) {
765 if (resolved.base_url.empty()) {
766 return false;
767 }
768 if (probe_only_local && !IsLocalOrTrustedEndpoint(
769 resolved.base_url, resolved.allow_insecure)) {
770 return false;
771 }
772 if (!ProbeOllamaHost(resolved.base_url)) {
773 return false;
774 }
775 return select_host(resolved);
776 }
777
778 if (api_type == cli::kProviderOpenAi) {
779 if (resolved.base_url.empty()) {
780 return false;
781 }
782 const bool trusted =
783 IsLocalOrTrustedEndpoint(resolved.base_url, resolved.allow_insecure);
784 if (probe_only_local && !trusted) {
785 return false;
786 }
787 if (trusted && ProbeOpenAICompatible(resolved.base_url)) {
788 return select_host(resolved);
789 }
790 if (!probe_only_local && has_key) {
791 return select_host(resolved);
792 }
793 return false;
794 }
795
796 if (api_type == cli::kProviderGemini ||
797 api_type == cli::kProviderAnthropic) {
798 return has_key ? select_host(resolved) : false;
799 }
800
801 return false;
802 };
803
804 std::vector<const UserSettings::Preferences::AiHost*> candidates;
805 if (!prefs.active_ai_host_id.empty()) {
806 for (const auto& host : prefs.ai_hosts) {
807 if (host.id == prefs.active_ai_host_id) {
808 candidates.push_back(&host);
809 break;
810 }
811 }
812 }
813 for (const auto& host : prefs.ai_hosts) {
814 if (!candidates.empty() && candidates.front()->id == host.id) {
815 continue;
816 }
817 candidates.push_back(&host);
818 }
819
820 for (const auto* host : candidates) {
821 if (try_host(*host, true)) {
822 return true;
823 }
824 }
825 for (const auto* host : candidates) {
826 if (try_host(*host, false)) {
827 return true;
828 }
829 }
830
831 return false;
832}
833
837
839 current_config_ = config;
840
841 if (agent_chat_) {
842 auto* service = agent_chat_->GetAgentService();
843 if (service) {
844 cli::AIServiceConfig provider_config;
845 provider_config.provider =
846 config.provider.empty() ? cli::kProviderAuto : config.provider;
847 provider_config.model = config.model;
848 provider_config.ollama_host = config.ollama_host;
849 provider_config.gemini_api_key = config.gemini_api_key;
850 provider_config.anthropic_api_key = config.anthropic_api_key;
851 provider_config.openai_api_key = config.openai_api_key;
852 provider_config.openai_base_url =
854 provider_config.verbose = config.verbose;
855 provider_config.rom_context = rom_;
856 if (rom_ != nullptr) {
857 provider_config.rom_path_hint = rom_->filename();
858 }
859
860 auto status = service->ConfigureProvider(provider_config);
861 if (!status.ok() && toast_manager_) {
862 toast_manager_->Show(std::string(status.message()), ToastType::kError);
863 }
864
865 auto agent_cfg = service->GetConfig();
866 agent_cfg.max_tool_iterations = config.max_tool_iterations;
867 agent_cfg.max_retry_attempts = config.max_retry_attempts;
868 agent_cfg.verbose = config.verbose;
869 agent_cfg.show_reasoning = config.show_reasoning;
870 service->SetConfig(agent_cfg);
871 }
872 }
873}
874
875} // namespace editor
876} // namespace yaze
The Rom class is used to load, save, and modify Rom data. This is a generic SNES ROM container and do...
Definition rom.h:28
auto filename() const
Definition rom.h:145
std::unique_ptr< cli::AIService > model_service_
void ApplyConfig(const AgentConfig &config)
void InitializeWithDependencies(ToastManager *toast_manager, ProposalDrawer *proposal_drawer, Rom *rom)
void ApplyModelPreset(const ModelPreset &preset)
ProposalDrawer * proposal_drawer_
ModelServiceKey last_model_service_key_
AgentUIContext * context_
ToastManager * toast_manager_
AgentConfig GetCurrentConfig() const
std::vector< std::string > last_local_model_paths_
void ApplyConfigFromContext(const AgentConfigState &config)
std::unique_ptr< AgentChat > agent_chat_
void ApplyUserSettingsDefaults(bool force=false)
AgentConfigState & agent_config()
Rom * rom() const
Definition editor.h:306
EditorDependencies dependencies_
Definition editor.h:316
ImGui drawer for displaying and managing agent proposals.
void Show(const std::string &message, ToastType type=ToastType::kInfo, float ttl_seconds=3.0f)
static TestManager & Get()
static std::filesystem::path GetHomeDirectory()
Get the user's home directory in a cross-platform way.
constexpr char kProviderGemini[]
Definition provider_ids.h:9
constexpr char kProviderAnthropic[]
absl::StatusOr< std::unique_ptr< AIService > > CreateAIServiceStrict(const AIServiceConfig &config)
constexpr char kProviderAuto[]
Definition provider_ids.h:6
std::string NormalizeOpenAiBaseUrl(std::string base)
constexpr char kProviderMock[]
Definition provider_ids.h:7
constexpr char kProviderOpenAi[]
constexpr char kProviderOllama[]
Definition provider_ids.h:8
constexpr char kProviderLmStudio[]
bool IsOllamaModelsPath(const std::filesystem::path &path)
void CollectOllamaManifestModels(const std::filesystem::path &models_root, std::vector< std::string > *output, std::unordered_set< std::string > *seen)
bool HasModelExtension(const std::filesystem::path &path)
bool IsLocalOrTrustedEndpoint(const std::string &base_url, bool allow_insecure)
void CollectModelFiles(const std::filesystem::path &base_path, std::vector< std::string > *output, std::unordered_set< std::string > *seen)
bool ContainsText(const std::string &haystack, const std::string &needle)
bool StartsWithText(const std::string &text, const std::string &prefix)
std::vector< std::string > CollectLocalModelNames(const UserSettings::Preferences *prefs)
void AddUniqueModelName(const std::string &name, std::vector< std::string > *output, std::unordered_set< std::string > *seen)
bool ProbeHttpEndpoint(const std::string &base_url, const char *path)
std::string ResolveHostApiKey(const UserSettings::Preferences *prefs, const UserSettings::Preferences::AiHost &host)
void CopyStringToBuffer(const std::string &src, char(&dest)[N])
void ApplyHostPresetToProfile(AgentEditor::BotProfile *profile, const UserSettings::Preferences::AiHost &host, const UserSettings::Preferences *prefs)
Model preset for quick switching.
std::vector< std::string > ai_model_paths