feat: enhance agent configuration and chat history management
- Introduced AgentConfigSnapshot structure to encapsulate agent configuration settings, including model metadata, tool preferences, and automation options. - Updated AgentChatHistoryCodec to support serialization and deserialization of agent configuration, warnings, and model metadata. - Enhanced AgentChatHistoryPopup with provider filtering and message pinning functionality for improved user experience. - Added new methods for managing agent settings and builder workflows, facilitating better integration of agent configurations into the chat interface. - Documented the new Agent Builder workflow in README for clarity on usage and features.
This commit is contained in:
@@ -22,6 +22,7 @@ The main manager class that coordinates all agent-related functionality:
|
||||
- Mode switching between local and network collaboration
|
||||
- ROM context management for agent queries
|
||||
- Integration with toast notifications and proposal drawer
|
||||
- Agent Builder workspace for persona, tool-stack, automation, and validation planning
|
||||
|
||||
#### AgentChatWidget (`agent_chat_widget.h/cc`)
|
||||
ImGui-based chat interface for interacting with AI agents:
|
||||
@@ -37,6 +38,11 @@ ImGui-based chat interface for interacting with AI agents:
|
||||
- JSON response formatting
|
||||
- Table data visualization
|
||||
- Proposal metadata display
|
||||
- Provider/model telemetry badges with latency + tool counts
|
||||
- Built-in Ollama model roster with favorites, filtering, and chain modes
|
||||
- Model Deck with persistent presets (host/model/tags) synced via chat history
|
||||
- Persona summary + automation hooks surfaced directly in the chat controls
|
||||
- Tool configuration matrix (resources/dungeon/overworld/dialogue/etc.) akin to OpenWebUI
|
||||
|
||||
#### AgentChatHistoryCodec (`agent_chat_history_codec.h/cc`)
|
||||
Serialization/deserialization for chat history:
|
||||
@@ -137,6 +143,18 @@ network_coordinator->SendProposal(username, proposal_json);
|
||||
network_coordinator->SendAIQuery(username, "What enemies are in room 5?");
|
||||
```
|
||||
|
||||
### Agent Builder Workflow
|
||||
|
||||
The `Agent Builder` tab inside AgentEditor walks you through five phases:
|
||||
|
||||
1. **Persona & Goals** – capture the agent’s tone, guardrails, and explicit objectives.
|
||||
2. **Tool Stack** – toggle dispatcher categories (resources, dungeon, overworld, dialogue, GUI, music, sprite, emulator) and sync the plan to the chat widget.
|
||||
3. **Automation Hooks** – configure automatic harness execution, ROM syncing, and proposal focus behaviour for full E2E runs.
|
||||
4. **Validation** – document success criteria and testing notes.
|
||||
5. **E2E Checklist** – track readiness (automation toggles, persona, ROM sync) before triggering full end-to-end harness runs. Builder stages can be exported/imported as JSON blueprints (`~/.yaze/agent/blueprints/*.json`) for reuse across projects.
|
||||
|
||||
Builder plans can be applied directly to `AgentChatWidget::AgentConfigState` so that UI and CLI automation stay in sync.
|
||||
|
||||
## File Structure
|
||||
|
||||
```
|
||||
@@ -256,6 +274,7 @@ Server health and metrics:
|
||||
4. **Session replay** - Record and playback editing sessions
|
||||
5. **Agent memory** - Persistent context across sessions
|
||||
6. **Real-time cursor tracking** - See where collaborators are working
|
||||
7. **Blueprint templates** - Share agent personas/tool stacks between teams
|
||||
|
||||
## Server Protocol
|
||||
|
||||
|
||||
@@ -187,6 +187,38 @@ absl::StatusOr<AgentChatHistoryCodec::Snapshot> AgentChatHistoryCodec::Load(
|
||||
if (item.contains("proposal")) {
|
||||
message.proposal = ParseProposal(item["proposal"]);
|
||||
}
|
||||
if (item.contains("warnings") && item["warnings"].is_array()) {
|
||||
for (const auto& warning : item["warnings"]) {
|
||||
if (warning.is_string()) {
|
||||
message.warnings.push_back(warning.get<std::string>());
|
||||
}
|
||||
}
|
||||
}
|
||||
if (item.contains("model_metadata") &&
|
||||
item["model_metadata"].is_object()) {
|
||||
const auto& meta_json = item["model_metadata"];
|
||||
cli::agent::ChatMessage::ModelMetadata meta;
|
||||
meta.provider = meta_json.value("provider", "");
|
||||
meta.model = meta_json.value("model", "");
|
||||
meta.latency_seconds = meta_json.value("latency_seconds", 0.0);
|
||||
meta.tool_iterations = meta_json.value("tool_iterations", 0);
|
||||
if (meta_json.contains("tool_names") && meta_json["tool_names"].is_array()) {
|
||||
for (const auto& name : meta_json["tool_names"]) {
|
||||
if (name.is_string()) {
|
||||
meta.tool_names.push_back(name.get<std::string>());
|
||||
}
|
||||
}
|
||||
}
|
||||
if (meta_json.contains("parameters") &&
|
||||
meta_json["parameters"].is_object()) {
|
||||
for (const auto& [key, value] : meta_json["parameters"].items()) {
|
||||
if (value.is_string()) {
|
||||
meta.parameters[key] = value.get<std::string>();
|
||||
}
|
||||
}
|
||||
}
|
||||
message.model_metadata = meta;
|
||||
}
|
||||
|
||||
snapshot.history.push_back(std::move(message));
|
||||
}
|
||||
@@ -237,6 +269,80 @@ absl::StatusOr<AgentChatHistoryCodec::Snapshot> AgentChatHistoryCodec::Load(
|
||||
}
|
||||
}
|
||||
|
||||
if (json.contains("agent_config") && json["agent_config"].is_object()) {
|
||||
const auto& config_json = json["agent_config"];
|
||||
AgentConfigSnapshot config;
|
||||
config.provider = config_json.value("provider", "");
|
||||
config.model = config_json.value("model", "");
|
||||
config.ollama_host = config_json.value("ollama_host", "http://localhost:11434");
|
||||
config.gemini_api_key = config_json.value("gemini_api_key", "");
|
||||
config.verbose = config_json.value("verbose", false);
|
||||
config.show_reasoning = config_json.value("show_reasoning", true);
|
||||
config.max_tool_iterations = config_json.value("max_tool_iterations", 4);
|
||||
config.max_retry_attempts = config_json.value("max_retry_attempts", 3);
|
||||
config.temperature = config_json.value("temperature", 0.25f);
|
||||
config.top_p = config_json.value("top_p", 0.95f);
|
||||
config.max_output_tokens = config_json.value("max_output_tokens", 2048);
|
||||
config.stream_responses = config_json.value("stream_responses", false);
|
||||
config.chain_mode = config_json.value("chain_mode", 0);
|
||||
if (config_json.contains("favorite_models") &&
|
||||
config_json["favorite_models"].is_array()) {
|
||||
for (const auto& fav : config_json["favorite_models"]) {
|
||||
if (fav.is_string()) {
|
||||
config.favorite_models.push_back(fav.get<std::string>());
|
||||
}
|
||||
}
|
||||
}
|
||||
if (config_json.contains("model_chain") &&
|
||||
config_json["model_chain"].is_array()) {
|
||||
for (const auto& chain : config_json["model_chain"]) {
|
||||
if (chain.is_string()) {
|
||||
config.model_chain.push_back(chain.get<std::string>());
|
||||
}
|
||||
}
|
||||
}
|
||||
if (config_json.contains("goals") && config_json["goals"].is_array()) {
|
||||
for (const auto& goal : config_json["goals"]) {
|
||||
if (goal.is_string()) {
|
||||
config.goals.push_back(goal.get<std::string>());
|
||||
}
|
||||
}
|
||||
}
|
||||
if (config_json.contains("model_presets") &&
|
||||
config_json["model_presets"].is_array()) {
|
||||
for (const auto& preset_json : config_json["model_presets"]) {
|
||||
if (!preset_json.is_object()) continue;
|
||||
AgentConfigSnapshot::ModelPreset preset;
|
||||
preset.name = preset_json.value("name", "");
|
||||
preset.model = preset_json.value("model", "");
|
||||
preset.host = preset_json.value("host", "");
|
||||
preset.pinned = preset_json.value("pinned", false);
|
||||
if (preset_json.contains("tags") && preset_json["tags"].is_array()) {
|
||||
for (const auto& tag : preset_json["tags"]) {
|
||||
if (tag.is_string()) {
|
||||
preset.tags.push_back(tag.get<std::string>());
|
||||
}
|
||||
}
|
||||
}
|
||||
config.model_presets.push_back(std::move(preset));
|
||||
}
|
||||
}
|
||||
if (config_json.contains("tools") && config_json["tools"].is_object()) {
|
||||
const auto& tools_json = config_json["tools"];
|
||||
config.tools.resources = tools_json.value("resources", true);
|
||||
config.tools.dungeon = tools_json.value("dungeon", true);
|
||||
config.tools.overworld = tools_json.value("overworld", true);
|
||||
config.tools.dialogue = tools_json.value("dialogue", true);
|
||||
config.tools.messages = tools_json.value("messages", true);
|
||||
config.tools.gui = tools_json.value("gui", true);
|
||||
config.tools.music = tools_json.value("music", true);
|
||||
config.tools.sprite = tools_json.value("sprite", true);
|
||||
config.tools.emulator = tools_json.value("emulator", true);
|
||||
}
|
||||
config.persona_notes = config_json.value("persona_notes", "");
|
||||
snapshot.agent_config = config;
|
||||
}
|
||||
|
||||
return snapshot;
|
||||
#else
|
||||
(void)path;
|
||||
@@ -249,7 +355,7 @@ absl::Status AgentChatHistoryCodec::Save(
|
||||
const std::filesystem::path& path, const Snapshot& snapshot) {
|
||||
#if defined(YAZE_WITH_JSON)
|
||||
Json json;
|
||||
json["version"] = 3;
|
||||
json["version"] = 4;
|
||||
json["messages"] = Json::array();
|
||||
|
||||
for (const auto& message : snapshot.history) {
|
||||
@@ -286,6 +392,24 @@ absl::Status AgentChatHistoryCodec::Save(
|
||||
if (message.proposal.has_value()) {
|
||||
entry["proposal"] = SerializeProposal(*message.proposal);
|
||||
}
|
||||
if (!message.warnings.empty()) {
|
||||
entry["warnings"] = message.warnings;
|
||||
}
|
||||
if (message.model_metadata.has_value()) {
|
||||
const auto& meta = *message.model_metadata;
|
||||
Json meta_json;
|
||||
meta_json["provider"] = meta.provider;
|
||||
meta_json["model"] = meta.model;
|
||||
meta_json["latency_seconds"] = meta.latency_seconds;
|
||||
meta_json["tool_iterations"] = meta.tool_iterations;
|
||||
meta_json["tool_names"] = meta.tool_names;
|
||||
Json params_json;
|
||||
for (const auto& [key, value] : meta.parameters) {
|
||||
params_json[key] = value;
|
||||
}
|
||||
meta_json["parameters"] = std::move(params_json);
|
||||
entry["model_metadata"] = std::move(meta_json);
|
||||
}
|
||||
|
||||
json["messages"].push_back(std::move(entry));
|
||||
}
|
||||
@@ -317,6 +441,54 @@ absl::Status AgentChatHistoryCodec::Save(
|
||||
}
|
||||
json["multimodal"] = std::move(multimodal_json);
|
||||
|
||||
if (snapshot.agent_config.has_value()) {
|
||||
const auto& config = *snapshot.agent_config;
|
||||
Json config_json;
|
||||
config_json["provider"] = config.provider;
|
||||
config_json["model"] = config.model;
|
||||
config_json["ollama_host"] = config.ollama_host;
|
||||
config_json["gemini_api_key"] = config.gemini_api_key;
|
||||
config_json["verbose"] = config.verbose;
|
||||
config_json["show_reasoning"] = config.show_reasoning;
|
||||
config_json["max_tool_iterations"] = config.max_tool_iterations;
|
||||
config_json["max_retry_attempts"] = config.max_retry_attempts;
|
||||
config_json["temperature"] = config.temperature;
|
||||
config_json["top_p"] = config.top_p;
|
||||
config_json["max_output_tokens"] = config.max_output_tokens;
|
||||
config_json["stream_responses"] = config.stream_responses;
|
||||
config_json["chain_mode"] = config.chain_mode;
|
||||
config_json["favorite_models"] = config.favorite_models;
|
||||
config_json["model_chain"] = config.model_chain;
|
||||
config_json["persona_notes"] = config.persona_notes;
|
||||
config_json["goals"] = config.goals;
|
||||
|
||||
Json tools_json;
|
||||
tools_json["resources"] = config.tools.resources;
|
||||
tools_json["dungeon"] = config.tools.dungeon;
|
||||
tools_json["overworld"] = config.tools.overworld;
|
||||
tools_json["dialogue"] = config.tools.dialogue;
|
||||
tools_json["messages"] = config.tools.messages;
|
||||
tools_json["gui"] = config.tools.gui;
|
||||
tools_json["music"] = config.tools.music;
|
||||
tools_json["sprite"] = config.tools.sprite;
|
||||
tools_json["emulator"] = config.tools.emulator;
|
||||
config_json["tools"] = std::move(tools_json);
|
||||
|
||||
Json presets_json = Json::array();
|
||||
for (const auto& preset : config.model_presets) {
|
||||
Json preset_json;
|
||||
preset_json["name"] = preset.name;
|
||||
preset_json["model"] = preset.model;
|
||||
preset_json["host"] = preset.host;
|
||||
preset_json["tags"] = preset.tags;
|
||||
preset_json["pinned"] = preset.pinned;
|
||||
presets_json.push_back(std::move(preset_json));
|
||||
}
|
||||
config_json["model_presets"] = std::move(presets_json);
|
||||
|
||||
json["agent_config"] = std::move(config_json);
|
||||
}
|
||||
|
||||
std::error_code ec;
|
||||
auto directory = path.parent_path();
|
||||
if (!directory.empty()) {
|
||||
|
||||
@@ -34,10 +34,52 @@ class AgentChatHistoryCodec {
|
||||
absl::Time last_updated = absl::InfinitePast();
|
||||
};
|
||||
|
||||
struct AgentConfigSnapshot {
|
||||
struct ToolFlags {
|
||||
bool resources = true;
|
||||
bool dungeon = true;
|
||||
bool overworld = true;
|
||||
bool dialogue = true;
|
||||
bool messages = true;
|
||||
bool gui = true;
|
||||
bool music = true;
|
||||
bool sprite = true;
|
||||
bool emulator = true;
|
||||
};
|
||||
struct ModelPreset {
|
||||
std::string name;
|
||||
std::string model;
|
||||
std::string host;
|
||||
std::vector<std::string> tags;
|
||||
bool pinned = false;
|
||||
};
|
||||
|
||||
std::string provider;
|
||||
std::string model;
|
||||
std::string ollama_host;
|
||||
std::string gemini_api_key;
|
||||
bool verbose = false;
|
||||
bool show_reasoning = true;
|
||||
int max_tool_iterations = 4;
|
||||
int max_retry_attempts = 3;
|
||||
float temperature = 0.25f;
|
||||
float top_p = 0.95f;
|
||||
int max_output_tokens = 2048;
|
||||
bool stream_responses = false;
|
||||
int chain_mode = 0;
|
||||
std::vector<std::string> favorite_models;
|
||||
std::vector<std::string> model_chain;
|
||||
std::vector<ModelPreset> model_presets;
|
||||
std::string persona_notes;
|
||||
std::vector<std::string> goals;
|
||||
ToolFlags tools;
|
||||
};
|
||||
|
||||
struct Snapshot {
|
||||
std::vector<cli::agent::ChatMessage> history;
|
||||
CollaborationState collaboration;
|
||||
MultimodalState multimodal;
|
||||
std::optional<AgentConfigSnapshot> agent_config;
|
||||
};
|
||||
|
||||
// Returns true when the codec can actually serialize / deserialize history.
|
||||
|
||||
@@ -1,8 +1,13 @@
|
||||
#include "app/editor/agent/agent_chat_history_popup.h"
|
||||
|
||||
#include <cstring>
|
||||
#include <set>
|
||||
#include <string>
|
||||
|
||||
#include "absl/strings/ascii.h"
|
||||
#include "absl/strings/str_format.h"
|
||||
#include "absl/strings/str_join.h"
|
||||
#include "absl/strings/match.h"
|
||||
#include "absl/time/time.h"
|
||||
#include "app/editor/agent/agent_ui_theme.h"
|
||||
#include "app/editor/system/toast_manager.h"
|
||||
@@ -14,8 +19,25 @@
|
||||
namespace yaze {
|
||||
namespace editor {
|
||||
|
||||
namespace {
|
||||
|
||||
std::string BuildProviderLabel(
|
||||
const std::optional<cli::agent::ChatMessage::ModelMetadata>& meta) {
|
||||
if (!meta.has_value()) {
|
||||
return "";
|
||||
}
|
||||
if (meta->model.empty()) {
|
||||
return meta->provider;
|
||||
}
|
||||
return absl::StrFormat("%s · %s", meta->provider, meta->model);
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
AgentChatHistoryPopup::AgentChatHistoryPopup() {
|
||||
std::memset(input_buffer_, 0, sizeof(input_buffer_));
|
||||
std::memset(search_buffer_, 0, sizeof(search_buffer_));
|
||||
provider_filters_.push_back("All providers");
|
||||
}
|
||||
|
||||
void AgentChatHistoryPopup::Draw() {
|
||||
@@ -129,12 +151,10 @@ void AgentChatHistoryPopup::DrawMessageList() {
|
||||
// Skip internal messages
|
||||
if (msg.is_internal) continue;
|
||||
|
||||
// Apply filter
|
||||
if (message_filter_ == MessageFilter::kUserOnly &&
|
||||
msg.sender != cli::agent::ChatMessage::Sender::kUser) continue;
|
||||
if (message_filter_ == MessageFilter::kAgentOnly &&
|
||||
msg.sender != cli::agent::ChatMessage::Sender::kAgent) continue;
|
||||
|
||||
if (!MessagePassesFilters(msg, i)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
DrawMessage(msg, i);
|
||||
}
|
||||
}
|
||||
@@ -157,6 +177,33 @@ void AgentChatHistoryPopup::DrawMessage(const cli::agent::ChatMessage& msg, int
|
||||
ImGui::SameLine();
|
||||
ImGui::TextColored(ImVec4(0.5f, 0.5f, 0.5f, 1.0f),
|
||||
"[%s]", absl::FormatTime("%H:%M:%S", msg.timestamp, absl::LocalTimeZone()).c_str());
|
||||
if (msg.model_metadata.has_value()) {
|
||||
const auto& meta = *msg.model_metadata;
|
||||
ImGui::SameLine();
|
||||
ImGui::TextColored(ImVec4(0.3f, 0.8f, 1.0f, 1.0f), "[%s • %s]",
|
||||
meta.provider.c_str(), meta.model.c_str());
|
||||
}
|
||||
|
||||
bool is_pinned = pinned_messages_.find(index) != pinned_messages_.end();
|
||||
float pin_target =
|
||||
ImGui::GetCursorPosX() + ImGui::GetContentRegionAvail().x - 24.0f;
|
||||
if (pin_target > ImGui::GetCursorPosX()) {
|
||||
ImGui::SameLine(pin_target);
|
||||
} else {
|
||||
ImGui::SameLine();
|
||||
}
|
||||
if (is_pinned) {
|
||||
ImGui::PushStyleColor(ImGuiCol_Button, ImVec4(0.2f, 0.5f, 0.3f, 0.8f));
|
||||
}
|
||||
if (ImGui::SmallButton(ICON_MD_PUSH_PIN)) {
|
||||
TogglePin(index);
|
||||
}
|
||||
if (is_pinned) {
|
||||
ImGui::PopStyleColor();
|
||||
}
|
||||
if (ImGui::IsItemHovered()) {
|
||||
ImGui::SetTooltip(is_pinned ? "Unpin message" : "Pin message");
|
||||
}
|
||||
|
||||
// Message content with terminal styling
|
||||
ImGui::Indent(15.0f);
|
||||
@@ -184,6 +231,21 @@ void AgentChatHistoryPopup::DrawMessage(const cli::agent::ChatMessage& msg, int
|
||||
ImGui::TextColored(ImVec4(0.2f, proposal_pulse, 0.4f, 1.0f),
|
||||
" %s Proposal: [%s]", ICON_MD_PREVIEW, msg.proposal->id.c_str());
|
||||
}
|
||||
|
||||
if (msg.model_metadata.has_value()) {
|
||||
const auto& meta = *msg.model_metadata;
|
||||
ImGui::TextDisabled(" Latency: %.2fs | Tools: %d",
|
||||
meta.latency_seconds, meta.tool_iterations);
|
||||
if (!meta.tool_names.empty()) {
|
||||
ImGui::TextDisabled(" Tool calls: %s",
|
||||
absl::StrJoin(meta.tool_names, ", ").c_str());
|
||||
}
|
||||
}
|
||||
|
||||
for (const auto& warning : msg.warnings) {
|
||||
ImGui::TextColored(ImVec4(0.95f, 0.6f, 0.2f, 1.0f), " %s %s",
|
||||
ICON_MD_WARNING, warning.c_str());
|
||||
}
|
||||
|
||||
ImGui::Unindent(15.0f);
|
||||
ImGui::Spacing();
|
||||
@@ -253,6 +315,49 @@ void AgentChatHistoryPopup::DrawHeader() {
|
||||
|
||||
ImGui::SameLine();
|
||||
ImGui::TextDisabled("[v0.4.x]");
|
||||
|
||||
ImGui::Spacing();
|
||||
ImGui::SetNextItemWidth(ImGui::GetContentRegionAvail().x * 0.55f);
|
||||
if (ImGui::InputTextWithHint("##history_search", ICON_MD_SEARCH " Search...",
|
||||
search_buffer_, sizeof(search_buffer_))) {
|
||||
needs_scroll_ = true;
|
||||
}
|
||||
|
||||
if (provider_filters_.empty()) {
|
||||
provider_filters_.push_back("All providers");
|
||||
provider_filter_index_ = 0;
|
||||
}
|
||||
|
||||
ImGui::SameLine();
|
||||
ImGui::SetNextItemWidth(150.0f);
|
||||
const char* provider_preview =
|
||||
provider_filters_[std::min<int>(provider_filter_index_,
|
||||
static_cast<int>(provider_filters_.size() - 1))]
|
||||
.c_str();
|
||||
if (ImGui::BeginCombo("##provider_filter", provider_preview)) {
|
||||
for (int i = 0; i < static_cast<int>(provider_filters_.size()); ++i) {
|
||||
bool selected = (provider_filter_index_ == i);
|
||||
if (ImGui::Selectable(provider_filters_[i].c_str(), selected)) {
|
||||
provider_filter_index_ = i;
|
||||
needs_scroll_ = true;
|
||||
}
|
||||
if (selected) {
|
||||
ImGui::SetItemDefaultFocus();
|
||||
}
|
||||
}
|
||||
ImGui::EndCombo();
|
||||
}
|
||||
if (ImGui::IsItemHovered()) {
|
||||
ImGui::SetTooltip("Filter messages by provider/model metadata");
|
||||
}
|
||||
|
||||
ImGui::SameLine();
|
||||
if (ImGui::Checkbox(ICON_MD_PUSH_PIN "##pin_filter", &show_pinned_only_)) {
|
||||
needs_scroll_ = true;
|
||||
}
|
||||
if (ImGui::IsItemHovered()) {
|
||||
ImGui::SetTooltip("Show pinned messages only");
|
||||
}
|
||||
|
||||
// Buttons properly spaced from right edge
|
||||
ImGui::SameLine(ImGui::GetCursorPosX() + ImGui::GetContentRegionAvail().x - 75.0f);
|
||||
@@ -297,13 +402,13 @@ void AgentChatHistoryPopup::DrawHeader() {
|
||||
|
||||
// Message count with retro styling
|
||||
int visible_count = 0;
|
||||
for (const auto& msg : messages_) {
|
||||
if (msg.is_internal) continue;
|
||||
if (message_filter_ == MessageFilter::kUserOnly &&
|
||||
msg.sender != cli::agent::ChatMessage::Sender::kUser) continue;
|
||||
if (message_filter_ == MessageFilter::kAgentOnly &&
|
||||
msg.sender != cli::agent::ChatMessage::Sender::kAgent) continue;
|
||||
visible_count++;
|
||||
for (int i = 0; i < static_cast<int>(messages_.size()); ++i) {
|
||||
if (messages_[i].is_internal) {
|
||||
continue;
|
||||
}
|
||||
if (MessagePassesFilters(messages_[i], i)) {
|
||||
++visible_count;
|
||||
}
|
||||
}
|
||||
|
||||
ImGui::Spacing();
|
||||
@@ -385,6 +490,79 @@ void AgentChatHistoryPopup::DrawQuickActions() {
|
||||
}
|
||||
}
|
||||
|
||||
bool AgentChatHistoryPopup::MessagePassesFilters(
|
||||
const cli::agent::ChatMessage& msg, int index) const {
|
||||
if (message_filter_ == MessageFilter::kUserOnly &&
|
||||
msg.sender != cli::agent::ChatMessage::Sender::kUser) {
|
||||
return false;
|
||||
}
|
||||
if (message_filter_ == MessageFilter::kAgentOnly &&
|
||||
msg.sender != cli::agent::ChatMessage::Sender::kAgent) {
|
||||
return false;
|
||||
}
|
||||
if (show_pinned_only_ &&
|
||||
pinned_messages_.find(index) == pinned_messages_.end()) {
|
||||
return false;
|
||||
}
|
||||
if (provider_filter_index_ > 0 &&
|
||||
provider_filter_index_ < static_cast<int>(provider_filters_.size())) {
|
||||
std::string label = BuildProviderLabel(msg.model_metadata);
|
||||
if (label != provider_filters_[provider_filter_index_]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (search_buffer_[0] != '\0') {
|
||||
std::string needle = absl::AsciiStrToLower(std::string(search_buffer_));
|
||||
auto contains = [&](const std::string& value) {
|
||||
return absl::StrContains(absl::AsciiStrToLower(value), needle);
|
||||
};
|
||||
bool matched = contains(msg.message);
|
||||
if (!matched && msg.json_pretty.has_value()) {
|
||||
matched = contains(*msg.json_pretty);
|
||||
}
|
||||
if (!matched && msg.proposal.has_value()) {
|
||||
matched = contains(msg.proposal->id);
|
||||
}
|
||||
if (!matched) {
|
||||
for (const auto& warning : msg.warnings) {
|
||||
if (contains(warning)) {
|
||||
matched = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!matched) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
void AgentChatHistoryPopup::RefreshProviderFilters() {
|
||||
std::set<std::string> unique_labels;
|
||||
for (const auto& msg : messages_) {
|
||||
std::string label = BuildProviderLabel(msg.model_metadata);
|
||||
if (!label.empty()) {
|
||||
unique_labels.insert(label);
|
||||
}
|
||||
}
|
||||
provider_filters_.clear();
|
||||
provider_filters_.push_back("All providers");
|
||||
provider_filters_.insert(provider_filters_.end(), unique_labels.begin(),
|
||||
unique_labels.end());
|
||||
if (provider_filter_index_ >= static_cast<int>(provider_filters_.size())) {
|
||||
provider_filter_index_ = 0;
|
||||
}
|
||||
}
|
||||
|
||||
void AgentChatHistoryPopup::TogglePin(int index) {
|
||||
if (pinned_messages_.find(index) != pinned_messages_.end()) {
|
||||
pinned_messages_.erase(index);
|
||||
} else {
|
||||
pinned_messages_.insert(index);
|
||||
}
|
||||
}
|
||||
|
||||
void AgentChatHistoryPopup::DrawInputSection() {
|
||||
ImGui::Separator();
|
||||
ImGui::Spacing();
|
||||
@@ -440,6 +618,15 @@ void AgentChatHistoryPopup::UpdateHistory(const std::vector<cli::agent::ChatMess
|
||||
int old_size = messages_.size();
|
||||
|
||||
messages_ = history;
|
||||
|
||||
std::unordered_set<int> updated_pins;
|
||||
for (int pin : pinned_messages_) {
|
||||
if (pin < static_cast<int>(messages_.size())) {
|
||||
updated_pins.insert(pin);
|
||||
}
|
||||
}
|
||||
pinned_messages_.swap(updated_pins);
|
||||
RefreshProviderFilters();
|
||||
|
||||
// Auto-scroll if new messages arrived
|
||||
if (auto_scroll_ && messages_.size() > old_size) {
|
||||
@@ -460,6 +647,10 @@ void AgentChatHistoryPopup::NotifyNewMessage() {
|
||||
|
||||
void AgentChatHistoryPopup::ClearHistory() {
|
||||
messages_.clear();
|
||||
pinned_messages_.clear();
|
||||
provider_filters_.clear();
|
||||
provider_filters_.push_back("All providers");
|
||||
provider_filter_index_ = 0;
|
||||
|
||||
if (toast_manager_) {
|
||||
toast_manager_->Show("Chat history popup cleared", ToastType::kInfo, 2.0f);
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
#ifndef YAZE_APP_EDITOR_AGENT_AGENT_CHAT_HISTORY_POPUP_H
|
||||
#define YAZE_APP_EDITOR_AGENT_AGENT_CHAT_HISTORY_POPUP_H
|
||||
|
||||
#include <functional>
|
||||
#include <string>
|
||||
#include <unordered_set>
|
||||
#include <vector>
|
||||
|
||||
#include "cli/service/agent/conversational_agent_service.h"
|
||||
@@ -68,12 +70,16 @@ class AgentChatHistoryPopup {
|
||||
capture_snapshot_callback_ = std::move(callback);
|
||||
}
|
||||
|
||||
private:
|
||||
private:
|
||||
void DrawHeader();
|
||||
void DrawQuickActions();
|
||||
void DrawInputSection();
|
||||
void DrawMessageList();
|
||||
void DrawMessage(const cli::agent::ChatMessage& msg, int index);
|
||||
void DrawInputSection();
|
||||
void DrawQuickActions();
|
||||
bool MessagePassesFilters(const cli::agent::ChatMessage& msg,
|
||||
int index) const;
|
||||
void RefreshProviderFilters();
|
||||
void TogglePin(int index);
|
||||
|
||||
void SendMessage(const std::string& message);
|
||||
void ClearHistory();
|
||||
@@ -92,6 +98,7 @@ class AgentChatHistoryPopup {
|
||||
|
||||
// Input state
|
||||
char input_buffer_[512] = {};
|
||||
char search_buffer_[160] = {};
|
||||
bool focus_input_ = false;
|
||||
|
||||
// UI state
|
||||
@@ -107,6 +114,10 @@ class AgentChatHistoryPopup {
|
||||
kAgentOnly
|
||||
};
|
||||
MessageFilter message_filter_ = MessageFilter::kAll;
|
||||
std::vector<std::string> provider_filters_;
|
||||
int provider_filter_index_ = 0;
|
||||
bool show_pinned_only_ = false;
|
||||
std::unordered_set<int> pinned_messages_;
|
||||
|
||||
// Visual state
|
||||
float header_pulse_ = 0.0f;
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
#include <vector>
|
||||
|
||||
#include "absl/status/status.h"
|
||||
#include "absl/strings/ascii.h"
|
||||
#include "absl/strings/match.h"
|
||||
#include "absl/strings/str_format.h"
|
||||
#include "absl/strings/str_join.h"
|
||||
@@ -26,6 +27,8 @@
|
||||
#include "app/editor/system/toast_manager.h"
|
||||
#include "app/gui/core/icons.h"
|
||||
#include "app/rom.h"
|
||||
#include "cli/service/ai/ollama_ai_service.h"
|
||||
#include "cli/service/ai/service_factory.h"
|
||||
#include "imgui/imgui.h"
|
||||
#include "util/file_util.h"
|
||||
#include "util/platform_paths.h"
|
||||
@@ -109,6 +112,36 @@ void RenderTable(const ChatMessage::TableData& table_data) {
|
||||
}
|
||||
}
|
||||
|
||||
std::string FormatByteSize(uint64_t bytes) {
|
||||
static const char* kUnits[] = {"B", "KB", "MB", "GB", "TB"};
|
||||
double size = static_cast<double>(bytes);
|
||||
int unit = 0;
|
||||
while (size >= 1024.0 && unit < 4) {
|
||||
size /= 1024.0;
|
||||
++unit;
|
||||
}
|
||||
return absl::StrFormat("%.1f %s", size, kUnits[unit]);
|
||||
}
|
||||
|
||||
std::string FormatRelativeTime(absl::Time timestamp) {
|
||||
if (timestamp == absl::InfinitePast()) {
|
||||
return "—";
|
||||
}
|
||||
absl::Duration delta = absl::Now() - timestamp;
|
||||
if (delta < absl::Seconds(60)) {
|
||||
return "just now";
|
||||
}
|
||||
if (delta < absl::Minutes(60)) {
|
||||
return absl::StrFormat("%dm ago",
|
||||
static_cast<int>(delta / absl::Minutes(1)));
|
||||
}
|
||||
if (delta < absl::Hours(24)) {
|
||||
return absl::StrFormat("%dh ago",
|
||||
static_cast<int>(delta / absl::Hours(1)));
|
||||
}
|
||||
return absl::FormatTime("%b %d", timestamp, absl::LocalTimeZone());
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
namespace yaze {
|
||||
@@ -291,6 +324,10 @@ void AgentChatWidget::EnsureHistoryLoaded() {
|
||||
multimodal_state_.last_capture_path = snapshot.multimodal.last_capture_path;
|
||||
multimodal_state_.status_message = snapshot.multimodal.status_message;
|
||||
multimodal_state_.last_updated = snapshot.multimodal.last_updated;
|
||||
|
||||
if (snapshot.agent_config.has_value() && persist_agent_config_with_history_) {
|
||||
ApplyHistoryAgentConfig(*snapshot.agent_config);
|
||||
}
|
||||
}
|
||||
|
||||
void AgentChatWidget::PersistHistory() {
|
||||
@@ -323,6 +360,10 @@ void AgentChatWidget::PersistHistory() {
|
||||
snapshot.multimodal.status_message = multimodal_state_.status_message;
|
||||
snapshot.multimodal.last_updated = multimodal_state_.last_updated;
|
||||
|
||||
if (persist_agent_config_with_history_) {
|
||||
snapshot.agent_config = BuildHistoryAgentConfig();
|
||||
}
|
||||
|
||||
absl::Status status = AgentChatHistoryCodec::Save(history_path_, snapshot);
|
||||
if (!status.ok()) {
|
||||
if (status.code() == absl::StatusCode::kUnimplemented) {
|
||||
@@ -418,6 +459,7 @@ void AgentChatWidget::HandleAgentResponse(
|
||||
}
|
||||
last_proposal_count_ = std::max(last_proposal_count_, total);
|
||||
|
||||
MarkPresetUsage(agent_config_.ai_model);
|
||||
// Sync history to popup after response
|
||||
SyncHistoryToPopup();
|
||||
}
|
||||
@@ -1068,6 +1110,7 @@ void AgentChatWidget::Draw() {
|
||||
ImGui::EndTable();
|
||||
}
|
||||
|
||||
RenderPersonaSummary();
|
||||
RenderAutomationPanel();
|
||||
RenderCollaborationPanel();
|
||||
RenderRomSyncPanel();
|
||||
@@ -1617,6 +1660,14 @@ void AgentChatWidget::RenderAutomationPanel() {
|
||||
ImGui::SetTooltip("Auto-refresh interval");
|
||||
}
|
||||
|
||||
ImGui::Spacing();
|
||||
ImGui::Separator();
|
||||
ImGui::TextDisabled("Automation Hooks");
|
||||
ImGui::Checkbox("Auto-run harness plan", &automation_state_.auto_run_plan);
|
||||
ImGui::Checkbox("Auto-sync ROM context", &automation_state_.auto_sync_rom);
|
||||
ImGui::Checkbox("Auto-focus proposal drawer",
|
||||
&automation_state_.auto_focus_proposals);
|
||||
|
||||
// === RECENT AUTOMATION ACTIONS WITH SCROLLING ===
|
||||
ImGui::Spacing();
|
||||
ImGui::Separator();
|
||||
@@ -1858,6 +1909,76 @@ void AgentChatWidget::PollSharedHistory() {
|
||||
}
|
||||
}
|
||||
|
||||
cli::AIServiceConfig AgentChatWidget::BuildAIServiceConfig() const {
|
||||
cli::AIServiceConfig cfg;
|
||||
cfg.provider =
|
||||
agent_config_.ai_provider.empty() ? "auto" : agent_config_.ai_provider;
|
||||
cfg.model = agent_config_.ai_model;
|
||||
cfg.ollama_host = agent_config_.ollama_host;
|
||||
cfg.gemini_api_key = agent_config_.gemini_api_key;
|
||||
cfg.verbose = agent_config_.verbose;
|
||||
return cfg;
|
||||
}
|
||||
|
||||
void AgentChatWidget::ApplyToolPreferences() {
|
||||
cli::agent::ToolDispatcher::ToolPreferences prefs;
|
||||
prefs.resources = agent_config_.tool_config.resources;
|
||||
prefs.dungeon = agent_config_.tool_config.dungeon;
|
||||
prefs.overworld = agent_config_.tool_config.overworld;
|
||||
prefs.dialogue = agent_config_.tool_config.dialogue;
|
||||
prefs.messages = agent_config_.tool_config.messages;
|
||||
prefs.gui = agent_config_.tool_config.gui;
|
||||
prefs.music = agent_config_.tool_config.music;
|
||||
prefs.sprite = agent_config_.tool_config.sprite;
|
||||
prefs.emulator = agent_config_.tool_config.emulator;
|
||||
agent_service_.SetToolPreferences(prefs);
|
||||
}
|
||||
|
||||
void AgentChatWidget::RefreshOllamaModels() {
|
||||
#if defined(YAZE_WITH_JSON)
|
||||
ollama_models_loading_ = true;
|
||||
cli::OllamaConfig config;
|
||||
config.base_url = agent_config_.ollama_host;
|
||||
if (!agent_config_.ai_model.empty()) {
|
||||
config.model = agent_config_.ai_model;
|
||||
}
|
||||
cli::OllamaAIService ollama_service(config);
|
||||
auto models_or = ollama_service.ListAvailableModels();
|
||||
ollama_models_loading_ = false;
|
||||
if (!models_or.ok()) {
|
||||
if (toast_manager_) {
|
||||
toast_manager_->Show(
|
||||
absl::StrFormat("Model refresh failed: %s",
|
||||
models_or.status().message()),
|
||||
ToastType::kWarning, 4.0f);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
ollama_model_info_cache_ = *models_or;
|
||||
std::sort(ollama_model_info_cache_.begin(), ollama_model_info_cache_.end(),
|
||||
[](const cli::OllamaAIService::ModelInfo& lhs,
|
||||
const cli::OllamaAIService::ModelInfo& rhs) {
|
||||
return lhs.name < rhs.name;
|
||||
});
|
||||
ollama_model_cache_.clear();
|
||||
for (const auto& info : ollama_model_info_cache_) {
|
||||
ollama_model_cache_.push_back(info.name);
|
||||
}
|
||||
last_model_refresh_ = absl::Now();
|
||||
if (toast_manager_) {
|
||||
toast_manager_->Show(
|
||||
absl::StrFormat("Loaded %zu local models", ollama_model_cache_.size()),
|
||||
ToastType::kSuccess, 2.0f);
|
||||
}
|
||||
#else
|
||||
if (toast_manager_) {
|
||||
toast_manager_->Show("Model discovery requires JSON-enabled build",
|
||||
ToastType::kWarning, 3.5f);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
void AgentChatWidget::UpdateAgentConfig(const AgentConfigState& config) {
|
||||
agent_config_ = config;
|
||||
|
||||
@@ -1870,77 +1991,657 @@ void AgentChatWidget::UpdateAgentConfig(const AgentConfigState& config) {
|
||||
|
||||
agent_service_.SetConfig(service_config);
|
||||
|
||||
auto provider_config = BuildAIServiceConfig();
|
||||
absl::Status status = agent_service_.ConfigureProvider(provider_config);
|
||||
if (!status.ok()) {
|
||||
if (toast_manager_) {
|
||||
toast_manager_->Show("Agent configuration updated", ToastType::kSuccess,
|
||||
2.5f);
|
||||
toast_manager_->Show(
|
||||
absl::StrFormat("Provider init failed: %s", status.message()),
|
||||
ToastType::kError, 4.0f);
|
||||
}
|
||||
} else {
|
||||
ApplyToolPreferences();
|
||||
if (toast_manager_) {
|
||||
toast_manager_->Show("Agent configuration applied",
|
||||
ToastType::kSuccess, 2.0f);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void AgentChatWidget::RenderAgentConfigPanel() {
|
||||
const auto& theme = AgentUI::GetTheme();
|
||||
|
||||
// Dense header (no collapsing)
|
||||
ImGui::PushStyleColor(ImGuiCol_ChildBg, theme.panel_bg_color);
|
||||
ImGui::BeginChild("AgentConfig", ImVec2(0, 140), true); // Reduced from 350
|
||||
AgentUI::RenderSectionHeader(ICON_MD_SETTINGS, "Config", theme.command_text_color);
|
||||
|
||||
ImGui::BeginChild("AgentConfig", ImVec2(0, 190), true);
|
||||
AgentUI::RenderSectionHeader(ICON_MD_SETTINGS, "Agent Builder",
|
||||
theme.command_text_color);
|
||||
|
||||
// Compact provider selection
|
||||
int provider_idx = 0;
|
||||
if (agent_config_.ai_provider == "ollama")
|
||||
provider_idx = 1;
|
||||
else if (agent_config_.ai_provider == "gemini")
|
||||
provider_idx = 2;
|
||||
if (ImGui::BeginTabBar("AgentConfigTabs",
|
||||
ImGuiTabBarFlags_NoCloseWithMiddleMouseButton)) {
|
||||
if (ImGui::BeginTabItem(ICON_MD_SMART_TOY " Models")) {
|
||||
RenderModelConfigControls();
|
||||
ImGui::Separator();
|
||||
RenderModelDeck();
|
||||
ImGui::EndTabItem();
|
||||
}
|
||||
if (ImGui::BeginTabItem(ICON_MD_TUNE " Parameters")) {
|
||||
RenderParameterControls();
|
||||
ImGui::EndTabItem();
|
||||
}
|
||||
if (ImGui::BeginTabItem(ICON_MD_CONSTRUCTION " Tools")) {
|
||||
RenderToolingControls();
|
||||
ImGui::EndTabItem();
|
||||
}
|
||||
}
|
||||
ImGui::EndTabBar();
|
||||
|
||||
if (ImGui::RadioButton("Mock", &provider_idx, 0)) {
|
||||
agent_config_.ai_provider = "mock";
|
||||
std::snprintf(agent_config_.provider_buffer,
|
||||
sizeof(agent_config_.provider_buffer), "mock");
|
||||
ImGui::Spacing();
|
||||
if (ImGui::Checkbox("Sync agent config with chat history",
|
||||
&persist_agent_config_with_history_)) {
|
||||
if (toast_manager_) {
|
||||
toast_manager_->Show(
|
||||
persist_agent_config_with_history_
|
||||
? "Chat histories now capture provider + tool settings"
|
||||
: "Chat histories will no longer overwrite provider settings",
|
||||
ToastType::kInfo, 3.0f);
|
||||
}
|
||||
}
|
||||
ImGui::SameLine();
|
||||
if (ImGui::RadioButton("Ollama", &provider_idx, 1)) {
|
||||
agent_config_.ai_provider = "ollama";
|
||||
std::snprintf(agent_config_.provider_buffer,
|
||||
sizeof(agent_config_.provider_buffer), "ollama");
|
||||
}
|
||||
ImGui::SameLine();
|
||||
if (ImGui::RadioButton("Gemini", &provider_idx, 2)) {
|
||||
agent_config_.ai_provider = "gemini";
|
||||
std::snprintf(agent_config_.provider_buffer,
|
||||
sizeof(agent_config_.provider_buffer), "gemini");
|
||||
if (ImGui::IsItemHovered()) {
|
||||
ImGui::SetTooltip(
|
||||
"When enabled, provider, model, presets, and tool toggles reload with "
|
||||
"each chat history file.");
|
||||
}
|
||||
|
||||
// Dense provider settings
|
||||
if (agent_config_.ai_provider == "ollama") {
|
||||
ImGui::InputText("##ollama_model", agent_config_.model_buffer,
|
||||
IM_ARRAYSIZE(agent_config_.model_buffer));
|
||||
ImGui::InputText("##ollama_host", agent_config_.ollama_host_buffer,
|
||||
IM_ARRAYSIZE(agent_config_.ollama_host_buffer));
|
||||
} else if (agent_config_.ai_provider == "gemini") {
|
||||
ImGui::InputText("##gemini_model", agent_config_.model_buffer,
|
||||
IM_ARRAYSIZE(agent_config_.model_buffer));
|
||||
ImGui::InputText("##gemini_key", agent_config_.gemini_key_buffer,
|
||||
IM_ARRAYSIZE(agent_config_.gemini_key_buffer),
|
||||
ImGuiInputTextFlags_Password);
|
||||
}
|
||||
|
||||
ImGui::Separator();
|
||||
ImGui::Checkbox("Verbose", &agent_config_.verbose);
|
||||
ImGui::SameLine();
|
||||
ImGui::Checkbox("Reasoning", &agent_config_.show_reasoning);
|
||||
ImGui::SetNextItemWidth(-1);
|
||||
ImGui::SliderInt("##max_iter", &agent_config_.max_tool_iterations, 1, 10,
|
||||
"Iter: %d");
|
||||
|
||||
if (ImGui::Button(ICON_MD_CHECK " Apply", ImVec2(-1, 0))) {
|
||||
agent_config_.ai_model = agent_config_.model_buffer;
|
||||
agent_config_.ollama_host = agent_config_.ollama_host_buffer;
|
||||
agent_config_.gemini_api_key = agent_config_.gemini_key_buffer;
|
||||
ImGui::Spacing();
|
||||
if (ImGui::Button(ICON_MD_CLOUD_SYNC " Apply Provider Settings",
|
||||
ImVec2(-1, 0))) {
|
||||
UpdateAgentConfig(agent_config_);
|
||||
}
|
||||
|
||||
ImGui::EndChild();
|
||||
ImGui::PopStyleColor(); // Pop the ChildBg color from line 1609
|
||||
ImGui::PopStyleColor();
|
||||
}
|
||||
|
||||
void AgentChatWidget::RenderModelConfigControls() {
|
||||
auto provider_button = [&](const char* label, const char* value,
|
||||
const ImVec4& color) {
|
||||
bool active = agent_config_.ai_provider == value;
|
||||
if (active) {
|
||||
ImGui::PushStyleColor(ImGuiCol_Button, color);
|
||||
}
|
||||
if (ImGui::Button(label, ImVec2(90, 28))) {
|
||||
agent_config_.ai_provider = value;
|
||||
std::snprintf(agent_config_.provider_buffer,
|
||||
sizeof(agent_config_.provider_buffer), "%s", value);
|
||||
}
|
||||
if (active) {
|
||||
ImGui::PopStyleColor();
|
||||
}
|
||||
ImGui::SameLine();
|
||||
};
|
||||
|
||||
const auto& theme = AgentUI::GetTheme();
|
||||
provider_button(ICON_MD_SETTINGS " Mock", "mock", theme.provider_mock);
|
||||
provider_button(ICON_MD_CLOUD " Ollama", "ollama", theme.provider_ollama);
|
||||
provider_button(ICON_MD_SMART_TOY " Gemini", "gemini", theme.provider_gemini);
|
||||
ImGui::NewLine();
|
||||
ImGui::NewLine();
|
||||
|
||||
if (agent_config_.ai_provider == "ollama") {
|
||||
if (ImGui::InputTextWithHint("##ollama_host", "http://localhost:11434",
|
||||
agent_config_.ollama_host_buffer,
|
||||
IM_ARRAYSIZE(agent_config_.ollama_host_buffer))) {
|
||||
agent_config_.ollama_host = agent_config_.ollama_host_buffer;
|
||||
}
|
||||
if (ImGui::InputTextWithHint("##ollama_model", "qwen2.5-coder:7b",
|
||||
agent_config_.model_buffer,
|
||||
IM_ARRAYSIZE(agent_config_.model_buffer))) {
|
||||
agent_config_.ai_model = agent_config_.model_buffer;
|
||||
}
|
||||
|
||||
ImGui::SetNextItemWidth(ImGui::GetContentRegionAvail().x - 60.0f);
|
||||
ImGui::InputTextWithHint("##model_search", "Search local models...",
|
||||
model_search_buffer_,
|
||||
IM_ARRAYSIZE(model_search_buffer_));
|
||||
ImGui::SameLine();
|
||||
if (ImGui::Button(ollama_models_loading_ ? ICON_MD_SYNC
|
||||
: ICON_MD_REFRESH)) {
|
||||
RefreshOllamaModels();
|
||||
}
|
||||
|
||||
ImGui::PushStyleColor(ImGuiCol_ChildBg, ImVec4(0.1f, 0.1f, 0.14f, 0.9f));
|
||||
ImGui::BeginChild("OllamaModelList", ImVec2(0, 140), true);
|
||||
std::string filter = absl::AsciiStrToLower(model_search_buffer_);
|
||||
const bool has_metadata = !ollama_model_info_cache_.empty();
|
||||
if (ollama_model_cache_.empty() && ollama_model_info_cache_.empty()) {
|
||||
ImGui::TextDisabled("No cached models. Refresh to discover local models.");
|
||||
} else if (has_metadata) {
|
||||
for (const auto& info : ollama_model_info_cache_) {
|
||||
std::string lower = absl::AsciiStrToLower(info.name);
|
||||
if (!filter.empty() && lower.find(filter) == std::string::npos) {
|
||||
std::string param = absl::AsciiStrToLower(info.parameter_size);
|
||||
if (param.find(filter) == std::string::npos) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
bool is_selected = agent_config_.ai_model == info.name;
|
||||
if (ImGui::Selectable(info.name.c_str(), is_selected)) {
|
||||
agent_config_.ai_model = info.name;
|
||||
std::snprintf(agent_config_.model_buffer,
|
||||
sizeof(agent_config_.model_buffer), "%s",
|
||||
info.name.c_str());
|
||||
}
|
||||
|
||||
ImGui::SameLine();
|
||||
bool is_favorite = std::find(agent_config_.favorite_models.begin(),
|
||||
agent_config_.favorite_models.end(),
|
||||
info.name) !=
|
||||
agent_config_.favorite_models.end();
|
||||
std::string fav_id = absl::StrFormat("Fav##%s", info.name);
|
||||
if (ImGui::SmallButton(is_favorite ? ICON_MD_STAR : ICON_MD_STAR_BORDER)) {
|
||||
if (is_favorite) {
|
||||
agent_config_.favorite_models.erase(std::remove(
|
||||
agent_config_.favorite_models.begin(),
|
||||
agent_config_.favorite_models.end(), info.name),
|
||||
agent_config_.favorite_models.end());
|
||||
agent_config_.model_chain.erase(std::remove(
|
||||
agent_config_.model_chain.begin(), agent_config_.model_chain.end(),
|
||||
info.name),
|
||||
agent_config_.model_chain.end());
|
||||
} else {
|
||||
agent_config_.favorite_models.push_back(info.name);
|
||||
}
|
||||
}
|
||||
if (ImGui::IsItemHovered()) {
|
||||
ImGui::SetTooltip(is_favorite ? "Remove from favorites"
|
||||
: "Favorite model");
|
||||
}
|
||||
|
||||
ImGui::SameLine();
|
||||
std::string preset_id = absl::StrFormat("Preset##%s", info.name);
|
||||
if (ImGui::SmallButton(ICON_MD_NOTE_ADD)) {
|
||||
AgentConfigState::ModelPreset preset;
|
||||
preset.name = info.name;
|
||||
preset.model = info.name;
|
||||
preset.host = agent_config_.ollama_host;
|
||||
preset.tags = {"ollama"};
|
||||
preset.last_used = absl::Now();
|
||||
agent_config_.model_presets.push_back(std::move(preset));
|
||||
if (toast_manager_) {
|
||||
toast_manager_->Show("Preset captured from Ollama roster",
|
||||
ToastType::kSuccess, 2.0f);
|
||||
}
|
||||
}
|
||||
if (ImGui::IsItemHovered()) {
|
||||
ImGui::SetTooltip("Capture preset from this model");
|
||||
}
|
||||
|
||||
std::string size_label =
|
||||
info.parameter_size.empty()
|
||||
? FormatByteSize(info.size_bytes)
|
||||
: info.parameter_size;
|
||||
ImGui::TextDisabled("%s • %s", size_label.c_str(),
|
||||
info.quantization_level.c_str());
|
||||
if (!info.family.empty()) {
|
||||
ImGui::TextDisabled("Family: %s", info.family.c_str());
|
||||
}
|
||||
if (info.modified_at != absl::InfinitePast()) {
|
||||
ImGui::TextDisabled("Updated %s",
|
||||
FormatRelativeTime(info.modified_at).c_str());
|
||||
}
|
||||
ImGui::Separator();
|
||||
}
|
||||
} else {
|
||||
for (const auto& model_name : ollama_model_cache_) {
|
||||
std::string lower = absl::AsciiStrToLower(model_name);
|
||||
if (!filter.empty() && lower.find(filter) == std::string::npos) {
|
||||
continue;
|
||||
}
|
||||
|
||||
bool is_selected = agent_config_.ai_model == model_name;
|
||||
if (ImGui::Selectable(model_name.c_str(), is_selected)) {
|
||||
agent_config_.ai_model = model_name;
|
||||
std::snprintf(agent_config_.model_buffer,
|
||||
sizeof(agent_config_.model_buffer), "%s",
|
||||
model_name.c_str());
|
||||
}
|
||||
ImGui::SameLine();
|
||||
bool is_favorite = std::find(agent_config_.favorite_models.begin(),
|
||||
agent_config_.favorite_models.end(),
|
||||
model_name) !=
|
||||
agent_config_.favorite_models.end();
|
||||
if (ImGui::SmallButton(is_favorite ? ICON_MD_STAR : ICON_MD_STAR_BORDER)) {
|
||||
if (is_favorite) {
|
||||
agent_config_.favorite_models.erase(std::remove(
|
||||
agent_config_.favorite_models.begin(),
|
||||
agent_config_.favorite_models.end(), model_name),
|
||||
agent_config_.favorite_models.end());
|
||||
agent_config_.model_chain.erase(std::remove(
|
||||
agent_config_.model_chain.begin(), agent_config_.model_chain.end(),
|
||||
model_name),
|
||||
agent_config_.model_chain.end());
|
||||
} else {
|
||||
agent_config_.favorite_models.push_back(model_name);
|
||||
}
|
||||
}
|
||||
ImGui::Separator();
|
||||
}
|
||||
}
|
||||
ImGui::EndChild();
|
||||
ImGui::PopStyleColor();
|
||||
|
||||
if (last_model_refresh_ != absl::InfinitePast()) {
|
||||
double seconds =
|
||||
absl::ToDoubleSeconds(absl::Now() - last_model_refresh_);
|
||||
ImGui::TextDisabled("Last refresh %.0fs ago", seconds);
|
||||
} else {
|
||||
ImGui::TextDisabled("Models not refreshed yet");
|
||||
}
|
||||
|
||||
RenderChainModeControls();
|
||||
} else if (agent_config_.ai_provider == "gemini") {
|
||||
if (ImGui::InputTextWithHint("##gemini_model", "gemini-2.5-flash",
|
||||
agent_config_.model_buffer,
|
||||
IM_ARRAYSIZE(agent_config_.model_buffer))) {
|
||||
agent_config_.ai_model = agent_config_.model_buffer;
|
||||
}
|
||||
if (ImGui::InputTextWithHint("##gemini_key", "API key...",
|
||||
agent_config_.gemini_key_buffer,
|
||||
IM_ARRAYSIZE(agent_config_.gemini_key_buffer),
|
||||
ImGuiInputTextFlags_Password)) {
|
||||
agent_config_.gemini_api_key = agent_config_.gemini_key_buffer;
|
||||
}
|
||||
ImGui::SameLine();
|
||||
if (ImGui::SmallButton(ICON_MD_SYNC " Env")) {
|
||||
const char* env_key = std::getenv("GEMINI_API_KEY");
|
||||
if (env_key) {
|
||||
std::snprintf(agent_config_.gemini_key_buffer,
|
||||
sizeof(agent_config_.gemini_key_buffer), "%s", env_key);
|
||||
agent_config_.gemini_api_key = env_key;
|
||||
if (toast_manager_) {
|
||||
toast_manager_->Show("Loaded GEMINI_API_KEY from environment",
|
||||
ToastType::kInfo, 2.0f);
|
||||
}
|
||||
} else if (toast_manager_) {
|
||||
toast_manager_->Show("GEMINI_API_KEY not set", ToastType::kWarning,
|
||||
2.0f);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!agent_config_.favorite_models.empty()) {
|
||||
ImGui::Separator();
|
||||
ImGui::TextColored(ImVec4(1.0f, 0.843f, 0.0f, 1.0f), ICON_MD_STAR " Favorites");
|
||||
for (size_t i = 0; i < agent_config_.favorite_models.size(); ++i) {
|
||||
auto& favorite = agent_config_.favorite_models[i];
|
||||
ImGui::PushID(static_cast<int>(i));
|
||||
bool active = agent_config_.ai_model == favorite;
|
||||
if (ImGui::Selectable(favorite.c_str(), active)) {
|
||||
agent_config_.ai_model = favorite;
|
||||
std::snprintf(agent_config_.model_buffer,
|
||||
sizeof(agent_config_.model_buffer), "%s",
|
||||
favorite.c_str());
|
||||
}
|
||||
ImGui::SameLine();
|
||||
if (ImGui::SmallButton(ICON_MD_CLOSE)) {
|
||||
agent_config_.model_chain.erase(std::remove(
|
||||
agent_config_.model_chain.begin(), agent_config_.model_chain.end(),
|
||||
favorite),
|
||||
agent_config_.model_chain.end());
|
||||
agent_config_.favorite_models.erase(
|
||||
agent_config_.favorite_models.begin() + i);
|
||||
ImGui::PopID();
|
||||
break;
|
||||
}
|
||||
ImGui::PopID();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void AgentChatWidget::RenderModelDeck() {
|
||||
ImGui::TextDisabled("Model Deck");
|
||||
if (agent_config_.model_presets.empty()) {
|
||||
ImGui::TextWrapped(
|
||||
"Capture a preset to quickly swap between hosts/models with consistent "
|
||||
"tool stacks.");
|
||||
}
|
||||
ImGui::InputTextWithHint("##new_preset_name", "Preset name...",
|
||||
new_preset_name_, IM_ARRAYSIZE(new_preset_name_));
|
||||
ImGui::SameLine();
|
||||
if (ImGui::SmallButton(ICON_MD_NOTE_ADD " Capture Current")) {
|
||||
AgentConfigState::ModelPreset preset;
|
||||
preset.name = new_preset_name_[0] ? std::string(new_preset_name_)
|
||||
: agent_config_.ai_model;
|
||||
preset.model = agent_config_.ai_model;
|
||||
preset.host = agent_config_.ollama_host;
|
||||
preset.tags = {"current"};
|
||||
preset.last_used = absl::Now();
|
||||
agent_config_.model_presets.push_back(std::move(preset));
|
||||
new_preset_name_[0] = '\0';
|
||||
if (toast_manager_) {
|
||||
toast_manager_->Show("Captured chat preset", ToastType::kSuccess, 2.0f);
|
||||
}
|
||||
}
|
||||
|
||||
ImGui::PushStyleColor(ImGuiCol_ChildBg, ImVec4(0.09f, 0.09f, 0.11f, 0.9f));
|
||||
ImGui::BeginChild("PresetList", ImVec2(0, 110), true);
|
||||
if (agent_config_.model_presets.empty()) {
|
||||
ImGui::TextDisabled("No presets yet");
|
||||
} else {
|
||||
for (int i = 0; i < static_cast<int>(agent_config_.model_presets.size());
|
||||
++i) {
|
||||
auto& preset = agent_config_.model_presets[i];
|
||||
ImGui::PushID(i);
|
||||
bool selected = active_model_preset_index_ == i;
|
||||
if (ImGui::Selectable(preset.name.c_str(), selected)) {
|
||||
active_model_preset_index_ = i;
|
||||
ApplyModelPreset(preset);
|
||||
}
|
||||
ImGui::SameLine();
|
||||
if (ImGui::SmallButton(ICON_MD_PLAY_ARROW "##apply")) {
|
||||
active_model_preset_index_ = i;
|
||||
ApplyModelPreset(preset);
|
||||
}
|
||||
ImGui::SameLine();
|
||||
if (ImGui::SmallButton(preset.pinned ? ICON_MD_STAR : ICON_MD_STAR_BORDER)) {
|
||||
preset.pinned = !preset.pinned;
|
||||
}
|
||||
ImGui::SameLine();
|
||||
if (ImGui::SmallButton(ICON_MD_DELETE)) {
|
||||
agent_config_.model_presets.erase(
|
||||
agent_config_.model_presets.begin() + i);
|
||||
if (active_model_preset_index_ == i) {
|
||||
active_model_preset_index_ = -1;
|
||||
}
|
||||
ImGui::PopID();
|
||||
break;
|
||||
}
|
||||
if (!preset.host.empty()) {
|
||||
ImGui::TextDisabled("%s", preset.host.c_str());
|
||||
}
|
||||
if (!preset.tags.empty()) {
|
||||
ImGui::TextDisabled("Tags: %s",
|
||||
absl::StrJoin(preset.tags, ", ").c_str());
|
||||
}
|
||||
if (preset.last_used != absl::InfinitePast()) {
|
||||
ImGui::TextDisabled("Last used %s",
|
||||
FormatRelativeTime(preset.last_used).c_str());
|
||||
}
|
||||
ImGui::Separator();
|
||||
ImGui::PopID();
|
||||
}
|
||||
}
|
||||
ImGui::EndChild();
|
||||
ImGui::PopStyleColor();
|
||||
}
|
||||
|
||||
void AgentChatWidget::RenderParameterControls() {
|
||||
ImGui::SliderFloat("Temperature", &agent_config_.temperature, 0.0f, 1.5f);
|
||||
ImGui::SliderFloat("Top P", &agent_config_.top_p, 0.0f, 1.0f);
|
||||
ImGui::SliderInt("Max Output Tokens", &agent_config_.max_output_tokens, 256,
|
||||
8192);
|
||||
ImGui::SliderInt("Max Tool Iterations", &agent_config_.max_tool_iterations, 1,
|
||||
10);
|
||||
ImGui::SliderInt("Max Retry Attempts", &agent_config_.max_retry_attempts, 0,
|
||||
5);
|
||||
ImGui::Checkbox("Stream responses", &agent_config_.stream_responses);
|
||||
ImGui::SameLine();
|
||||
ImGui::Checkbox("Show reasoning", &agent_config_.show_reasoning);
|
||||
ImGui::SameLine();
|
||||
ImGui::Checkbox("Verbose logs", &agent_config_.verbose);
|
||||
}
|
||||
|
||||
void AgentChatWidget::RenderToolingControls() {
|
||||
struct ToolToggleEntry {
|
||||
const char* label;
|
||||
bool* flag;
|
||||
const char* hint;
|
||||
} entries[] = {
|
||||
{"Resources", &agent_config_.tool_config.resources,
|
||||
"resource-list/search"},
|
||||
{"Dungeon", &agent_config_.tool_config.dungeon,
|
||||
"Room + sprite inspection"},
|
||||
{"Overworld", &agent_config_.tool_config.overworld,
|
||||
"Map + entrance analysis"},
|
||||
{"Dialogue", &agent_config_.tool_config.dialogue,
|
||||
"Dialogue list/search"},
|
||||
{"Messages", &agent_config_.tool_config.messages,
|
||||
"Message table + ROM text"},
|
||||
{"GUI Automation", &agent_config_.tool_config.gui, "GUI automation tools"},
|
||||
{"Music", &agent_config_.tool_config.music, "Music info & tracks"},
|
||||
{"Sprite", &agent_config_.tool_config.sprite, "Sprite palette/properties"},
|
||||
{"Emulator", &agent_config_.tool_config.emulator, "Emulator controls"}};
|
||||
|
||||
int columns = 2;
|
||||
ImGui::Columns(columns, nullptr, false);
|
||||
for (size_t i = 0; i < std::size(entries); ++i) {
|
||||
if (ImGui::Checkbox(entries[i].label, entries[i].flag) &&
|
||||
auto_apply_agent_config_) {
|
||||
ApplyToolPreferences();
|
||||
}
|
||||
if (ImGui::IsItemHovered() && entries[i].hint) {
|
||||
ImGui::SetTooltip("%s", entries[i].hint);
|
||||
}
|
||||
ImGui::NextColumn();
|
||||
}
|
||||
ImGui::Columns(1);
|
||||
ImGui::Separator();
|
||||
ImGui::Checkbox("Auto-apply", &auto_apply_agent_config_);
|
||||
}
|
||||
|
||||
void AgentChatWidget::RenderPersonaSummary() {
|
||||
if (!persona_profile_.active || persona_profile_.notes.empty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
AgentUI::PushPanelStyle();
|
||||
if (ImGui::BeginChild("PersonaSummaryPanel", ImVec2(0, 110), true)) {
|
||||
ImVec4 accent = ImVec4(0.6f, 0.8f, 0.4f, 1.0f);
|
||||
if (persona_highlight_active_) {
|
||||
float pulse = 0.5f + 0.5f * std::sin(ImGui::GetTime() * 2.5f);
|
||||
accent.x *= 0.7f + 0.3f * pulse;
|
||||
accent.y *= 0.7f + 0.3f * pulse;
|
||||
}
|
||||
ImGui::TextColored(accent, "%s Active Persona", ICON_MD_PERSON);
|
||||
if (ImGui::IsItemHovered()) {
|
||||
ImGui::SetTooltip("Applied from Agent Builder");
|
||||
}
|
||||
ImGui::SameLine();
|
||||
if (ImGui::SmallButton(ICON_MD_CLOSE "##persona_clear")) {
|
||||
persona_profile_.active = false;
|
||||
persona_highlight_active_ = false;
|
||||
}
|
||||
ImGui::TextWrapped("%s", persona_profile_.notes.c_str());
|
||||
if (!persona_profile_.goals.empty()) {
|
||||
ImGui::TextDisabled("Goals");
|
||||
for (const auto& goal : persona_profile_.goals) {
|
||||
ImGui::BulletText("%s", goal.c_str());
|
||||
}
|
||||
}
|
||||
ImGui::TextDisabled("Applied %s",
|
||||
FormatRelativeTime(persona_profile_.applied_at).c_str());
|
||||
}
|
||||
ImGui::EndChild();
|
||||
AgentUI::PopPanelStyle();
|
||||
persona_highlight_active_ = false;
|
||||
}
|
||||
|
||||
void AgentChatWidget::ApplyModelPreset(
|
||||
const AgentConfigState::ModelPreset& preset) {
|
||||
agent_config_.ai_provider = "ollama";
|
||||
agent_config_.ollama_host = preset.host.empty() ? agent_config_.ollama_host
|
||||
: preset.host;
|
||||
agent_config_.ai_model = preset.model;
|
||||
std::snprintf(agent_config_.model_buffer, sizeof(agent_config_.model_buffer),
|
||||
"%s", agent_config_.ai_model.c_str());
|
||||
std::snprintf(agent_config_.ollama_host_buffer,
|
||||
sizeof(agent_config_.ollama_host_buffer), "%s",
|
||||
agent_config_.ollama_host.c_str());
|
||||
MarkPresetUsage(preset.name.empty() ? preset.model : preset.name);
|
||||
UpdateAgentConfig(agent_config_);
|
||||
}
|
||||
|
||||
void AgentChatWidget::ApplyBuilderPersona(
|
||||
const std::string& persona_notes,
|
||||
const std::vector<std::string>& goals) {
|
||||
persona_profile_.notes = persona_notes;
|
||||
persona_profile_.goals = goals;
|
||||
persona_profile_.applied_at = absl::Now();
|
||||
persona_profile_.active = !persona_profile_.notes.empty();
|
||||
persona_highlight_active_ = persona_profile_.active;
|
||||
}
|
||||
|
||||
void AgentChatWidget::ApplyAutomationPlan(bool auto_run_tests,
|
||||
bool auto_sync_rom,
|
||||
bool auto_focus_proposals) {
|
||||
automation_state_.auto_run_plan = auto_run_tests;
|
||||
automation_state_.auto_sync_rom = auto_sync_rom;
|
||||
automation_state_.auto_focus_proposals = auto_focus_proposals;
|
||||
}
|
||||
|
||||
AgentChatHistoryCodec::AgentConfigSnapshot
|
||||
AgentChatWidget::BuildHistoryAgentConfig() const {
|
||||
AgentChatHistoryCodec::AgentConfigSnapshot snapshot;
|
||||
snapshot.provider = agent_config_.ai_provider;
|
||||
snapshot.model = agent_config_.ai_model;
|
||||
snapshot.ollama_host = agent_config_.ollama_host;
|
||||
snapshot.gemini_api_key = agent_config_.gemini_api_key;
|
||||
snapshot.verbose = agent_config_.verbose;
|
||||
snapshot.show_reasoning = agent_config_.show_reasoning;
|
||||
snapshot.max_tool_iterations = agent_config_.max_tool_iterations;
|
||||
snapshot.max_retry_attempts = agent_config_.max_retry_attempts;
|
||||
snapshot.temperature = agent_config_.temperature;
|
||||
snapshot.top_p = agent_config_.top_p;
|
||||
snapshot.max_output_tokens = agent_config_.max_output_tokens;
|
||||
snapshot.stream_responses = agent_config_.stream_responses;
|
||||
snapshot.chain_mode = static_cast<int>(agent_config_.chain_mode);
|
||||
snapshot.favorite_models = agent_config_.favorite_models;
|
||||
snapshot.model_chain = agent_config_.model_chain;
|
||||
snapshot.persona_notes = persona_profile_.notes;
|
||||
snapshot.goals = persona_profile_.goals;
|
||||
snapshot.tools.resources = agent_config_.tool_config.resources;
|
||||
snapshot.tools.dungeon = agent_config_.tool_config.dungeon;
|
||||
snapshot.tools.overworld = agent_config_.tool_config.overworld;
|
||||
snapshot.tools.dialogue = agent_config_.tool_config.dialogue;
|
||||
snapshot.tools.messages = agent_config_.tool_config.messages;
|
||||
snapshot.tools.gui = agent_config_.tool_config.gui;
|
||||
snapshot.tools.music = agent_config_.tool_config.music;
|
||||
snapshot.tools.sprite = agent_config_.tool_config.sprite;
|
||||
snapshot.tools.emulator = agent_config_.tool_config.emulator;
|
||||
for (const auto& preset : agent_config_.model_presets) {
|
||||
AgentChatHistoryCodec::AgentConfigSnapshot::ModelPreset stored;
|
||||
stored.name = preset.name;
|
||||
stored.model = preset.model;
|
||||
stored.host = preset.host;
|
||||
stored.tags = preset.tags;
|
||||
stored.pinned = preset.pinned;
|
||||
snapshot.model_presets.push_back(std::move(stored));
|
||||
}
|
||||
return snapshot;
|
||||
}
|
||||
|
||||
void AgentChatWidget::ApplyHistoryAgentConfig(
|
||||
const AgentChatHistoryCodec::AgentConfigSnapshot& snapshot) {
|
||||
agent_config_.ai_provider = snapshot.provider;
|
||||
agent_config_.ai_model = snapshot.model;
|
||||
agent_config_.ollama_host = snapshot.ollama_host;
|
||||
agent_config_.gemini_api_key = snapshot.gemini_api_key;
|
||||
agent_config_.verbose = snapshot.verbose;
|
||||
agent_config_.show_reasoning = snapshot.show_reasoning;
|
||||
agent_config_.max_tool_iterations = snapshot.max_tool_iterations;
|
||||
agent_config_.max_retry_attempts = snapshot.max_retry_attempts;
|
||||
agent_config_.temperature = snapshot.temperature;
|
||||
agent_config_.top_p = snapshot.top_p;
|
||||
agent_config_.max_output_tokens = snapshot.max_output_tokens;
|
||||
agent_config_.stream_responses = snapshot.stream_responses;
|
||||
agent_config_.chain_mode = static_cast<AgentConfigState::ChainMode>(
|
||||
std::clamp(snapshot.chain_mode, 0, 2));
|
||||
agent_config_.favorite_models = snapshot.favorite_models;
|
||||
agent_config_.model_chain = snapshot.model_chain;
|
||||
agent_config_.tool_config.resources = snapshot.tools.resources;
|
||||
agent_config_.tool_config.dungeon = snapshot.tools.dungeon;
|
||||
agent_config_.tool_config.overworld = snapshot.tools.overworld;
|
||||
agent_config_.tool_config.dialogue = snapshot.tools.dialogue;
|
||||
agent_config_.tool_config.messages = snapshot.tools.messages;
|
||||
agent_config_.tool_config.gui = snapshot.tools.gui;
|
||||
agent_config_.tool_config.music = snapshot.tools.music;
|
||||
agent_config_.tool_config.sprite = snapshot.tools.sprite;
|
||||
agent_config_.tool_config.emulator = snapshot.tools.emulator;
|
||||
agent_config_.model_presets.clear();
|
||||
for (const auto& stored : snapshot.model_presets) {
|
||||
AgentConfigState::ModelPreset preset;
|
||||
preset.name = stored.name;
|
||||
preset.model = stored.model;
|
||||
preset.host = stored.host;
|
||||
preset.tags = stored.tags;
|
||||
preset.pinned = stored.pinned;
|
||||
agent_config_.model_presets.push_back(std::move(preset));
|
||||
}
|
||||
persona_profile_.notes = snapshot.persona_notes;
|
||||
persona_profile_.goals = snapshot.goals;
|
||||
persona_profile_.active = !persona_profile_.notes.empty();
|
||||
persona_profile_.applied_at = absl::Now();
|
||||
persona_highlight_active_ = persona_profile_.active;
|
||||
|
||||
std::snprintf(agent_config_.model_buffer, sizeof(agent_config_.model_buffer),
|
||||
"%s", agent_config_.ai_model.c_str());
|
||||
std::snprintf(agent_config_.ollama_host_buffer,
|
||||
sizeof(agent_config_.ollama_host_buffer),
|
||||
"%s", agent_config_.ollama_host.c_str());
|
||||
std::snprintf(agent_config_.gemini_key_buffer,
|
||||
sizeof(agent_config_.gemini_key_buffer),
|
||||
"%s", agent_config_.gemini_api_key.c_str());
|
||||
|
||||
UpdateAgentConfig(agent_config_);
|
||||
}
|
||||
|
||||
void AgentChatWidget::MarkPresetUsage(const std::string& model_name) {
|
||||
if (model_name.empty()) {
|
||||
return;
|
||||
}
|
||||
for (auto& preset : agent_config_.model_presets) {
|
||||
if (preset.name == model_name || preset.model == model_name) {
|
||||
preset.last_used = absl::Now();
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void AgentChatWidget::RenderChainModeControls() {
|
||||
const char* labels[] = {"Disabled", "Round Robin", "Consensus"};
|
||||
int mode = static_cast<int>(agent_config_.chain_mode);
|
||||
if (ImGui::Combo("Chain Mode", &mode, labels, IM_ARRAYSIZE(labels))) {
|
||||
agent_config_.chain_mode =
|
||||
static_cast<AgentConfigState::ChainMode>(mode);
|
||||
}
|
||||
|
||||
if (agent_config_.chain_mode == AgentConfigState::ChainMode::kDisabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
ImGui::TextDisabled("Model Chain");
|
||||
if (agent_config_.favorite_models.empty()) {
|
||||
ImGui::Text("Add favorites to build a chain.");
|
||||
return;
|
||||
}
|
||||
|
||||
for (const auto& favorite : agent_config_.favorite_models) {
|
||||
bool selected = std::find(agent_config_.model_chain.begin(),
|
||||
agent_config_.model_chain.end(),
|
||||
favorite) != agent_config_.model_chain.end();
|
||||
if (ImGui::Selectable(favorite.c_str(), selected)) {
|
||||
if (selected) {
|
||||
agent_config_.model_chain.erase(std::remove(
|
||||
agent_config_.model_chain.begin(), agent_config_.model_chain.end(),
|
||||
favorite),
|
||||
agent_config_.model_chain.end());
|
||||
} else {
|
||||
agent_config_.model_chain.push_back(favorite);
|
||||
}
|
||||
}
|
||||
}
|
||||
ImGui::TextDisabled("Chain length: %zu", agent_config_.model_chain.size());
|
||||
}
|
||||
|
||||
void AgentChatWidget::RenderZ3EDCommandPanel() {
|
||||
@@ -2768,6 +3469,29 @@ void AgentChatWidget::LoadAgentSettingsFromProject(
|
||||
agent_config_.max_tool_iterations =
|
||||
project.agent_settings.max_tool_iterations;
|
||||
agent_config_.max_retry_attempts = project.agent_settings.max_retry_attempts;
|
||||
agent_config_.temperature = project.agent_settings.temperature;
|
||||
agent_config_.top_p = project.agent_settings.top_p;
|
||||
agent_config_.max_output_tokens = project.agent_settings.max_output_tokens;
|
||||
agent_config_.stream_responses = project.agent_settings.stream_responses;
|
||||
agent_config_.favorite_models = project.agent_settings.favorite_models;
|
||||
agent_config_.model_chain = project.agent_settings.model_chain;
|
||||
agent_config_.chain_mode = static_cast<AgentConfigState::ChainMode>(
|
||||
std::clamp(project.agent_settings.chain_mode, 0, 2));
|
||||
agent_config_.tool_config.resources =
|
||||
project.agent_settings.enable_tool_resources;
|
||||
agent_config_.tool_config.dungeon =
|
||||
project.agent_settings.enable_tool_dungeon;
|
||||
agent_config_.tool_config.overworld =
|
||||
project.agent_settings.enable_tool_overworld;
|
||||
agent_config_.tool_config.dialogue =
|
||||
project.agent_settings.enable_tool_dialogue;
|
||||
agent_config_.tool_config.messages =
|
||||
project.agent_settings.enable_tool_messages;
|
||||
agent_config_.tool_config.gui = project.agent_settings.enable_tool_gui;
|
||||
agent_config_.tool_config.music = project.agent_settings.enable_tool_music;
|
||||
agent_config_.tool_config.sprite = project.agent_settings.enable_tool_sprite;
|
||||
agent_config_.tool_config.emulator =
|
||||
project.agent_settings.enable_tool_emulator;
|
||||
|
||||
// Copy to buffer for ImGui
|
||||
strncpy(agent_config_.provider_buffer, agent_config_.ai_provider.c_str(),
|
||||
@@ -2827,6 +3551,29 @@ void AgentChatWidget::SaveAgentSettingsToProject(project::YazeProject& project)
|
||||
project.agent_settings.max_tool_iterations =
|
||||
agent_config_.max_tool_iterations;
|
||||
project.agent_settings.max_retry_attempts = agent_config_.max_retry_attempts;
|
||||
project.agent_settings.temperature = agent_config_.temperature;
|
||||
project.agent_settings.top_p = agent_config_.top_p;
|
||||
project.agent_settings.max_output_tokens = agent_config_.max_output_tokens;
|
||||
project.agent_settings.stream_responses = agent_config_.stream_responses;
|
||||
project.agent_settings.favorite_models = agent_config_.favorite_models;
|
||||
project.agent_settings.model_chain = agent_config_.model_chain;
|
||||
project.agent_settings.chain_mode =
|
||||
static_cast<int>(agent_config_.chain_mode);
|
||||
project.agent_settings.enable_tool_resources =
|
||||
agent_config_.tool_config.resources;
|
||||
project.agent_settings.enable_tool_dungeon =
|
||||
agent_config_.tool_config.dungeon;
|
||||
project.agent_settings.enable_tool_overworld =
|
||||
agent_config_.tool_config.overworld;
|
||||
project.agent_settings.enable_tool_dialogue =
|
||||
agent_config_.tool_config.dialogue;
|
||||
project.agent_settings.enable_tool_messages =
|
||||
agent_config_.tool_config.messages;
|
||||
project.agent_settings.enable_tool_gui = agent_config_.tool_config.gui;
|
||||
project.agent_settings.enable_tool_music = agent_config_.tool_config.music;
|
||||
project.agent_settings.enable_tool_sprite = agent_config_.tool_config.sprite;
|
||||
project.agent_settings.enable_tool_emulator =
|
||||
agent_config_.tool_config.emulator;
|
||||
|
||||
// Check if a custom system prompt is loaded
|
||||
for (const auto& tab : open_files_) {
|
||||
|
||||
@@ -10,7 +10,9 @@
|
||||
#include "absl/status/status.h"
|
||||
#include "absl/status/statusor.h"
|
||||
#include "absl/time/time.h"
|
||||
#include "app/editor/agent/agent_chat_history_codec.h"
|
||||
#include "app/gui/widgets/text_editor.h"
|
||||
#include "cli/service/ai/ollama_ai_service.h"
|
||||
#include "cli/service/agent/conversational_agent_service.h"
|
||||
#include "cli/service/agent/advanced_routing.h"
|
||||
#include "cli/service/agent/agent_pretraining.h"
|
||||
@@ -21,6 +23,10 @@ namespace yaze {
|
||||
|
||||
class Rom;
|
||||
|
||||
namespace cli {
|
||||
struct AIServiceConfig;
|
||||
}
|
||||
|
||||
namespace editor {
|
||||
|
||||
class ProposalDrawer;
|
||||
@@ -122,6 +128,10 @@ class AgentChatWidget {
|
||||
|
||||
void SetMultimodalCallbacks(const MultimodalCallbacks& callbacks);
|
||||
void SetAutomationCallbacks(const AutomationCallbacks& callbacks);
|
||||
void ApplyBuilderPersona(const std::string& persona_notes,
|
||||
const std::vector<std::string>& goals);
|
||||
void ApplyAutomationPlan(bool auto_run_tests, bool auto_sync_rom,
|
||||
bool auto_focus_proposals);
|
||||
|
||||
void UpdateHarnessTelemetry(const AutomationTelemetry& telemetry);
|
||||
void SetLastPlanSummary(const std::string& summary);
|
||||
@@ -141,8 +151,6 @@ class AgentChatWidget {
|
||||
bool* active() { return &active_; }
|
||||
bool is_active() const { return active_; }
|
||||
void set_active(bool active) { active_ = active; }
|
||||
|
||||
public:
|
||||
enum class CollaborationMode {
|
||||
kLocal = 0, // Filesystem-based collaboration
|
||||
kNetwork = 1 // WebSocket-based collaboration
|
||||
@@ -205,6 +213,9 @@ public:
|
||||
int connection_attempts = 0;
|
||||
absl::Time last_connection_attempt = absl::InfinitePast();
|
||||
std::string grpc_server_address = "localhost:50052";
|
||||
bool auto_run_plan = false;
|
||||
bool auto_sync_rom = true;
|
||||
bool auto_focus_proposals = true;
|
||||
};
|
||||
|
||||
// Agent Configuration State
|
||||
@@ -217,6 +228,38 @@ public:
|
||||
bool show_reasoning = true;
|
||||
int max_tool_iterations = 4;
|
||||
int max_retry_attempts = 3;
|
||||
float temperature = 0.25f;
|
||||
float top_p = 0.95f;
|
||||
int max_output_tokens = 2048;
|
||||
bool stream_responses = false;
|
||||
std::vector<std::string> favorite_models;
|
||||
std::vector<std::string> model_chain;
|
||||
enum class ChainMode {
|
||||
kDisabled = 0,
|
||||
kRoundRobin = 1,
|
||||
kConsensus = 2,
|
||||
};
|
||||
ChainMode chain_mode = ChainMode::kDisabled;
|
||||
struct ModelPreset {
|
||||
std::string name;
|
||||
std::string model;
|
||||
std::string host;
|
||||
std::vector<std::string> tags;
|
||||
bool pinned = false;
|
||||
absl::Time last_used = absl::InfinitePast();
|
||||
};
|
||||
std::vector<ModelPreset> model_presets;
|
||||
struct ToolConfig {
|
||||
bool resources = true;
|
||||
bool dungeon = true;
|
||||
bool overworld = true;
|
||||
bool dialogue = true;
|
||||
bool messages = true;
|
||||
bool gui = true;
|
||||
bool music = true;
|
||||
bool sprite = true;
|
||||
bool emulator = true;
|
||||
} tool_config;
|
||||
char provider_buffer[32] = "mock";
|
||||
char model_buffer[128] = {};
|
||||
char ollama_host_buffer[256] = "http://localhost:11434";
|
||||
@@ -289,6 +332,12 @@ public:
|
||||
void RenderHarnessPanel();
|
||||
void RenderSystemPromptEditor();
|
||||
void RenderFileEditorTabs();
|
||||
void RenderModelConfigControls();
|
||||
void RenderModelDeck();
|
||||
void RenderParameterControls();
|
||||
void RenderToolingControls();
|
||||
void RenderChainModeControls();
|
||||
void RenderPersonaSummary();
|
||||
void RefreshCollaboration();
|
||||
void ApplyCollaborationSession(
|
||||
const CollaborationCallbacks::SessionContext& context,
|
||||
@@ -298,6 +347,14 @@ public:
|
||||
void HandleRomSyncReceived(const std::string& diff_data, const std::string& rom_hash);
|
||||
void HandleSnapshotReceived(const std::string& snapshot_data, const std::string& snapshot_type);
|
||||
void HandleProposalReceived(const std::string& proposal_data);
|
||||
void RefreshOllamaModels();
|
||||
cli::AIServiceConfig BuildAIServiceConfig() const;
|
||||
void ApplyToolPreferences();
|
||||
void ApplyHistoryAgentConfig(
|
||||
const AgentChatHistoryCodec::AgentConfigSnapshot& snapshot);
|
||||
AgentChatHistoryCodec::AgentConfigSnapshot BuildHistoryAgentConfig() const;
|
||||
void MarkPresetUsage(const std::string& model_name);
|
||||
void ApplyModelPreset(const AgentConfigState::ModelPreset& preset);
|
||||
|
||||
// History synchronization
|
||||
void SyncHistoryToPopup();
|
||||
@@ -358,6 +415,14 @@ public:
|
||||
AgentConfigState agent_config_;
|
||||
RomSyncState rom_sync_state_;
|
||||
Z3EDCommandState z3ed_command_state_;
|
||||
bool persist_agent_config_with_history_ = true;
|
||||
struct PersonaProfile {
|
||||
std::string notes;
|
||||
std::vector<std::string> goals;
|
||||
absl::Time applied_at = absl::InfinitePast();
|
||||
bool active = false;
|
||||
} persona_profile_;
|
||||
bool persona_highlight_active_ = false;
|
||||
|
||||
// Callbacks
|
||||
CollaborationCallbacks collaboration_callbacks_;
|
||||
@@ -399,6 +464,18 @@ public:
|
||||
};
|
||||
std::vector<FileEditorTab> open_files_;
|
||||
int active_file_tab_ = -1;
|
||||
|
||||
// Model roster cache
|
||||
std::vector<cli::OllamaAIService::ModelInfo> ollama_model_info_cache_;
|
||||
std::vector<std::string> ollama_model_cache_;
|
||||
absl::Time last_model_refresh_ = absl::InfinitePast();
|
||||
bool ollama_models_loading_ = false;
|
||||
char model_search_buffer_[64] = {};
|
||||
char new_preset_name_[64] = {};
|
||||
int active_model_preset_index_ = -1;
|
||||
bool show_model_manager_popup_ = false;
|
||||
bool show_tool_manager_popup_ = false;
|
||||
bool auto_apply_agent_config_ = false;
|
||||
};
|
||||
|
||||
} // namespace editor
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
#include "app/editor/agent/agent_editor.h"
|
||||
|
||||
#include <algorithm>
|
||||
#include <filesystem>
|
||||
#include <fstream>
|
||||
#include <memory>
|
||||
@@ -13,6 +14,7 @@
|
||||
#include "app/editor/system/proposal_drawer.h"
|
||||
#include "app/editor/system/toast_manager.h"
|
||||
#include "app/gui/core/icons.h"
|
||||
#include "imgui/misc/cpp/imgui_stdlib.h"
|
||||
#include "app/rom.h"
|
||||
#include "util/file_util.h"
|
||||
#include "util/platform_paths.h"
|
||||
@@ -62,6 +64,15 @@ AgentEditor::AgentEditor() {
|
||||
|
||||
// Ensure profiles directory exists
|
||||
EnsureProfilesDirectory();
|
||||
|
||||
builder_state_.stages = {
|
||||
{"Persona", "Define persona and goals", false},
|
||||
{"Tool Stack", "Select the agent's tools", false},
|
||||
{"Automation", "Configure automation hooks", false},
|
||||
{"Validation", "Describe E2E validation", false},
|
||||
{"E2E Checklist", "Track readiness for end-to-end runs", false}};
|
||||
builder_state_.persona_notes =
|
||||
"Describe the persona, tone, and constraints for this agent.";
|
||||
}
|
||||
|
||||
AgentEditor::~AgentEditor() = default;
|
||||
@@ -304,6 +315,11 @@ void AgentEditor::DrawDashboard() {
|
||||
ImGui::EndTabItem();
|
||||
}
|
||||
|
||||
if (ImGui::BeginTabItem(ICON_MD_AUTO_FIX_HIGH " Agent Builder")) {
|
||||
DrawAgentBuilderPanel();
|
||||
ImGui::EndTabItem();
|
||||
}
|
||||
|
||||
ImGui::EndTabBar();
|
||||
}
|
||||
|
||||
@@ -1106,6 +1122,297 @@ void AgentEditor::DrawNewPromptCreator() {
|
||||
"edit existing prompts.");
|
||||
}
|
||||
|
||||
void AgentEditor::DrawAgentBuilderPanel() {
|
||||
if (!chat_widget_) {
|
||||
ImGui::TextDisabled("Chat widget not initialized.");
|
||||
return;
|
||||
}
|
||||
|
||||
ImGui::BeginChild("AgentBuilderPanel", ImVec2(0, 0), false);
|
||||
ImGui::Columns(2, nullptr, false);
|
||||
ImGui::TextColored(ImVec4(0.8f, 0.8f, 1.0f, 1.0f), "Stages");
|
||||
ImGui::Separator();
|
||||
|
||||
for (size_t i = 0; i < builder_state_.stages.size(); ++i) {
|
||||
auto& stage = builder_state_.stages[i];
|
||||
ImGui::PushID(static_cast<int>(i));
|
||||
bool selected = builder_state_.active_stage == static_cast<int>(i);
|
||||
if (ImGui::Selectable(stage.name.c_str(), selected)) {
|
||||
builder_state_.active_stage = static_cast<int>(i);
|
||||
}
|
||||
ImGui::SameLine(ImGui::GetContentRegionAvail().x - 24.0f);
|
||||
ImGui::Checkbox("##stage_done", &stage.completed);
|
||||
ImGui::PopID();
|
||||
}
|
||||
|
||||
ImGui::NextColumn();
|
||||
ImGui::TextColored(ImVec4(0.9f, 0.9f, 0.6f, 1.0f), "Stage Details");
|
||||
ImGui::Separator();
|
||||
|
||||
int stage_index = std::clamp(builder_state_.active_stage, 0,
|
||||
static_cast<int>(builder_state_.stages.size()) -
|
||||
1);
|
||||
int completed_stages = 0;
|
||||
for (const auto& stage : builder_state_.stages) {
|
||||
if (stage.completed) {
|
||||
++completed_stages;
|
||||
}
|
||||
}
|
||||
switch (stage_index) {
|
||||
case 0: {
|
||||
static std::string new_goal;
|
||||
ImGui::Text("Persona + Goals");
|
||||
ImGui::InputTextMultiline("##persona_notes",
|
||||
&builder_state_.persona_notes,
|
||||
ImVec2(-1, 120));
|
||||
ImGui::Spacing();
|
||||
ImGui::TextDisabled("Add Goal");
|
||||
ImGui::InputTextWithHint("##goal_input", "e.g. Document dungeon plan",
|
||||
&new_goal);
|
||||
ImGui::SameLine();
|
||||
if (ImGui::Button(ICON_MD_ADD) && !new_goal.empty()) {
|
||||
builder_state_.goals.push_back(new_goal);
|
||||
new_goal.clear();
|
||||
}
|
||||
for (size_t i = 0; i < builder_state_.goals.size(); ++i) {
|
||||
ImGui::BulletText("%s", builder_state_.goals[i].c_str());
|
||||
ImGui::SameLine();
|
||||
ImGui::PushID(static_cast<int>(i));
|
||||
if (ImGui::SmallButton(ICON_MD_CLOSE)) {
|
||||
builder_state_.goals.erase(builder_state_.goals.begin() + i);
|
||||
ImGui::PopID();
|
||||
break;
|
||||
}
|
||||
ImGui::PopID();
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 1: {
|
||||
ImGui::Text("Tool Stack");
|
||||
auto tool_checkbox = [&](const char* label, bool* value) {
|
||||
ImGui::Checkbox(label, value);
|
||||
};
|
||||
tool_checkbox("Resources", &builder_state_.tools.resources);
|
||||
tool_checkbox("Dungeon", &builder_state_.tools.dungeon);
|
||||
tool_checkbox("Overworld", &builder_state_.tools.overworld);
|
||||
tool_checkbox("Dialogue", &builder_state_.tools.dialogue);
|
||||
tool_checkbox("GUI Automation", &builder_state_.tools.gui);
|
||||
tool_checkbox("Music", &builder_state_.tools.music);
|
||||
tool_checkbox("Sprite", &builder_state_.tools.sprite);
|
||||
tool_checkbox("Emulator", &builder_state_.tools.emulator);
|
||||
break;
|
||||
}
|
||||
case 2: {
|
||||
ImGui::Text("Automation");
|
||||
ImGui::Checkbox("Auto-run harness plan", &builder_state_.auto_run_tests);
|
||||
ImGui::Checkbox("Auto-sync ROM context", &builder_state_.auto_sync_rom);
|
||||
ImGui::Checkbox("Auto-focus proposal drawer",
|
||||
&builder_state_.auto_focus_proposals);
|
||||
ImGui::TextWrapped(
|
||||
"Enable these options to push harness dashboards/test plans whenever "
|
||||
"the builder executes a plan.");
|
||||
break;
|
||||
}
|
||||
case 3: {
|
||||
ImGui::Text("Validation Criteria");
|
||||
ImGui::InputTextMultiline("##validation_notes",
|
||||
&builder_state_.stages[stage_index].summary,
|
||||
ImVec2(-1, 120));
|
||||
break;
|
||||
}
|
||||
case 4: {
|
||||
ImGui::Text("E2E Checklist");
|
||||
float progress =
|
||||
builder_state_.stages.empty()
|
||||
? 0.0f
|
||||
: static_cast<float>(completed_stages) /
|
||||
static_cast<float>(builder_state_.stages.size());
|
||||
ImGui::ProgressBar(progress, ImVec2(-1, 0),
|
||||
absl::StrFormat("%d/%zu complete", completed_stages,
|
||||
builder_state_.stages.size())
|
||||
.c_str());
|
||||
ImGui::Checkbox("Ready for automation handoff",
|
||||
&builder_state_.ready_for_e2e);
|
||||
ImGui::TextDisabled("Harness auto-run: %s",
|
||||
builder_state_.auto_run_tests ? "ON" : "OFF");
|
||||
ImGui::TextDisabled("Auto-sync ROM: %s",
|
||||
builder_state_.auto_sync_rom ? "ON" : "OFF");
|
||||
ImGui::TextDisabled("Auto-focus proposals: %s",
|
||||
builder_state_.auto_focus_proposals ? "ON" : "OFF");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
ImGui::Columns(1);
|
||||
ImGui::Separator();
|
||||
|
||||
float completion_ratio =
|
||||
builder_state_.stages.empty()
|
||||
? 0.0f
|
||||
: static_cast<float>(completed_stages) /
|
||||
static_cast<float>(builder_state_.stages.size());
|
||||
ImGui::TextDisabled("Overall Progress");
|
||||
ImGui::ProgressBar(completion_ratio, ImVec2(-1, 0));
|
||||
ImGui::TextDisabled("E2E Ready: %s",
|
||||
builder_state_.ready_for_e2e ? "Yes" : "No");
|
||||
|
||||
if (ImGui::Button(ICON_MD_LINK " Apply to Chat")) {
|
||||
auto config = chat_widget_->GetAgentConfig();
|
||||
config.tool_config.resources = builder_state_.tools.resources;
|
||||
config.tool_config.dungeon = builder_state_.tools.dungeon;
|
||||
config.tool_config.overworld = builder_state_.tools.overworld;
|
||||
config.tool_config.dialogue = builder_state_.tools.dialogue;
|
||||
config.tool_config.gui = builder_state_.tools.gui;
|
||||
config.tool_config.music = builder_state_.tools.music;
|
||||
config.tool_config.sprite = builder_state_.tools.sprite;
|
||||
config.tool_config.emulator = builder_state_.tools.emulator;
|
||||
chat_widget_->UpdateAgentConfig(config);
|
||||
chat_widget_->ApplyBuilderPersona(builder_state_.persona_notes,
|
||||
builder_state_.goals);
|
||||
chat_widget_->ApplyAutomationPlan(builder_state_.auto_run_tests,
|
||||
builder_state_.auto_sync_rom,
|
||||
builder_state_.auto_focus_proposals);
|
||||
if (toast_manager_) {
|
||||
toast_manager_->Show("Builder tool plan synced to chat",
|
||||
ToastType::kSuccess, 2.0f);
|
||||
}
|
||||
}
|
||||
ImGui::SameLine();
|
||||
|
||||
ImGui::InputTextWithHint("##blueprint_path", "Path to blueprint...",
|
||||
&builder_state_.blueprint_path);
|
||||
std::filesystem::path blueprint_path =
|
||||
builder_state_.blueprint_path.empty()
|
||||
? (std::filesystem::temp_directory_path() / "agent_builder.json")
|
||||
: std::filesystem::path(builder_state_.blueprint_path);
|
||||
|
||||
if (ImGui::Button(ICON_MD_SAVE " Save Blueprint")) {
|
||||
auto status = SaveBuilderBlueprint(blueprint_path);
|
||||
if (toast_manager_) {
|
||||
if (status.ok()) {
|
||||
toast_manager_->Show("Builder blueprint saved", ToastType::kSuccess,
|
||||
2.0f);
|
||||
} else {
|
||||
toast_manager_->Show(std::string(status.message()),
|
||||
ToastType::kError, 3.5f);
|
||||
}
|
||||
}
|
||||
}
|
||||
ImGui::SameLine();
|
||||
if (ImGui::Button(ICON_MD_FOLDER_OPEN " Load Blueprint")) {
|
||||
auto status = LoadBuilderBlueprint(blueprint_path);
|
||||
if (toast_manager_) {
|
||||
if (status.ok()) {
|
||||
toast_manager_->Show("Builder blueprint loaded", ToastType::kSuccess,
|
||||
2.0f);
|
||||
} else {
|
||||
toast_manager_->Show(std::string(status.message()),
|
||||
ToastType::kError, 3.5f);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ImGui::EndChild();
|
||||
}
|
||||
|
||||
absl::Status AgentEditor::SaveBuilderBlueprint(
|
||||
const std::filesystem::path& path) {
|
||||
#if defined(YAZE_WITH_JSON)
|
||||
nlohmann::json json;
|
||||
json["persona_notes"] = builder_state_.persona_notes;
|
||||
json["goals"] = builder_state_.goals;
|
||||
json["auto_run_tests"] = builder_state_.auto_run_tests;
|
||||
json["auto_sync_rom"] = builder_state_.auto_sync_rom;
|
||||
json["auto_focus_proposals"] = builder_state_.auto_focus_proposals;
|
||||
json["ready_for_e2e"] = builder_state_.ready_for_e2e;
|
||||
json["tools"] = {
|
||||
{"resources", builder_state_.tools.resources},
|
||||
{"dungeon", builder_state_.tools.dungeon},
|
||||
{"overworld", builder_state_.tools.overworld},
|
||||
{"dialogue", builder_state_.tools.dialogue},
|
||||
{"gui", builder_state_.tools.gui},
|
||||
{"music", builder_state_.tools.music},
|
||||
{"sprite", builder_state_.tools.sprite},
|
||||
{"emulator", builder_state_.tools.emulator},
|
||||
};
|
||||
json["stages"] = nlohmann::json::array();
|
||||
for (const auto& stage : builder_state_.stages) {
|
||||
json["stages"].push_back(
|
||||
{{"name", stage.name}, {"summary", stage.summary},
|
||||
{"completed", stage.completed}});
|
||||
}
|
||||
|
||||
std::error_code ec;
|
||||
std::filesystem::create_directories(path.parent_path(), ec);
|
||||
std::ofstream file(path);
|
||||
if (!file.is_open()) {
|
||||
return absl::InternalError(
|
||||
absl::StrFormat("Failed to open blueprint: %s", path.string()));
|
||||
}
|
||||
file << json.dump(2);
|
||||
builder_state_.blueprint_path = path.string();
|
||||
return absl::OkStatus();
|
||||
#else
|
||||
(void)path;
|
||||
return absl::UnimplementedError("Blueprint export requires JSON support");
|
||||
#endif
|
||||
}
|
||||
|
||||
absl::Status AgentEditor::LoadBuilderBlueprint(
|
||||
const std::filesystem::path& path) {
|
||||
#if defined(YAZE_WITH_JSON)
|
||||
std::ifstream file(path);
|
||||
if (!file.is_open()) {
|
||||
return absl::NotFoundError(
|
||||
absl::StrFormat("Blueprint not found: %s", path.string()));
|
||||
}
|
||||
|
||||
nlohmann::json json;
|
||||
file >> json;
|
||||
|
||||
builder_state_.persona_notes = json.value("persona_notes", "");
|
||||
builder_state_.goals.clear();
|
||||
if (json.contains("goals") && json["goals"].is_array()) {
|
||||
for (const auto& goal : json["goals"]) {
|
||||
if (goal.is_string()) {
|
||||
builder_state_.goals.push_back(goal.get<std::string>());
|
||||
}
|
||||
}
|
||||
}
|
||||
if (json.contains("tools") && json["tools"].is_object()) {
|
||||
auto tools = json["tools"];
|
||||
builder_state_.tools.resources = tools.value("resources", true);
|
||||
builder_state_.tools.dungeon = tools.value("dungeon", true);
|
||||
builder_state_.tools.overworld = tools.value("overworld", true);
|
||||
builder_state_.tools.dialogue = tools.value("dialogue", true);
|
||||
builder_state_.tools.gui = tools.value("gui", false);
|
||||
builder_state_.tools.music = tools.value("music", false);
|
||||
builder_state_.tools.sprite = tools.value("sprite", false);
|
||||
builder_state_.tools.emulator = tools.value("emulator", false);
|
||||
}
|
||||
builder_state_.auto_run_tests = json.value("auto_run_tests", false);
|
||||
builder_state_.auto_sync_rom = json.value("auto_sync_rom", true);
|
||||
builder_state_.auto_focus_proposals =
|
||||
json.value("auto_focus_proposals", true);
|
||||
builder_state_.ready_for_e2e = json.value("ready_for_e2e", false);
|
||||
if (json.contains("stages") && json["stages"].is_array()) {
|
||||
builder_state_.stages.clear();
|
||||
for (const auto& stage : json["stages"]) {
|
||||
AgentBuilderState::Stage builder_stage;
|
||||
builder_stage.name = stage.value("name", std::string{});
|
||||
builder_stage.summary = stage.value("summary", std::string{});
|
||||
builder_stage.completed = stage.value("completed", false);
|
||||
builder_state_.stages.push_back(builder_stage);
|
||||
}
|
||||
}
|
||||
builder_state_.blueprint_path = path.string();
|
||||
return absl::OkStatus();
|
||||
#else
|
||||
(void)path;
|
||||
return absl::UnimplementedError("Blueprint import requires JSON support");
|
||||
#endif
|
||||
}
|
||||
|
||||
// Bot Profile Management Implementation
|
||||
absl::Status AgentEditor::SaveBotProfile(const BotProfile& profile) {
|
||||
#if defined(YAZE_WITH_JSON)
|
||||
|
||||
@@ -102,6 +102,33 @@ class AgentEditor : public Editor {
|
||||
bool show_reasoning = true;
|
||||
int max_tool_iterations = 4;
|
||||
};
|
||||
|
||||
struct AgentBuilderState {
|
||||
struct Stage {
|
||||
std::string name;
|
||||
std::string summary;
|
||||
bool completed = false;
|
||||
};
|
||||
std::vector<Stage> stages;
|
||||
int active_stage = 0;
|
||||
std::vector<std::string> goals;
|
||||
std::string persona_notes;
|
||||
struct ToolPlan {
|
||||
bool resources = true;
|
||||
bool dungeon = true;
|
||||
bool overworld = true;
|
||||
bool dialogue = true;
|
||||
bool gui = false;
|
||||
bool music = false;
|
||||
bool sprite = false;
|
||||
bool emulator = false;
|
||||
} tools;
|
||||
bool auto_run_tests = false;
|
||||
bool auto_sync_rom = true;
|
||||
bool auto_focus_proposals = true;
|
||||
std::string blueprint_path;
|
||||
bool ready_for_e2e = false;
|
||||
};
|
||||
|
||||
// Retro hacker animation state
|
||||
float pulse_animation_ = 0.0f;
|
||||
@@ -190,6 +217,7 @@ class AgentEditor : public Editor {
|
||||
void DrawAdvancedMetricsPanel();
|
||||
void DrawCommonTilesEditor();
|
||||
void DrawNewPromptCreator();
|
||||
void DrawAgentBuilderPanel();
|
||||
|
||||
// Setup callbacks
|
||||
void SetupChatWidgetCallbacks();
|
||||
@@ -200,6 +228,8 @@ class AgentEditor : public Editor {
|
||||
absl::Status EnsureProfilesDirectory();
|
||||
std::string ProfileToJson(const BotProfile& profile) const;
|
||||
absl::StatusOr<BotProfile> JsonToProfile(const std::string& json) const;
|
||||
absl::Status SaveBuilderBlueprint(const std::filesystem::path& path);
|
||||
absl::Status LoadBuilderBlueprint(const std::filesystem::path& path);
|
||||
|
||||
// Internal state
|
||||
std::unique_ptr<AgentChatWidget> chat_widget_; // Owned by AgentEditor
|
||||
@@ -218,6 +248,7 @@ class AgentEditor : public Editor {
|
||||
// Bot Profile System
|
||||
BotProfile current_profile_;
|
||||
std::vector<BotProfile> loaded_profiles_;
|
||||
AgentBuilderState builder_state_;
|
||||
|
||||
// System Prompt Editor
|
||||
std::unique_ptr<TextEditor> prompt_editor_;
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
#include "absl/flags/declare.h"
|
||||
#include "absl/flags/flag.h"
|
||||
#include "absl/status/status.h"
|
||||
#include "absl/status/statusor.h"
|
||||
#include "absl/strings/str_cat.h"
|
||||
#include "absl/strings/str_format.h"
|
||||
#include "absl/strings/str_join.h"
|
||||
@@ -181,7 +182,9 @@ ChatMessage CreateMessage(ChatMessage::Sender sender, const std::string& content
|
||||
} // namespace
|
||||
|
||||
ConversationalAgentService::ConversationalAgentService() {
|
||||
provider_config_.provider = "auto";
|
||||
ai_service_ = CreateAIService();
|
||||
tool_dispatcher_.SetToolPreferences(tool_preferences_);
|
||||
|
||||
#ifdef Z3ED_AI
|
||||
// Initialize advanced features
|
||||
@@ -201,7 +204,9 @@ ConversationalAgentService::ConversationalAgentService() {
|
||||
|
||||
ConversationalAgentService::ConversationalAgentService(const AgentConfig& config)
|
||||
: config_(config) {
|
||||
provider_config_.provider = "auto";
|
||||
ai_service_ = CreateAIService();
|
||||
tool_dispatcher_.SetToolPreferences(tool_preferences_);
|
||||
|
||||
#ifdef Z3ED_AI
|
||||
// Initialize advanced features
|
||||
@@ -280,6 +285,7 @@ absl::StatusOr<ChatMessage> ConversationalAgentService::SendMessage(
|
||||
const int max_iterations = config_.max_tool_iterations;
|
||||
bool waiting_for_text_response = false;
|
||||
absl::Time turn_start = absl::Now();
|
||||
std::vector<std::string> executed_tools;
|
||||
|
||||
if (config_.verbose) {
|
||||
util::PrintInfo(absl::StrCat("Starting agent loop (max ", max_iterations, " iterations)"));
|
||||
@@ -348,7 +354,7 @@ absl::StatusOr<ChatMessage> ConversationalAgentService::SendMessage(
|
||||
|
||||
util::PrintToolCall(tool_call.tool_name, args_str);
|
||||
|
||||
auto tool_result_or = tool_dispatcher_.Dispatch(tool_call);
|
||||
auto tool_result_or = tool_dispatcher_.Dispatch(tool_call);
|
||||
if (!tool_result_or.ok()) {
|
||||
util::PrintError(absl::StrCat(
|
||||
"Tool execution failed: ", tool_result_or.status().message()));
|
||||
@@ -381,6 +387,7 @@ absl::StatusOr<ChatMessage> ConversationalAgentService::SendMessage(
|
||||
history_.push_back(tool_result_msg);
|
||||
}
|
||||
executed_tool = true;
|
||||
executed_tools.push_back(tool_call.tool_name);
|
||||
}
|
||||
|
||||
if (executed_tool) {
|
||||
@@ -500,6 +507,23 @@ absl::StatusOr<ChatMessage> ConversationalAgentService::SendMessage(
|
||||
++metrics_.turns_completed;
|
||||
metrics_.total_latency += absl::Now() - turn_start;
|
||||
chat_response.metrics = BuildMetricsSnapshot();
|
||||
if (!agent_response.warnings.empty()) {
|
||||
chat_response.warnings = agent_response.warnings;
|
||||
}
|
||||
ChatMessage::ModelMetadata meta;
|
||||
meta.provider = !agent_response.provider.empty()
|
||||
? agent_response.provider
|
||||
: provider_config_.provider;
|
||||
meta.model = !agent_response.model.empty() ? agent_response.model
|
||||
: provider_config_.model;
|
||||
meta.latency_seconds =
|
||||
agent_response.latency_seconds > 0.0
|
||||
? agent_response.latency_seconds
|
||||
: absl::ToDoubleSeconds(absl::Now() - turn_start);
|
||||
meta.tool_iterations = metrics_.tool_calls;
|
||||
meta.tool_names = executed_tools;
|
||||
meta.parameters = agent_response.parameters;
|
||||
chat_response.model_metadata = meta;
|
||||
history_.push_back(chat_response);
|
||||
TrimHistoryIfNeeded();
|
||||
return chat_response;
|
||||
@@ -509,6 +533,27 @@ absl::StatusOr<ChatMessage> ConversationalAgentService::SendMessage(
|
||||
"Agent did not produce a response after executing tools.");
|
||||
}
|
||||
|
||||
absl::Status ConversationalAgentService::ConfigureProvider(
|
||||
const AIServiceConfig& config) {
|
||||
auto service_or = CreateAIServiceStrict(config);
|
||||
if (!service_or.ok()) {
|
||||
return service_or.status();
|
||||
}
|
||||
|
||||
ai_service_ = std::move(service_or.value());
|
||||
provider_config_ = config;
|
||||
if (rom_context_) {
|
||||
ai_service_->SetRomContext(rom_context_);
|
||||
}
|
||||
return absl::OkStatus();
|
||||
}
|
||||
|
||||
void ConversationalAgentService::SetToolPreferences(
|
||||
const ToolDispatcher::ToolPreferences& prefs) {
|
||||
tool_preferences_ = prefs;
|
||||
tool_dispatcher_.SetToolPreferences(tool_preferences_);
|
||||
}
|
||||
|
||||
const std::vector<ChatMessage>& ConversationalAgentService::GetHistory() const {
|
||||
return history_;
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
#define YAZE_SRC_CLI_SERVICE_AGENT_CONVERSATIONAL_AGENT_SERVICE_H_
|
||||
|
||||
#include <filesystem>
|
||||
#include <map>
|
||||
#include <optional>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
@@ -10,6 +11,7 @@
|
||||
#include "absl/status/statusor.h"
|
||||
#include "absl/time/time.h"
|
||||
#include "cli/service/ai/ai_service.h"
|
||||
#include "cli/service/ai/service_factory.h"
|
||||
#include "cli/service/agent/proposal_executor.h"
|
||||
#include "cli/service/agent/tool_dispatcher.h"
|
||||
// Advanced features (only available when Z3ED_AI=ON)
|
||||
@@ -50,6 +52,16 @@ struct ChatMessage {
|
||||
std::optional<std::string> json_pretty;
|
||||
std::optional<TableData> table_data;
|
||||
bool is_internal = false; // True for tool results and other messages not meant for user display
|
||||
std::vector<std::string> warnings;
|
||||
struct ModelMetadata {
|
||||
std::string provider;
|
||||
std::string model;
|
||||
double latency_seconds = 0.0;
|
||||
int tool_iterations = 0;
|
||||
std::vector<std::string> tool_names;
|
||||
std::map<std::string, std::string> parameters;
|
||||
};
|
||||
std::optional<ModelMetadata> model_metadata;
|
||||
struct SessionMetrics {
|
||||
int turn_index = 0;
|
||||
int total_user_messages = 0;
|
||||
@@ -102,6 +114,9 @@ class ConversationalAgentService {
|
||||
// Configuration
|
||||
void SetConfig(const AgentConfig& config) { config_ = config; }
|
||||
const AgentConfig& GetConfig() const { return config_; }
|
||||
absl::Status ConfigureProvider(const AIServiceConfig& config);
|
||||
const AIServiceConfig& provider_config() const { return provider_config_; }
|
||||
void SetToolPreferences(const ToolDispatcher::ToolPreferences& prefs);
|
||||
|
||||
ChatMessage::SessionMetrics GetMetrics() const;
|
||||
|
||||
@@ -145,6 +160,8 @@ class ConversationalAgentService {
|
||||
std::vector<ChatMessage> history_;
|
||||
std::unique_ptr<AIService> ai_service_;
|
||||
ToolDispatcher tool_dispatcher_;
|
||||
ToolDispatcher::ToolPreferences tool_preferences_;
|
||||
AIServiceConfig provider_config_;
|
||||
Rom* rom_context_ = nullptr;
|
||||
AgentConfig config_;
|
||||
InternalMetrics metrics_;
|
||||
|
||||
@@ -229,6 +229,75 @@ std::vector<std::string> ConvertArgsToVector(
|
||||
|
||||
} // namespace
|
||||
|
||||
bool ToolDispatcher::IsToolEnabled(ToolCallType type) const {
|
||||
switch (type) {
|
||||
case ToolCallType::kResourceList:
|
||||
case ToolCallType::kResourceSearch:
|
||||
return preferences_.resources;
|
||||
|
||||
case ToolCallType::kDungeonListSprites:
|
||||
case ToolCallType::kDungeonDescribeRoom:
|
||||
case ToolCallType::kDungeonExportRoom:
|
||||
case ToolCallType::kDungeonListObjects:
|
||||
case ToolCallType::kDungeonGetRoomTiles:
|
||||
case ToolCallType::kDungeonSetRoomProperty:
|
||||
return preferences_.dungeon;
|
||||
|
||||
case ToolCallType::kOverworldFindTile:
|
||||
case ToolCallType::kOverworldDescribeMap:
|
||||
case ToolCallType::kOverworldListWarps:
|
||||
case ToolCallType::kOverworldListSprites:
|
||||
case ToolCallType::kOverworldGetEntrance:
|
||||
case ToolCallType::kOverworldTileStats:
|
||||
return preferences_.overworld;
|
||||
|
||||
case ToolCallType::kMessageList:
|
||||
case ToolCallType::kMessageRead:
|
||||
case ToolCallType::kMessageSearch:
|
||||
return preferences_.messages;
|
||||
|
||||
case ToolCallType::kDialogueList:
|
||||
case ToolCallType::kDialogueRead:
|
||||
case ToolCallType::kDialogueSearch:
|
||||
return preferences_.dialogue;
|
||||
|
||||
case ToolCallType::kGuiPlaceTile:
|
||||
case ToolCallType::kGuiClick:
|
||||
case ToolCallType::kGuiDiscover:
|
||||
case ToolCallType::kGuiScreenshot:
|
||||
return preferences_.gui;
|
||||
|
||||
case ToolCallType::kMusicList:
|
||||
case ToolCallType::kMusicInfo:
|
||||
case ToolCallType::kMusicTracks:
|
||||
return preferences_.music;
|
||||
|
||||
case ToolCallType::kSpriteList:
|
||||
case ToolCallType::kSpriteProperties:
|
||||
case ToolCallType::kSpritePalette:
|
||||
return preferences_.sprite;
|
||||
|
||||
#ifdef YAZE_WITH_GRPC
|
||||
case ToolCallType::kEmulatorStep:
|
||||
case ToolCallType::kEmulatorRun:
|
||||
case ToolCallType::kEmulatorPause:
|
||||
case ToolCallType::kEmulatorReset:
|
||||
case ToolCallType::kEmulatorGetState:
|
||||
case ToolCallType::kEmulatorSetBreakpoint:
|
||||
case ToolCallType::kEmulatorClearBreakpoint:
|
||||
case ToolCallType::kEmulatorListBreakpoints:
|
||||
case ToolCallType::kEmulatorReadMemory:
|
||||
case ToolCallType::kEmulatorWriteMemory:
|
||||
case ToolCallType::kEmulatorGetRegisters:
|
||||
case ToolCallType::kEmulatorGetMetrics:
|
||||
return preferences_.emulator;
|
||||
#endif
|
||||
|
||||
default:
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
absl::StatusOr<std::string> ToolDispatcher::Dispatch(const ToolCall& call) {
|
||||
// Determine tool call type
|
||||
ToolCallType type = GetToolCallType(call.tool_name);
|
||||
@@ -238,6 +307,12 @@ absl::StatusOr<std::string> ToolDispatcher::Dispatch(const ToolCall& call) {
|
||||
absl::StrCat("Unknown tool: ", call.tool_name));
|
||||
}
|
||||
|
||||
if (!IsToolEnabled(type)) {
|
||||
return absl::FailedPreconditionError(
|
||||
absl::StrCat("Tool '", call.tool_name,
|
||||
"' disabled by current agent configuration"));
|
||||
}
|
||||
|
||||
// Create the appropriate command handler
|
||||
auto handler = CreateHandler(type);
|
||||
if (!handler) {
|
||||
|
||||
@@ -67,15 +67,36 @@ enum class ToolCallType {
|
||||
|
||||
class ToolDispatcher {
|
||||
public:
|
||||
struct ToolPreferences {
|
||||
bool resources = true;
|
||||
bool dungeon = true;
|
||||
bool overworld = true;
|
||||
bool messages = true;
|
||||
bool dialogue = true;
|
||||
bool gui = true;
|
||||
bool music = true;
|
||||
bool sprite = true;
|
||||
#ifdef YAZE_WITH_GRPC
|
||||
bool emulator = true;
|
||||
#else
|
||||
bool emulator = false;
|
||||
#endif
|
||||
};
|
||||
|
||||
ToolDispatcher() = default;
|
||||
|
||||
// Execute a tool call and return the result as a string.
|
||||
absl::StatusOr<std::string> Dispatch(const ToolCall& tool_call);
|
||||
// Provide a ROM context for tool calls that require ROM access.
|
||||
void SetRomContext(Rom* rom) { rom_context_ = rom; }
|
||||
void SetToolPreferences(const ToolPreferences& prefs) { preferences_ = prefs; }
|
||||
const ToolPreferences& preferences() const { return preferences_; }
|
||||
|
||||
private:
|
||||
bool IsToolEnabled(ToolCallType type) const;
|
||||
|
||||
Rom* rom_context_ = nullptr;
|
||||
ToolPreferences preferences_;
|
||||
};
|
||||
|
||||
} // namespace agent
|
||||
|
||||
@@ -89,6 +89,10 @@ std::string ExtractKeyword(const std::string& normalized_prompt) {
|
||||
absl::StatusOr<AgentResponse> MockAIService::GenerateResponse(
|
||||
const std::string& prompt) {
|
||||
AgentResponse response;
|
||||
response.provider = "mock";
|
||||
response.model = "mock";
|
||||
response.parameters["mode"] = "scripted";
|
||||
response.parameters["temperature"] = "0.0";
|
||||
const std::string normalized = absl::AsciiStrToLower(prompt);
|
||||
|
||||
if (normalized.empty()) {
|
||||
@@ -187,6 +191,10 @@ absl::StatusOr<AgentResponse> MockAIService::GenerateResponse(
|
||||
absl::StrContains(it->message, "\"id\"") ||
|
||||
absl::StrContains(it->message, "\n{"))) {
|
||||
AgentResponse response;
|
||||
response.provider = "mock";
|
||||
response.model = "mock";
|
||||
response.parameters["mode"] = "scripted";
|
||||
response.parameters["temperature"] = "0.0";
|
||||
response.text_response =
|
||||
"Here's what I found:\n" + it->message +
|
||||
"\nLet me know if you'd like to make a change.";
|
||||
|
||||
@@ -27,6 +27,17 @@ struct AgentResponse {
|
||||
|
||||
// The AI's explanation of its thought process.
|
||||
std::string reasoning;
|
||||
|
||||
// Provider + model metadata so the UI can show badges / filters.
|
||||
std::string provider;
|
||||
std::string model;
|
||||
|
||||
// Basic timing + parameter telemetry.
|
||||
double latency_seconds = 0.0;
|
||||
std::map<std::string, std::string> parameters;
|
||||
|
||||
// Optional warnings surfaced by the backend (e.g. truncated context).
|
||||
std::vector<std::string> warnings;
|
||||
};
|
||||
|
||||
} // namespace cli
|
||||
|
||||
@@ -11,6 +11,8 @@
|
||||
#include "absl/strings/str_cat.h"
|
||||
#include "absl/strings/str_split.h"
|
||||
#include "absl/strings/strip.h"
|
||||
#include "absl/time/clock.h"
|
||||
#include "absl/time/time.h"
|
||||
#include "util/platform_paths.h"
|
||||
|
||||
#ifdef YAZE_WITH_JSON
|
||||
@@ -296,6 +298,8 @@ absl::StatusOr<AgentResponse> GeminiAIService::GenerateResponse(
|
||||
return absl::FailedPreconditionError("Gemini API key not configured");
|
||||
}
|
||||
|
||||
absl::Time request_start = absl::Now();
|
||||
|
||||
try {
|
||||
if (config_.verbose) {
|
||||
std::cerr << "[DEBUG] Using curl for HTTPS request" << std::endl;
|
||||
@@ -448,7 +452,23 @@ absl::StatusOr<AgentResponse> GeminiAIService::GenerateResponse(
|
||||
if (config_.verbose) {
|
||||
std::cerr << "[DEBUG] Parsing response..." << std::endl;
|
||||
}
|
||||
return ParseGeminiResponse(response_str);
|
||||
auto parsed_or = ParseGeminiResponse(response_str);
|
||||
if (!parsed_or.ok()) {
|
||||
return parsed_or.status();
|
||||
}
|
||||
AgentResponse agent_response = std::move(parsed_or.value());
|
||||
agent_response.provider = "gemini";
|
||||
agent_response.model = config_.model;
|
||||
agent_response.latency_seconds =
|
||||
absl::ToDoubleSeconds(absl::Now() - request_start);
|
||||
agent_response.parameters["prompt_version"] = config_.prompt_version;
|
||||
agent_response.parameters["temperature"] =
|
||||
absl::StrFormat("%.2f", config_.temperature);
|
||||
agent_response.parameters["max_output_tokens"] =
|
||||
absl::StrFormat("%d", config_.max_output_tokens);
|
||||
agent_response.parameters["function_calling"] =
|
||||
function_calling_enabled_ ? "true" : "false";
|
||||
return agent_response;
|
||||
|
||||
} catch (const std::exception& e) {
|
||||
if (config_.verbose) {
|
||||
|
||||
@@ -5,6 +5,8 @@
|
||||
|
||||
#include "absl/strings/str_cat.h"
|
||||
#include "absl/strings/str_format.h"
|
||||
#include "absl/time/clock.h"
|
||||
#include "absl/time/time.h"
|
||||
#include "cli/service/agent/conversational_agent_service.h"
|
||||
|
||||
#ifdef YAZE_WITH_JSON
|
||||
@@ -101,7 +103,7 @@ absl::Status OllamaAIService::CheckAvailability() {
|
||||
#endif
|
||||
}
|
||||
|
||||
absl::StatusOr<std::vector<std::string>> OllamaAIService::ListAvailableModels() {
|
||||
absl::StatusOr<std::vector<OllamaAIService::ModelInfo>> OllamaAIService::ListAvailableModels() {
|
||||
#ifndef YAZE_WITH_JSON
|
||||
return absl::UnimplementedError("Requires httplib and JSON support");
|
||||
#else
|
||||
@@ -117,16 +119,42 @@ absl::StatusOr<std::vector<std::string>> OllamaAIService::ListAvailableModels()
|
||||
}
|
||||
|
||||
nlohmann::json models_json = nlohmann::json::parse(res->body);
|
||||
std::vector<std::string> models;
|
||||
std::vector<ModelInfo> models;
|
||||
|
||||
if (models_json.contains("models") && models_json["models"].is_array()) {
|
||||
for (const auto& model : models_json["models"]) {
|
||||
if (model.contains("name")) {
|
||||
models.push_back(model["name"].get<std::string>());
|
||||
ModelInfo info;
|
||||
if (model.contains("name") && model["name"].is_string()) {
|
||||
info.name = model["name"].get<std::string>();
|
||||
}
|
||||
if (model.contains("digest") && model["digest"].is_string()) {
|
||||
info.digest = model["digest"].get<std::string>();
|
||||
}
|
||||
if (model.contains("size")) {
|
||||
if (model["size"].is_string()) {
|
||||
info.size_bytes = std::strtoull(model["size"].get<std::string>().c_str(), nullptr, 10);
|
||||
} else if (model["size"].is_number_unsigned()) {
|
||||
info.size_bytes = model["size"].get<uint64_t>();
|
||||
}
|
||||
}
|
||||
if (model.contains("modified_at") && model["modified_at"].is_string()) {
|
||||
absl::Time parsed_time;
|
||||
if (absl::ParseTime(absl::RFC3339_full,
|
||||
model["modified_at"].get<std::string>(),
|
||||
&parsed_time, nullptr)) {
|
||||
info.modified_at = parsed_time;
|
||||
}
|
||||
}
|
||||
if (model.contains("details") && model["details"].is_object()) {
|
||||
const auto& details = model["details"];
|
||||
info.parameter_size = details.value("parameter_size", "");
|
||||
info.quantization_level = details.value("quantization_level", "");
|
||||
info.family = details.value("family", "");
|
||||
}
|
||||
models.push_back(std::move(info));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return models;
|
||||
} catch (const std::exception& e) {
|
||||
return absl::InternalError(absl::StrCat(
|
||||
@@ -168,29 +196,62 @@ absl::StatusOr<AgentResponse> OllamaAIService::GenerateResponse(
|
||||
"Ollama service requires httplib and JSON support. "
|
||||
"Install vcpkg dependencies or use bundled libraries.");
|
||||
#else
|
||||
// TODO: Implement history-aware prompting.
|
||||
if (history.empty()) {
|
||||
return absl::InvalidArgumentError("History cannot be empty.");
|
||||
}
|
||||
std::string prompt = prompt_builder_.BuildPromptFromHistory(history);
|
||||
|
||||
// Build request payload
|
||||
nlohmann::json request_body = {
|
||||
{"model", config_.model},
|
||||
{"system", config_.system_prompt},
|
||||
{"prompt", prompt},
|
||||
{"stream", false},
|
||||
{"options",
|
||||
{{"temperature", config_.temperature},
|
||||
{"num_predict", config_.max_tokens}}},
|
||||
{"format", "json"} // Force JSON output
|
||||
};
|
||||
nlohmann::json messages = nlohmann::json::array();
|
||||
for (const auto& chat_msg : history) {
|
||||
if (chat_msg.is_internal) {
|
||||
continue;
|
||||
}
|
||||
nlohmann::json entry;
|
||||
entry["role"] =
|
||||
chat_msg.sender == agent::ChatMessage::Sender::kUser ? "user"
|
||||
: "assistant";
|
||||
entry["content"] = chat_msg.message;
|
||||
messages.push_back(std::move(entry));
|
||||
}
|
||||
|
||||
if (messages.empty()) {
|
||||
return absl::InvalidArgumentError(
|
||||
"History does not contain any user/assistant messages.");
|
||||
}
|
||||
|
||||
std::string fallback_prompt =
|
||||
prompt_builder_.BuildPromptFromHistory(history);
|
||||
|
||||
nlohmann::json request_body;
|
||||
request_body["model"] = config_.model;
|
||||
request_body["system"] = config_.system_prompt;
|
||||
request_body["stream"] = config_.stream;
|
||||
request_body["format"] = "json";
|
||||
|
||||
if (config_.use_chat_completions) {
|
||||
request_body["messages"] = messages;
|
||||
} else {
|
||||
request_body["prompt"] = fallback_prompt;
|
||||
}
|
||||
|
||||
nlohmann::json options = {
|
||||
{"temperature", config_.temperature},
|
||||
{"top_p", config_.top_p},
|
||||
{"top_k", config_.top_k},
|
||||
{"num_predict", config_.max_tokens},
|
||||
{"num_ctx", config_.num_ctx}};
|
||||
request_body["options"] = options;
|
||||
|
||||
AgentResponse agent_response;
|
||||
agent_response.provider = "ollama";
|
||||
|
||||
try {
|
||||
httplib::Client cli(config_.base_url);
|
||||
cli.set_read_timeout(60); // Longer timeout for inference
|
||||
|
||||
auto res = cli.Post("/api/generate", request_body.dump(), "application/json");
|
||||
|
||||
const char* endpoint = config_.use_chat_completions ? "/api/chat"
|
||||
: "/api/generate";
|
||||
absl::Time request_start = absl::Now();
|
||||
auto res = cli.Post(endpoint, request_body.dump(), "application/json");
|
||||
|
||||
if (!res) {
|
||||
return absl::UnavailableError(
|
||||
@@ -243,16 +304,34 @@ absl::StatusOr<AgentResponse> OllamaAIService::GenerateResponse(
|
||||
try {
|
||||
response_json = nlohmann::json::parse(json_only);
|
||||
} catch (const nlohmann::json::exception&) {
|
||||
return absl::InvalidArgumentError(
|
||||
"LLM did not return valid JSON. Response:\n" + llm_output);
|
||||
agent_response.warnings.push_back(
|
||||
"LLM response was not valid JSON; returning raw text.");
|
||||
agent_response.text_response = llm_output;
|
||||
return agent_response;
|
||||
}
|
||||
} else {
|
||||
return absl::InvalidArgumentError(
|
||||
"LLM did not return a JSON object. Response:\n" + llm_output);
|
||||
agent_response.warnings.push_back(
|
||||
"LLM response did not contain a JSON object; returning raw text.");
|
||||
agent_response.text_response = llm_output;
|
||||
return agent_response;
|
||||
}
|
||||
}
|
||||
|
||||
AgentResponse agent_response;
|
||||
agent_response.model =
|
||||
ollama_wrapper.value("model", config_.model);
|
||||
agent_response.latency_seconds =
|
||||
absl::ToDoubleSeconds(absl::Now() - request_start);
|
||||
agent_response.parameters["temperature"] =
|
||||
absl::StrFormat("%.2f", config_.temperature);
|
||||
agent_response.parameters["top_p"] =
|
||||
absl::StrFormat("%.2f", config_.top_p);
|
||||
agent_response.parameters["top_k"] =
|
||||
absl::StrFormat("%d", config_.top_k);
|
||||
agent_response.parameters["num_predict"] =
|
||||
absl::StrFormat("%d", config_.max_tokens);
|
||||
agent_response.parameters["num_ctx"] =
|
||||
absl::StrFormat("%d", config_.num_ctx);
|
||||
agent_response.parameters["endpoint"] = endpoint;
|
||||
if (response_json.contains("text_response") &&
|
||||
response_json["text_response"].is_string()) {
|
||||
agent_response.text_response =
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
#ifndef YAZE_SRC_CLI_OLLAMA_AI_SERVICE_H_
|
||||
#define YAZE_SRC_CLI_OLLAMA_AI_SERVICE_H_
|
||||
|
||||
#include <cstdint>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "absl/status/status.h"
|
||||
#include "absl/status/statusor.h"
|
||||
#include "absl/time/time.h"
|
||||
#include "cli/service/ai/ai_service.h"
|
||||
#include "cli/service/ai/prompt_builder.h"
|
||||
|
||||
@@ -20,12 +22,28 @@ struct OllamaConfig {
|
||||
int max_tokens = 2048; // Sufficient for command lists
|
||||
std::string system_prompt; // Injected from resource catalogue
|
||||
bool use_enhanced_prompting = true; // Enable few-shot examples
|
||||
float top_p = 0.92f;
|
||||
int top_k = 40;
|
||||
int num_ctx = 4096;
|
||||
bool stream = false;
|
||||
bool use_chat_completions = true;
|
||||
std::vector<std::string> favorite_models;
|
||||
};
|
||||
|
||||
class OllamaAIService : public AIService {
|
||||
public:
|
||||
explicit OllamaAIService(const OllamaConfig& config);
|
||||
|
||||
struct ModelInfo {
|
||||
std::string name;
|
||||
std::string digest;
|
||||
std::string family;
|
||||
std::string parameter_size;
|
||||
std::string quantization_level;
|
||||
uint64_t size_bytes = 0;
|
||||
absl::Time modified_at = absl::InfinitePast();
|
||||
};
|
||||
|
||||
void SetRomContext(Rom* rom) override;
|
||||
|
||||
// Generate z3ed commands from natural language prompt
|
||||
@@ -38,7 +56,7 @@ class OllamaAIService : public AIService {
|
||||
absl::Status CheckAvailability();
|
||||
|
||||
// List available models on Ollama server
|
||||
absl::StatusOr<std::vector<std::string>> ListAvailableModels();
|
||||
absl::StatusOr<std::vector<ModelInfo>> ListAvailableModels();
|
||||
|
||||
private:
|
||||
OllamaConfig config_;
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
#include "absl/flags/declare.h"
|
||||
#include "absl/flags/flag.h"
|
||||
#include "absl/strings/ascii.h"
|
||||
#include "absl/strings/str_format.h"
|
||||
#include "cli/service/ai/ai_service.h"
|
||||
#include "cli/service/ai/ollama_ai_service.h"
|
||||
|
||||
@@ -45,40 +46,67 @@ std::unique_ptr<AIService> CreateAIService() {
|
||||
}
|
||||
|
||||
std::unique_ptr<AIService> CreateAIService(const AIServiceConfig& config) {
|
||||
std::string provider = config.provider;
|
||||
|
||||
// Auto-detection: try gemini → ollama → mock
|
||||
if (provider == "auto") {
|
||||
// Try Gemini first if API key is available
|
||||
AIServiceConfig effective_config = config;
|
||||
if (effective_config.provider.empty()) {
|
||||
effective_config.provider = "auto";
|
||||
}
|
||||
|
||||
if (effective_config.provider == "auto") {
|
||||
#ifdef YAZE_WITH_JSON
|
||||
if (!config.gemini_api_key.empty()) {
|
||||
if (!effective_config.gemini_api_key.empty()) {
|
||||
std::cout << "🤖 Auto-detecting AI provider...\n";
|
||||
std::cout << " Found Gemini API key, using Gemini\n";
|
||||
provider = "gemini";
|
||||
effective_config.provider = "gemini";
|
||||
} else
|
||||
#endif
|
||||
{
|
||||
// Try Ollama next
|
||||
OllamaConfig test_config;
|
||||
test_config.base_url = config.ollama_host;
|
||||
auto test_service = std::make_unique<OllamaAIService>(test_config);
|
||||
if (test_service->CheckAvailability().ok()) {
|
||||
test_config.base_url = effective_config.ollama_host;
|
||||
if (!effective_config.model.empty()) {
|
||||
test_config.model = effective_config.model;
|
||||
}
|
||||
auto tester = std::make_unique<OllamaAIService>(test_config);
|
||||
if (tester->CheckAvailability().ok()) {
|
||||
std::cout << "🤖 Auto-detecting AI provider...\n";
|
||||
std::cout << " Ollama available, using Ollama\n";
|
||||
provider = "ollama";
|
||||
effective_config.provider = "ollama";
|
||||
if (effective_config.model.empty()) {
|
||||
effective_config.model = test_config.model;
|
||||
}
|
||||
} else {
|
||||
std::cout << "🤖 No AI provider configured, using MockAIService\n";
|
||||
std::cout << " Tip: Set GEMINI_API_KEY or start Ollama for real AI\n";
|
||||
provider = "mock";
|
||||
effective_config.provider = "mock";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (provider != "mock") {
|
||||
std::cout << "🤖 AI Provider: " << provider << "\n";
|
||||
|
||||
if (effective_config.provider != "mock") {
|
||||
std::cout << "🤖 AI Provider: " << effective_config.provider << "\n";
|
||||
}
|
||||
|
||||
// Ollama provider
|
||||
|
||||
auto service_or = CreateAIServiceStrict(effective_config);
|
||||
if (service_or.ok()) {
|
||||
return std::move(service_or.value());
|
||||
}
|
||||
|
||||
std::cerr << "⚠️ " << service_or.status().message() << std::endl;
|
||||
std::cerr << " Falling back to MockAIService" << std::endl;
|
||||
return std::make_unique<MockAIService>();
|
||||
}
|
||||
|
||||
absl::StatusOr<std::unique_ptr<AIService>> CreateAIServiceStrict(
|
||||
const AIServiceConfig& config) {
|
||||
std::string provider = absl::AsciiStrToLower(config.provider);
|
||||
if (provider.empty() || provider == "auto") {
|
||||
return absl::InvalidArgumentError(
|
||||
"CreateAIServiceStrict requires an explicit provider (not 'auto')");
|
||||
}
|
||||
|
||||
if (provider == "mock") {
|
||||
return std::make_unique<MockAIService>();
|
||||
}
|
||||
|
||||
if (provider == "ollama") {
|
||||
OllamaConfig ollama_config;
|
||||
ollama_config.base_url = config.ollama_host;
|
||||
@@ -87,28 +115,19 @@ std::unique_ptr<AIService> CreateAIService(const AIServiceConfig& config) {
|
||||
}
|
||||
|
||||
auto service = std::make_unique<OllamaAIService>(ollama_config);
|
||||
|
||||
// Health check
|
||||
if (auto status = service->CheckAvailability(); !status.ok()) {
|
||||
std::cerr << "⚠️ Ollama unavailable: " << status.message() << std::endl;
|
||||
std::cerr << " Falling back to MockAIService" << std::endl;
|
||||
return std::make_unique<MockAIService>();
|
||||
auto status = service->CheckAvailability();
|
||||
if (!status.ok()) {
|
||||
return status;
|
||||
}
|
||||
|
||||
std::cout << " Using model: " << ollama_config.model << std::endl;
|
||||
return std::unique_ptr<AIService>(std::move(service));
|
||||
return service;
|
||||
}
|
||||
|
||||
// Gemini provider
|
||||
#ifdef YAZE_WITH_JSON
|
||||
if (provider == "gemini") {
|
||||
if (config.gemini_api_key.empty()) {
|
||||
std::cerr << "⚠️ Gemini API key not provided" << std::endl;
|
||||
std::cerr << " Use --gemini_api_key=<key> or GEMINI_API_KEY environment variable" << std::endl;
|
||||
std::cerr << " Falling back to MockAIService" << std::endl;
|
||||
return std::make_unique<MockAIService>();
|
||||
return absl::FailedPreconditionError(
|
||||
"Gemini API key not provided. Set --gemini_api_key or GEMINI_API_KEY.");
|
||||
}
|
||||
|
||||
GeminiConfig gemini_config(config.gemini_api_key);
|
||||
if (!config.model.empty()) {
|
||||
gemini_config.model = config.model;
|
||||
@@ -116,37 +135,17 @@ std::unique_ptr<AIService> CreateAIService(const AIServiceConfig& config) {
|
||||
gemini_config.prompt_version = absl::GetFlag(FLAGS_prompt_version);
|
||||
gemini_config.use_function_calling = absl::GetFlag(FLAGS_use_function_calling);
|
||||
gemini_config.verbose = config.verbose;
|
||||
|
||||
std::cout << " Model: " << gemini_config.model << std::endl;
|
||||
if (config.verbose) {
|
||||
std::cerr << " Prompt: " << gemini_config.prompt_version << std::endl;
|
||||
}
|
||||
|
||||
auto service = std::make_unique<GeminiAIService>(gemini_config);
|
||||
// Health check - DISABLED due to SSL issues
|
||||
// if (auto status = service->CheckAvailability(); !status.ok()) {
|
||||
// std::cerr << "⚠️ Gemini unavailable: " << status.message() << std::endl;
|
||||
// std::cerr << " Falling back to MockAIService" << std::endl;
|
||||
// return std::make_unique<MockAIService>();
|
||||
// }
|
||||
|
||||
if (config.verbose) {
|
||||
std::cerr << "[DEBUG] Gemini service ready" << std::endl;
|
||||
}
|
||||
return service;
|
||||
return std::make_unique<GeminiAIService>(gemini_config);
|
||||
}
|
||||
#else
|
||||
if (provider == "gemini") {
|
||||
std::cerr << "⚠️ Gemini support not available: rebuild with YAZE_WITH_JSON=ON" << std::endl;
|
||||
std::cerr << " Falling back to MockAIService" << std::endl;
|
||||
return absl::FailedPreconditionError(
|
||||
"Gemini support not available: rebuild with YAZE_WITH_JSON=ON");
|
||||
}
|
||||
#endif
|
||||
|
||||
// Default: Mock service
|
||||
if (provider == "mock") {
|
||||
std::cout << " Using MockAIService (no real AI)\n";
|
||||
}
|
||||
return std::make_unique<MockAIService>();
|
||||
return absl::InvalidArgumentError(
|
||||
absl::StrFormat("Unknown AI provider: %s", config.provider));
|
||||
}
|
||||
|
||||
} // namespace cli
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
#include <memory>
|
||||
#include <string>
|
||||
|
||||
#include "absl/status/statusor.h"
|
||||
#include "cli/service/ai/ai_service.h"
|
||||
|
||||
namespace yaze {
|
||||
@@ -22,6 +23,8 @@ std::unique_ptr<AIService> CreateAIService();
|
||||
|
||||
// Create AI service with explicit configuration
|
||||
std::unique_ptr<AIService> CreateAIService(const AIServiceConfig& config);
|
||||
absl::StatusOr<std::unique_ptr<AIService>> CreateAIServiceStrict(
|
||||
const AIServiceConfig& config);
|
||||
|
||||
} // namespace cli
|
||||
} // namespace yaze
|
||||
|
||||
@@ -239,6 +239,23 @@ absl::Status YazeProject::LoadFromYazeFormat(const std::string& project_path) {
|
||||
else if (key == "verbose") agent_settings.verbose = ParseBool(value);
|
||||
else if (key == "max_tool_iterations") agent_settings.max_tool_iterations = std::stoi(value);
|
||||
else if (key == "max_retry_attempts") agent_settings.max_retry_attempts = std::stoi(value);
|
||||
else if (key == "temperature") agent_settings.temperature = ParseFloat(value);
|
||||
else if (key == "top_p") agent_settings.top_p = ParseFloat(value);
|
||||
else if (key == "max_output_tokens") agent_settings.max_output_tokens = std::stoi(value);
|
||||
else if (key == "stream_responses") agent_settings.stream_responses = ParseBool(value);
|
||||
else if (key == "favorite_models") agent_settings.favorite_models = ParseStringList(value);
|
||||
else if (key == "model_chain") agent_settings.model_chain = ParseStringList(value);
|
||||
else if (key == "chain_mode") agent_settings.chain_mode = std::stoi(value);
|
||||
else if (key == "enable_tool_resources") agent_settings.enable_tool_resources = ParseBool(value);
|
||||
else if (key == "enable_tool_dungeon") agent_settings.enable_tool_dungeon = ParseBool(value);
|
||||
else if (key == "enable_tool_overworld") agent_settings.enable_tool_overworld = ParseBool(value);
|
||||
else if (key == "enable_tool_messages") agent_settings.enable_tool_messages = ParseBool(value);
|
||||
else if (key == "enable_tool_dialogue") agent_settings.enable_tool_dialogue = ParseBool(value);
|
||||
else if (key == "enable_tool_gui") agent_settings.enable_tool_gui = ParseBool(value);
|
||||
else if (key == "enable_tool_music") agent_settings.enable_tool_music = ParseBool(value);
|
||||
else if (key == "enable_tool_sprite") agent_settings.enable_tool_sprite = ParseBool(value);
|
||||
else if (key == "enable_tool_emulator") agent_settings.enable_tool_emulator = ParseBool(value);
|
||||
else if (key == "builder_blueprint_path") agent_settings.builder_blueprint_path = value;
|
||||
}
|
||||
else if (current_section == "build") {
|
||||
if (key == "build_script") build_script = value;
|
||||
@@ -345,6 +362,23 @@ absl::Status YazeProject::SaveToYazeFormat() {
|
||||
file << "verbose=" << (agent_settings.verbose ? "true" : "false") << "\n";
|
||||
file << "max_tool_iterations=" << agent_settings.max_tool_iterations << "\n";
|
||||
file << "max_retry_attempts=" << agent_settings.max_retry_attempts << "\n\n";
|
||||
file << "temperature=" << agent_settings.temperature << "\n";
|
||||
file << "top_p=" << agent_settings.top_p << "\n";
|
||||
file << "max_output_tokens=" << agent_settings.max_output_tokens << "\n";
|
||||
file << "stream_responses=" << (agent_settings.stream_responses ? "true" : "false") << "\n";
|
||||
file << "favorite_models=" << absl::StrJoin(agent_settings.favorite_models, ",") << "\n";
|
||||
file << "model_chain=" << absl::StrJoin(agent_settings.model_chain, ",") << "\n";
|
||||
file << "chain_mode=" << agent_settings.chain_mode << "\n";
|
||||
file << "enable_tool_resources=" << (agent_settings.enable_tool_resources ? "true" : "false") << "\n";
|
||||
file << "enable_tool_dungeon=" << (agent_settings.enable_tool_dungeon ? "true" : "false") << "\n";
|
||||
file << "enable_tool_overworld=" << (agent_settings.enable_tool_overworld ? "true" : "false") << "\n";
|
||||
file << "enable_tool_messages=" << (agent_settings.enable_tool_messages ? "true" : "false") << "\n";
|
||||
file << "enable_tool_dialogue=" << (agent_settings.enable_tool_dialogue ? "true" : "false") << "\n";
|
||||
file << "enable_tool_gui=" << (agent_settings.enable_tool_gui ? "true" : "false") << "\n";
|
||||
file << "enable_tool_music=" << (agent_settings.enable_tool_music ? "true" : "false") << "\n";
|
||||
file << "enable_tool_sprite=" << (agent_settings.enable_tool_sprite ? "true" : "false") << "\n";
|
||||
file << "enable_tool_emulator=" << (agent_settings.enable_tool_emulator ? "true" : "false") << "\n";
|
||||
file << "builder_blueprint_path=" << agent_settings.builder_blueprint_path << "\n\n";
|
||||
|
||||
// Custom keybindings section
|
||||
if (!workspace_settings.custom_keybindings.empty()) {
|
||||
@@ -992,6 +1026,57 @@ absl::Status YazeProject::LoadFromJsonFormat(const std::string& project_path) {
|
||||
workspace_settings.autosave_interval_secs = ws["auto_save_interval"].get<float>();
|
||||
}
|
||||
|
||||
if (proj.contains("agent_settings") && proj["agent_settings"].is_object()) {
|
||||
auto& agent = proj["agent_settings"];
|
||||
agent_settings.ai_provider = agent.value("ai_provider", agent_settings.ai_provider);
|
||||
agent_settings.ai_model = agent.value("ai_model", agent_settings.ai_model);
|
||||
agent_settings.ollama_host = agent.value("ollama_host", agent_settings.ollama_host);
|
||||
agent_settings.gemini_api_key = agent.value("gemini_api_key", agent_settings.gemini_api_key);
|
||||
agent_settings.use_custom_prompt = agent.value("use_custom_prompt", agent_settings.use_custom_prompt);
|
||||
agent_settings.custom_system_prompt = agent.value("custom_system_prompt", agent_settings.custom_system_prompt);
|
||||
agent_settings.show_reasoning = agent.value("show_reasoning", agent_settings.show_reasoning);
|
||||
agent_settings.verbose = agent.value("verbose", agent_settings.verbose);
|
||||
agent_settings.max_tool_iterations = agent.value("max_tool_iterations", agent_settings.max_tool_iterations);
|
||||
agent_settings.max_retry_attempts = agent.value("max_retry_attempts", agent_settings.max_retry_attempts);
|
||||
agent_settings.temperature = agent.value("temperature", agent_settings.temperature);
|
||||
agent_settings.top_p = agent.value("top_p", agent_settings.top_p);
|
||||
agent_settings.max_output_tokens = agent.value("max_output_tokens", agent_settings.max_output_tokens);
|
||||
agent_settings.stream_responses = agent.value("stream_responses", agent_settings.stream_responses);
|
||||
if (agent.contains("favorite_models") && agent["favorite_models"].is_array()) {
|
||||
agent_settings.favorite_models.clear();
|
||||
for (const auto& model : agent["favorite_models"]) {
|
||||
if (model.is_string()) agent_settings.favorite_models.push_back(model.get<std::string>());
|
||||
}
|
||||
}
|
||||
if (agent.contains("model_chain") && agent["model_chain"].is_array()) {
|
||||
agent_settings.model_chain.clear();
|
||||
for (const auto& model : agent["model_chain"]) {
|
||||
if (model.is_string()) agent_settings.model_chain.push_back(model.get<std::string>());
|
||||
}
|
||||
}
|
||||
agent_settings.chain_mode = agent.value("chain_mode", agent_settings.chain_mode);
|
||||
agent_settings.enable_tool_resources =
|
||||
agent.value("enable_tool_resources", agent_settings.enable_tool_resources);
|
||||
agent_settings.enable_tool_dungeon =
|
||||
agent.value("enable_tool_dungeon", agent_settings.enable_tool_dungeon);
|
||||
agent_settings.enable_tool_overworld =
|
||||
agent.value("enable_tool_overworld", agent_settings.enable_tool_overworld);
|
||||
agent_settings.enable_tool_messages =
|
||||
agent.value("enable_tool_messages", agent_settings.enable_tool_messages);
|
||||
agent_settings.enable_tool_dialogue =
|
||||
agent.value("enable_tool_dialogue", agent_settings.enable_tool_dialogue);
|
||||
agent_settings.enable_tool_gui =
|
||||
agent.value("enable_tool_gui", agent_settings.enable_tool_gui);
|
||||
agent_settings.enable_tool_music =
|
||||
agent.value("enable_tool_music", agent_settings.enable_tool_music);
|
||||
agent_settings.enable_tool_sprite =
|
||||
agent.value("enable_tool_sprite", agent_settings.enable_tool_sprite);
|
||||
agent_settings.enable_tool_emulator =
|
||||
agent.value("enable_tool_emulator", agent_settings.enable_tool_emulator);
|
||||
agent_settings.builder_blueprint_path =
|
||||
agent.value("builder_blueprint_path", agent_settings.builder_blueprint_path);
|
||||
}
|
||||
|
||||
// Build settings
|
||||
if (proj.contains("build_script")) build_script = proj["build_script"].get<std::string>();
|
||||
if (proj.contains("output_folder")) output_folder = proj["output_folder"].get<std::string>();
|
||||
@@ -1039,6 +1124,35 @@ absl::Status YazeProject::SaveToJsonFormat() {
|
||||
proj["workspace_settings"]["auto_save_enabled"] = workspace_settings.autosave_enabled;
|
||||
proj["workspace_settings"]["auto_save_interval"] = workspace_settings.autosave_interval_secs;
|
||||
|
||||
auto& agent = proj["agent_settings"];
|
||||
agent["ai_provider"] = agent_settings.ai_provider;
|
||||
agent["ai_model"] = agent_settings.ai_model;
|
||||
agent["ollama_host"] = agent_settings.ollama_host;
|
||||
agent["gemini_api_key"] = agent_settings.gemini_api_key;
|
||||
agent["use_custom_prompt"] = agent_settings.use_custom_prompt;
|
||||
agent["custom_system_prompt"] = agent_settings.custom_system_prompt;
|
||||
agent["show_reasoning"] = agent_settings.show_reasoning;
|
||||
agent["verbose"] = agent_settings.verbose;
|
||||
agent["max_tool_iterations"] = agent_settings.max_tool_iterations;
|
||||
agent["max_retry_attempts"] = agent_settings.max_retry_attempts;
|
||||
agent["temperature"] = agent_settings.temperature;
|
||||
agent["top_p"] = agent_settings.top_p;
|
||||
agent["max_output_tokens"] = agent_settings.max_output_tokens;
|
||||
agent["stream_responses"] = agent_settings.stream_responses;
|
||||
agent["favorite_models"] = agent_settings.favorite_models;
|
||||
agent["model_chain"] = agent_settings.model_chain;
|
||||
agent["chain_mode"] = agent_settings.chain_mode;
|
||||
agent["enable_tool_resources"] = agent_settings.enable_tool_resources;
|
||||
agent["enable_tool_dungeon"] = agent_settings.enable_tool_dungeon;
|
||||
agent["enable_tool_overworld"] = agent_settings.enable_tool_overworld;
|
||||
agent["enable_tool_messages"] = agent_settings.enable_tool_messages;
|
||||
agent["enable_tool_dialogue"] = agent_settings.enable_tool_dialogue;
|
||||
agent["enable_tool_gui"] = agent_settings.enable_tool_gui;
|
||||
agent["enable_tool_music"] = agent_settings.enable_tool_music;
|
||||
agent["enable_tool_sprite"] = agent_settings.enable_tool_sprite;
|
||||
agent["enable_tool_emulator"] = agent_settings.enable_tool_emulator;
|
||||
agent["builder_blueprint_path"] = agent_settings.builder_blueprint_path;
|
||||
|
||||
// Build settings
|
||||
proj["build_script"] = build_script;
|
||||
proj["git_repository"] = git_repository;
|
||||
|
||||
@@ -123,6 +123,23 @@ struct YazeProject {
|
||||
bool verbose = false;
|
||||
int max_tool_iterations = 4;
|
||||
int max_retry_attempts = 3;
|
||||
float temperature = 0.25f;
|
||||
float top_p = 0.95f;
|
||||
int max_output_tokens = 2048;
|
||||
bool stream_responses = false;
|
||||
std::vector<std::string> favorite_models;
|
||||
std::vector<std::string> model_chain;
|
||||
int chain_mode = 0;
|
||||
bool enable_tool_resources = true;
|
||||
bool enable_tool_dungeon = true;
|
||||
bool enable_tool_overworld = true;
|
||||
bool enable_tool_messages = true;
|
||||
bool enable_tool_dialogue = true;
|
||||
bool enable_tool_gui = true;
|
||||
bool enable_tool_music = true;
|
||||
bool enable_tool_sprite = true;
|
||||
bool enable_tool_emulator = true;
|
||||
std::string builder_blueprint_path; // Saved agent builder configuration
|
||||
} agent_settings;
|
||||
|
||||
// ZScream compatibility (for importing existing projects)
|
||||
|
||||
Reference in New Issue
Block a user