Integrate AI Agent Services and Chat Interface
- Added support for AI agent services, including `ConversationalAgentService`, to facilitate user interactions through a chat interface. - Implemented `ChatTUI` for a terminal-based chat experience, allowing users to send messages and receive responses from the AI agent. - Updated `EditorManager` to include options for displaying the agent chat widget and performance dashboard. - Enhanced CMake configurations to include new source files for AI services and chat interface components. This commit significantly expands the functionality of the z3ed system, paving the way for a more interactive and user-friendly experience in ROM hacking.
This commit is contained in:
@@ -93,6 +93,7 @@ target_include_directories(
|
||||
${CMAKE_SOURCE_DIR}/incl/
|
||||
${CMAKE_SOURCE_DIR}/src/
|
||||
${CMAKE_SOURCE_DIR}/src/lib/imgui_test_engine
|
||||
${CMAKE_SOURCE_DIR}/third_party/httplib
|
||||
${SDL2_INCLUDE_DIR}
|
||||
${CMAKE_CURRENT_BINARY_DIR}
|
||||
${PROJECT_BINARY_DIR}
|
||||
@@ -273,6 +274,19 @@ if(YAZE_WITH_GRPC)
|
||||
${CMAKE_SOURCE_DIR}/src/app/core/testing/test_script_parser.cc
|
||||
${CMAKE_SOURCE_DIR}/src/app/core/testing/test_script_parser.h)
|
||||
|
||||
# Add AI agent sources
|
||||
target_sources(yaze PRIVATE
|
||||
${CMAKE_SOURCE_DIR}/src/cli/service/agent/conversational_agent_service.cc
|
||||
${CMAKE_SOURCE_DIR}/src/cli/service/ai/service_factory.cc
|
||||
${CMAKE_SOURCE_DIR}/src/cli/service/ai/ai_service.cc
|
||||
${CMAKE_SOURCE_DIR}/src/cli/service/ai/ollama_ai_service.cc
|
||||
${CMAKE_SOURCE_DIR}/src/cli/service/ai/gemini_ai_service.cc
|
||||
${CMAKE_SOURCE_DIR}/src/cli/service/ai/prompt_builder.cc
|
||||
${CMAKE_SOURCE_DIR}/src/cli/service/planning/tile16_proposal_generator.cc
|
||||
${CMAKE_SOURCE_DIR}/src/cli/service/resources/resource_context_builder.cc
|
||||
${CMAKE_SOURCE_DIR}/src/cli/service/resources/resource_catalog.cc
|
||||
)
|
||||
|
||||
# Link gRPC libraries
|
||||
target_link_libraries(yaze PRIVATE
|
||||
grpc++
|
||||
@@ -280,4 +294,5 @@ if(YAZE_WITH_GRPC)
|
||||
libprotobuf)
|
||||
|
||||
message(STATUS "✓ gRPC ImGuiTestHarness integrated")
|
||||
message(STATUS "✓ AI Agent services integrated into yaze GUI")
|
||||
endif()
|
||||
|
||||
@@ -37,3 +37,7 @@ set(
|
||||
app/test/unit_test_suite.h
|
||||
app/editor/system/proposal_drawer.cc
|
||||
)
|
||||
|
||||
if(YAZE_WITH_GRPC)
|
||||
list(APPEND YAZE_APP_EDITOR_SRC app/editor/system/agent_chat_widget.cc)
|
||||
endif()
|
||||
@@ -8,7 +8,6 @@
|
||||
#include "absl/strings/match.h"
|
||||
#include "absl/strings/str_cat.h"
|
||||
#include "app/core/features.h"
|
||||
#include "app/gfx/performance_profiler.h"
|
||||
#include "app/core/platform/file_dialog.h"
|
||||
#include "app/core/project.h"
|
||||
#include "app/editor/code/assembly_editor.h"
|
||||
@@ -21,6 +20,7 @@
|
||||
#include "app/editor/sprite/sprite_editor.h"
|
||||
#include "app/emu/emulator.h"
|
||||
#include "app/gfx/arena.h"
|
||||
#include "app/gfx/performance_profiler.h"
|
||||
#include "app/gui/background_renderer.h"
|
||||
#include "app/gui/icons.h"
|
||||
#include "app/gui/input.h"
|
||||
@@ -38,11 +38,16 @@
|
||||
#ifdef YAZE_ENABLE_GTEST
|
||||
#include "app/test/unit_test_suite.h"
|
||||
#endif
|
||||
#include "app/editor/system/settings_editor.h"
|
||||
#include "app/editor/system/toast_manager.h"
|
||||
#include "app/emu/emulator.h"
|
||||
#include "app/gfx/performance_dashboard.h"
|
||||
#include "editor/editor.h"
|
||||
#include "imgui/imgui.h"
|
||||
#include "imgui/misc/cpp/imgui_stdlib.h"
|
||||
#include "util/log.h"
|
||||
#include "util/macro.h"
|
||||
#include "yaze_config.h"
|
||||
|
||||
namespace yaze {
|
||||
namespace editor {
|
||||
@@ -706,6 +711,10 @@ void EditorManager::Initialize(const std::string& filename) {
|
||||
// Agent Proposals
|
||||
{absl::StrCat(ICON_MD_PREVIEW, " Agent Proposals"), "",
|
||||
[&]() { proposal_drawer_.Toggle(); }},
|
||||
#ifdef YAZE_WITH_GRPC
|
||||
{absl::StrCat(ICON_MD_CHAT, " Agent Chat"), "",
|
||||
[&]() { show_agent_chat_widget_ = !show_agent_chat_widget_; }},
|
||||
#endif
|
||||
|
||||
{gui::kSeparator, "", nullptr, []() { return true; }},
|
||||
|
||||
@@ -915,6 +924,19 @@ absl::Status EditorManager::Update() {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (show_performance_dashboard_) {
|
||||
gfx::PerformanceDashboard::Get().Render();
|
||||
}
|
||||
if (show_proposal_drawer_) {
|
||||
proposal_drawer_.Draw();
|
||||
}
|
||||
#ifdef YAZE_WITH_GRPC
|
||||
if (show_agent_chat_widget_) {
|
||||
agent_chat_widget_.Draw();
|
||||
}
|
||||
#endif
|
||||
|
||||
return absl::OkStatus();
|
||||
}
|
||||
|
||||
|
||||
@@ -21,6 +21,9 @@
|
||||
#include "app/editor/sprite/sprite_editor.h"
|
||||
#include "app/editor/system/popup_manager.h"
|
||||
#include "app/editor/system/proposal_drawer.h"
|
||||
#ifdef YAZE_WITH_GRPC
|
||||
#include "app/editor/system/agent_chat_widget.h"
|
||||
#endif
|
||||
#include "app/editor/system/settings_editor.h"
|
||||
#include "app/editor/system/toast_manager.h"
|
||||
#include "app/emu/emulator.h"
|
||||
@@ -174,11 +177,17 @@ class EditorManager {
|
||||
// Testing interface
|
||||
bool show_test_dashboard_ = false;
|
||||
bool show_performance_dashboard_ = false;
|
||||
|
||||
|
||||
// Agent proposal drawer
|
||||
ProposalDrawer proposal_drawer_;
|
||||
bool show_proposal_drawer_ = false;
|
||||
|
||||
#ifdef YAZE_WITH_GRPC
|
||||
// Agent chat widget
|
||||
AgentChatWidget agent_chat_widget_;
|
||||
bool show_agent_chat_widget_ = false;
|
||||
#endif
|
||||
|
||||
std::string version_ = "";
|
||||
std::string settings_filename_ = "settings.ini";
|
||||
float font_global_scale_ = 1.0f;
|
||||
|
||||
42
src/app/editor/system/agent_chat_widget.cc
Normal file
42
src/app/editor/system/agent_chat_widget.cc
Normal file
@@ -0,0 +1,42 @@
|
||||
#include "app/editor/system/agent_chat_widget.h"
|
||||
#include "imgui.h"
|
||||
|
||||
namespace yaze {
|
||||
namespace editor {
|
||||
|
||||
AgentChatWidget::AgentChatWidget() {
|
||||
title_ = "Agent Chat";
|
||||
memset(input_buffer_, 0, sizeof(input_buffer_));
|
||||
}
|
||||
|
||||
void AgentChatWidget::Draw() {
|
||||
if (!active_) {
|
||||
return;
|
||||
}
|
||||
|
||||
ImGui::Begin(title_.c_str(), &active_);
|
||||
|
||||
// Display message history
|
||||
ImGui::BeginChild("History", ImVec2(0, -ImGui::GetFrameHeightWithSpacing()));
|
||||
for (const auto& msg : agent_service_.GetHistory()) {
|
||||
std::string prefix =
|
||||
msg.sender == cli::agent::ChatMessage::Sender::kUser ? "You: " : "Agent: ";
|
||||
ImGui::TextWrapped((prefix + msg.message).c_str());
|
||||
}
|
||||
ImGui::EndChild();
|
||||
|
||||
// Display input text box
|
||||
if (ImGui::InputText("Input", input_buffer_, sizeof(input_buffer_),
|
||||
ImGuiInputTextFlags_EnterReturnsTrue)) {
|
||||
if (strlen(input_buffer_) > 0) {
|
||||
(void)agent_service_.SendMessage(input_buffer_);
|
||||
memset(input_buffer_, 0, sizeof(input_buffer_));
|
||||
}
|
||||
ImGui::SetKeyboardFocusHere(-1); // Refocus input
|
||||
}
|
||||
|
||||
ImGui::End();
|
||||
}
|
||||
|
||||
} // namespace editor
|
||||
} // namespace yaze
|
||||
30
src/app/editor/system/agent_chat_widget.h
Normal file
30
src/app/editor/system/agent_chat_widget.h
Normal file
@@ -0,0 +1,30 @@
|
||||
#ifndef YAZE_SRC_APP_EDITOR_SYSTEM_AGENT_CHAT_WIDGET_H_
|
||||
#define YAZE_SRC_APP_EDITOR_SYSTEM_AGENT_CHAT_WIDGET_H_
|
||||
|
||||
#include <string>
|
||||
|
||||
#include "cli/service/agent/conversational_agent_service.h"
|
||||
|
||||
namespace yaze {
|
||||
namespace editor {
|
||||
|
||||
class AgentChatWidget {
|
||||
public:
|
||||
AgentChatWidget();
|
||||
|
||||
void Draw();
|
||||
|
||||
bool* active() { return &active_; }
|
||||
void set_active(bool active) { active_ = active; }
|
||||
|
||||
private:
|
||||
cli::agent::ConversationalAgentService agent_service_;
|
||||
char input_buffer_[1024];
|
||||
bool active_ = false;
|
||||
std::string title_;
|
||||
};
|
||||
|
||||
} // namespace editor
|
||||
} // namespace yaze
|
||||
|
||||
#endif // YAZE_SRC_APP_EDITOR_SYSTEM_AGENT_CHAT_WIDGET_H_
|
||||
@@ -65,6 +65,9 @@ absl::Status Agent::Run(const std::vector<std::string>& arg_vec) {
|
||||
if (subcommand == "dungeon-list-sprites") {
|
||||
return agent::HandleDungeonListSpritesCommand(subcommand_args);
|
||||
}
|
||||
if (subcommand == "chat") {
|
||||
return agent::HandleChatCommand();
|
||||
}
|
||||
|
||||
return absl::InvalidArgumentError(std::string(agent::kUsage));
|
||||
}
|
||||
|
||||
@@ -28,6 +28,7 @@ absl::Status HandleDescribeCommand(const std::vector<std::string>& arg_vec);
|
||||
absl::Status HandleResourceListCommand(const std::vector<std::string>& arg_vec);
|
||||
absl::Status HandleDungeonListSpritesCommand(
|
||||
const std::vector<std::string>& arg_vec);
|
||||
absl::Status HandleChatCommand();
|
||||
|
||||
} // namespace agent
|
||||
} // namespace cli
|
||||
|
||||
@@ -22,11 +22,13 @@
|
||||
#include "cli/service/ai/ai_service.h"
|
||||
#include "cli/service/ai/gemini_ai_service.h"
|
||||
#include "cli/service/ai/ollama_ai_service.h"
|
||||
#include "cli/service/ai/service_factory.h"
|
||||
#include "cli/service/planning/proposal_registry.h"
|
||||
#include "cli/service/planning/tile16_proposal_generator.h"
|
||||
#include "cli/service/resources/resource_catalog.h"
|
||||
#include "cli/service/resources/resource_context_builder.h"
|
||||
#include "cli/service/rom/rom_sandbox_manager.h"
|
||||
#include "cli/tui/chat_tui.h"
|
||||
#include "cli/z3ed.h"
|
||||
#include "util/macro.h"
|
||||
|
||||
@@ -38,67 +40,6 @@ namespace agent {
|
||||
|
||||
namespace {
|
||||
|
||||
// Helper: Select AI service based on environment variables
|
||||
std::unique_ptr<AIService> CreateAIService() {
|
||||
// Priority: Ollama (local) > Gemini (remote) > Mock (testing)
|
||||
|
||||
const char* provider_env = std::getenv("YAZE_AI_PROVIDER");
|
||||
const char* gemini_key = std::getenv("GEMINI_API_KEY");
|
||||
const char* ollama_model = std::getenv("OLLAMA_MODEL");
|
||||
const char* gemini_model = std::getenv("GEMINI_MODEL");
|
||||
|
||||
// Explicit provider selection
|
||||
if (provider_env && std::string(provider_env) == "ollama") {
|
||||
OllamaConfig config;
|
||||
|
||||
// Allow model override via env
|
||||
if (ollama_model && std::strlen(ollama_model) > 0) {
|
||||
config.model = ollama_model;
|
||||
}
|
||||
|
||||
auto service = std::make_unique<OllamaAIService>(config);
|
||||
|
||||
// Health check
|
||||
if (auto status = service->CheckAvailability(); !status.ok()) {
|
||||
std::cerr << "⚠️ Ollama unavailable: " << status.message() << std::endl;
|
||||
std::cerr << " Falling back to MockAIService" << std::endl;
|
||||
return std::make_unique<MockAIService>();
|
||||
}
|
||||
|
||||
std::cout << "🤖 Using Ollama AI with model: " << config.model << std::endl;
|
||||
return service;
|
||||
}
|
||||
|
||||
// Gemini if API key provided
|
||||
if (gemini_key && std::strlen(gemini_key) > 0) {
|
||||
GeminiConfig config(gemini_key);
|
||||
|
||||
// Allow model override via env
|
||||
if (gemini_model && std::strlen(gemini_model) > 0) {
|
||||
config.model = gemini_model;
|
||||
}
|
||||
|
||||
auto service = std::make_unique<GeminiAIService>(config);
|
||||
|
||||
// Health check
|
||||
if (auto status = service->CheckAvailability(); !status.ok()) {
|
||||
std::cerr << "⚠️ Gemini unavailable: " << status.message() << std::endl;
|
||||
std::cerr << " Falling back to MockAIService" << std::endl;
|
||||
return std::make_unique<MockAIService>();
|
||||
}
|
||||
|
||||
std::cout << "🤖 Using Gemini AI with model: " << config.model << std::endl;
|
||||
return service;
|
||||
}
|
||||
|
||||
// Default: Mock service for testing
|
||||
std::cout << "🤖 Using MockAIService (no LLM configured)" << std::endl;
|
||||
std::cout
|
||||
<< " Tip: Set YAZE_AI_PROVIDER=ollama or GEMINI_API_KEY to enable LLM"
|
||||
<< std::endl;
|
||||
return std::make_unique<MockAIService>();
|
||||
}
|
||||
|
||||
struct DescribeOptions {
|
||||
std::optional<std::string> resource;
|
||||
std::string format = "json";
|
||||
@@ -199,11 +140,11 @@ absl::Status HandleRunCommand(const std::vector<std::string>& arg_vec,
|
||||
|
||||
// 2. Get commands from the AI service
|
||||
auto ai_service = CreateAIService(); // Use service factory
|
||||
auto commands_or = ai_service->GetCommands(prompt);
|
||||
if (!commands_or.ok()) {
|
||||
return commands_or.status();
|
||||
auto response_or = ai_service->GenerateResponse(prompt);
|
||||
if (!response_or.ok()) {
|
||||
return response_or.status();
|
||||
}
|
||||
std::vector<std::string> commands = commands_or.value();
|
||||
std::vector<std::string> commands = response_or.value().commands;
|
||||
|
||||
// 3. Generate a structured proposal from the commands
|
||||
Tile16ProposalGenerator generator;
|
||||
@@ -268,11 +209,11 @@ absl::Status HandlePlanCommand(const std::vector<std::string>& arg_vec) {
|
||||
std::string prompt = arg_vec[1];
|
||||
|
||||
auto ai_service = CreateAIService(); // Use service factory
|
||||
auto commands_or = ai_service->GetCommands(prompt);
|
||||
if (!commands_or.ok()) {
|
||||
return commands_or.status();
|
||||
auto response_or = ai_service->GenerateResponse(prompt);
|
||||
if (!response_or.ok()) {
|
||||
return response_or.status();
|
||||
}
|
||||
std::vector<std::string> commands = commands_or.value();
|
||||
std::vector<std::string> commands = response_or.value().commands;
|
||||
|
||||
// Create a proposal from the commands
|
||||
Tile16ProposalGenerator generator;
|
||||
@@ -683,6 +624,12 @@ absl::Status HandleDungeonListSpritesCommand(
|
||||
return absl::OkStatus();
|
||||
}
|
||||
|
||||
absl::Status HandleChatCommand() {
|
||||
tui::ChatTUI chat_tui;
|
||||
chat_tui.Run();
|
||||
return absl::OkStatus();
|
||||
}
|
||||
|
||||
absl::Status HandleAcceptCommand(const std::vector<std::string>& arg_vec,
|
||||
Rom& rom) {
|
||||
if (arg_vec.empty() || arg_vec[0] != "--proposal-id") {
|
||||
|
||||
52
src/cli/service/agent/conversational_agent_service.cc
Normal file
52
src/cli/service/agent/conversational_agent_service.cc
Normal file
@@ -0,0 +1,52 @@
|
||||
#include "cli/service/agent/conversational_agent_service.h"
|
||||
|
||||
#include "absl/strings/str_cat.h"
|
||||
#include "absl/strings/str_join.h"
|
||||
#include "absl/time/clock.h"
|
||||
#include "cli/service/ai/service_factory.h"
|
||||
|
||||
namespace yaze {
|
||||
namespace cli {
|
||||
namespace agent {
|
||||
|
||||
ConversationalAgentService::ConversationalAgentService() {
|
||||
ai_service_ = CreateAIService();
|
||||
}
|
||||
|
||||
absl::StatusOr<ChatMessage> ConversationalAgentService::SendMessage(
|
||||
const std::string& message) {
|
||||
// 1. Add user message to history.
|
||||
history_.push_back({ChatMessage::Sender::kUser, message, absl::Now()});
|
||||
|
||||
// 2. Get response from the AI service using the full history.
|
||||
auto response_or = ai_service_->GenerateResponse(history_);
|
||||
if (!response_or.ok()) {
|
||||
return absl::InternalError(absl::StrCat("Failed to get AI response: ",
|
||||
response_or.status().message()));
|
||||
}
|
||||
|
||||
const auto& agent_response = response_or.value();
|
||||
|
||||
// For now, combine text and commands for display.
|
||||
// In the future, the TUI/GUI will handle these differently.
|
||||
std::string response_text = agent_response.text_response;
|
||||
if (!agent_response.commands.empty()) {
|
||||
response_text += "\n\nCommands:\n" + absl::StrJoin(agent_response.commands, "\n");
|
||||
}
|
||||
|
||||
ChatMessage chat_response = {ChatMessage::Sender::kAgent, response_text,
|
||||
absl::Now()};
|
||||
|
||||
// 3. Add agent response to history.
|
||||
history_.push_back(chat_response);
|
||||
|
||||
return chat_response;
|
||||
}
|
||||
|
||||
const std::vector<ChatMessage>& ConversationalAgentService::GetHistory() const {
|
||||
return history_;
|
||||
}
|
||||
|
||||
} // namespace agent
|
||||
} // namespace cli
|
||||
} // namespace yaze
|
||||
40
src/cli/service/agent/conversational_agent_service.h
Normal file
40
src/cli/service/agent/conversational_agent_service.h
Normal file
@@ -0,0 +1,40 @@
|
||||
#ifndef YAZE_SRC_CLI_SERVICE_AGENT_CONVERSATIONAL_AGENT_SERVICE_H_
|
||||
#define YAZE_SRC_CLI_SERVICE_AGENT_CONVERSATIONAL_AGENT_SERVICE_H_
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "absl/status/statusor.h"
|
||||
#include "cli/service/ai/ai_service.h"
|
||||
|
||||
namespace yaze {
|
||||
namespace cli {
|
||||
namespace agent {
|
||||
|
||||
struct ChatMessage {
|
||||
enum class Sender { kUser, kAgent };
|
||||
Sender sender;
|
||||
std::string message;
|
||||
absl::Time timestamp;
|
||||
};
|
||||
|
||||
class ConversationalAgentService {
|
||||
public:
|
||||
ConversationalAgentService();
|
||||
|
||||
// Send a message from the user and get the agent's response.
|
||||
absl::StatusOr<ChatMessage> SendMessage(const std::string& message);
|
||||
|
||||
// Get the full chat history.
|
||||
const std::vector<ChatMessage>& GetHistory() const;
|
||||
|
||||
private:
|
||||
std::vector<ChatMessage> history_;
|
||||
std::unique_ptr<AIService> ai_service_;
|
||||
};
|
||||
|
||||
} // namespace agent
|
||||
} // namespace cli
|
||||
} // namespace yaze
|
||||
|
||||
#endif // YAZE_SRC_CLI_SERVICE_AGENT_CONVERSATIONAL_AGENT_SERVICE_H_
|
||||
@@ -1,28 +1,28 @@
|
||||
#include "cli/service/ai/ai_service.h"
|
||||
#include "cli/service/agent/conversational_agent_service.h"
|
||||
|
||||
namespace yaze {
|
||||
namespace cli {
|
||||
|
||||
absl::StatusOr<std::vector<std::string>> MockAIService::GetCommands(
|
||||
absl::StatusOr<AgentResponse> MockAIService::GenerateResponse(
|
||||
const std::string& prompt) {
|
||||
// NOTE: These commands use positional arguments (not --flags) because
|
||||
// the command handlers haven't been updated to parse flags yet.
|
||||
// TODO: Update handlers to use absl::flags parsing
|
||||
|
||||
if (prompt == "Make all the soldiers in Hyrule Castle wear red armor.") {
|
||||
// Simplified command sequence - just export then import
|
||||
// (In reality, you'd modify the palette file between export and import)
|
||||
return std::vector<std::string>{
|
||||
"palette export sprites_aux1 4 soldier_palette.col"
|
||||
// Would normally modify soldier_palette.col here to change colors
|
||||
// Then import it back
|
||||
};
|
||||
} else if (prompt == "Place a tree") {
|
||||
// Example: Place a tree on the light world map
|
||||
// Command format: map_id x y tile_id (hex)
|
||||
return std::vector<std::string>{"overworld set-tile 0 10 20 0x02E"};
|
||||
AgentResponse response;
|
||||
if (prompt == "Place a tree") {
|
||||
response.text_response = "Sure, I can do that. Here is the command:";
|
||||
response.commands.push_back("overworld set-tile 0 10 20 0x02E");
|
||||
response.reasoning = "The user asked to place a tree, so I generated the appropriate `set-tile` command.";
|
||||
} else {
|
||||
response.text_response = "I'm sorry, I don't understand that prompt. Try 'Place a tree'.";
|
||||
}
|
||||
return absl::UnimplementedError("Prompt not supported by mock AI service. Try: 'Make all the soldiers in Hyrule Castle wear red armor.' or 'Place a tree'");
|
||||
return response;
|
||||
}
|
||||
|
||||
absl::StatusOr<AgentResponse> MockAIService::GenerateResponse(
|
||||
const std::vector<agent::ChatMessage>& history) {
|
||||
if (history.empty()) {
|
||||
return absl::InvalidArgumentError("History cannot be empty.");
|
||||
}
|
||||
return GenerateResponse(history.back().message);
|
||||
}
|
||||
|
||||
} // namespace cli
|
||||
|
||||
@@ -1,28 +1,42 @@
|
||||
#ifndef YAZE_SRC_CLI_AI_SERVICE_H_
|
||||
#define YAZE_SRC_CLI_AI_SERVICE_H_
|
||||
#ifndef YAZE_SRC_CLI_SERVICE_AI_AI_SERVICE_H_
|
||||
#define YAZE_SRC_CLI_SERVICE_AI_AI_SERVICE_H_
|
||||
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "absl/status/statusor.h"
|
||||
#include "cli/service/ai/common.h"
|
||||
|
||||
namespace yaze {
|
||||
namespace cli {
|
||||
|
||||
namespace agent {
|
||||
struct ChatMessage;
|
||||
}
|
||||
// Abstract interface for AI services
|
||||
class AIService {
|
||||
public:
|
||||
virtual ~AIService() = default;
|
||||
virtual absl::StatusOr<std::vector<std::string>> GetCommands(
|
||||
|
||||
// Generate a response from a single prompt.
|
||||
virtual absl::StatusOr<AgentResponse> GenerateResponse(
|
||||
const std::string& prompt) = 0;
|
||||
|
||||
// Generate a response from a conversation history.
|
||||
virtual absl::StatusOr<AgentResponse> GenerateResponse(
|
||||
const std::vector<agent::ChatMessage>& history) = 0;
|
||||
};
|
||||
|
||||
// Mock implementation for testing
|
||||
class MockAIService : public AIService {
|
||||
public:
|
||||
absl::StatusOr<std::vector<std::string>> GetCommands(
|
||||
absl::StatusOr<AgentResponse> GenerateResponse(
|
||||
const std::string& prompt) override;
|
||||
absl::StatusOr<AgentResponse> GenerateResponse(
|
||||
const std::vector<agent::ChatMessage>& history) override;
|
||||
};
|
||||
|
||||
} // namespace cli
|
||||
} // namespace yaze
|
||||
|
||||
#endif // YAZE_SRC_CLI_AI_SERVICE_H_
|
||||
#endif // YAZE_SRC_CLI_SERVICE_AI_AI_SERVICE_H_
|
||||
|
||||
25
src/cli/service/ai/common.h
Normal file
25
src/cli/service/ai/common.h
Normal file
@@ -0,0 +1,25 @@
|
||||
#ifndef YAZE_SRC_CLI_SERVICE_AI_COMMON_H_
|
||||
#define YAZE_SRC_CLI_SERVICE_AI_COMMON_H_
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
namespace yaze {
|
||||
namespace cli {
|
||||
|
||||
// A structured response from an AI service.
|
||||
struct AgentResponse {
|
||||
// A natural language response to the user.
|
||||
std::string text_response;
|
||||
|
||||
// A list of z3ed commands to be executed.
|
||||
std::vector<std::string> commands;
|
||||
|
||||
// The AI's explanation of its thought process.
|
||||
std::string reasoning;
|
||||
};
|
||||
|
||||
} // namespace cli
|
||||
} // namespace yaze
|
||||
|
||||
#endif // YAZE_SRC_CLI_SERVICE_AI_COMMON_H_
|
||||
@@ -1,4 +1,5 @@
|
||||
#include "cli/service/ai/gemini_ai_service.h"
|
||||
#include "cli/service/agent/conversational_agent_service.h"
|
||||
|
||||
#include <cstdlib>
|
||||
#include <iostream>
|
||||
@@ -88,12 +89,24 @@ absl::Status GeminiAIService::CheckAvailability() {
|
||||
#endif
|
||||
}
|
||||
|
||||
absl::StatusOr<std::vector<std::string>> GeminiAIService::GetCommands(
|
||||
absl::StatusOr<AgentResponse> GeminiAIService::GenerateResponse(
|
||||
const std::string& prompt) {
|
||||
return GenerateResponse({{{agent::ChatMessage::Sender::kUser, prompt, absl::Now()}}});
|
||||
}
|
||||
|
||||
absl::StatusOr<AgentResponse> GeminiAIService::GenerateResponse(
|
||||
const std::vector<agent::ChatMessage>& history) {
|
||||
#ifndef YAZE_WITH_JSON
|
||||
return absl::UnimplementedError(
|
||||
"Gemini AI service requires JSON support. Build with -DYAZE_WITH_JSON=ON");
|
||||
#else
|
||||
// TODO: Implement history-aware prompting.
|
||||
if (history.empty()) {
|
||||
return absl::InvalidArgumentError("History cannot be empty.");
|
||||
}
|
||||
|
||||
std::string prompt = prompt_builder_.BuildPromptFromHistory(history);
|
||||
|
||||
// Validate configuration
|
||||
if (auto status = CheckAvailability(); !status.ok()) {
|
||||
return status;
|
||||
@@ -142,10 +155,10 @@ absl::StatusOr<std::vector<std::string>> GeminiAIService::GetCommands(
|
||||
#endif
|
||||
}
|
||||
|
||||
absl::StatusOr<std::vector<std::string>> GeminiAIService::ParseGeminiResponse(
|
||||
absl::StatusOr<AgentResponse> GeminiAIService::ParseGeminiResponse(
|
||||
const std::string& response_body) {
|
||||
#ifdef YAZE_WITH_JSON
|
||||
std::vector<std::string> commands;
|
||||
AgentResponse agent_response;
|
||||
|
||||
try {
|
||||
nlohmann::json response_json = nlohmann::json::parse(response_body);
|
||||
@@ -181,24 +194,33 @@ absl::StatusOr<std::vector<std::string>> GeminiAIService::ParseGeminiResponse(
|
||||
}
|
||||
text_content = std::string(absl::StripAsciiWhitespace(text_content));
|
||||
|
||||
// Parse as JSON array
|
||||
// Parse as JSON object
|
||||
try {
|
||||
nlohmann::json commands_array = nlohmann::json::parse(text_content);
|
||||
|
||||
if (commands_array.is_array()) {
|
||||
for (const auto& cmd : commands_array) {
|
||||
nlohmann::json response_json = nlohmann::json::parse(text_content);
|
||||
if (response_json.contains("text_response") &&
|
||||
response_json["text_response"].is_string()) {
|
||||
agent_response.text_response =
|
||||
response_json["text_response"].get<std::string>();
|
||||
}
|
||||
if (response_json.contains("reasoning") &&
|
||||
response_json["reasoning"].is_string()) {
|
||||
agent_response.reasoning =
|
||||
response_json["reasoning"].get<std::string>();
|
||||
}
|
||||
if (response_json.contains("commands") &&
|
||||
response_json["commands"].is_array()) {
|
||||
for (const auto& cmd : response_json["commands"]) {
|
||||
if (cmd.is_string()) {
|
||||
std::string command = cmd.get<std::string>();
|
||||
// Remove "z3ed " prefix if LLM included it
|
||||
if (absl::StartsWith(command, "z3ed ")) {
|
||||
command = command.substr(5);
|
||||
}
|
||||
commands.push_back(command);
|
||||
agent_response.commands.push_back(command);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (const nlohmann::json::exception& inner_e) {
|
||||
// Fallback: Try to extract commands line by line
|
||||
// If parsing the full object fails, fallback to just commands
|
||||
std::vector<std::string> lines = absl::StrSplit(text_content, '\n');
|
||||
for (const auto& line : lines) {
|
||||
std::string trimmed = std::string(absl::StripAsciiWhitespace(line));
|
||||
@@ -211,7 +233,7 @@ absl::StatusOr<std::vector<std::string>> GeminiAIService::ParseGeminiResponse(
|
||||
if (absl::StartsWith(trimmed, "z3ed ")) {
|
||||
trimmed = trimmed.substr(5);
|
||||
}
|
||||
commands.push_back(trimmed);
|
||||
agent_response.commands.push_back(trimmed);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -222,13 +244,13 @@ absl::StatusOr<std::vector<std::string>> GeminiAIService::ParseGeminiResponse(
|
||||
absl::StrCat("❌ Failed to parse Gemini response: ", e.what()));
|
||||
}
|
||||
|
||||
if (commands.empty()) {
|
||||
if (agent_response.commands.empty()) {
|
||||
return absl::InternalError(
|
||||
"❌ No valid commands extracted from Gemini response\n"
|
||||
" Raw response: " + response_body);
|
||||
}
|
||||
|
||||
return commands;
|
||||
return agent_response;
|
||||
#else
|
||||
return absl::UnimplementedError("JSON support required");
|
||||
#endif
|
||||
|
||||
@@ -29,15 +29,17 @@ class GeminiAIService : public AIService {
|
||||
explicit GeminiAIService(const GeminiConfig& config);
|
||||
|
||||
// Primary interface
|
||||
absl::StatusOr<std::vector<std::string>> GetCommands(
|
||||
absl::StatusOr<AgentResponse> GenerateResponse(
|
||||
const std::string& prompt) override;
|
||||
absl::StatusOr<AgentResponse> GenerateResponse(
|
||||
const std::vector<agent::ChatMessage>& history) override;
|
||||
|
||||
// Health check
|
||||
absl::Status CheckAvailability();
|
||||
|
||||
private:
|
||||
std::string BuildSystemInstruction();
|
||||
absl::StatusOr<std::vector<std::string>> ParseGeminiResponse(
|
||||
absl::StatusOr<AgentResponse> ParseGeminiResponse(
|
||||
const std::string& response_body);
|
||||
|
||||
GeminiConfig config_;
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
|
||||
#include "absl/strings/str_cat.h"
|
||||
#include "absl/strings/str_format.h"
|
||||
#include "cli/service/agent/conversational_agent_service.h"
|
||||
|
||||
// Check if we have httplib available (from vcpkg or bundled)
|
||||
#if __has_include("httplib.h")
|
||||
@@ -164,24 +165,34 @@ absl::StatusOr<std::string> OllamaAIService::ParseOllamaResponse(
|
||||
#endif
|
||||
}
|
||||
|
||||
absl::StatusOr<std::vector<std::string>> OllamaAIService::GetCommands(
|
||||
absl::StatusOr<AgentResponse> OllamaAIService::GenerateResponse(
|
||||
const std::string& prompt) {
|
||||
return GenerateResponse({{{agent::ChatMessage::Sender::kUser, prompt, absl::Now()}}});
|
||||
}
|
||||
|
||||
absl::StatusOr<AgentResponse> OllamaAIService::GenerateResponse(
|
||||
const std::vector<agent::ChatMessage>& history) {
|
||||
#if !YAZE_HAS_HTTPLIB || !YAZE_HAS_JSON
|
||||
return absl::UnimplementedError(
|
||||
"Ollama service requires httplib and JSON support. "
|
||||
"Install vcpkg dependencies or use bundled libraries.");
|
||||
#else
|
||||
|
||||
// TODO: Implement history-aware prompting.
|
||||
if (history.empty()) {
|
||||
return absl::InvalidArgumentError("History cannot be empty.");
|
||||
}
|
||||
std::string prompt = prompt_builder_.BuildPromptFromHistory(history);
|
||||
|
||||
// Build request payload
|
||||
nlohmann::json request_body = {
|
||||
{"model", config_.model},
|
||||
{"prompt", config_.system_prompt + "\n\nUSER REQUEST: " + prompt},
|
||||
{"stream", false},
|
||||
{"options", {
|
||||
{"temperature", config_.temperature},
|
||||
{"num_predict", config_.max_tokens}
|
||||
}},
|
||||
{"format", "json"} // Force JSON output
|
||||
{"model", config_.model},
|
||||
{"system", config_.system_prompt},
|
||||
{"prompt", prompt},
|
||||
{"stream", false},
|
||||
{"options",
|
||||
{{"temperature", config_.temperature},
|
||||
{"num_predict", config_.max_tokens}}},
|
||||
{"format", "json"} // Force JSON output
|
||||
};
|
||||
|
||||
try {
|
||||
@@ -203,58 +214,52 @@ absl::StatusOr<std::vector<std::string>> OllamaAIService::GetCommands(
|
||||
}
|
||||
|
||||
// Parse response to extract generated text
|
||||
auto generated_text_or = ParseOllamaResponse(res->body);
|
||||
if (!generated_text_or.ok()) {
|
||||
return generated_text_or.status();
|
||||
}
|
||||
std::string generated_text = generated_text_or.value();
|
||||
|
||||
// Parse the command array from generated text
|
||||
nlohmann::json commands_json;
|
||||
nlohmann::json response_json;
|
||||
try {
|
||||
commands_json = nlohmann::json::parse(generated_text);
|
||||
response_json = nlohmann::json::parse(res->body);
|
||||
} catch (const nlohmann::json::exception& e) {
|
||||
// Sometimes the LLM includes extra text - try to extract JSON array
|
||||
size_t start = generated_text.find('[');
|
||||
size_t end = generated_text.rfind(']');
|
||||
// Sometimes the LLM includes extra text - try to extract JSON object
|
||||
size_t start = res->body.find('{');
|
||||
size_t end = res->body.rfind('}');
|
||||
|
||||
if (start != std::string::npos && end != std::string::npos && end > start) {
|
||||
std::string json_only = generated_text.substr(start, end - start + 1);
|
||||
std::string json_only = res->body.substr(start, end - start + 1);
|
||||
try {
|
||||
commands_json = nlohmann::json::parse(json_only);
|
||||
response_json = nlohmann::json::parse(json_only);
|
||||
} catch (const nlohmann::json::exception&) {
|
||||
return absl::InvalidArgumentError(
|
||||
"LLM did not return valid JSON. Response:\n" + generated_text);
|
||||
"LLM did not return valid JSON. Response:\n" + res->body);
|
||||
}
|
||||
} else {
|
||||
return absl::InvalidArgumentError(
|
||||
"LLM did not return a JSON array. Response:\n" + generated_text);
|
||||
"LLM did not return a JSON object. Response:\n" + res->body);
|
||||
}
|
||||
}
|
||||
|
||||
if (!commands_json.is_array()) {
|
||||
return absl::InvalidArgumentError(
|
||||
"LLM did not return a JSON array. Response:\n" + generated_text);
|
||||
AgentResponse agent_response;
|
||||
if (response_json.contains("text_response") &&
|
||||
response_json["text_response"].is_string()) {
|
||||
agent_response.text_response =
|
||||
response_json["text_response"].get<std::string>();
|
||||
}
|
||||
|
||||
std::vector<std::string> commands;
|
||||
for (const auto& cmd : commands_json) {
|
||||
if (cmd.is_string()) {
|
||||
commands.push_back(cmd.get<std::string>());
|
||||
if (response_json.contains("reasoning") &&
|
||||
response_json["reasoning"].is_string()) {
|
||||
agent_response.reasoning = response_json["reasoning"].get<std::string>();
|
||||
}
|
||||
if (response_json.contains("commands") &&
|
||||
response_json["commands"].is_array()) {
|
||||
for (const auto& cmd : response_json["commands"]) {
|
||||
if (cmd.is_string()) {
|
||||
agent_response.commands.push_back(cmd.get<std::string>());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (commands.empty()) {
|
||||
return absl::InvalidArgumentError(
|
||||
"LLM returned empty command list. Prompt may be unclear.\n"
|
||||
"Try rephrasing your request to be more specific.");
|
||||
}
|
||||
|
||||
return commands;
|
||||
|
||||
|
||||
return agent_response;
|
||||
|
||||
} catch (const std::exception& e) {
|
||||
return absl::InternalError(absl::StrCat(
|
||||
"Ollama request failed: ", e.what()));
|
||||
return absl::InternalError(
|
||||
absl::StrCat("Ollama request failed: ", e.what()));
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
@@ -27,8 +27,10 @@ class OllamaAIService : public AIService {
|
||||
explicit OllamaAIService(const OllamaConfig& config);
|
||||
|
||||
// Generate z3ed commands from natural language prompt
|
||||
absl::StatusOr<std::vector<std::string>> GetCommands(
|
||||
absl::StatusOr<AgentResponse> GenerateResponse(
|
||||
const std::string& prompt) override;
|
||||
absl::StatusOr<AgentResponse> GenerateResponse(
|
||||
const std::vector<agent::ChatMessage>& history) override;
|
||||
|
||||
// Health check: verify Ollama server is running and model is available
|
||||
absl::Status CheckAvailability();
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
#include "cli/service/ai/prompt_builder.h"
|
||||
#include "cli/service/agent/conversational_agent_service.h"
|
||||
|
||||
#include <fstream>
|
||||
#include <sstream>
|
||||
@@ -21,86 +22,84 @@ void PromptBuilder::LoadDefaultExamples() {
|
||||
// Single tile placement
|
||||
examples_.push_back({
|
||||
"Place a tree at position 10, 20 on the Light World map",
|
||||
{
|
||||
"overworld set-tile --map 0 --x 10 --y 20 --tile 0x02E"
|
||||
},
|
||||
"Single tile16 placement. Tree tile ID is 0x02E in vanilla ALTTP"
|
||||
});
|
||||
"Okay, I can place that tree for you. Here is the command:",
|
||||
{"overworld set-tile --map 0 --x 10 --y 20 --tile 0x02E"},
|
||||
"Single tile16 placement. Tree tile ID is 0x02E in vanilla ALTTP"});
|
||||
|
||||
// Area/region editing
|
||||
examples_.push_back({
|
||||
"Create a 3x3 water pond at coordinates 15, 10",
|
||||
{
|
||||
"overworld set-tile --map 0 --x 15 --y 10 --tile 0x14C",
|
||||
"overworld set-tile --map 0 --x 16 --y 10 --tile 0x14D",
|
||||
"overworld set-tile --map 0 --x 17 --y 10 --tile 0x14C",
|
||||
"overworld set-tile --map 0 --x 15 --y 11 --tile 0x14D",
|
||||
"overworld set-tile --map 0 --x 16 --y 11 --tile 0x14D",
|
||||
"overworld set-tile --map 0 --x 17 --y 11 --tile 0x14D",
|
||||
"overworld set-tile --map 0 --x 15 --y 12 --tile 0x14E",
|
||||
"overworld set-tile --map 0 --x 16 --y 12 --tile 0x14E",
|
||||
"overworld set-tile --map 0 --x 17 --y 12 --tile 0x14E"
|
||||
},
|
||||
"Water areas use different edge tiles: 0x14C (top), 0x14D (middle), 0x14E (bottom)"
|
||||
});
|
||||
"Creating a 3x3 pond requires nine `set-tile` commands. Here they are:",
|
||||
{"overworld set-tile --map 0 --x 15 --y 10 --tile 0x14C",
|
||||
"overworld set-tile --map 0 --x 16 --y 10 --tile 0x14D",
|
||||
"overworld set-tile --map 0 --x 17 --y 10 --tile 0x14C",
|
||||
"overworld set-tile --map 0 --x 15 --y 11 --tile 0x14D",
|
||||
"overworld set-tile --map 0 --x 16 --y 11 --tile 0x14D",
|
||||
"overworld set-tile --map 0 --x 17 --y 11 --tile 0x14D",
|
||||
"overworld set-tile --map 0 --x 15 --y 12 --tile 0x14E",
|
||||
"overworld set-tile --map 0 --x 16 --y 12 --tile 0x14E",
|
||||
"overworld set-tile --map 0 --x 17 --y 12 --tile 0x14E"},
|
||||
"Water areas use different edge tiles: 0x14C (top), 0x14D (middle), "
|
||||
"0x14E (bottom)"});
|
||||
|
||||
// Path/line creation
|
||||
examples_.push_back({
|
||||
"Add a dirt path from position 5,5 to 5,15",
|
||||
{
|
||||
"overworld set-tile --map 0 --x 5 --y 5 --tile 0x022",
|
||||
"overworld set-tile --map 0 --x 5 --y 6 --tile 0x022",
|
||||
"overworld set-tile --map 0 --x 5 --y 7 --tile 0x022",
|
||||
"overworld set-tile --map 0 --x 5 --y 8 --tile 0x022",
|
||||
"overworld set-tile --map 0 --x 5 --y 9 --tile 0x022",
|
||||
"overworld set-tile --map 0 --x 5 --y 10 --tile 0x022",
|
||||
"overworld set-tile --map 0 --x 5 --y 11 --tile 0x022",
|
||||
"overworld set-tile --map 0 --x 5 --y 12 --tile 0x022",
|
||||
"overworld set-tile --map 0 --x 5 --y 13 --tile 0x022",
|
||||
"overworld set-tile --map 0 --x 5 --y 14 --tile 0x022",
|
||||
"overworld set-tile --map 0 --x 5 --y 15 --tile 0x022"
|
||||
},
|
||||
"Linear paths are created by placing tiles sequentially. Dirt tile is 0x022"
|
||||
});
|
||||
|
||||
examples_.push_back(
|
||||
{"Add a dirt path from position 5,5 to 5,15",
|
||||
"I will generate a `set-tile` command for each point along the path.",
|
||||
{"overworld set-tile --map 0 --x 5 --y 5 --tile 0x022",
|
||||
"overworld set-tile --map 0 --x 5 --y 6 --tile 0x022",
|
||||
"overworld set-tile --map 0 --x 5 --y 7 --tile 0x022",
|
||||
"overworld set-tile --map 0 --x 5 --y 8 --tile 0x022",
|
||||
"overworld set-tile --map 0 --x 5 --y 9 --tile 0x022",
|
||||
"overworld set-tile --map 0 --x 5 --y 10 --tile 0x022",
|
||||
"overworld set-tile --map 0 --x 5 --y 11 --tile 0x022",
|
||||
"overworld set-tile --map 0 --x 5 --y 12 --tile 0x022",
|
||||
"overworld set-tile --map 0 --x 5 --y 13 --tile 0x022",
|
||||
"overworld set-tile --map 0 --x 5 --y 14 --tile 0x022",
|
||||
"overworld set-tile --map 0 --x 5 --y 15 --tile 0x022"},
|
||||
"Linear paths are created by placing tiles sequentially. Dirt tile is "
|
||||
"0x022"});
|
||||
|
||||
// Forest/tree grouping
|
||||
examples_.push_back({
|
||||
"Plant a row of trees horizontally at y=8 from x=20 to x=25",
|
||||
{
|
||||
"overworld set-tile --map 0 --x 20 --y 8 --tile 0x02E",
|
||||
"overworld set-tile --map 0 --x 21 --y 8 --tile 0x02E",
|
||||
"overworld set-tile --map 0 --x 22 --y 8 --tile 0x02E",
|
||||
"overworld set-tile --map 0 --x 23 --y 8 --tile 0x02E",
|
||||
"overworld set-tile --map 0 --x 24 --y 8 --tile 0x02E",
|
||||
"overworld set-tile --map 0 --x 25 --y 8 --tile 0x02E"
|
||||
},
|
||||
"Tree rows create natural barriers and visual boundaries"
|
||||
});
|
||||
|
||||
examples_.push_back(
|
||||
{"Plant a row of trees horizontally at y=8 from x=20 to x=25",
|
||||
"Here are the commands to plant that row of trees:",
|
||||
{"overworld set-tile --map 0 --x 20 --y 8 --tile 0x02E",
|
||||
"overworld set-tile --map 0 --x 21 --y 8 --tile 0x02E",
|
||||
"overworld set-tile --map 0 --x 22 --y 8 --tile 0x02E",
|
||||
"overworld set-tile --map 0 --x 23 --y 8 --tile 0x02E",
|
||||
"overworld set-tile --map 0 --x 24 --y 8 --tile 0x02E",
|
||||
"overworld set-tile --map 0 --x 25 --y 8 --tile 0x02E"},
|
||||
"Tree rows create natural barriers and visual boundaries"});
|
||||
|
||||
// ==========================================================================
|
||||
// DUNGEON EDITING - Label-Aware Operations
|
||||
// ==========================================================================
|
||||
|
||||
|
||||
// Sprite placement (label-aware)
|
||||
examples_.push_back({
|
||||
"Add 3 soldiers to the Eastern Palace entrance room",
|
||||
{
|
||||
"dungeon add-sprite --dungeon 0x02 --room 0x00 --sprite 0x41 --x 5 --y 3",
|
||||
"dungeon add-sprite --dungeon 0x02 --room 0x00 --sprite 0x41 --x 10 --y 3",
|
||||
"dungeon add-sprite --dungeon 0x02 --room 0x00 --sprite 0x41 --x 7 --y 8"
|
||||
},
|
||||
"Dungeon ID 0x02 is Eastern Palace. Sprite 0x41 is soldier. Spread placement for balance"
|
||||
});
|
||||
|
||||
examples_.push_back(
|
||||
{"Add 3 soldiers to the Eastern Palace entrance room",
|
||||
"I've identified the dungeon and sprite IDs from your project's "
|
||||
"labels. Here are the commands:",
|
||||
{"dungeon add-sprite --dungeon 0x02 --room 0x00 --sprite 0x41 --x 5 --y "
|
||||
"3",
|
||||
"dungeon add-sprite --dungeon 0x02 --room 0x00 --sprite 0x41 --x 10 "
|
||||
"--y 3",
|
||||
"dungeon add-sprite --dungeon 0x02 --room 0x00 --sprite 0x41 --x 7 --y "
|
||||
"8"},
|
||||
"Dungeon ID 0x02 is Eastern Palace. Sprite 0x41 is soldier. Spread "
|
||||
"placement for balance"});
|
||||
|
||||
// Object placement
|
||||
examples_.push_back({
|
||||
"Place a chest in the Hyrule Castle treasure room",
|
||||
{
|
||||
"dungeon add-chest --dungeon 0x00 --room 0x60 --x 7 --y 5 --item 0x12 --big false"
|
||||
},
|
||||
"Dungeon 0x00 is Hyrule Castle. Item 0x12 is a small key. Position centered in room"
|
||||
});
|
||||
|
||||
examples_.push_back(
|
||||
{"Place a chest in the Hyrule Castle treasure room",
|
||||
"Certainly. I will place a chest containing a small key in the center of "
|
||||
"the room.",
|
||||
{"dungeon add-chest --dungeon 0x00 --room 0x60 --x 7 --y 5 --item 0x12 "
|
||||
"--big false"},
|
||||
"Dungeon 0x00 is Hyrule Castle. Item 0x12 is a small key. Position "
|
||||
"centered in room"});
|
||||
|
||||
// ==========================================================================
|
||||
// COMMON TILE16 REFERENCE (for AI knowledge)
|
||||
// ==========================================================================
|
||||
@@ -118,13 +117,11 @@ void PromptBuilder::LoadDefaultExamples() {
|
||||
// Shallow Water: 0x150
|
||||
|
||||
// Validation example (still useful)
|
||||
examples_.push_back({
|
||||
"Check if my overworld changes are valid",
|
||||
{
|
||||
"rom validate"
|
||||
},
|
||||
"Validation ensures ROM integrity after tile modifications"
|
||||
});
|
||||
examples_.push_back(
|
||||
{"Check if my overworld changes are valid",
|
||||
"Yes, I can validate the ROM for you.",
|
||||
{"rom validate"},
|
||||
"Validation ensures ROM integrity after tile modifications"});
|
||||
}
|
||||
|
||||
absl::Status PromptBuilder::LoadResourceCatalogue(const std::string& yaml_path) {
|
||||
@@ -198,16 +195,19 @@ std::string PromptBuilder::BuildFewShotExamplesSection() {
|
||||
for (const auto& example : examples_) {
|
||||
oss << "**User Request:** \"" << example.user_prompt << "\"\n";
|
||||
oss << "**Commands:**\n";
|
||||
oss << "```json\n[";
|
||||
|
||||
oss << "```json\n{";
|
||||
oss << " \"text_response\": \"" << example.text_response << "\",\n";
|
||||
oss << " \"commands\": [";
|
||||
|
||||
std::vector<std::string> quoted_cmds;
|
||||
for (const auto& cmd : example.expected_commands) {
|
||||
quoted_cmds.push_back("\"" + cmd + "\"");
|
||||
}
|
||||
oss << absl::StrJoin(quoted_cmds, ", ");
|
||||
|
||||
oss << "]\n```\n";
|
||||
oss << "*Explanation:* " << example.explanation << "\n\n";
|
||||
|
||||
oss << "],\n";
|
||||
oss << " \"reasoning\": \"" << example.explanation << "\"\n";
|
||||
oss << "}\n```\n\n";
|
||||
}
|
||||
|
||||
return oss.str();
|
||||
@@ -217,11 +217,15 @@ std::string PromptBuilder::BuildConstraintsSection() {
|
||||
return R"(
|
||||
# Critical Constraints
|
||||
|
||||
1. **Output Format:** You MUST respond with ONLY a JSON array of strings
|
||||
- Each string is a complete z3ed command
|
||||
- NO explanatory text before or after
|
||||
- NO markdown code blocks (```json)
|
||||
- NO "z3ed" prefix in commands
|
||||
1. **Output Format:** You MUST respond with ONLY a JSON object with the following structure:
|
||||
{
|
||||
"text_response": "Your natural language reply to the user.",
|
||||
"commands": ["command1", "command2"],
|
||||
"reasoning": "Your thought process."
|
||||
}
|
||||
- `text_response` is for conversational replies.
|
||||
- `commands` is for executable z3ed commands. It can be an empty array.
|
||||
- NO explanatory text before or after the JSON object.
|
||||
|
||||
2. **Command Syntax:** Follow the exact syntax shown in examples
|
||||
- Use correct flag names (--group, --id, --to, --from, etc.)
|
||||
@@ -332,6 +336,24 @@ std::string PromptBuilder::BuildContextualPrompt(
|
||||
return oss.str();
|
||||
}
|
||||
|
||||
std::string PromptBuilder::BuildPromptFromHistory(
|
||||
const std::vector<agent::ChatMessage>& history) {
|
||||
std::ostringstream oss;
|
||||
oss << "This is a conversation between a user and an expert ROM hacking "
|
||||
"assistant.\n\n";
|
||||
|
||||
for (const auto& msg : history) {
|
||||
if (msg.sender == agent::ChatMessage::Sender::kUser) {
|
||||
oss << "User: " << msg.message << "\n";
|
||||
} else {
|
||||
oss << "Agent: " << msg.message << "\n";
|
||||
}
|
||||
}
|
||||
oss << "\nBased on this conversation, provide a response in the required JSON "
|
||||
"format.";
|
||||
return oss.str();
|
||||
}
|
||||
|
||||
void PromptBuilder::AddFewShotExample(const FewShotExample& example) {
|
||||
examples_.push_back(example);
|
||||
}
|
||||
|
||||
@@ -6,15 +6,21 @@
|
||||
#include <map>
|
||||
|
||||
#include "absl/status/statusor.h"
|
||||
#include "cli/service/ai/common.h"
|
||||
#include "cli/service/resources/resource_context_builder.h"
|
||||
#include "app/rom.h"
|
||||
|
||||
namespace yaze {
|
||||
namespace cli {
|
||||
|
||||
namespace agent {
|
||||
struct ChatMessage;
|
||||
}
|
||||
|
||||
// Few-shot example for prompt engineering
|
||||
struct FewShotExample {
|
||||
std::string user_prompt;
|
||||
std::string text_response;
|
||||
std::vector<std::string> expected_commands;
|
||||
std::string explanation; // Why these commands work
|
||||
};
|
||||
@@ -47,6 +53,10 @@ class PromptBuilder {
|
||||
std::string BuildContextualPrompt(
|
||||
const std::string& user_prompt,
|
||||
const RomContext& context);
|
||||
|
||||
// Build a full prompt from a conversation history
|
||||
std::string BuildPromptFromHistory(
|
||||
const std::vector<agent::ChatMessage>& history);
|
||||
|
||||
// Add custom few-shot examples
|
||||
void AddFewShotExample(const FewShotExample& example);
|
||||
|
||||
72
src/cli/service/ai/service_factory.cc
Normal file
72
src/cli/service/ai/service_factory.cc
Normal file
@@ -0,0 +1,72 @@
|
||||
#include "cli/service/ai/service_factory.h"
|
||||
|
||||
#include <iostream>
|
||||
|
||||
#include "cli/service/ai/ai_service.h"
|
||||
#include "cli/service/ai/gemini_ai_service.h"
|
||||
#include "cli/service/ai/ollama_ai_service.h"
|
||||
|
||||
namespace yaze {
|
||||
namespace cli {
|
||||
|
||||
std::unique_ptr<AIService> CreateAIService() {
|
||||
// Priority: Ollama (local) > Gemini (remote) > Mock (testing)
|
||||
const char* provider_env = std::getenv("YAZE_AI_PROVIDER");
|
||||
const char* gemini_key = std::getenv("GEMINI_API_KEY");
|
||||
const char* ollama_model = std::getenv("OLLAMA_MODEL");
|
||||
const char* gemini_model = std::getenv("GEMINI_MODEL");
|
||||
|
||||
// Explicit provider selection
|
||||
if (provider_env && std::string(provider_env) == "ollama") {
|
||||
OllamaConfig config;
|
||||
|
||||
// Allow model override via env
|
||||
if (ollama_model && std::strlen(ollama_model) > 0) {
|
||||
config.model = ollama_model;
|
||||
}
|
||||
|
||||
auto service = std::make_unique<OllamaAIService>(config);
|
||||
|
||||
// Health check
|
||||
if (auto status = service->CheckAvailability(); !status.ok()) {
|
||||
std::cerr << "⚠️ Ollama unavailable: " << status.message() << std::endl;
|
||||
std::cerr << " Falling back to MockAIService" << std::endl;
|
||||
return std::make_unique<MockAIService>();
|
||||
}
|
||||
|
||||
std::cout << "🤖 Using Ollama AI with model: " << config.model << std::endl;
|
||||
return service;
|
||||
}
|
||||
|
||||
// Gemini if API key provided
|
||||
if (gemini_key && std::strlen(gemini_key) > 0) {
|
||||
GeminiConfig config(gemini_key);
|
||||
|
||||
// Allow model override via env
|
||||
if (gemini_model && std::strlen(gemini_model) > 0) {
|
||||
config.model = gemini_model;
|
||||
}
|
||||
|
||||
auto service = std::make_unique<GeminiAIService>(config);
|
||||
|
||||
// Health check
|
||||
if (auto status = service->CheckAvailability(); !status.ok()) {
|
||||
std::cerr << "⚠️ Gemini unavailable: " << status.message() << std::endl;
|
||||
std::cerr << " Falling back to MockAIService" << std::endl;
|
||||
return std::make_unique<MockAIService>();
|
||||
}
|
||||
|
||||
std::cout << "🤖 Using Gemini AI with model: " << config.model << std::endl;
|
||||
return service;
|
||||
}
|
||||
|
||||
// Default: Mock service for testing
|
||||
std::cout << "🤖 Using MockAIService (no LLM configured)" << std::endl;
|
||||
std::cout
|
||||
<< " Tip: Set YAZE_AI_PROVIDER=ollama or GEMINI_API_KEY to enable LLM"
|
||||
<< std::endl;
|
||||
return std::make_unique<MockAIService>();
|
||||
}
|
||||
|
||||
} // namespace cli
|
||||
} // namespace yaze
|
||||
16
src/cli/service/ai/service_factory.h
Normal file
16
src/cli/service/ai/service_factory.h
Normal file
@@ -0,0 +1,16 @@
|
||||
#ifndef YAZE_SRC_CLI_SERVICE_AI_SERVICE_FACTORY_H_
|
||||
#define YAZE_SRC_CLI_SERVICE_AI_SERVICE_FACTORY_H_
|
||||
|
||||
#include <memory>
|
||||
#include "cli/service/ai/ai_service.h"
|
||||
|
||||
namespace yaze {
|
||||
namespace cli {
|
||||
|
||||
// Helper: Select AI service based on environment variables
|
||||
std::unique_ptr<AIService> CreateAIService();
|
||||
|
||||
} // namespace cli
|
||||
} // namespace yaze
|
||||
|
||||
#endif // YAZE_SRC_CLI_SERVICE_AI_SERVICE_FACTORY_H_
|
||||
57
src/cli/tui/chat_tui.cc
Normal file
57
src/cli/tui/chat_tui.cc
Normal file
@@ -0,0 +1,57 @@
|
||||
#include "cli/tui/chat_tui.h"
|
||||
|
||||
#include <vector>
|
||||
#include "ftxui/component/captured_mouse.hpp"
|
||||
#include "ftxui/component/component.hpp"
|
||||
#include "ftxui/component/component_base.hpp"
|
||||
#include "ftxui/component/screen_interactive.hpp"
|
||||
#include "ftxui/dom/elements.hpp"
|
||||
|
||||
namespace yaze {
|
||||
namespace cli {
|
||||
namespace tui {
|
||||
|
||||
using namespace ftxui;
|
||||
|
||||
ChatTUI::ChatTUI() = default;
|
||||
|
||||
void ChatTUI::Run() {
|
||||
auto input = Input(&input_message_, "Enter your message...");
|
||||
auto button = Button("Send", [this] { OnSubmit(); });
|
||||
|
||||
auto layout = Container::Vertical({
|
||||
input,
|
||||
button,
|
||||
});
|
||||
|
||||
auto renderer = Renderer(layout, [this] {
|
||||
std::vector<Element> messages;
|
||||
for (const auto& msg : agent_service_.GetHistory()) {
|
||||
std::string prefix =
|
||||
msg.sender == agent::ChatMessage::Sender::kUser ? "You: " : "Agent: ";
|
||||
messages.push_back(text(prefix + msg.message));
|
||||
}
|
||||
|
||||
return vbox({
|
||||
vbox(messages) | flex,
|
||||
separator(),
|
||||
hbox(text(" > "), text(input_message_)),
|
||||
}) |
|
||||
border;
|
||||
});
|
||||
|
||||
screen_.Loop(renderer);
|
||||
}
|
||||
|
||||
void ChatTUI::OnSubmit() {
|
||||
if (input_message_.empty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
(void)agent_service_.SendMessage(input_message_);
|
||||
input_message_.clear();
|
||||
}
|
||||
|
||||
} // namespace tui
|
||||
} // namespace cli
|
||||
} // namespace yaze
|
||||
30
src/cli/tui/chat_tui.h
Normal file
30
src/cli/tui/chat_tui.h
Normal file
@@ -0,0 +1,30 @@
|
||||
#ifndef YAZE_SRC_CLI_TUI_CHAT_TUI_H_
|
||||
#define YAZE_SRC_CLI_TUI_CHAT_TUI_H_
|
||||
|
||||
#include "ftxui/component/component.hpp"
|
||||
#include "ftxui/component/screen_interactive.hpp"
|
||||
#include "cli/service/agent/conversational_agent_service.h"
|
||||
|
||||
namespace yaze {
|
||||
namespace cli {
|
||||
namespace tui {
|
||||
|
||||
class ChatTUI {
|
||||
public:
|
||||
ChatTUI();
|
||||
void Run();
|
||||
|
||||
private:
|
||||
void Render();
|
||||
void OnSubmit();
|
||||
|
||||
ftxui::ScreenInteractive screen_ = ftxui::ScreenInteractive::Fullscreen();
|
||||
std::string input_message_;
|
||||
agent::ConversationalAgentService agent_service_;
|
||||
};
|
||||
|
||||
} // namespace tui
|
||||
} // namespace cli
|
||||
} // namespace yaze
|
||||
|
||||
#endif // YAZE_SRC_CLI_TUI_CHAT_TUI_H_
|
||||
@@ -38,6 +38,7 @@ add_executable(
|
||||
cli/tui/asar_patch.cc
|
||||
cli/tui/palette_editor.cc
|
||||
cli/tui/command_palette.cc
|
||||
cli/tui/chat_tui.cc
|
||||
cli/modern_cli.cc
|
||||
cli/handlers/command_palette.cc
|
||||
cli/handlers/project.cc
|
||||
@@ -66,6 +67,10 @@ add_executable(
|
||||
cli/service/planning/tile16_proposal_generator.cc
|
||||
cli/service/resources/resource_context_builder.h
|
||||
cli/service/resources/resource_context_builder.cc
|
||||
cli/service/agent/conversational_agent_service.h
|
||||
cli/service/agent/conversational_agent_service.cc
|
||||
cli/service/ai/service_factory.h
|
||||
cli/service/ai/service_factory.cc
|
||||
app/rom.cc
|
||||
app/core/project.cc
|
||||
app/core/asar_wrapper.cc
|
||||
|
||||
Reference in New Issue
Block a user