Refactor CLI Service Structure and Enhance AI Integration
- Restructured CLI service source files to improve organization, moving files into dedicated directories for better maintainability. - Introduced new AI service components, including `AIService`, `MockAIService`, and `GeminiAIService`, to facilitate natural language command generation. - Implemented `PolicyEvaluator` and `ProposalRegistry` for enhanced proposal management and policy enforcement in AI workflows. - Updated CMake configurations to reflect new file paths and ensure proper linking of the restructured components. - Enhanced test suite with new test workflow generation capabilities, improving the robustness of automated testing. This commit significantly advances the architecture of the z3ed system, laying the groundwork for more sophisticated AI-driven features and streamlined development processes.
This commit is contained in:
30
src/cli/service/ai/ai_service.cc
Normal file
30
src/cli/service/ai/ai_service.cc
Normal file
@@ -0,0 +1,30 @@
|
||||
#include "cli/service/ai/ai_service.h"
|
||||
|
||||
namespace yaze {
|
||||
namespace cli {
|
||||
|
||||
absl::StatusOr<std::vector<std::string>> MockAIService::GetCommands(
|
||||
const std::string& prompt) {
|
||||
// NOTE: These commands use positional arguments (not --flags) because
|
||||
// the command handlers haven't been updated to parse flags yet.
|
||||
// TODO: Update handlers to use absl::flags parsing
|
||||
|
||||
if (prompt == "Make all the soldiers in Hyrule Castle wear red armor.") {
|
||||
// Simplified command sequence - just export then import
|
||||
// (In reality, you'd modify the palette file between export and import)
|
||||
return std::vector<std::string>{
|
||||
"palette export sprites_aux1 4 soldier_palette.col"
|
||||
// Would normally modify soldier_palette.col here to change colors
|
||||
// Then import it back
|
||||
};
|
||||
} else if (prompt == "Place a tree") {
|
||||
// Example: Place a tree on the light world map
|
||||
// Command format: map_id x y tile_id (hex)
|
||||
return std::vector<std::string>{"overworld set-tile 0 10 20 0x02E"};
|
||||
}
|
||||
return absl::UnimplementedError("Prompt not supported by mock AI service. Try: 'Make all the soldiers in Hyrule Castle wear red armor.' or 'Place a tree'");
|
||||
}
|
||||
|
||||
} // namespace cli
|
||||
} // namespace yaze
|
||||
|
||||
28
src/cli/service/ai/ai_service.h
Normal file
28
src/cli/service/ai/ai_service.h
Normal file
@@ -0,0 +1,28 @@
|
||||
#ifndef YAZE_SRC_CLI_AI_SERVICE_H_
|
||||
#define YAZE_SRC_CLI_AI_SERVICE_H_
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "absl/status/statusor.h"
|
||||
|
||||
namespace yaze {
|
||||
namespace cli {
|
||||
|
||||
class AIService {
|
||||
public:
|
||||
virtual ~AIService() = default;
|
||||
virtual absl::StatusOr<std::vector<std::string>> GetCommands(
|
||||
const std::string& prompt) = 0;
|
||||
};
|
||||
|
||||
class MockAIService : public AIService {
|
||||
public:
|
||||
absl::StatusOr<std::vector<std::string>> GetCommands(
|
||||
const std::string& prompt) override;
|
||||
};
|
||||
|
||||
} // namespace cli
|
||||
} // namespace yaze
|
||||
|
||||
#endif // YAZE_SRC_CLI_AI_SERVICE_H_
|
||||
238
src/cli/service/ai/gemini_ai_service.cc
Normal file
238
src/cli/service/ai/gemini_ai_service.cc
Normal file
@@ -0,0 +1,238 @@
|
||||
#include "cli/service/ai/gemini_ai_service.h"
|
||||
|
||||
#include <cstdlib>
|
||||
#include <iostream>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "absl/strings/str_cat.h"
|
||||
#include "absl/strings/str_split.h"
|
||||
#include "absl/strings/strip.h"
|
||||
|
||||
#ifdef YAZE_WITH_JSON
|
||||
#include "httplib.h"
|
||||
#include "nlohmann/json.hpp"
|
||||
#endif
|
||||
|
||||
namespace yaze {
|
||||
namespace cli {
|
||||
|
||||
GeminiAIService::GeminiAIService(const GeminiConfig& config)
|
||||
: config_(config) {
|
||||
// Load command documentation into prompt builder
|
||||
prompt_builder_.LoadResourceCatalogue(""); // TODO: Pass actual yaml path when available
|
||||
|
||||
if (config_.system_instruction.empty()) {
|
||||
// Use enhanced prompting by default
|
||||
if (config_.use_enhanced_prompting) {
|
||||
config_.system_instruction = prompt_builder_.BuildSystemInstructionWithExamples();
|
||||
} else {
|
||||
config_.system_instruction = BuildSystemInstruction();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
std::string GeminiAIService::BuildSystemInstruction() {
|
||||
// Fallback prompt if enhanced prompting is disabled
|
||||
// Use PromptBuilder's basic system instruction
|
||||
return prompt_builder_.BuildSystemInstruction();
|
||||
}
|
||||
|
||||
absl::Status GeminiAIService::CheckAvailability() {
|
||||
#ifndef YAZE_WITH_JSON
|
||||
return absl::UnimplementedError(
|
||||
"Gemini AI service requires JSON support. Build with -DYAZE_WITH_JSON=ON");
|
||||
#else
|
||||
if (config_.api_key.empty()) {
|
||||
return absl::FailedPreconditionError(
|
||||
"❌ Gemini API key not configured\n"
|
||||
" Set GEMINI_API_KEY environment variable\n"
|
||||
" Get your API key at: https://makersuite.google.com/app/apikey");
|
||||
}
|
||||
|
||||
// Test API connectivity with a simple request
|
||||
httplib::Client cli("https://generativelanguage.googleapis.com");
|
||||
cli.set_connection_timeout(5, 0); // 5 seconds timeout
|
||||
|
||||
std::string test_endpoint = "/v1beta/models/" + config_.model;
|
||||
httplib::Headers headers = {
|
||||
{"x-goog-api-key", config_.api_key},
|
||||
};
|
||||
|
||||
auto res = cli.Get(test_endpoint.c_str(), headers);
|
||||
|
||||
if (!res) {
|
||||
return absl::UnavailableError(
|
||||
"❌ Cannot reach Gemini API\n"
|
||||
" Check your internet connection");
|
||||
}
|
||||
|
||||
if (res->status == 401 || res->status == 403) {
|
||||
return absl::PermissionDeniedError(
|
||||
"❌ Invalid Gemini API key\n"
|
||||
" Verify your key at: https://makersuite.google.com/app/apikey");
|
||||
}
|
||||
|
||||
if (res->status == 404) {
|
||||
return absl::NotFoundError(
|
||||
absl::StrCat("❌ Model '", config_.model, "' not found\n",
|
||||
" Try: gemini-2.5-flash or gemini-1.5-pro"));
|
||||
}
|
||||
|
||||
if (res->status != 200) {
|
||||
return absl::InternalError(
|
||||
absl::StrCat("❌ Gemini API error: ", res->status, "\n ", res->body));
|
||||
}
|
||||
|
||||
return absl::OkStatus();
|
||||
#endif
|
||||
}
|
||||
|
||||
absl::StatusOr<std::vector<std::string>> GeminiAIService::GetCommands(
|
||||
const std::string& prompt) {
|
||||
#ifndef YAZE_WITH_JSON
|
||||
return absl::UnimplementedError(
|
||||
"Gemini AI service requires JSON support. Build with -DYAZE_WITH_JSON=ON");
|
||||
#else
|
||||
// Validate configuration
|
||||
if (auto status = CheckAvailability(); !status.ok()) {
|
||||
return status;
|
||||
}
|
||||
|
||||
httplib::Client cli("https://generativelanguage.googleapis.com");
|
||||
cli.set_connection_timeout(30, 0); // 30 seconds for generation
|
||||
|
||||
// Build request with proper Gemini API v1beta format
|
||||
nlohmann::json request_body = {
|
||||
{"system_instruction", {
|
||||
{"parts", {
|
||||
{"text", config_.system_instruction}
|
||||
}}
|
||||
}},
|
||||
{"contents", {{
|
||||
{"parts", {{
|
||||
{"text", prompt}
|
||||
}}}
|
||||
}}},
|
||||
{"generationConfig", {
|
||||
{"temperature", config_.temperature},
|
||||
{"maxOutputTokens", config_.max_output_tokens},
|
||||
{"responseMimeType", "application/json"}
|
||||
}}
|
||||
};
|
||||
|
||||
httplib::Headers headers = {
|
||||
{"Content-Type", "application/json"},
|
||||
{"x-goog-api-key", config_.api_key},
|
||||
};
|
||||
|
||||
std::string endpoint = "/v1beta/models/" + config_.model + ":generateContent";
|
||||
auto res = cli.Post(endpoint.c_str(), headers, request_body.dump(), "application/json");
|
||||
|
||||
if (!res) {
|
||||
return absl::InternalError("❌ Failed to connect to Gemini API");
|
||||
}
|
||||
|
||||
if (res->status != 200) {
|
||||
return absl::InternalError(
|
||||
absl::StrCat("❌ Gemini API error: ", res->status, "\n ", res->body));
|
||||
}
|
||||
|
||||
return ParseGeminiResponse(res->body);
|
||||
#endif
|
||||
}
|
||||
|
||||
absl::StatusOr<std::vector<std::string>> GeminiAIService::ParseGeminiResponse(
|
||||
const std::string& response_body) {
|
||||
#ifdef YAZE_WITH_JSON
|
||||
std::vector<std::string> commands;
|
||||
|
||||
try {
|
||||
nlohmann::json response_json = nlohmann::json::parse(response_body);
|
||||
|
||||
// Navigate Gemini's response structure
|
||||
if (!response_json.contains("candidates") ||
|
||||
response_json["candidates"].empty()) {
|
||||
return absl::InternalError("❌ No candidates in Gemini response");
|
||||
}
|
||||
|
||||
for (const auto& candidate : response_json["candidates"]) {
|
||||
if (!candidate.contains("content") ||
|
||||
!candidate["content"].contains("parts")) {
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const auto& part : candidate["content"]["parts"]) {
|
||||
if (!part.contains("text")) {
|
||||
continue;
|
||||
}
|
||||
|
||||
std::string text_content = part["text"].get<std::string>();
|
||||
|
||||
// Strip markdown code blocks if present (```json ... ```)
|
||||
text_content = std::string(absl::StripAsciiWhitespace(text_content));
|
||||
if (absl::StartsWith(text_content, "```json")) {
|
||||
text_content = text_content.substr(7); // Remove ```json
|
||||
} else if (absl::StartsWith(text_content, "```")) {
|
||||
text_content = text_content.substr(3); // Remove ```
|
||||
}
|
||||
if (absl::EndsWith(text_content, "```")) {
|
||||
text_content = text_content.substr(0, text_content.length() - 3);
|
||||
}
|
||||
text_content = std::string(absl::StripAsciiWhitespace(text_content));
|
||||
|
||||
// Parse as JSON array
|
||||
try {
|
||||
nlohmann::json commands_array = nlohmann::json::parse(text_content);
|
||||
|
||||
if (commands_array.is_array()) {
|
||||
for (const auto& cmd : commands_array) {
|
||||
if (cmd.is_string()) {
|
||||
std::string command = cmd.get<std::string>();
|
||||
// Remove "z3ed " prefix if LLM included it
|
||||
if (absl::StartsWith(command, "z3ed ")) {
|
||||
command = command.substr(5);
|
||||
}
|
||||
commands.push_back(command);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (const nlohmann::json::exception& inner_e) {
|
||||
// Fallback: Try to extract commands line by line
|
||||
std::vector<std::string> lines = absl::StrSplit(text_content, '\n');
|
||||
for (const auto& line : lines) {
|
||||
std::string trimmed = std::string(absl::StripAsciiWhitespace(line));
|
||||
if (!trimmed.empty() &&
|
||||
(absl::StartsWith(trimmed, "z3ed ") ||
|
||||
absl::StartsWith(trimmed, "palette ") ||
|
||||
absl::StartsWith(trimmed, "overworld ") ||
|
||||
absl::StartsWith(trimmed, "sprite ") ||
|
||||
absl::StartsWith(trimmed, "dungeon "))) {
|
||||
if (absl::StartsWith(trimmed, "z3ed ")) {
|
||||
trimmed = trimmed.substr(5);
|
||||
}
|
||||
commands.push_back(trimmed);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (const nlohmann::json::exception& e) {
|
||||
return absl::InternalError(
|
||||
absl::StrCat("❌ Failed to parse Gemini response: ", e.what()));
|
||||
}
|
||||
|
||||
if (commands.empty()) {
|
||||
return absl::InternalError(
|
||||
"❌ No valid commands extracted from Gemini response\n"
|
||||
" Raw response: " + response_body);
|
||||
}
|
||||
|
||||
return commands;
|
||||
#else
|
||||
return absl::UnimplementedError("JSON support required");
|
||||
#endif
|
||||
}
|
||||
|
||||
} // namespace cli
|
||||
} // namespace yaze
|
||||
50
src/cli/service/ai/gemini_ai_service.h
Normal file
50
src/cli/service/ai/gemini_ai_service.h
Normal file
@@ -0,0 +1,50 @@
|
||||
#ifndef YAZE_SRC_CLI_GEMINI_AI_SERVICE_H_
|
||||
#define YAZE_SRC_CLI_GEMINI_AI_SERVICE_H_
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "absl/status/status.h"
|
||||
#include "absl/status/statusor.h"
|
||||
#include "cli/service/ai/ai_service.h"
|
||||
#include "cli/service/ai/prompt_builder.h"
|
||||
|
||||
namespace yaze {
|
||||
namespace cli {
|
||||
|
||||
struct GeminiConfig {
|
||||
std::string api_key;
|
||||
std::string model = "gemini-2.5-flash"; // Default to flash model
|
||||
float temperature = 0.7f;
|
||||
int max_output_tokens = 2048;
|
||||
std::string system_instruction;
|
||||
bool use_enhanced_prompting = true; // Enable few-shot examples
|
||||
|
||||
GeminiConfig() = default;
|
||||
explicit GeminiConfig(const std::string& key) : api_key(key) {}
|
||||
};
|
||||
|
||||
class GeminiAIService : public AIService {
|
||||
public:
|
||||
explicit GeminiAIService(const GeminiConfig& config);
|
||||
|
||||
// Primary interface
|
||||
absl::StatusOr<std::vector<std::string>> GetCommands(
|
||||
const std::string& prompt) override;
|
||||
|
||||
// Health check
|
||||
absl::Status CheckAvailability();
|
||||
|
||||
private:
|
||||
std::string BuildSystemInstruction();
|
||||
absl::StatusOr<std::vector<std::string>> ParseGeminiResponse(
|
||||
const std::string& response_body);
|
||||
|
||||
GeminiConfig config_;
|
||||
PromptBuilder prompt_builder_;
|
||||
};
|
||||
|
||||
} // namespace cli
|
||||
} // namespace yaze
|
||||
|
||||
#endif // YAZE_SRC_CLI_GEMINI_AI_SERVICE_H_
|
||||
263
src/cli/service/ai/ollama_ai_service.cc
Normal file
263
src/cli/service/ai/ollama_ai_service.cc
Normal file
@@ -0,0 +1,263 @@
|
||||
#include "cli/service/ai/ollama_ai_service.h"
|
||||
|
||||
#include <cstdlib>
|
||||
|
||||
#include "absl/strings/str_cat.h"
|
||||
#include "absl/strings/str_format.h"
|
||||
|
||||
// Check if we have httplib available (from vcpkg or bundled)
|
||||
#if __has_include("httplib.h")
|
||||
#define YAZE_HAS_HTTPLIB 1
|
||||
#include "httplib.h"
|
||||
#elif __has_include("incl/httplib.h")
|
||||
#define YAZE_HAS_HTTPLIB 1
|
||||
#include "incl/httplib.h"
|
||||
#else
|
||||
#define YAZE_HAS_HTTPLIB 0
|
||||
#endif
|
||||
|
||||
// Check if we have JSON library available
|
||||
#if __has_include("third_party/json/src/json.hpp")
|
||||
#define YAZE_HAS_JSON 1
|
||||
#include "third_party/json/src/json.hpp"
|
||||
#elif __has_include("json.hpp")
|
||||
#define YAZE_HAS_JSON 1
|
||||
#include "json.hpp"
|
||||
#else
|
||||
#define YAZE_HAS_JSON 0
|
||||
#endif
|
||||
|
||||
namespace yaze {
|
||||
namespace cli {
|
||||
|
||||
OllamaAIService::OllamaAIService(const OllamaConfig& config) : config_(config) {
|
||||
// Load command documentation into prompt builder
|
||||
prompt_builder_.LoadResourceCatalogue(""); // TODO: Pass actual yaml path when available
|
||||
|
||||
if (config_.system_prompt.empty()) {
|
||||
// Use enhanced prompting by default
|
||||
if (config_.use_enhanced_prompting) {
|
||||
config_.system_prompt = prompt_builder_.BuildSystemInstructionWithExamples();
|
||||
} else {
|
||||
config_.system_prompt = BuildSystemPrompt();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
std::string OllamaAIService::BuildSystemPrompt() {
|
||||
// Fallback prompt if enhanced prompting is disabled
|
||||
// Use PromptBuilder's basic system instruction
|
||||
return prompt_builder_.BuildSystemInstruction();
|
||||
}
|
||||
|
||||
absl::Status OllamaAIService::CheckAvailability() {
|
||||
#if !YAZE_HAS_HTTPLIB || !YAZE_HAS_JSON
|
||||
return absl::UnimplementedError(
|
||||
"Ollama service requires httplib and JSON support. "
|
||||
"Install vcpkg dependencies or use bundled libraries.");
|
||||
#else
|
||||
try {
|
||||
httplib::Client cli(config_.base_url);
|
||||
cli.set_connection_timeout(5); // 5 second timeout
|
||||
|
||||
auto res = cli.Get("/api/tags");
|
||||
if (!res) {
|
||||
return absl::UnavailableError(absl::StrFormat(
|
||||
"Cannot connect to Ollama server at %s.\n"
|
||||
"Make sure Ollama is installed and running:\n"
|
||||
" 1. Install: brew install ollama (macOS) or https://ollama.com/download\n"
|
||||
" 2. Start: ollama serve\n"
|
||||
" 3. Verify: curl http://localhost:11434/api/tags",
|
||||
config_.base_url));
|
||||
}
|
||||
|
||||
if (res->status != 200) {
|
||||
return absl::InternalError(absl::StrFormat(
|
||||
"Ollama server error: HTTP %d\nResponse: %s",
|
||||
res->status, res->body));
|
||||
}
|
||||
|
||||
// Check if requested model is available
|
||||
nlohmann::json models_json = nlohmann::json::parse(res->body);
|
||||
bool model_found = false;
|
||||
|
||||
if (models_json.contains("models") && models_json["models"].is_array()) {
|
||||
for (const auto& model : models_json["models"]) {
|
||||
if (model.contains("name")) {
|
||||
std::string model_name = model["name"].get<std::string>();
|
||||
if (model_name.find(config_.model) != std::string::npos) {
|
||||
model_found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!model_found) {
|
||||
return absl::NotFoundError(absl::StrFormat(
|
||||
"Model '%s' not found on Ollama server.\n"
|
||||
"Pull it with: ollama pull %s\n"
|
||||
"Available models: ollama list",
|
||||
config_.model, config_.model));
|
||||
}
|
||||
|
||||
return absl::OkStatus();
|
||||
} catch (const std::exception& e) {
|
||||
return absl::InternalError(absl::StrCat(
|
||||
"Ollama health check failed: ", e.what()));
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
absl::StatusOr<std::vector<std::string>> OllamaAIService::ListAvailableModels() {
|
||||
#if !YAZE_HAS_HTTPLIB || !YAZE_HAS_JSON
|
||||
return absl::UnimplementedError("Requires httplib and JSON support");
|
||||
#else
|
||||
try {
|
||||
httplib::Client cli(config_.base_url);
|
||||
cli.set_connection_timeout(5);
|
||||
|
||||
auto res = cli.Get("/api/tags");
|
||||
|
||||
if (!res || res->status != 200) {
|
||||
return absl::UnavailableError(
|
||||
"Cannot list Ollama models. Is the server running?");
|
||||
}
|
||||
|
||||
nlohmann::json models_json = nlohmann::json::parse(res->body);
|
||||
std::vector<std::string> models;
|
||||
|
||||
if (models_json.contains("models") && models_json["models"].is_array()) {
|
||||
for (const auto& model : models_json["models"]) {
|
||||
if (model.contains("name")) {
|
||||
models.push_back(model["name"].get<std::string>());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return models;
|
||||
} catch (const std::exception& e) {
|
||||
return absl::InternalError(absl::StrCat(
|
||||
"Failed to list models: ", e.what()));
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
absl::StatusOr<std::string> OllamaAIService::ParseOllamaResponse(
|
||||
const std::string& json_response) {
|
||||
#if !YAZE_HAS_JSON
|
||||
return absl::UnimplementedError("Requires JSON support");
|
||||
#else
|
||||
try {
|
||||
nlohmann::json response_json = nlohmann::json::parse(json_response);
|
||||
|
||||
if (!response_json.contains("response")) {
|
||||
return absl::InvalidArgumentError(
|
||||
"Ollama response missing 'response' field");
|
||||
}
|
||||
|
||||
return response_json["response"].get<std::string>();
|
||||
} catch (const nlohmann::json::exception& e) {
|
||||
return absl::InternalError(absl::StrCat(
|
||||
"Failed to parse Ollama response: ", e.what()));
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
absl::StatusOr<std::vector<std::string>> OllamaAIService::GetCommands(
|
||||
const std::string& prompt) {
|
||||
#if !YAZE_HAS_HTTPLIB || !YAZE_HAS_JSON
|
||||
return absl::UnimplementedError(
|
||||
"Ollama service requires httplib and JSON support. "
|
||||
"Install vcpkg dependencies or use bundled libraries.");
|
||||
#else
|
||||
|
||||
// Build request payload
|
||||
nlohmann::json request_body = {
|
||||
{"model", config_.model},
|
||||
{"prompt", config_.system_prompt + "\n\nUSER REQUEST: " + prompt},
|
||||
{"stream", false},
|
||||
{"options", {
|
||||
{"temperature", config_.temperature},
|
||||
{"num_predict", config_.max_tokens}
|
||||
}},
|
||||
{"format", "json"} // Force JSON output
|
||||
};
|
||||
|
||||
try {
|
||||
httplib::Client cli(config_.base_url);
|
||||
cli.set_read_timeout(60); // Longer timeout for inference
|
||||
|
||||
auto res = cli.Post("/api/generate", request_body.dump(), "application/json");
|
||||
|
||||
if (!res) {
|
||||
return absl::UnavailableError(
|
||||
"Failed to connect to Ollama. Is 'ollama serve' running?\n"
|
||||
"Start with: ollama serve");
|
||||
}
|
||||
|
||||
if (res->status != 200) {
|
||||
return absl::InternalError(absl::StrFormat(
|
||||
"Ollama API error: HTTP %d\nResponse: %s",
|
||||
res->status, res->body));
|
||||
}
|
||||
|
||||
// Parse response to extract generated text
|
||||
auto generated_text_or = ParseOllamaResponse(res->body);
|
||||
if (!generated_text_or.ok()) {
|
||||
return generated_text_or.status();
|
||||
}
|
||||
std::string generated_text = generated_text_or.value();
|
||||
|
||||
// Parse the command array from generated text
|
||||
nlohmann::json commands_json;
|
||||
try {
|
||||
commands_json = nlohmann::json::parse(generated_text);
|
||||
} catch (const nlohmann::json::exception& e) {
|
||||
// Sometimes the LLM includes extra text - try to extract JSON array
|
||||
size_t start = generated_text.find('[');
|
||||
size_t end = generated_text.rfind(']');
|
||||
|
||||
if (start != std::string::npos && end != std::string::npos && end > start) {
|
||||
std::string json_only = generated_text.substr(start, end - start + 1);
|
||||
try {
|
||||
commands_json = nlohmann::json::parse(json_only);
|
||||
} catch (const nlohmann::json::exception&) {
|
||||
return absl::InvalidArgumentError(
|
||||
"LLM did not return valid JSON. Response:\n" + generated_text);
|
||||
}
|
||||
} else {
|
||||
return absl::InvalidArgumentError(
|
||||
"LLM did not return a JSON array. Response:\n" + generated_text);
|
||||
}
|
||||
}
|
||||
|
||||
if (!commands_json.is_array()) {
|
||||
return absl::InvalidArgumentError(
|
||||
"LLM did not return a JSON array. Response:\n" + generated_text);
|
||||
}
|
||||
|
||||
std::vector<std::string> commands;
|
||||
for (const auto& cmd : commands_json) {
|
||||
if (cmd.is_string()) {
|
||||
commands.push_back(cmd.get<std::string>());
|
||||
}
|
||||
}
|
||||
|
||||
if (commands.empty()) {
|
||||
return absl::InvalidArgumentError(
|
||||
"LLM returned empty command list. Prompt may be unclear.\n"
|
||||
"Try rephrasing your request to be more specific.");
|
||||
}
|
||||
|
||||
return commands;
|
||||
|
||||
} catch (const std::exception& e) {
|
||||
return absl::InternalError(absl::StrCat(
|
||||
"Ollama request failed: ", e.what()));
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
} // namespace cli
|
||||
} // namespace yaze
|
||||
53
src/cli/service/ai/ollama_ai_service.h
Normal file
53
src/cli/service/ai/ollama_ai_service.h
Normal file
@@ -0,0 +1,53 @@
|
||||
#ifndef YAZE_SRC_CLI_OLLAMA_AI_SERVICE_H_
|
||||
#define YAZE_SRC_CLI_OLLAMA_AI_SERVICE_H_
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "absl/status/status.h"
|
||||
#include "absl/status/statusor.h"
|
||||
#include "cli/service/ai/ai_service.h"
|
||||
#include "cli/service/ai/prompt_builder.h"
|
||||
|
||||
namespace yaze {
|
||||
namespace cli {
|
||||
|
||||
// Ollama configuration for local LLM inference
|
||||
struct OllamaConfig {
|
||||
std::string base_url = "http://localhost:11434"; // Default Ollama endpoint
|
||||
std::string model = "qwen2.5-coder:7b"; // Recommended for code generation
|
||||
float temperature = 0.1; // Low temp for deterministic commands
|
||||
int max_tokens = 2048; // Sufficient for command lists
|
||||
std::string system_prompt; // Injected from resource catalogue
|
||||
bool use_enhanced_prompting = true; // Enable few-shot examples
|
||||
};
|
||||
|
||||
class OllamaAIService : public AIService {
|
||||
public:
|
||||
explicit OllamaAIService(const OllamaConfig& config);
|
||||
|
||||
// Generate z3ed commands from natural language prompt
|
||||
absl::StatusOr<std::vector<std::string>> GetCommands(
|
||||
const std::string& prompt) override;
|
||||
|
||||
// Health check: verify Ollama server is running and model is available
|
||||
absl::Status CheckAvailability();
|
||||
|
||||
// List available models on Ollama server
|
||||
absl::StatusOr<std::vector<std::string>> ListAvailableModels();
|
||||
|
||||
private:
|
||||
OllamaConfig config_;
|
||||
PromptBuilder prompt_builder_;
|
||||
|
||||
// Build system prompt from resource catalogue
|
||||
std::string BuildSystemPrompt();
|
||||
|
||||
// Parse JSON response from Ollama API
|
||||
absl::StatusOr<std::string> ParseOllamaResponse(const std::string& json_response);
|
||||
};
|
||||
|
||||
} // namespace cli
|
||||
} // namespace yaze
|
||||
|
||||
#endif // YAZE_SRC_CLI_OLLAMA_AI_SERVICE_H_
|
||||
364
src/cli/service/ai/prompt_builder.cc
Normal file
364
src/cli/service/ai/prompt_builder.cc
Normal file
@@ -0,0 +1,364 @@
|
||||
#include "cli/service/ai/prompt_builder.h"
|
||||
|
||||
#include <fstream>
|
||||
#include <sstream>
|
||||
|
||||
#include "absl/strings/str_cat.h"
|
||||
#include "absl/strings/str_join.h"
|
||||
|
||||
namespace yaze {
|
||||
namespace cli {
|
||||
|
||||
PromptBuilder::PromptBuilder() {
|
||||
LoadDefaultExamples();
|
||||
}
|
||||
|
||||
void PromptBuilder::LoadDefaultExamples() {
|
||||
// ==========================================================================
|
||||
// OVERWORLD TILE16 EDITING - Primary Focus
|
||||
// ==========================================================================
|
||||
|
||||
// Single tile placement
|
||||
examples_.push_back({
|
||||
"Place a tree at position 10, 20 on the Light World map",
|
||||
{
|
||||
"overworld set-tile --map 0 --x 10 --y 20 --tile 0x02E"
|
||||
},
|
||||
"Single tile16 placement. Tree tile ID is 0x02E in vanilla ALTTP"
|
||||
});
|
||||
|
||||
// Area/region editing
|
||||
examples_.push_back({
|
||||
"Create a 3x3 water pond at coordinates 15, 10",
|
||||
{
|
||||
"overworld set-tile --map 0 --x 15 --y 10 --tile 0x14C",
|
||||
"overworld set-tile --map 0 --x 16 --y 10 --tile 0x14D",
|
||||
"overworld set-tile --map 0 --x 17 --y 10 --tile 0x14C",
|
||||
"overworld set-tile --map 0 --x 15 --y 11 --tile 0x14D",
|
||||
"overworld set-tile --map 0 --x 16 --y 11 --tile 0x14D",
|
||||
"overworld set-tile --map 0 --x 17 --y 11 --tile 0x14D",
|
||||
"overworld set-tile --map 0 --x 15 --y 12 --tile 0x14E",
|
||||
"overworld set-tile --map 0 --x 16 --y 12 --tile 0x14E",
|
||||
"overworld set-tile --map 0 --x 17 --y 12 --tile 0x14E"
|
||||
},
|
||||
"Water areas use different edge tiles: 0x14C (top), 0x14D (middle), 0x14E (bottom)"
|
||||
});
|
||||
|
||||
// Path/line creation
|
||||
examples_.push_back({
|
||||
"Add a dirt path from position 5,5 to 5,15",
|
||||
{
|
||||
"overworld set-tile --map 0 --x 5 --y 5 --tile 0x022",
|
||||
"overworld set-tile --map 0 --x 5 --y 6 --tile 0x022",
|
||||
"overworld set-tile --map 0 --x 5 --y 7 --tile 0x022",
|
||||
"overworld set-tile --map 0 --x 5 --y 8 --tile 0x022",
|
||||
"overworld set-tile --map 0 --x 5 --y 9 --tile 0x022",
|
||||
"overworld set-tile --map 0 --x 5 --y 10 --tile 0x022",
|
||||
"overworld set-tile --map 0 --x 5 --y 11 --tile 0x022",
|
||||
"overworld set-tile --map 0 --x 5 --y 12 --tile 0x022",
|
||||
"overworld set-tile --map 0 --x 5 --y 13 --tile 0x022",
|
||||
"overworld set-tile --map 0 --x 5 --y 14 --tile 0x022",
|
||||
"overworld set-tile --map 0 --x 5 --y 15 --tile 0x022"
|
||||
},
|
||||
"Linear paths are created by placing tiles sequentially. Dirt tile is 0x022"
|
||||
});
|
||||
|
||||
// Forest/tree grouping
|
||||
examples_.push_back({
|
||||
"Plant a row of trees horizontally at y=8 from x=20 to x=25",
|
||||
{
|
||||
"overworld set-tile --map 0 --x 20 --y 8 --tile 0x02E",
|
||||
"overworld set-tile --map 0 --x 21 --y 8 --tile 0x02E",
|
||||
"overworld set-tile --map 0 --x 22 --y 8 --tile 0x02E",
|
||||
"overworld set-tile --map 0 --x 23 --y 8 --tile 0x02E",
|
||||
"overworld set-tile --map 0 --x 24 --y 8 --tile 0x02E",
|
||||
"overworld set-tile --map 0 --x 25 --y 8 --tile 0x02E"
|
||||
},
|
||||
"Tree rows create natural barriers and visual boundaries"
|
||||
});
|
||||
|
||||
// ==========================================================================
|
||||
// DUNGEON EDITING - Label-Aware Operations
|
||||
// ==========================================================================
|
||||
|
||||
// Sprite placement (label-aware)
|
||||
examples_.push_back({
|
||||
"Add 3 soldiers to the Eastern Palace entrance room",
|
||||
{
|
||||
"dungeon add-sprite --dungeon 0x02 --room 0x00 --sprite 0x41 --x 5 --y 3",
|
||||
"dungeon add-sprite --dungeon 0x02 --room 0x00 --sprite 0x41 --x 10 --y 3",
|
||||
"dungeon add-sprite --dungeon 0x02 --room 0x00 --sprite 0x41 --x 7 --y 8"
|
||||
},
|
||||
"Dungeon ID 0x02 is Eastern Palace. Sprite 0x41 is soldier. Spread placement for balance"
|
||||
});
|
||||
|
||||
// Object placement
|
||||
examples_.push_back({
|
||||
"Place a chest in the Hyrule Castle treasure room",
|
||||
{
|
||||
"dungeon add-chest --dungeon 0x00 --room 0x60 --x 7 --y 5 --item 0x12 --big false"
|
||||
},
|
||||
"Dungeon 0x00 is Hyrule Castle. Item 0x12 is a small key. Position centered in room"
|
||||
});
|
||||
|
||||
// ==========================================================================
|
||||
// COMMON TILE16 REFERENCE (for AI knowledge)
|
||||
// ==========================================================================
|
||||
// Grass: 0x020
|
||||
// Dirt: 0x022
|
||||
// Tree: 0x02E
|
||||
// Water (top): 0x14C
|
||||
// Water (middle): 0x14D
|
||||
// Water (bottom): 0x14E
|
||||
// Bush: 0x003
|
||||
// Rock: 0x004
|
||||
// Flower: 0x021
|
||||
// Sand: 0x023
|
||||
// Deep Water: 0x14F
|
||||
// Shallow Water: 0x150
|
||||
|
||||
// Validation example (still useful)
|
||||
examples_.push_back({
|
||||
"Check if my overworld changes are valid",
|
||||
{
|
||||
"rom validate"
|
||||
},
|
||||
"Validation ensures ROM integrity after tile modifications"
|
||||
});
|
||||
}
|
||||
|
||||
absl::Status PromptBuilder::LoadResourceCatalogue(const std::string& yaml_path) {
|
||||
// TODO: Parse z3ed-resources.yaml when available
|
||||
// For now, use hardcoded command reference
|
||||
|
||||
command_docs_["palette export"] =
|
||||
"Export palette data to JSON file\n"
|
||||
" --group <group> Palette group (overworld, dungeon, sprite)\n"
|
||||
" --id <id> Palette ID (0-based index)\n"
|
||||
" --to <file> Output JSON file path";
|
||||
|
||||
command_docs_["palette import"] =
|
||||
"Import palette data from JSON file\n"
|
||||
" --group <group> Palette group (overworld, dungeon, sprite)\n"
|
||||
" --id <id> Palette ID (0-based index)\n"
|
||||
" --from <file> Input JSON file path";
|
||||
|
||||
command_docs_["palette set-color"] =
|
||||
"Modify a color in palette JSON file\n"
|
||||
" --file <file> Palette JSON file to modify\n"
|
||||
" --index <index> Color index (0-15 per palette)\n"
|
||||
" --color <hex> New color in hex (0xRRGGBB format)";
|
||||
|
||||
command_docs_["overworld set-tile"] =
|
||||
"Place a tile in the overworld\n"
|
||||
" --map <id> Map ID (0-based)\n"
|
||||
" --x <x> X coordinate (0-63)\n"
|
||||
" --y <y> Y coordinate (0-63)\n"
|
||||
" --tile <hex> Tile ID in hex (e.g., 0x02E for tree)";
|
||||
|
||||
command_docs_["sprite set-position"] =
|
||||
"Move a sprite to new position\n"
|
||||
" --id <id> Sprite ID\n"
|
||||
" --x <x> X coordinate\n"
|
||||
" --y <y> Y coordinate";
|
||||
|
||||
command_docs_["dungeon set-room-tile"] =
|
||||
"Place a tile in dungeon room\n"
|
||||
" --room <id> Room ID\n"
|
||||
" --x <x> X coordinate\n"
|
||||
" --y <y> Y coordinate\n"
|
||||
" --tile <hex> Tile ID";
|
||||
|
||||
command_docs_["rom validate"] =
|
||||
"Validate ROM integrity and structure";
|
||||
|
||||
catalogue_loaded_ = true;
|
||||
return absl::OkStatus();
|
||||
}
|
||||
|
||||
std::string PromptBuilder::BuildCommandReference() {
|
||||
std::ostringstream oss;
|
||||
|
||||
oss << "# Available z3ed Commands\n\n";
|
||||
|
||||
for (const auto& [cmd, docs] : command_docs_) {
|
||||
oss << "## " << cmd << "\n";
|
||||
oss << docs << "\n\n";
|
||||
}
|
||||
|
||||
return oss.str();
|
||||
}
|
||||
|
||||
std::string PromptBuilder::BuildFewShotExamplesSection() {
|
||||
std::ostringstream oss;
|
||||
|
||||
oss << "# Example Command Sequences\n\n";
|
||||
oss << "Here are proven examples of how to accomplish common tasks:\n\n";
|
||||
|
||||
for (const auto& example : examples_) {
|
||||
oss << "**User Request:** \"" << example.user_prompt << "\"\n";
|
||||
oss << "**Commands:**\n";
|
||||
oss << "```json\n[";
|
||||
|
||||
std::vector<std::string> quoted_cmds;
|
||||
for (const auto& cmd : example.expected_commands) {
|
||||
quoted_cmds.push_back("\"" + cmd + "\"");
|
||||
}
|
||||
oss << absl::StrJoin(quoted_cmds, ", ");
|
||||
|
||||
oss << "]\n```\n";
|
||||
oss << "*Explanation:* " << example.explanation << "\n\n";
|
||||
}
|
||||
|
||||
return oss.str();
|
||||
}
|
||||
|
||||
std::string PromptBuilder::BuildConstraintsSection() {
|
||||
return R"(
|
||||
# Critical Constraints
|
||||
|
||||
1. **Output Format:** You MUST respond with ONLY a JSON array of strings
|
||||
- Each string is a complete z3ed command
|
||||
- NO explanatory text before or after
|
||||
- NO markdown code blocks (```json)
|
||||
- NO "z3ed" prefix in commands
|
||||
|
||||
2. **Command Syntax:** Follow the exact syntax shown in examples
|
||||
- Use correct flag names (--group, --id, --to, --from, etc.)
|
||||
- Use hex format for colors (0xRRGGBB) and tile IDs (0xNNN)
|
||||
- Coordinates are 0-based indices
|
||||
|
||||
3. **Common Patterns:**
|
||||
- Palette modifications: export → set-color → import
|
||||
- Multiple tile placement: multiple overworld set-tile commands
|
||||
- Validation: single rom validate command
|
||||
|
||||
4. **Tile IDs Reference (ALTTP):**
|
||||
- Tree: 0x02E
|
||||
- House (2x2): 0x0C0, 0x0C1, 0x0D0, 0x0D1
|
||||
- Water: 0x038
|
||||
- Grass: 0x000
|
||||
|
||||
5. **Error Prevention:**
|
||||
- Always export before modifying palettes
|
||||
- Use temporary file names (temp_*.json) for intermediate files
|
||||
- Validate coordinates are within bounds
|
||||
)";
|
||||
}
|
||||
|
||||
std::string PromptBuilder::BuildContextSection(const RomContext& context) {
|
||||
std::ostringstream oss;
|
||||
|
||||
oss << "# Current ROM Context\n\n";
|
||||
|
||||
// Use ResourceContextBuilder if a ROM is available
|
||||
if (rom_ && rom_->is_loaded()) {
|
||||
if (!resource_context_builder_) {
|
||||
resource_context_builder_ = std::make_unique<ResourceContextBuilder>(rom_);
|
||||
}
|
||||
auto resource_context_or = resource_context_builder_->BuildResourceContext();
|
||||
if (resource_context_or.ok()) {
|
||||
oss << resource_context_or.value();
|
||||
}
|
||||
}
|
||||
|
||||
if (context.rom_loaded) {
|
||||
oss << "- **ROM Loaded:** Yes (" << context.rom_path << ")\n";
|
||||
} else {
|
||||
oss << "- **ROM Loaded:** No\n";
|
||||
}
|
||||
|
||||
if (!context.current_editor.empty()) {
|
||||
oss << "- **Active Editor:** " << context.current_editor << "\n";
|
||||
}
|
||||
|
||||
if (!context.editor_state.empty()) {
|
||||
oss << "- **Editor State:**\n";
|
||||
for (const auto& [key, value] : context.editor_state) {
|
||||
oss << " - " << key << ": " << value << "\n";
|
||||
}
|
||||
}
|
||||
|
||||
oss << "\n";
|
||||
return oss.str();
|
||||
}
|
||||
|
||||
std::string PromptBuilder::BuildSystemInstruction() {
|
||||
std::ostringstream oss;
|
||||
|
||||
oss << "You are an expert ROM hacking assistant for The Legend of Zelda: "
|
||||
<< "A Link to the Past (ALTTP).\n\n";
|
||||
|
||||
oss << "Your task is to generate a sequence of z3ed CLI commands to achieve "
|
||||
<< "the user's request.\n\n";
|
||||
|
||||
if (catalogue_loaded_) {
|
||||
oss << BuildCommandReference();
|
||||
}
|
||||
|
||||
oss << BuildConstraintsSection();
|
||||
|
||||
oss << "\n**Response Format:**\n";
|
||||
oss << "```json\n";
|
||||
oss << "[\"command1 --flag value\", \"command2 --flag value\"]\n";
|
||||
oss << "```\n";
|
||||
|
||||
return oss.str();
|
||||
}
|
||||
|
||||
std::string PromptBuilder::BuildSystemInstructionWithExamples() {
|
||||
std::ostringstream oss;
|
||||
|
||||
oss << BuildSystemInstruction();
|
||||
oss << "\n---\n\n";
|
||||
oss << BuildFewShotExamplesSection();
|
||||
|
||||
return oss.str();
|
||||
}
|
||||
|
||||
std::string PromptBuilder::BuildContextualPrompt(
|
||||
const std::string& user_prompt,
|
||||
const RomContext& context) {
|
||||
std::ostringstream oss;
|
||||
|
||||
if (context.rom_loaded || !context.current_editor.empty()) {
|
||||
oss << BuildContextSection(context);
|
||||
oss << "---\n\n";
|
||||
}
|
||||
|
||||
oss << "**User Request:** " << user_prompt << "\n\n";
|
||||
oss << "Generate the appropriate z3ed commands as a JSON array.";
|
||||
|
||||
return oss.str();
|
||||
}
|
||||
|
||||
void PromptBuilder::AddFewShotExample(const FewShotExample& example) {
|
||||
examples_.push_back(example);
|
||||
}
|
||||
|
||||
std::vector<FewShotExample> PromptBuilder::GetExamplesForCategory(
|
||||
const std::string& category) {
|
||||
std::vector<FewShotExample> result;
|
||||
|
||||
for (const auto& example : examples_) {
|
||||
// Simple category matching based on keywords
|
||||
if (category == "palette" &&
|
||||
(example.user_prompt.find("palette") != std::string::npos ||
|
||||
example.user_prompt.find("color") != std::string::npos)) {
|
||||
result.push_back(example);
|
||||
} else if (category == "overworld" &&
|
||||
(example.user_prompt.find("place") != std::string::npos ||
|
||||
example.user_prompt.find("tree") != std::string::npos ||
|
||||
example.user_prompt.find("house") != std::string::npos)) {
|
||||
result.push_back(example);
|
||||
} else if (category == "validation" &&
|
||||
example.user_prompt.find("validate") != std::string::npos) {
|
||||
result.push_back(example);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
} // namespace cli
|
||||
} // namespace yaze
|
||||
80
src/cli/service/ai/prompt_builder.h
Normal file
80
src/cli/service/ai/prompt_builder.h
Normal file
@@ -0,0 +1,80 @@
|
||||
#ifndef YAZE_CLI_SERVICE_PROMPT_BUILDER_H_
|
||||
#define YAZE_CLI_SERVICE_PROMPT_BUILDER_H_
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <map>
|
||||
|
||||
#include "absl/status/statusor.h"
|
||||
#include "cli/service/resources/resource_context_builder.h"
|
||||
#include "app/rom.h"
|
||||
|
||||
namespace yaze {
|
||||
namespace cli {
|
||||
|
||||
// Few-shot example for prompt engineering
|
||||
struct FewShotExample {
|
||||
std::string user_prompt;
|
||||
std::vector<std::string> expected_commands;
|
||||
std::string explanation; // Why these commands work
|
||||
};
|
||||
|
||||
// ROM context information to inject into prompts
|
||||
struct RomContext {
|
||||
std::string rom_path;
|
||||
bool rom_loaded = false;
|
||||
std::string current_editor; // "overworld", "dungeon", "sprite", etc.
|
||||
std::map<std::string, std::string> editor_state; // Context-specific state
|
||||
};
|
||||
|
||||
// Builds sophisticated prompts for LLM services
|
||||
class PromptBuilder {
|
||||
public:
|
||||
PromptBuilder();
|
||||
|
||||
void SetRom(Rom* rom) { rom_ = rom; }
|
||||
|
||||
// Load z3ed command documentation from resources
|
||||
absl::Status LoadResourceCatalogue(const std::string& yaml_path);
|
||||
|
||||
// Build system instruction with full command reference
|
||||
std::string BuildSystemInstruction();
|
||||
|
||||
// Build system instruction with few-shot examples
|
||||
std::string BuildSystemInstructionWithExamples();
|
||||
|
||||
// Build user prompt with ROM context
|
||||
std::string BuildContextualPrompt(
|
||||
const std::string& user_prompt,
|
||||
const RomContext& context);
|
||||
|
||||
// Add custom few-shot examples
|
||||
void AddFewShotExample(const FewShotExample& example);
|
||||
|
||||
// Get few-shot examples for specific category
|
||||
std::vector<FewShotExample> GetExamplesForCategory(
|
||||
const std::string& category);
|
||||
|
||||
// Set verbosity level (0=minimal, 1=standard, 2=verbose)
|
||||
void SetVerbosity(int level) { verbosity_ = level; }
|
||||
|
||||
private:
|
||||
std::string BuildCommandReference();
|
||||
std::string BuildFewShotExamplesSection();
|
||||
std::string BuildContextSection(const RomContext& context);
|
||||
std::string BuildConstraintsSection();
|
||||
|
||||
void LoadDefaultExamples();
|
||||
|
||||
Rom* rom_ = nullptr;
|
||||
std::unique_ptr<ResourceContextBuilder> resource_context_builder_;
|
||||
std::map<std::string, std::string> command_docs_; // Command name -> docs
|
||||
std::vector<FewShotExample> examples_;
|
||||
int verbosity_ = 1;
|
||||
bool catalogue_loaded_ = false;
|
||||
};
|
||||
|
||||
} // namespace cli
|
||||
} // namespace yaze
|
||||
|
||||
#endif // YAZE_CLI_SERVICE_PROMPT_BUILDER_H_
|
||||
Reference in New Issue
Block a user