feat: Enhance AI service with verbose logging and command help

- Added a `--verbose` flag to enable detailed debug output for the Gemini AI service.
- Updated `GeminiAIService` constructor to log initialization details when verbose mode is enabled.
- Modified `CreateAIService` to pass the verbose flag to the Gemini configuration.
- Enhanced command help in `ModernCLI` to categorize commands and provide detailed descriptions.
- Refactored `HandleSimpleChatCommand` to accept a pointer to `Rom` instead of a reference.
- Updated `ShowCategoryHelp` to display command categories and examples.
- Improved error handling and logging in `GeminiAIService` for better debugging.
This commit is contained in:
scawful
2025-10-04 04:28:44 -04:00
parent 173685a928
commit 8deb2656d5
8 changed files with 374 additions and 120 deletions

View File

@@ -4,8 +4,12 @@
#include <string>
#include <vector>
#include "absl/flags/declare.h"
#include "absl/flags/flag.h"
#include "absl/status/status.h"
ABSL_DECLARE_FLAG(bool, quiet);
namespace yaze {
namespace cli {
namespace agent {
@@ -133,7 +137,7 @@ absl::Status Agent::Run(const std::vector<std::string>& arg_vec) {
return agent::HandleChatCommand(rom_);
}
if (subcommand == "simple-chat") {
return agent::HandleSimpleChatCommand(subcommand_args, rom_, absl::GetFlag(FLAGS_quiet));
return agent::HandleSimpleChatCommand(subcommand_args, &rom_, absl::GetFlag(FLAGS_quiet));
}
return absl::InvalidArgumentError(std::string(agent::kUsage));

View File

@@ -619,10 +619,10 @@ absl::Status HandleChatCommand(Rom& rom) {
}
absl::Status HandleSimpleChatCommand(const std::vector<std::string>& arg_vec,
Rom& rom, bool quiet) {
RETURN_IF_ERROR(EnsureRomLoaded(rom, "agent simple-chat"));
Rom* rom, bool quiet) {
RETURN_IF_ERROR(EnsureRomLoaded(*rom, "agent simple-chat"));
auto _ = TryLoadProjectAndLabels(rom);
auto _ = TryLoadProjectAndLabels(*rom);
std::optional<std::string> batch_file;
std::optional<std::string> single_message;
@@ -646,8 +646,7 @@ absl::Status HandleSimpleChatCommand(const std::vector<std::string>& arg_vec,
SimpleChatSession session;
session.SetConfig(config);
session.SetRomContext(&rom);
session.SetQuietMode(quiet);
session.SetRomContext(rom);
if (batch_file.has_value()) {
std::ifstream file(*batch_file);

View File

@@ -289,37 +289,243 @@ void ModernCLI::SetupCommands() {
void ModernCLI::ShowHelp() {
std::cout << GetColoredLogo() << std::endl;
std::cout << std::endl;
std::cout << "USAGE:" << std::endl;
std::cout << " z3ed [--tui] <resource> <action> [arguments]" << std::endl;
std::cout << "\033[1m\033[36mUSAGE:\033[0m" << std::endl;
std::cout << " z3ed [options] <command> [arguments]" << std::endl;
std::cout << std::endl;
std::cout << "GLOBAL FLAGS:" << std::endl;
std::cout << " --tui Launch Text User Interface" << std::endl;
std::cout << " --rom=<file> Specify ROM file to use" << std::endl;
std::cout << "\033[1m\033[36mGLOBAL OPTIONS:\033[0m" << std::endl;
std::cout << " --tui Launch interactive Text User Interface" << std::endl;
std::cout << " --rom=<file> Specify ROM file path" << std::endl;
std::cout << " --verbose, -v Show detailed debug output" << std::endl;
std::cout << " --version Show version information" << std::endl;
std::cout << " --help Show this help message" << std::endl;
std::cout << " --help, -h Show this help message" << std::endl;
std::cout << std::endl;
std::cout << "COMMANDS:" << std::endl;
for (const auto& [name, info] : commands_) {
std::cout << absl::StrFormat(" %-25s %s", name, info.description) << std::endl;
}
// Categorize commands
std::cout << "\033[1m\033[36mCOMMANDS:\033[0m" << std::endl;
std::cout << std::endl;
std::cout << " \033[1m🤖 AI Agent\033[0m" << std::endl;
std::cout << " agent simple-chat Natural language ROM queries" << std::endl;
std::cout << " agent test-conversation Interactive testing mode" << std::endl;
std::cout << " \033[90m→ z3ed help agent\033[0m" << std::endl;
std::cout << std::endl;
std::cout << "EXAMPLES:" << std::endl;
std::cout << " z3ed --tui # Launch TUI" << std::endl;
std::cout << " z3ed patch apply-asar patch.asm --rom=zelda3.sfc # Apply Asar patch" << std::endl;
std::cout << " z3ed patch apply-bps changes.bps --rom=zelda3.sfc # Apply BPS patch" << std::endl;
std::cout << " z3ed patch extract-symbols patch.asm # Extract symbols" << std::endl;
std::cout << " z3ed rom info --rom=zelda3.sfc # Show ROM info" << std::endl;
std::cout << " \033[1m🔧 ROM Patching\033[0m" << std::endl;
std::cout << " patch apply-asar Apply Asar 65816 assembly patch" << std::endl;
std::cout << " patch apply-bps Apply BPS binary patch" << std::endl;
std::cout << " patch extract-symbols Extract symbols from assembly" << std::endl;
std::cout << " \033[90m→ z3ed help patch\033[0m" << std::endl;
std::cout << std::endl;
std::cout << "For more information on a specific command:" << std::endl;
std::cout << " z3ed help <resource> <action>" << std::endl;
std::cout << " \033[1m📦 ROM Operations\033[0m" << std::endl;
std::cout << " rom info Display ROM information" << std::endl;
std::cout << " rom diff Compare two ROM files" << std::endl;
std::cout << " rom generate-golden Create golden test file" << std::endl;
std::cout << " \033[90m→ z3ed help rom\033[0m" << std::endl;
std::cout << std::endl;
std::cout << " \033[1m🗺 Overworld\033[0m" << std::endl;
std::cout << " overworld get-tile Get tile at coordinates" << std::endl;
std::cout << " overworld set-tile Place tile at coordinates" << std::endl;
std::cout << " overworld find-tile Search for tile occurrences" << std::endl;
std::cout << " overworld describe-map Show map metadata" << std::endl;
std::cout << " overworld list-warps List entrances and exits" << std::endl;
std::cout << " \033[90m→ z3ed help overworld\033[0m" << std::endl;
std::cout << std::endl;
std::cout << " \033[1m🏰 Dungeon\033[0m" << std::endl;
std::cout << " dungeon export Export dungeon data" << std::endl;
std::cout << " dungeon import Import dungeon data" << std::endl;
std::cout << " \033[90m→ z3ed help dungeon\033[0m" << std::endl;
std::cout << std::endl;
std::cout << " \033[1m🎨 Graphics\033[0m" << std::endl;
std::cout << " gfx export-sheet Export graphics sheet" << std::endl;
std::cout << " gfx import-sheet Import graphics sheet" << std::endl;
std::cout << " palette export Export palette data" << std::endl;
std::cout << " palette import Import palette data" << std::endl;
std::cout << " \033[90m→ z3ed help gfx, z3ed help palette\033[0m" << std::endl;
std::cout << std::endl;
std::cout << "\033[1m\033[36mQUICK START:\033[0m" << std::endl;
std::cout << " z3ed --tui" << std::endl;
std::cout << " z3ed agent simple-chat \"What is room 5?\" --rom=zelda3.sfc" << std::endl;
std::cout << " z3ed patch apply-asar patch.asm --rom=zelda3.sfc" << std::endl;
std::cout << std::endl;
std::cout << "\033[90mFor detailed help: z3ed help <category>\033[0m" << std::endl;
}
void ModernCLI::PrintTopLevelHelp() const {
const_cast<ModernCLI*>(this)->ShowHelp();
}
void ModernCLI::ShowCategoryHelp(const std::string& category) {
std::cout << GetColoredLogo() << std::endl;
std::cout << std::endl;
if (category == "agent") {
std::cout << "\033[1m\033[36m🤖 AI AGENT COMMANDS\033[0m" << std::endl;
std::cout << std::endl;
std::cout << "\033[1mDESCRIPTION:\033[0m" << std::endl;
std::cout << " Natural language interface for ROM inspection using embedded labels." << std::endl;
std::cout << " Query rooms, sprites, entrances, and game data conversationally." << std::endl;
std::cout << std::endl;
std::cout << "\033[1mCOMMANDS:\033[0m" << std::endl;
std::cout << std::endl;
std::cout << " \033[1magent simple-chat\033[0m [\"<question>\"]" << std::endl;
std::cout << " Single-shot or interactive chat mode" << std::endl;
std::cout << " Options: --rom=<file>, --verbose" << std::endl;
std::cout << " Examples:" << std::endl;
std::cout << " z3ed agent simple-chat \"What sprites are in room 5?\" --rom=zelda3.sfc" << std::endl;
std::cout << " echo \"List all dungeons\" | z3ed agent simple-chat --rom=zelda3.sfc" << std::endl;
std::cout << std::endl;
std::cout << " \033[1magent test-conversation\033[0m" << std::endl;
std::cout << " Interactive testing mode with full context" << std::endl;
std::cout << " Options: --rom=<file>, --verbose, --file=<json>" << std::endl;
std::cout << std::endl;
std::cout << " \033[1magent chat\033[0m \"<prompt>\"" << std::endl;
std::cout << " Advanced multi-turn conversation mode" << std::endl;
std::cout << " Options: --host=<host>, --port=<port>" << std::endl;
std::cout << std::endl;
std::cout << "\033[1mTIPS:\033[0m" << std::endl;
std::cout << " • Use --verbose to see detailed API calls and responses" << std::endl;
std::cout << " • Set GEMINI_API_KEY environment variable for Gemini" << std::endl;
std::cout << " • Use --ai_provider=gemini or --ai_provider=ollama" << std::endl;
std::cout << std::endl;
} else if (category == "patch") {
std::cout << "\033[1m\033[36m🔧 ROM PATCHING COMMANDS\033[0m" << std::endl;
std::cout << std::endl;
std::cout << "\033[1mDESCRIPTION:\033[0m" << std::endl;
std::cout << " Apply patches and extract symbols from assembly files." << std::endl;
std::cout << std::endl;
std::cout << "\033[1mCOMMANDS:\033[0m" << std::endl;
std::cout << std::endl;
std::cout << " \033[1mpatch apply-asar\033[0m <patch.asm>" << std::endl;
std::cout << " Apply Asar 65816 assembly patch to ROM" << std::endl;
std::cout << " Options: --rom=<file>, --output=<file>" << std::endl;
std::cout << " Example: z3ed patch apply-asar custom.asm --rom=zelda3.sfc" << std::endl;
std::cout << std::endl;
std::cout << " \033[1mpatch apply-bps\033[0m <patch.bps>" << std::endl;
std::cout << " Apply BPS binary patch to ROM" << std::endl;
std::cout << " Options: --rom=<file>, --output=<file>" << std::endl;
std::cout << " Example: z3ed patch apply-bps hack.bps --rom=zelda3.sfc" << std::endl;
std::cout << std::endl;
std::cout << " \033[1mpatch extract-symbols\033[0m <patch.asm>" << std::endl;
std::cout << " Extract symbol table from assembly file" << std::endl;
std::cout << " Example: z3ed patch extract-symbols code.asm" << std::endl;
std::cout << std::endl;
std::cout << "\033[1mRELATED:\033[0m" << std::endl;
std::cout << " z3ed help rom ROM operations and validation" << std::endl;
std::cout << std::endl;
} else if (category == "rom") {
std::cout << "\033[1m\033[36m📦 ROM OPERATIONS\033[0m" << std::endl;
std::cout << std::endl;
std::cout << "\033[1mCOMMANDS:\033[0m" << std::endl;
std::cout << std::endl;
std::cout << " \033[1mrom info\033[0m" << std::endl;
std::cout << " Display ROM header and metadata" << std::endl;
std::cout << " Example: z3ed rom info --rom=zelda3.sfc" << std::endl;
std::cout << std::endl;
std::cout << " \033[1mrom diff\033[0m" << std::endl;
std::cout << " Compare two ROM files byte-by-byte" << std::endl;
std::cout << " Example: z3ed rom diff --src=original.sfc --modified=hacked.sfc" << std::endl;
std::cout << std::endl;
std::cout << " \033[1mrom generate-golden\033[0m" << std::endl;
std::cout << " Create golden test reference file" << std::endl;
std::cout << " Example: z3ed rom generate-golden --rom=zelda3.sfc" << std::endl;
std::cout << std::endl;
std::cout << " \033[1mrom validate\033[0m" << std::endl;
std::cout << " Validate ROM checksum and structure" << std::endl;
std::cout << " Example: z3ed rom validate --rom=zelda3.sfc" << std::endl;
std::cout << std::endl;
} else if (category == "overworld") {
std::cout << "\033[1m\033[36m🗺 OVERWORLD COMMANDS\033[0m" << std::endl;
std::cout << std::endl;
std::cout << "\033[1mDESCRIPTION:\033[0m" << std::endl;
std::cout << " Inspect and modify overworld map data, tiles, and warps." << std::endl;
std::cout << std::endl;
std::cout << "\033[1mCOMMANDS:\033[0m" << std::endl;
std::cout << std::endl;
std::cout << " \033[1moverworld get-tile\033[0m" << std::endl;
std::cout << " Get tile ID at specific coordinates" << std::endl;
std::cout << " Example: z3ed overworld get-tile --x=10 --y=20 --map=0 --rom=zelda3.sfc" << std::endl;
std::cout << std::endl;
std::cout << " \033[1moverworld set-tile\033[0m" << std::endl;
std::cout << " Place tile at coordinates" << std::endl;
std::cout << " Example: z3ed overworld set-tile --x=10 --y=20 --tile=0x42 --rom=zelda3.sfc" << std::endl;
std::cout << std::endl;
std::cout << " \033[1moverworld find-tile\033[0m" << std::endl;
std::cout << " Search for all occurrences of a tile" << std::endl;
std::cout << " Example: z3ed overworld find-tile --tile=0x42 --rom=zelda3.sfc" << std::endl;
std::cout << std::endl;
std::cout << " \033[1moverworld describe-map\033[0m" << std::endl;
std::cout << " Show map metadata and properties" << std::endl;
std::cout << " Example: z3ed overworld describe-map --map=0 --rom=zelda3.sfc" << std::endl;
std::cout << std::endl;
std::cout << " \033[1moverworld list-warps\033[0m" << std::endl;
std::cout << " List all entrances and exits" << std::endl;
std::cout << " Example: z3ed overworld list-warps --rom=zelda3.sfc" << std::endl;
std::cout << std::endl;
} else if (category == "dungeon") {
std::cout << "\033[1m\033[36m🏰 DUNGEON COMMANDS\033[0m" << std::endl;
std::cout << std::endl;
std::cout << "\033[1mCOMMANDS:\033[0m" << std::endl;
std::cout << std::endl;
std::cout << " \033[1mdungeon export\033[0m" << std::endl;
std::cout << " Export dungeon room data to JSON" << std::endl;
std::cout << " Example: z3ed dungeon export --room=5 --rom=zelda3.sfc" << std::endl;
std::cout << std::endl;
std::cout << " \033[1mdungeon import\033[0m" << std::endl;
std::cout << " Import dungeon data from JSON" << std::endl;
std::cout << " Example: z3ed dungeon import --file=room5.json --rom=zelda3.sfc" << std::endl;
std::cout << std::endl;
} else if (category == "gfx" || category == "graphics") {
std::cout << "\033[1m\033[36m🎨 GRAPHICS COMMANDS\033[0m" << std::endl;
std::cout << std::endl;
std::cout << "\033[1mCOMMANDS:\033[0m" << std::endl;
std::cout << std::endl;
std::cout << " \033[1mgfx export-sheet\033[0m" << std::endl;
std::cout << " Export graphics sheet to PNG" << std::endl;
std::cout << " Example: z3ed gfx export-sheet --sheet=0 --rom=zelda3.sfc" << std::endl;
std::cout << std::endl;
std::cout << " \033[1mgfx import-sheet\033[0m" << std::endl;
std::cout << " Import graphics from PNG" << std::endl;
std::cout << " Example: z3ed gfx import-sheet --file=custom.png --rom=zelda3.sfc" << std::endl;
std::cout << std::endl;
std::cout << "\033[1mRELATED:\033[0m" << std::endl;
std::cout << " z3ed help palette Palette manipulation commands" << std::endl;
std::cout << std::endl;
} else if (category == "palette") {
std::cout << "\033[1m\033[36m🎨 PALETTE COMMANDS\033[0m" << std::endl;
std::cout << std::endl;
std::cout << "\033[1mCOMMANDS:\033[0m" << std::endl;
std::cout << std::endl;
std::cout << " \033[1mpalette export\033[0m" << std::endl;
std::cout << " Export palette data" << std::endl;
std::cout << " Example: z3ed palette export --palette=0 --rom=zelda3.sfc" << std::endl;
std::cout << std::endl;
std::cout << " \033[1mpalette import\033[0m" << std::endl;
std::cout << " Import palette data" << std::endl;
std::cout << " Example: z3ed palette import --file=colors.pal --rom=zelda3.sfc" << std::endl;
std::cout << std::endl;
} else {
std::cout << "\033[1m\033[31mUnknown category: " << category << "\033[0m" << std::endl;
std::cout << std::endl;
std::cout << "Available categories: agent, patch, rom, overworld, dungeon, gfx, palette" << std::endl;
std::cout << std::endl;
std::cout << "Use 'z3ed --help' to see all commands." << std::endl;
}
}
absl::Status ModernCLI::Run(int argc, char* argv[]) {
if (argc < 2) {
ShowHelp();
@@ -332,6 +538,16 @@ absl::Status ModernCLI::Run(int argc, char* argv[]) {
args.emplace_back(argv[i]);
}
// Handle "help <category>" command
if (args.size() >= 1 && args[0] == "help") {
if (args.size() == 1) {
ShowHelp();
return absl::OkStatus();
}
ShowCategoryHelp(args[1]);
return absl::OkStatus();
}
const CommandInfo* command_info = nullptr;
size_t consumed_tokens = 0;

View File

@@ -31,6 +31,7 @@ class ModernCLI {
private:
void SetupCommands();
void ShowHelp();
void ShowCategoryHelp(const std::string& category);
// Command Handlers
absl::Status HandleAsarPatchCommand(const std::vector<std::string>& args);

View File

@@ -42,15 +42,20 @@ static void InitializeOpenSSL() {
namespace yaze {
namespace cli {
GeminiAIService::GeminiAIService(const GeminiConfig& config)
GeminiAIService::GeminiAIService(const GeminiConfig& config)
: config_(config), function_calling_enabled_(config.use_function_calling) {
std::cerr << "🔧 GeminiAIService constructor: start" << std::endl;
std::cerr << "🔧 Function calling: " << (function_calling_enabled_ ? "enabled" : "disabled (JSON output mode)") << std::endl;
std::cerr << "🔧 Prompt version: " << config_.prompt_version << std::endl;
if (config_.verbose) {
std::cerr << "[DEBUG] Initializing Gemini service..." << std::endl;
std::cerr << "[DEBUG] Function calling: " << (function_calling_enabled_ ? "enabled" : "disabled") << std::endl;
std::cerr << "[DEBUG] Prompt version: " << config_.prompt_version << std::endl;
}
#ifdef CPPHTTPLIB_OPENSSL_SUPPORT
// Initialize OpenSSL for HTTPS support
InitializeOpenSSL();
if (config_.verbose) {
std::cerr << "[DEBUG] OpenSSL initialized for HTTPS" << std::endl;
}
#endif
// Load command documentation into prompt builder with specified version
@@ -62,10 +67,14 @@ GeminiAIService::GeminiAIService(const GeminiConfig& config)
<< status.message() << std::endl;
}
std::cerr << "🔧 GeminiAIService: loaded catalogue" << std::endl;
if (config_.verbose) {
std::cerr << "[DEBUG] Loaded prompt catalogue" << std::endl;
}
if (config_.system_instruction.empty()) {
std::cerr << "🔧 GeminiAIService: building system instruction" << std::endl;
if (config_.verbose) {
std::cerr << "[DEBUG] Building system instruction..." << std::endl;
}
// Try to load version-specific system prompt file
std::string prompt_file = config_.prompt_version == "v2"
@@ -85,7 +94,9 @@ GeminiAIService::GeminiAIService(const GeminiConfig& config)
std::stringstream buffer;
buffer << file.rdbuf();
config_.system_instruction = buffer.str();
std::cerr << "✓ Loaded prompt from: " << path << std::endl;
if (config_.verbose) {
std::cerr << "[DEBUG] Loaded prompt: " << path << std::endl;
}
loaded = true;
break;
}
@@ -99,10 +110,11 @@ GeminiAIService::GeminiAIService(const GeminiConfig& config)
config_.system_instruction = BuildSystemInstruction();
}
}
std::cerr << "🔧 GeminiAIService: system instruction built" << std::endl;
}
std::cerr << "🔧 GeminiAIService constructor: complete" << std::endl;
if (config_.verbose) {
std::cerr << "[DEBUG] Gemini service initialized" << std::endl;
}
}
void GeminiAIService::EnableFunctionCalling(bool enable) {
@@ -186,7 +198,9 @@ absl::Status GeminiAIService::CheckAvailability() {
"Gemini AI service requires JSON support. Build with -DYAZE_WITH_JSON=ON");
#else
try {
std::cerr << "🔧 CheckAvailability: start" << std::endl;
if (config_.verbose) {
std::cerr << "[DEBUG] CheckAvailability: start" << std::endl;
}
if (config_.api_key.empty()) {
return absl::FailedPreconditionError(
@@ -195,23 +209,33 @@ absl::Status GeminiAIService::CheckAvailability() {
" Get your API key at: https://makersuite.google.com/app/apikey");
}
std::cerr << "🔧 CheckAvailability: creating HTTPS client" << std::endl;
if (config_.verbose) {
std::cerr << "[DEBUG] CheckAvailability: creating HTTPS client" << std::endl;
}
// Test API connectivity with a simple request
httplib::Client cli("https://generativelanguage.googleapis.com");
std::cerr << "🔧 CheckAvailability: client created" << std::endl;
if (config_.verbose) {
std::cerr << "[DEBUG] CheckAvailability: client created" << std::endl;
}
cli.set_connection_timeout(5, 0); // 5 seconds timeout
std::cerr << "🔧 CheckAvailability: building endpoint" << std::endl;
if (config_.verbose) {
std::cerr << "[DEBUG] CheckAvailability: building endpoint" << std::endl;
}
std::string test_endpoint = "/v1beta/models/" + config_.model;
httplib::Headers headers = {
{"x-goog-api-key", config_.api_key},
};
std::cerr << "🔧 CheckAvailability: making request to " << test_endpoint << std::endl;
if (config_.verbose) {
std::cerr << "[DEBUG] CheckAvailability: making request to " << test_endpoint << std::endl;
}
auto res = cli.Get(test_endpoint.c_str(), headers);
std::cerr << "🔧 CheckAvailability: got response" << std::endl;
if (config_.verbose) {
std::cerr << "[DEBUG] CheckAvailability: got response" << std::endl;
}
if (!res) {
return absl::UnavailableError(
@@ -238,10 +262,14 @@ absl::Status GeminiAIService::CheckAvailability() {
return absl::OkStatus();
} catch (const std::exception& e) {
std::cerr << "🔧 CheckAvailability: EXCEPTION: " << e.what() << std::endl;
if (config_.verbose) {
std::cerr << "[DEBUG] CheckAvailability: EXCEPTION: " << e.what() << std::endl;
}
return absl::InternalError(absl::StrCat("Exception during availability check: ", e.what()));
} catch (...) {
std::cerr << "🔧 CheckAvailability: UNKNOWN EXCEPTION" << std::endl;
if (config_.verbose) {
std::cerr << "[DEBUG] CheckAvailability: UNKNOWN EXCEPTION" << std::endl;
}
return absl::InternalError("Unknown exception during availability check");
}
#endif
@@ -276,7 +304,9 @@ absl::StatusOr<AgentResponse> GeminiAIService::GenerateResponse(
}
try {
std::cerr << "🔧 GenerateResponse: using curl for HTTPS request" << std::endl;
if (config_.verbose) {
std::cerr << "[DEBUG] Using curl for HTTPS request" << std::endl;
}
// Build request with proper Gemini API v1beta format
nlohmann::json request_body = {
@@ -328,17 +358,19 @@ absl::StatusOr<AgentResponse> GeminiAIService::GenerateResponse(
"-H 'x-goog-api-key: " + config_.api_key + "' "
"-d @" + temp_file + " 2>&1";
std::cerr << "🔧 Executing curl request..." << std::endl;
if (config_.verbose) {
std::cerr << "[DEBUG] Executing API request..." << std::endl;
}
FILE* pipe = popen(curl_cmd.c_str(), "r");
if (!pipe) {
return absl::InternalError("Failed to execute curl command");
}
std::string response_body;
std::string response_str;
char buffer[4096];
while (fgets(buffer, sizeof(buffer), pipe) != nullptr) {
response_body += buffer;
response_str += buffer;
}
int status = pclose(pipe);
@@ -348,25 +380,30 @@ absl::StatusOr<AgentResponse> GeminiAIService::GenerateResponse(
return absl::InternalError(absl::StrCat("Curl failed with status ", status));
}
if (response_body.empty()) {
if (response_str.empty()) {
return absl::InternalError("Empty response from Gemini API");
}
// Debug: print response
const char* verbose_env = std::getenv("Z3ED_VERBOSE");
if (verbose_env && std::string(verbose_env) == "1") {
if (config_.verbose) {
std::cout << "\n" << "\033[35m" << "🔍 Raw Gemini API Response:" << "\033[0m" << "\n"
<< "\033[2m" << response_body.substr(0, 500) << "\033[0m" << "\n\n";
<< "\033[2m" << response_str.substr(0, 500) << "\033[0m" << "\n\n";
}
std::cerr << "🔧 Got response, parsing..." << std::endl;
return ParseGeminiResponse(response_body);
if (config_.verbose) {
std::cerr << "[DEBUG] Parsing response..." << std::endl;
}
return ParseGeminiResponse(response_str);
} catch (const std::exception& e) {
std::cerr << "🔧 GenerateResponse: EXCEPTION: " << e.what() << std::endl;
if (config_.verbose) {
std::cerr << "[ERROR] Exception: " << e.what() << std::endl;
}
return absl::InternalError(absl::StrCat("Exception during generation: ", e.what()));
} catch (...) {
std::cerr << "🔧 GenerateResponse: UNKNOWN EXCEPTION" << std::endl;
if (config_.verbose) {
std::cerr << "[ERROR] Unknown exception" << std::endl;
}
return absl::InternalError("Unknown exception during generation");
}
#endif
@@ -374,34 +411,34 @@ absl::StatusOr<AgentResponse> GeminiAIService::GenerateResponse(
absl::StatusOr<AgentResponse> GeminiAIService::ParseGeminiResponse(
const std::string& response_body) {
#ifdef YAZE_WITH_JSON
#ifndef YAZE_WITH_JSON
return absl::UnimplementedError("JSON support required");
#else
AgentResponse agent_response;
try {
nlohmann::json response_json = nlohmann::json::parse(response_body);
// Navigate Gemini's response structure
if (!response_json.contains("candidates") ||
response_json["candidates"].empty()) {
return absl::InternalError("❌ No candidates in Gemini response");
auto response_json = nlohmann::json::parse(response_body, nullptr, false);
if (response_json.is_discarded()) {
return absl::InternalError("❌ Failed to parse Gemini response JSON");
}
// Navigate Gemini's response structure
if (!response_json.contains("candidates") ||
response_json["candidates"].empty()) {
return absl::InternalError("❌ No candidates in Gemini response");
}
for (const auto& candidate : response_json["candidates"]) {
if (!candidate.contains("content") ||
!candidate["content"].contains("parts")) {
continue;
}
for (const auto& candidate : response_json["candidates"]) {
if (!candidate.contains("content") ||
!candidate["content"].contains("parts")) {
continue;
}
for (const auto& part : candidate["content"]["parts"]) {
if (!part.contains("text")) {
continue;
}
for (const auto& part : candidate["content"]["parts"]) {
if (part.contains("text")) {
std::string text_content = part["text"].get<std::string>();
// Debug: Print raw LLM output when verbose mode is enabled
const char* verbose_env = std::getenv("Z3ED_VERBOSE");
if (verbose_env && std::string(verbose_env) == "1") {
if (config_.verbose) {
std::cout << "\n" << "\033[35m" << "🔍 Raw LLM Response:" << "\033[0m" << "\n"
<< "\033[2m" << text_content << "\033[0m" << "\n\n";
}
@@ -418,39 +455,22 @@ absl::StatusOr<AgentResponse> GeminiAIService::ParseGeminiResponse(
}
text_content = std::string(absl::StripAsciiWhitespace(text_content));
// Parse as JSON object
try {
nlohmann::json response_json = nlohmann::json::parse(text_content);
if (response_json.contains("text_response") &&
response_json["text_response"].is_string()) {
// Try to parse as JSON object
auto parsed_text = nlohmann::json::parse(text_content, nullptr, false);
if (!parsed_text.is_discarded()) {
if (parsed_text.contains("text_response") &&
parsed_text["text_response"].is_string()) {
agent_response.text_response =
response_json["text_response"].get<std::string>();
parsed_text["text_response"].get<std::string>();
}
if (response_json.contains("reasoning") &&
response_json["reasoning"].is_string()) {
if (parsed_text.contains("reasoning") &&
parsed_text["reasoning"].is_string()) {
agent_response.reasoning =
response_json["reasoning"].get<std::string>();
parsed_text["reasoning"].get<std::string>();
}
if (response_json.contains("tool_calls") &&
response_json["tool_calls"].is_array()) {
for (const auto& call : response_json["tool_calls"]) {
if (call.contains("tool_name") && call["tool_name"].is_string()) {
ToolCall tool_call;
tool_call.tool_name = call["tool_name"].get<std::string>();
if (call.contains("args") && call["args"].is_object()) {
for (auto& [key, value] : call["args"].items()) {
if (value.is_string()) {
tool_call.args[key] = value.get<std::string>();
}
}
}
agent_response.tool_calls.push_back(tool_call);
}
}
}
if (response_json.contains("commands") &&
response_json["commands"].is_array()) {
for (const auto& cmd : response_json["commands"]) {
if (parsed_text.contains("commands") &&
parsed_text["commands"].is_array()) {
for (const auto& cmd : parsed_text["commands"]) {
if (cmd.is_string()) {
std::string command = cmd.get<std::string>();
if (absl::StartsWith(command, "z3ed ")) {
@@ -460,8 +480,8 @@ absl::StatusOr<AgentResponse> GeminiAIService::ParseGeminiResponse(
}
}
}
} catch (const nlohmann::json::exception& inner_e) {
// If parsing the full object fails, fallback to just commands
} else {
// If parsing the full object fails, fallback to extracting commands from text
std::vector<std::string> lines = absl::StrSplit(text_content, '\n');
for (const auto& line : lines) {
std::string trimmed = std::string(absl::StripAsciiWhitespace(line));
@@ -478,11 +498,24 @@ absl::StatusOr<AgentResponse> GeminiAIService::ParseGeminiResponse(
}
}
}
} else if (part.contains("functionCall")) {
const auto& call = part["functionCall"];
if (call.contains("name") && call["name"].is_string()) {
ToolCall tool_call;
tool_call.tool_name = call["name"].get<std::string>();
if (call.contains("args") && call["args"].is_object()) {
for (auto& [key, value] : call["args"].items()) {
if (value.is_string()) {
tool_call.args[key] = value.get<std::string>();
} else if (value.is_number()) {
tool_call.args[key] = std::to_string(value.get<double>());
}
}
}
agent_response.tool_calls.push_back(tool_call);
}
}
}
} catch (const nlohmann::json::exception& e) {
return absl::InternalError(
absl::StrCat("❌ Failed to parse Gemini response: ", e.what()));
}
if (agent_response.text_response.empty() &&
@@ -495,8 +528,6 @@ absl::StatusOr<AgentResponse> GeminiAIService::ParseGeminiResponse(
}
return agent_response;
#else
return absl::UnimplementedError("JSON support required");
#endif
}

View File

@@ -21,6 +21,7 @@ struct GeminiConfig {
bool use_enhanced_prompting = true; // Enable few-shot examples
bool use_function_calling = false; // Use native Gemini function calling
std::string prompt_version = "default"; // Which prompt file to use (default, v2, etc.)
bool verbose = false; // Enable debug logging
GeminiConfig() = default;
explicit GeminiConfig(const std::string& key) : api_key(key) {}

View File

@@ -71,29 +71,28 @@ std::unique_ptr<AIService> CreateAIService(const AIServiceConfig& config) {
// Gemini provider
#ifdef YAZE_WITH_JSON
if (config.provider == "gemini") {
std::cerr << "🔧 Creating Gemini service..." << std::endl;
if (config.gemini_api_key.empty()) {
std::cerr << "⚠️ Gemini API key not provided" << std::endl;
std::cerr << " Use --gemini_api_key=<key> or set GEMINI_API_KEY environment variable" << std::endl;
std::cerr << " Use --gemini_api_key=<key> or GEMINI_API_KEY environment variable" << std::endl;
std::cerr << " Falling back to MockAIService" << std::endl;
return std::make_unique<MockAIService>();
}
std::cerr << "🔧 Building Gemini config..." << std::endl;
GeminiConfig gemini_config(config.gemini_api_key);
if (!config.model.empty()) {
gemini_config.model = config.model;
}
gemini_config.prompt_version = absl::GetFlag(FLAGS_prompt_version);
gemini_config.use_function_calling = absl::GetFlag(FLAGS_use_function_calling);
std::cerr << "🔧 Model: " << gemini_config.model << std::endl;
std::cerr << "🔧 Prompt version: " << gemini_config.prompt_version << std::endl;
gemini_config.verbose = config.verbose;
std::cerr << "🤖 AI Provider: gemini" << std::endl;
std::cerr << " Model: " << gemini_config.model << std::endl;
if (config.verbose) {
std::cerr << " Prompt: " << gemini_config.prompt_version << std::endl;
}
std::cerr << "🔧 Creating Gemini service instance..." << std::endl;
auto service = std::make_unique<GeminiAIService>(gemini_config);
std::cerr << "🔧 Skipping availability check (causes segfault with SSL)" << std::endl;
// Health check - DISABLED due to SSL issues
// if (auto status = service->CheckAvailability(); !status.ok()) {
// std::cerr << "⚠️ Gemini unavailable: " << status.message() << std::endl;
@@ -102,7 +101,9 @@ std::unique_ptr<AIService> CreateAIService(const AIServiceConfig& config) {
// }
std::cout << " Using model: " << gemini_config.model << std::endl;
std::cerr << "🔧 Gemini service ready" << std::endl;
if (config.verbose) {
std::cerr << "[DEBUG] Gemini service ready" << std::endl;
}
return service;
}
#else

View File

@@ -14,6 +14,7 @@ struct AIServiceConfig {
std::string model; // Provider-specific model name
std::string gemini_api_key; // For Gemini
std::string ollama_host = "http://localhost:11434"; // For Ollama
bool verbose = false; // Enable debug logging
};
// Create AI service using command-line flags