From 2fb96cbbfd91464d135433f6a36eea1b3a05c4b5 Mon Sep 17 00:00:00 2001 From: scawful Date: Sat, 4 Oct 2025 02:22:02 -0400 Subject: [PATCH] feat: Add AI service configuration flags and update service factory for improved flexibility --- src/cli/flags.cc | 11 +++ .../agent/conversational_agent_service.cc | 1 + src/cli/service/ai/service_factory.cc | 93 ++++++++++++------- src/cli/service/ai/service_factory.h | 20 +++- 4 files changed, 87 insertions(+), 38 deletions(-) diff --git a/src/cli/flags.cc b/src/cli/flags.cc index 926500dd..e16c9b9e 100644 --- a/src/cli/flags.cc +++ b/src/cli/flags.cc @@ -3,3 +3,14 @@ #include "absl/flags/flag.h" ABSL_FLAG(std::string, rom, "", "Path to the ROM file"); + +// AI Service Configuration Flags +ABSL_FLAG(std::string, ai_provider, "mock", + "AI provider to use: 'mock' (default), 'ollama', or 'gemini'"); +ABSL_FLAG(std::string, ai_model, "", + "AI model to use (provider-specific, e.g., 'llama3' for Ollama, " + "'gemini-1.5-flash' for Gemini)"); +ABSL_FLAG(std::string, gemini_api_key, "", + "Gemini API key (can also use GEMINI_API_KEY environment variable)"); +ABSL_FLAG(std::string, ollama_host, "http://localhost:11434", + "Ollama server host URL"); diff --git a/src/cli/service/agent/conversational_agent_service.cc b/src/cli/service/agent/conversational_agent_service.cc index cd230789..a36a40ec 100644 --- a/src/cli/service/agent/conversational_agent_service.cc +++ b/src/cli/service/agent/conversational_agent_service.cc @@ -2,6 +2,7 @@ #include #include +#include #include #include #include diff --git a/src/cli/service/ai/service_factory.cc b/src/cli/service/ai/service_factory.cc index cd76a527..17d7a103 100644 --- a/src/cli/service/ai/service_factory.cc +++ b/src/cli/service/ai/service_factory.cc @@ -3,6 +3,9 @@ #include #include +#include "absl/flags/declare.h" +#include "absl/flags/flag.h" +#include "absl/strings/ascii.h" #include "cli/service/ai/ai_service.h" #include "cli/service/ai/ollama_ai_service.h" @@ -10,31 +13,47 @@ #include "cli/service/ai/gemini_ai_service.h" #endif +ABSL_DECLARE_FLAG(std::string, ai_provider); +ABSL_DECLARE_FLAG(std::string, ai_model); +ABSL_DECLARE_FLAG(std::string, gemini_api_key); +ABSL_DECLARE_FLAG(std::string, ollama_host); + namespace yaze { namespace cli { std::unique_ptr CreateAIService() { - // Priority: Ollama (local) > Gemini (remote) > Mock (testing) - const char* provider_env = std::getenv("YAZE_AI_PROVIDER"); - const char* ollama_model = std::getenv("OLLAMA_MODEL"); - const std::string provider = provider_env ? provider_env : ""; - const bool gemini_requested = provider == "gemini"; + // Read configuration from flags + AIServiceConfig config; + config.provider = absl::AsciiStrToLower(absl::GetFlag(FLAGS_ai_provider)); + config.model = absl::GetFlag(FLAGS_ai_model); + config.gemini_api_key = absl::GetFlag(FLAGS_gemini_api_key); + config.ollama_host = absl::GetFlag(FLAGS_ollama_host); + + // Fall back to environment variables if flags not set + if (config.gemini_api_key.empty()) { + const char* env_key = std::getenv("GEMINI_API_KEY"); + if (env_key) config.gemini_api_key = env_key; + } + if (config.model.empty()) { + const char* env_model = std::getenv("OLLAMA_MODEL"); + if (env_model) config.model = env_model; + } + + return CreateAIService(config); +} -#ifdef YAZE_WITH_JSON - const char* gemini_key = std::getenv("GEMINI_API_KEY"); - const char* gemini_model = std::getenv("GEMINI_MODEL"); -#endif - - // Explicit provider selection - if (provider == "ollama") { - OllamaConfig config; - - // Allow model override via env - if (ollama_model && std::strlen(ollama_model) > 0) { - config.model = ollama_model; +std::unique_ptr CreateAIService(const AIServiceConfig& config) { + std::cout << "🤖 AI Provider: " << config.provider << "\n"; + + // Ollama provider + if (config.provider == "ollama") { + OllamaConfig ollama_config; + ollama_config.base_url = config.ollama_host; + if (!config.model.empty()) { + ollama_config.model = config.model; } - auto service = std::make_unique(config); + auto service = std::make_unique(ollama_config); // Health check if (auto status = service->CheckAvailability(); !status.ok()) { @@ -43,21 +62,26 @@ std::unique_ptr CreateAIService() { return std::make_unique(); } - std::cout << "🤖 Using Ollama AI with model: " << config.model << std::endl; + std::cout << " Using model: " << ollama_config.model << std::endl; return service; } - // Gemini if API key provided + // Gemini provider #ifdef YAZE_WITH_JSON - if (gemini_key && std::strlen(gemini_key) > 0) { - GeminiConfig config(gemini_key); - - // Allow model override via env - if (gemini_model && std::strlen(gemini_model) > 0) { - config.model = gemini_model; + if (config.provider == "gemini") { + if (config.gemini_api_key.empty()) { + std::cerr << "⚠️ Gemini API key not provided" << std::endl; + std::cerr << " Use --gemini_api_key= or set GEMINI_API_KEY environment variable" << std::endl; + std::cerr << " Falling back to MockAIService" << std::endl; + return std::make_unique(); + } + + GeminiConfig gemini_config(config.gemini_api_key); + if (!config.model.empty()) { + gemini_config.model = config.model; } - auto service = std::make_unique(config); + auto service = std::make_unique(gemini_config); // Health check if (auto status = service->CheckAvailability(); !status.ok()) { @@ -66,20 +90,21 @@ std::unique_ptr CreateAIService() { return std::make_unique(); } - std::cout << "🤖 Using Gemini AI with model: " << config.model << std::endl; + std::cout << " Using model: " << gemini_config.model << std::endl; return service; } #else - if (gemini_requested || std::getenv("GEMINI_API_KEY")) { + if (config.provider == "gemini") { std::cerr << "⚠️ Gemini support not available: rebuild with YAZE_WITH_JSON=ON" << std::endl; + std::cerr << " Falling back to MockAIService" << std::endl; } #endif - // Default: Mock service for testing - std::cout << "🤖 Using MockAIService (no LLM configured)" << std::endl; - std::cout - << " Tip: Set YAZE_AI_PROVIDER=ollama or GEMINI_API_KEY to enable LLM" - << std::endl; + // Default: Mock service + if (config.provider != "mock") { + std::cout << " No LLM configured, using MockAIService" << std::endl; + } + std::cout << " Tip: Use --ai_provider=ollama or --ai_provider=gemini" << std::endl; return std::make_unique(); } diff --git a/src/cli/service/ai/service_factory.h b/src/cli/service/ai/service_factory.h index a6f4374c..cd94dc07 100644 --- a/src/cli/service/ai/service_factory.h +++ b/src/cli/service/ai/service_factory.h @@ -1,16 +1,28 @@ -#ifndef YAZE_SRC_CLI_SERVICE_AI_SERVICE_FACTORY_H_ -#define YAZE_SRC_CLI_SERVICE_AI_SERVICE_FACTORY_H_ +#ifndef YAZE_CLI_SERVICE_AI_SERVICE_FACTORY_H_ +#define YAZE_CLI_SERVICE_AI_SERVICE_FACTORY_H_ #include +#include + #include "cli/service/ai/ai_service.h" namespace yaze { namespace cli { -// Helper: Select AI service based on environment variables +struct AIServiceConfig { + std::string provider = "mock"; // "mock", "ollama", or "gemini" + std::string model; // Provider-specific model name + std::string gemini_api_key; // For Gemini + std::string ollama_host = "http://localhost:11434"; // For Ollama +}; + +// Create AI service using command-line flags std::unique_ptr CreateAIService(); +// Create AI service with explicit configuration +std::unique_ptr CreateAIService(const AIServiceConfig& config); + } // namespace cli } // namespace yaze -#endif // YAZE_SRC_CLI_SERVICE_AI_SERVICE_FACTORY_H_ +#endif // YAZE_CLI_SERVICE_AI_SERVICE_FACTORY_H_