feat: Add AI service configuration flags and update service factory for improved flexibility

This commit is contained in:
scawful
2025-10-04 02:22:02 -04:00
parent 8ec723adb6
commit 2fb96cbbfd
4 changed files with 87 additions and 38 deletions

View File

@@ -3,3 +3,14 @@
#include "absl/flags/flag.h" #include "absl/flags/flag.h"
ABSL_FLAG(std::string, rom, "", "Path to the ROM file"); ABSL_FLAG(std::string, rom, "", "Path to the ROM file");
// AI Service Configuration Flags
ABSL_FLAG(std::string, ai_provider, "mock",
"AI provider to use: 'mock' (default), 'ollama', or 'gemini'");
ABSL_FLAG(std::string, ai_model, "",
"AI model to use (provider-specific, e.g., 'llama3' for Ollama, "
"'gemini-1.5-flash' for Gemini)");
ABSL_FLAG(std::string, gemini_api_key, "",
"Gemini API key (can also use GEMINI_API_KEY environment variable)");
ABSL_FLAG(std::string, ollama_host, "http://localhost:11434",
"Ollama server host URL");

View File

@@ -2,6 +2,7 @@
#include <algorithm> #include <algorithm>
#include <cctype> #include <cctype>
#include <iostream>
#include <set> #include <set>
#include <string> #include <string>
#include <vector> #include <vector>

View File

@@ -3,6 +3,9 @@
#include <cstring> #include <cstring>
#include <iostream> #include <iostream>
#include "absl/flags/declare.h"
#include "absl/flags/flag.h"
#include "absl/strings/ascii.h"
#include "cli/service/ai/ai_service.h" #include "cli/service/ai/ai_service.h"
#include "cli/service/ai/ollama_ai_service.h" #include "cli/service/ai/ollama_ai_service.h"
@@ -10,31 +13,47 @@
#include "cli/service/ai/gemini_ai_service.h" #include "cli/service/ai/gemini_ai_service.h"
#endif #endif
ABSL_DECLARE_FLAG(std::string, ai_provider);
ABSL_DECLARE_FLAG(std::string, ai_model);
ABSL_DECLARE_FLAG(std::string, gemini_api_key);
ABSL_DECLARE_FLAG(std::string, ollama_host);
namespace yaze { namespace yaze {
namespace cli { namespace cli {
std::unique_ptr<AIService> CreateAIService() { std::unique_ptr<AIService> CreateAIService() {
// Priority: Ollama (local) > Gemini (remote) > Mock (testing) // Read configuration from flags
const char* provider_env = std::getenv("YAZE_AI_PROVIDER"); AIServiceConfig config;
const char* ollama_model = std::getenv("OLLAMA_MODEL"); config.provider = absl::AsciiStrToLower(absl::GetFlag(FLAGS_ai_provider));
const std::string provider = provider_env ? provider_env : ""; config.model = absl::GetFlag(FLAGS_ai_model);
const bool gemini_requested = provider == "gemini"; config.gemini_api_key = absl::GetFlag(FLAGS_gemini_api_key);
config.ollama_host = absl::GetFlag(FLAGS_ollama_host);
// Fall back to environment variables if flags not set
if (config.gemini_api_key.empty()) {
const char* env_key = std::getenv("GEMINI_API_KEY");
if (env_key) config.gemini_api_key = env_key;
}
if (config.model.empty()) {
const char* env_model = std::getenv("OLLAMA_MODEL");
if (env_model) config.model = env_model;
}
return CreateAIService(config);
}
#ifdef YAZE_WITH_JSON std::unique_ptr<AIService> CreateAIService(const AIServiceConfig& config) {
const char* gemini_key = std::getenv("GEMINI_API_KEY"); std::cout << "🤖 AI Provider: " << config.provider << "\n";
const char* gemini_model = std::getenv("GEMINI_MODEL");
#endif // Ollama provider
if (config.provider == "ollama") {
// Explicit provider selection OllamaConfig ollama_config;
if (provider == "ollama") { ollama_config.base_url = config.ollama_host;
OllamaConfig config; if (!config.model.empty()) {
ollama_config.model = config.model;
// Allow model override via env
if (ollama_model && std::strlen(ollama_model) > 0) {
config.model = ollama_model;
} }
auto service = std::make_unique<OllamaAIService>(config); auto service = std::make_unique<OllamaAIService>(ollama_config);
// Health check // Health check
if (auto status = service->CheckAvailability(); !status.ok()) { if (auto status = service->CheckAvailability(); !status.ok()) {
@@ -43,21 +62,26 @@ std::unique_ptr<AIService> CreateAIService() {
return std::make_unique<MockAIService>(); return std::make_unique<MockAIService>();
} }
std::cout << "🤖 Using Ollama AI with model: " << config.model << std::endl; std::cout << " Using model: " << ollama_config.model << std::endl;
return service; return service;
} }
// Gemini if API key provided // Gemini provider
#ifdef YAZE_WITH_JSON #ifdef YAZE_WITH_JSON
if (gemini_key && std::strlen(gemini_key) > 0) { if (config.provider == "gemini") {
GeminiConfig config(gemini_key); if (config.gemini_api_key.empty()) {
std::cerr << "⚠️ Gemini API key not provided" << std::endl;
// Allow model override via env std::cerr << " Use --gemini_api_key=<key> or set GEMINI_API_KEY environment variable" << std::endl;
if (gemini_model && std::strlen(gemini_model) > 0) { std::cerr << " Falling back to MockAIService" << std::endl;
config.model = gemini_model; return std::make_unique<MockAIService>();
}
GeminiConfig gemini_config(config.gemini_api_key);
if (!config.model.empty()) {
gemini_config.model = config.model;
} }
auto service = std::make_unique<GeminiAIService>(config); auto service = std::make_unique<GeminiAIService>(gemini_config);
// Health check // Health check
if (auto status = service->CheckAvailability(); !status.ok()) { if (auto status = service->CheckAvailability(); !status.ok()) {
@@ -66,20 +90,21 @@ std::unique_ptr<AIService> CreateAIService() {
return std::make_unique<MockAIService>(); return std::make_unique<MockAIService>();
} }
std::cout << "🤖 Using Gemini AI with model: " << config.model << std::endl; std::cout << " Using model: " << gemini_config.model << std::endl;
return service; return service;
} }
#else #else
if (gemini_requested || std::getenv("GEMINI_API_KEY")) { if (config.provider == "gemini") {
std::cerr << "⚠️ Gemini support not available: rebuild with YAZE_WITH_JSON=ON" << std::endl; std::cerr << "⚠️ Gemini support not available: rebuild with YAZE_WITH_JSON=ON" << std::endl;
std::cerr << " Falling back to MockAIService" << std::endl;
} }
#endif #endif
// Default: Mock service for testing // Default: Mock service
std::cout << "🤖 Using MockAIService (no LLM configured)" << std::endl; if (config.provider != "mock") {
std::cout std::cout << " No LLM configured, using MockAIService" << std::endl;
<< " Tip: Set YAZE_AI_PROVIDER=ollama or GEMINI_API_KEY to enable LLM" }
<< std::endl; std::cout << " Tip: Use --ai_provider=ollama or --ai_provider=gemini" << std::endl;
return std::make_unique<MockAIService>(); return std::make_unique<MockAIService>();
} }

View File

@@ -1,16 +1,28 @@
#ifndef YAZE_SRC_CLI_SERVICE_AI_SERVICE_FACTORY_H_ #ifndef YAZE_CLI_SERVICE_AI_SERVICE_FACTORY_H_
#define YAZE_SRC_CLI_SERVICE_AI_SERVICE_FACTORY_H_ #define YAZE_CLI_SERVICE_AI_SERVICE_FACTORY_H_
#include <memory> #include <memory>
#include <string>
#include "cli/service/ai/ai_service.h" #include "cli/service/ai/ai_service.h"
namespace yaze { namespace yaze {
namespace cli { namespace cli {
// Helper: Select AI service based on environment variables struct AIServiceConfig {
std::string provider = "mock"; // "mock", "ollama", or "gemini"
std::string model; // Provider-specific model name
std::string gemini_api_key; // For Gemini
std::string ollama_host = "http://localhost:11434"; // For Ollama
};
// Create AI service using command-line flags
std::unique_ptr<AIService> CreateAIService(); std::unique_ptr<AIService> CreateAIService();
// Create AI service with explicit configuration
std::unique_ptr<AIService> CreateAIService(const AIServiceConfig& config);
} // namespace cli } // namespace cli
} // namespace yaze } // namespace yaze
#endif // YAZE_SRC_CLI_SERVICE_AI_SERVICE_FACTORY_H_ #endif // YAZE_CLI_SERVICE_AI_SERVICE_FACTORY_H_