feat: Add support for prompt versioning and function calling in Gemini AI service

This commit is contained in:
scawful
2025-10-04 03:42:22 -04:00
parent fe7b9053c7
commit 2a6f7d5c15
6 changed files with 294 additions and 8 deletions

View File

@@ -14,3 +14,7 @@ ABSL_FLAG(std::string, gemini_api_key, "",
"Gemini API key (can also use GEMINI_API_KEY environment variable)");
ABSL_FLAG(std::string, ollama_host, "http://localhost:11434",
"Ollama server host URL");
ABSL_FLAG(std::string, prompt_version, "default",
"Prompt version to use: 'default' or 'v2'");
ABSL_FLAG(bool, use_function_calling, false,
"Enable native Gemini function calling (incompatible with JSON output mode)");

View File

@@ -43,16 +43,21 @@ namespace yaze {
namespace cli {
GeminiAIService::GeminiAIService(const GeminiConfig& config)
: config_(config), function_calling_enabled_(false) { // Disable function calling - use JSON output instead
: config_(config), function_calling_enabled_(config.use_function_calling) {
std::cerr << "🔧 GeminiAIService constructor: start" << std::endl;
std::cerr << "🔧 Function calling: " << (function_calling_enabled_ ? "enabled" : "disabled (JSON output mode)") << std::endl;
std::cerr << "🔧 Prompt version: " << config_.prompt_version << std::endl;
#ifdef CPPHTTPLIB_OPENSSL_SUPPORT
// Initialize OpenSSL for HTTPS support
InitializeOpenSSL();
#endif
// Load command documentation into prompt builder
if (auto status = prompt_builder_.LoadResourceCatalogue(""); !status.ok()) {
// Load command documentation into prompt builder with specified version
std::string catalogue_path = config_.prompt_version == "v2"
? "assets/agent/prompt_catalogue_v2.yaml"
: "assets/agent/prompt_catalogue.yaml";
if (auto status = prompt_builder_.LoadResourceCatalogue(catalogue_path); !status.ok()) {
std::cerr << "⚠️ Failed to load agent prompt catalogue: "
<< status.message() << std::endl;
}
@@ -61,11 +66,38 @@ GeminiAIService::GeminiAIService(const GeminiConfig& config)
if (config_.system_instruction.empty()) {
std::cerr << "🔧 GeminiAIService: building system instruction" << std::endl;
// Use enhanced prompting by default
if (config_.use_enhanced_prompting) {
config_.system_instruction = prompt_builder_.BuildSystemInstructionWithExamples();
} else {
config_.system_instruction = BuildSystemInstruction();
// Try to load version-specific system prompt file
std::string prompt_file = config_.prompt_version == "v2"
? "assets/agent/system_prompt_v2.txt"
: "assets/agent/system_prompt.txt";
std::vector<std::string> search_paths = {
prompt_file,
"../" + prompt_file,
"../../" + prompt_file
};
bool loaded = false;
for (const auto& path : search_paths) {
std::ifstream file(path);
if (file.good()) {
std::stringstream buffer;
buffer << file.rdbuf();
config_.system_instruction = buffer.str();
std::cerr << "✓ Loaded prompt from: " << path << std::endl;
loaded = true;
break;
}
}
if (!loaded) {
// Fallback to builder
if (config_.use_enhanced_prompting) {
config_.system_instruction = prompt_builder_.BuildSystemInstructionWithExamples();
} else {
config_.system_instruction = BuildSystemInstruction();
}
}
std::cerr << "🔧 GeminiAIService: system instruction built" << std::endl;
}

View File

@@ -19,6 +19,8 @@ struct GeminiConfig {
int max_output_tokens = 2048;
mutable std::string system_instruction; // Mutable to allow lazy initialization
bool use_enhanced_prompting = true; // Enable few-shot examples
bool use_function_calling = false; // Use native Gemini function calling
std::string prompt_version = "default"; // Which prompt file to use (default, v2, etc.)
GeminiConfig() = default;
explicit GeminiConfig(const std::string& key) : api_key(key) {}

View File

@@ -17,6 +17,8 @@ ABSL_DECLARE_FLAG(std::string, ai_provider);
ABSL_DECLARE_FLAG(std::string, ai_model);
ABSL_DECLARE_FLAG(std::string, gemini_api_key);
ABSL_DECLARE_FLAG(std::string, ollama_host);
ABSL_DECLARE_FLAG(std::string, prompt_version);
ABSL_DECLARE_FLAG(bool, use_function_calling);
namespace yaze {
namespace cli {
@@ -83,7 +85,10 @@ std::unique_ptr<AIService> CreateAIService(const AIServiceConfig& config) {
if (!config.model.empty()) {
gemini_config.model = config.model;
}
gemini_config.prompt_version = absl::GetFlag(FLAGS_prompt_version);
gemini_config.use_function_calling = absl::GetFlag(FLAGS_use_function_calling);
std::cerr << "🔧 Model: " << gemini_config.model << std::endl;
std::cerr << "🔧 Prompt version: " << gemini_config.prompt_version << std::endl;
std::cerr << "🔧 Creating Gemini service instance..." << std::endl;
auto service = std::make_unique<GeminiAIService>(gemini_config);