feat: implement conditional AI runtime features based on build configuration

- Added conditional compilation for AI services and callbacks based on the YAZE_AI_RUNTIME_AVAILABLE flag.
- Implemented stubs for AI service methods to return errors when the AI runtime is disabled, enhancing error handling.
- Updated GeminiAIService and OllamaAIService to provide appropriate responses when AI features are not available.
- Introduced a new service factory stub to create mock AI services when the runtime is disabled, improving testing capabilities.
This commit is contained in:
scawful
2025-11-16 21:54:20 -05:00
parent 61c99ecfcd
commit 24078301be
6 changed files with 137 additions and 1 deletions

View File

@@ -383,6 +383,7 @@ void EditorManager::Initialize(gfx::IRenderer* renderer,
*output_path = result->file_path;
return absl::OkStatus();
};
#ifdef YAZE_AI_RUNTIME_AVAILABLE
multimodal_callbacks.send_to_gemini =
[this](const std::filesystem::path& image_path,
const std::string& prompt) -> absl::Status {
@@ -417,6 +418,14 @@ void EditorManager::Initialize(gfx::IRenderer* renderer,
return absl::OkStatus();
};
#else
multimodal_callbacks.send_to_gemini =
[](const std::filesystem::path&, const std::string&) -> absl::Status {
return absl::FailedPreconditionError(
"Gemini AI runtime is disabled in this build");
};
#endif
agent_editor_.GetChatWidget()->SetMultimodalCallbacks(multimodal_callbacks);
// Set up Z3ED command callbacks for proposal management

View File

@@ -1,5 +1,21 @@
#include "cli/service/agent/proposal_executor.h"
#ifndef YAZE_AI_RUNTIME_AVAILABLE
#include "absl/status/status.h"
namespace yaze::cli::agent {
absl::StatusOr<ProposalCreationResult> CreateProposalFromAgentResponse(
const ProposalCreationRequest&) {
return absl::FailedPreconditionError(
"AI runtime features are disabled in this build");
}
} // namespace yaze::cli::agent
#else // YAZE_AI_RUNTIME_AVAILABLE
#include <filesystem>
#include <sstream>
#include <utility>
@@ -180,3 +196,5 @@ absl::StatusOr<ProposalCreationResult> CreateProposalFromAgentResponse(
} // namespace agent
} // namespace cli
} // namespace yaze
#endif // YAZE_AI_RUNTIME_AVAILABLE

View File

@@ -7,7 +7,10 @@
#include "absl/status/status.h"
#include "absl/status/statusor.h"
#include "cli/service/ai/ai_service.h"
#ifdef YAZE_AI_RUNTIME_AVAILABLE
#include "cli/service/ai/prompt_builder.h"
#endif
namespace yaze {
namespace cli {
@@ -27,6 +30,8 @@ struct GeminiConfig {
explicit GeminiConfig(const std::string& key) : api_key(key) {}
};
#ifdef YAZE_AI_RUNTIME_AVAILABLE
class GeminiAIService : public AIService {
public:
explicit GeminiAIService(const GeminiConfig& config);
@@ -65,6 +70,37 @@ class GeminiAIService : public AIService {
PromptBuilder prompt_builder_;
};
#else // !YAZE_AI_RUNTIME_AVAILABLE
class GeminiAIService : public AIService {
public:
explicit GeminiAIService(const GeminiConfig&) {}
void SetRomContext(Rom*) override {}
absl::StatusOr<AgentResponse> GenerateResponse(
const std::string& prompt) override {
return absl::FailedPreconditionError(
"Gemini AI runtime is disabled (prompt: " + prompt + ")");
}
absl::StatusOr<AgentResponse> GenerateResponse(
const std::vector<agent::ChatMessage>&) override {
return absl::FailedPreconditionError(
"Gemini AI runtime is disabled");
}
absl::Status CheckAvailability() {
return absl::FailedPreconditionError(
"Gemini AI runtime is disabled");
}
void EnableFunctionCalling(bool) {}
std::vector<std::string> GetAvailableTools() const { return {}; }
absl::StatusOr<AgentResponse> GenerateMultimodalResponse(
const std::string&, const std::string&) {
return absl::FailedPreconditionError(
"Gemini AI runtime is disabled");
}
};
#endif // YAZE_AI_RUNTIME_AVAILABLE
} // namespace cli
} // namespace yaze

View File

@@ -9,7 +9,10 @@
#include "absl/status/statusor.h"
#include "absl/time/time.h"
#include "cli/service/ai/ai_service.h"
#ifdef YAZE_AI_RUNTIME_AVAILABLE
#include "cli/service/ai/prompt_builder.h"
#endif
namespace yaze {
namespace cli {
@@ -17,7 +20,7 @@ namespace cli {
// Ollama configuration for local LLM inference
struct OllamaConfig {
std::string base_url = "http://localhost:11434"; // Default Ollama endpoint
std::string model = "qwen2.5-coder:7b"; // Recommended for code generation
std::string model = "qwen2.5-coder:0.5b"; // Lightweight default with tool-calling
float temperature = 0.1; // Low temp for deterministic commands
int max_tokens = 2048; // Sufficient for command lists
std::string system_prompt; // Injected from resource catalogue
@@ -30,6 +33,8 @@ struct OllamaConfig {
std::vector<std::string> favorite_models;
};
#ifdef YAZE_AI_RUNTIME_AVAILABLE
class OllamaAIService : public AIService {
public:
explicit OllamaAIService(const OllamaConfig& config);
@@ -69,6 +74,44 @@ class OllamaAIService : public AIService {
absl::StatusOr<std::string> ParseOllamaResponse(const std::string& json_response);
};
#else // !YAZE_AI_RUNTIME_AVAILABLE
class OllamaAIService : public AIService {
public:
struct ModelInfo {
std::string name;
std::string digest;
std::string family;
std::string parameter_size;
std::string quantization_level;
uint64_t size_bytes = 0;
absl::Time modified_at = absl::InfinitePast();
};
explicit OllamaAIService(const OllamaConfig&) {}
void SetRomContext(Rom*) override {}
absl::StatusOr<AgentResponse> GenerateResponse(
const std::string&) override {
return absl::FailedPreconditionError(
"Ollama AI runtime is disabled");
}
absl::StatusOr<AgentResponse> GenerateResponse(
const std::vector<agent::ChatMessage>&) override {
return absl::FailedPreconditionError(
"Ollama AI runtime is disabled");
}
absl::Status CheckAvailability() {
return absl::FailedPreconditionError(
"Ollama AI runtime is disabled");
}
absl::StatusOr<std::vector<ModelInfo>> ListAvailableModels() {
return absl::FailedPreconditionError(
"Ollama AI runtime is disabled");
}
};
#endif // YAZE_AI_RUNTIME_AVAILABLE
} // namespace cli
} // namespace yaze

View File

@@ -1,5 +1,6 @@
#include "cli/service/ai/service_factory.h"
#include <cstdlib>
#include <cstring>
#include <iostream>
@@ -112,6 +113,8 @@ absl::StatusOr<std::unique_ptr<AIService>> CreateAIServiceStrict(
ollama_config.base_url = config.ollama_host;
if (!config.model.empty()) {
ollama_config.model = config.model;
} else if (const char* env_model = std::getenv("OLLAMA_MODEL")) {
ollama_config.model = env_model;
}
auto service = std::make_unique<OllamaAIService>(ollama_config);

View File

@@ -0,0 +1,27 @@
#ifndef YAZE_AI_RUNTIME_AVAILABLE
#include "cli/service/ai/service_factory.h"
#include "absl/status/status.h"
#include "absl/status/statusor.h"
namespace yaze::cli {
std::unique_ptr<AIService> CreateAIService() {
return std::make_unique<MockAIService>();
}
std::unique_ptr<AIService> CreateAIService(const AIServiceConfig&) {
return std::make_unique<MockAIService>();
}
absl::StatusOr<std::unique_ptr<AIService>> CreateAIServiceStrict(
const AIServiceConfig&) {
return absl::FailedPreconditionError(
"AI runtime features are disabled in this build");
}
} // namespace yaze::cli
#endif // !YAZE_AI_RUNTIME_AVAILABLE