Enhance AI Agent Integration and Tool Command Functionality
- Added support for JSON in CMake configuration for AI integrations. - Implemented new tool commands: resource-list and dungeon-list-sprites. - Created ToolDispatcher for managing tool command execution. - Refactored CMake structure to include agent sources and improve build configuration. - Updated agent roadmap and README documentation to reflect current status and next steps.
This commit is contained in:
@@ -1,5 +1,8 @@
|
||||
#include "cli/service/agent/tool_dispatcher.h"
|
||||
|
||||
#include <iostream>
|
||||
#include <sstream>
|
||||
|
||||
#include "absl/strings/str_format.h"
|
||||
#include "cli/handlers/agent/commands.h"
|
||||
|
||||
@@ -15,24 +18,29 @@ absl::StatusOr<std::string> ToolDispatcher::Dispatch(
|
||||
args.push_back(value);
|
||||
}
|
||||
|
||||
// Capture stdout
|
||||
std::stringstream buffer;
|
||||
auto old_cout_buf = std::cout.rdbuf();
|
||||
std::cout.rdbuf(buffer.rdbuf());
|
||||
|
||||
absl::Status status;
|
||||
if (tool_call.tool_name == "resource-list") {
|
||||
// Note: This is a simplified approach for now. A more robust solution
|
||||
// would capture stdout instead of relying on the handler to return a string.
|
||||
auto status = HandleResourceListCommand(args);
|
||||
if (!status.ok()) {
|
||||
return status;
|
||||
}
|
||||
return "Successfully listed resources.";
|
||||
status = HandleResourceListCommand(args);
|
||||
} else if (tool_call.tool_name == "dungeon-list-sprites") {
|
||||
auto status = HandleDungeonListSpritesCommand(args);
|
||||
if (!status.ok()) {
|
||||
return status;
|
||||
}
|
||||
return "Successfully listed sprites.";
|
||||
status = HandleDungeonListSpritesCommand(args);
|
||||
} else {
|
||||
status = absl::UnimplementedError(
|
||||
absl::StrFormat("Unknown tool: %s", tool_call.tool_name));
|
||||
}
|
||||
|
||||
return absl::UnimplementedError(
|
||||
absl::StrFormat("Unknown tool: %s", tool_call.tool_name));
|
||||
// Restore stdout
|
||||
std::cout.rdbuf(old_cout_buf);
|
||||
|
||||
if (!status.ok()) {
|
||||
return status;
|
||||
}
|
||||
|
||||
return buffer.str();
|
||||
}
|
||||
|
||||
} // namespace agent
|
||||
|
||||
@@ -1,23 +1,32 @@
|
||||
#include "cli/service/ai/service_factory.h"
|
||||
|
||||
#include <cstring>
|
||||
#include <iostream>
|
||||
|
||||
#include "cli/service/ai/ai_service.h"
|
||||
#include "cli/service/ai/gemini_ai_service.h"
|
||||
#include "cli/service/ai/ollama_ai_service.h"
|
||||
|
||||
#ifdef YAZE_WITH_JSON
|
||||
#include "cli/service/ai/gemini_ai_service.h"
|
||||
#endif
|
||||
|
||||
namespace yaze {
|
||||
namespace cli {
|
||||
|
||||
std::unique_ptr<AIService> CreateAIService() {
|
||||
// Priority: Ollama (local) > Gemini (remote) > Mock (testing)
|
||||
const char* provider_env = std::getenv("YAZE_AI_PROVIDER");
|
||||
const char* gemini_key = std::getenv("GEMINI_API_KEY");
|
||||
const char* ollama_model = std::getenv("OLLAMA_MODEL");
|
||||
const std::string provider = provider_env ? provider_env : "";
|
||||
const bool gemini_requested = provider == "gemini";
|
||||
|
||||
#ifdef YAZE_WITH_JSON
|
||||
const char* gemini_key = std::getenv("GEMINI_API_KEY");
|
||||
const char* gemini_model = std::getenv("GEMINI_MODEL");
|
||||
#endif
|
||||
|
||||
// Explicit provider selection
|
||||
if (provider_env && std::string(provider_env) == "ollama") {
|
||||
if (provider == "ollama") {
|
||||
OllamaConfig config;
|
||||
|
||||
// Allow model override via env
|
||||
@@ -39,6 +48,7 @@ std::unique_ptr<AIService> CreateAIService() {
|
||||
}
|
||||
|
||||
// Gemini if API key provided
|
||||
#ifdef YAZE_WITH_JSON
|
||||
if (gemini_key && std::strlen(gemini_key) > 0) {
|
||||
GeminiConfig config(gemini_key);
|
||||
|
||||
@@ -59,6 +69,11 @@ std::unique_ptr<AIService> CreateAIService() {
|
||||
std::cout << "🤖 Using Gemini AI with model: " << config.model << std::endl;
|
||||
return service;
|
||||
}
|
||||
#else
|
||||
if (gemini_requested || std::getenv("GEMINI_API_KEY")) {
|
||||
std::cerr << "⚠️ Gemini support not available: rebuild with YAZE_WITH_JSON=ON" << std::endl;
|
||||
}
|
||||
#endif
|
||||
|
||||
// Default: Mock service for testing
|
||||
std::cout << "🤖 Using MockAIService (no LLM configured)" << std::endl;
|
||||
|
||||
Reference in New Issue
Block a user