feat: Update CMake presets and agent configuration for improved build options and interactive chat session experience
This commit is contained in:
@@ -18,7 +18,7 @@
|
||||
"YAZE_BUILD_APP": "ON",
|
||||
"YAZE_BUILD_LIB": "ON",
|
||||
"YAZE_BUILD_EMU": "OFF",
|
||||
"YAZE_BUILD_Z3ED": "ON",
|
||||
"YAZE_BUILD_Z3ED": "OFF",
|
||||
"YAZE_USE_MODULAR_BUILD": "ON"
|
||||
}
|
||||
},
|
||||
@@ -57,11 +57,17 @@
|
||||
{
|
||||
"name": "macos-dev",
|
||||
"displayName": "macOS debug (ARM64)",
|
||||
"description": "macOS ARM64 development build with ROM testing",
|
||||
"description": "macOS ARM64 development build with ROM testing (no z3ed AI)",
|
||||
"inherits": "macos-debug",
|
||||
"cacheVariables": {
|
||||
"YAZE_BUILD_APP": "ON",
|
||||
"YAZE_BUILD_EMU": "OFF",
|
||||
"YAZE_BUILD_Z3ED": "OFF",
|
||||
"YAZE_MINIMAL_BUILD": "ON",
|
||||
"YAZE_BUILD_TESTS": "ON",
|
||||
"YAZE_ENABLE_ROM_TESTS": "ON",
|
||||
"YAZE_TEST_ROM_PATH": "${sourceDir}/zelda3.sfc"
|
||||
"YAZE_TEST_ROM_PATH": "${sourceDir}/zelda3.sfc",
|
||||
"CMAKE_EXPORT_COMPILE_COMMANDS": "ON"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -73,6 +79,7 @@
|
||||
"cacheVariables": {
|
||||
"YAZE_ENABLE_ROM_TESTS": "ON",
|
||||
"YAZE_TEST_ROM_PATH": "${sourceDir}/zelda3.sfc",
|
||||
"Z3ED_AI": "ON",
|
||||
"YAZE_WITH_JSON": "ON",
|
||||
"YAZE_WITH_GRPC": "ON",
|
||||
"YAZE_BUILD_Z3ED": "ON",
|
||||
|
||||
@@ -133,7 +133,7 @@ if(YAZE_WITH_JSON)
|
||||
|
||||
message(STATUS "✓ SSL/HTTPS support enabled for yaze_agent (Gemini + HTTPS)")
|
||||
else()
|
||||
message(WARNING "OpenSSL not found - Gemini HTTPS features disabled")
|
||||
message(WARNING "OpenSSL not found - Gemini HTTPS features disabled (Ollama still works)")
|
||||
message(STATUS " Install OpenSSL to enable Gemini: brew install openssl (macOS) or apt-get install libssl-dev (Linux)")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
@@ -4,6 +4,14 @@
|
||||
#include <iostream>
|
||||
#include <iomanip>
|
||||
|
||||
#ifdef _WIN32
|
||||
#include <io.h>
|
||||
#define isatty _isatty
|
||||
#define fileno _fileno
|
||||
#else
|
||||
#include <unistd.h>
|
||||
#endif
|
||||
|
||||
#include "absl/strings/str_format.h"
|
||||
#include "absl/time/time.h"
|
||||
|
||||
@@ -90,15 +98,30 @@ absl::Status SimpleChatSession::SendAndWaitForResponse(
|
||||
}
|
||||
|
||||
absl::Status SimpleChatSession::RunInteractive() {
|
||||
std::cout << "Z3ED Agent Chat (Simple Mode)\n";
|
||||
std::cout << "Type 'quit' or 'exit' to end the session.\n";
|
||||
std::cout << "Type 'reset' to clear conversation history.\n";
|
||||
std::cout << "----------------------------------------\n\n";
|
||||
// Check if stdin is a TTY (interactive) or a pipe/file
|
||||
bool is_interactive = isatty(fileno(stdin));
|
||||
|
||||
if (is_interactive) {
|
||||
std::cout << "Z3ED Agent Chat (Simple Mode)\n";
|
||||
std::cout << "Type 'quit' or 'exit' to end the session.\n";
|
||||
std::cout << "Type 'reset' to clear conversation history.\n";
|
||||
std::cout << "----------------------------------------\n\n";
|
||||
}
|
||||
|
||||
std::string input;
|
||||
while (true) {
|
||||
std::cout << "You: ";
|
||||
std::getline(std::cin, input);
|
||||
if (is_interactive) {
|
||||
std::cout << "You: ";
|
||||
std::cout.flush(); // Ensure prompt is displayed before reading
|
||||
}
|
||||
|
||||
if (!std::getline(std::cin, input)) {
|
||||
// EOF reached (piped input exhausted or Ctrl+D)
|
||||
if (is_interactive) {
|
||||
std::cout << "\n";
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
if (input.empty()) continue;
|
||||
if (input == "quit" || input == "exit") break;
|
||||
|
||||
@@ -102,7 +102,7 @@ absl::Status OllamaAIService::CheckAvailability() {
|
||||
}
|
||||
|
||||
absl::StatusOr<std::vector<std::string>> OllamaAIService::ListAvailableModels() {
|
||||
#if !YAZE_HAS_HTTPLIB || !YAZE_HAS_JSON
|
||||
#ifndef YAZE_WITH_JSON
|
||||
return absl::UnimplementedError("Requires httplib and JSON support");
|
||||
#else
|
||||
try {
|
||||
@@ -163,7 +163,7 @@ absl::StatusOr<AgentResponse> OllamaAIService::GenerateResponse(
|
||||
|
||||
absl::StatusOr<AgentResponse> OllamaAIService::GenerateResponse(
|
||||
const std::vector<agent::ChatMessage>& history) {
|
||||
#if !YAZE_HAS_HTTPLIB || !YAZE_HAS_JSON
|
||||
#ifndef YAZE_WITH_JSON
|
||||
return absl::UnimplementedError(
|
||||
"Ollama service requires httplib and JSON support. "
|
||||
"Install vcpkg dependencies or use bundled libraries.");
|
||||
@@ -204,26 +204,44 @@ absl::StatusOr<AgentResponse> OllamaAIService::GenerateResponse(
|
||||
res->status, res->body));
|
||||
}
|
||||
|
||||
// Parse response to extract generated text
|
||||
// Parse Ollama's wrapper JSON
|
||||
nlohmann::json ollama_wrapper;
|
||||
try {
|
||||
ollama_wrapper = nlohmann::json::parse(res->body);
|
||||
} catch (const nlohmann::json::exception& e) {
|
||||
return absl::InternalError(absl::StrFormat(
|
||||
"Failed to parse Ollama response: %s\nBody: %s",
|
||||
e.what(), res->body));
|
||||
}
|
||||
|
||||
// Extract the LLM's response from Ollama's "response" field
|
||||
if (!ollama_wrapper.contains("response") || !ollama_wrapper["response"].is_string()) {
|
||||
return absl::InvalidArgumentError(
|
||||
"Ollama response missing 'response' field");
|
||||
}
|
||||
|
||||
std::string llm_output = ollama_wrapper["response"].get<std::string>();
|
||||
|
||||
// Parse the LLM's JSON response (the agent structure)
|
||||
nlohmann::json response_json;
|
||||
try {
|
||||
response_json = nlohmann::json::parse(res->body);
|
||||
response_json = nlohmann::json::parse(llm_output);
|
||||
} catch (const nlohmann::json::exception& e) {
|
||||
// Sometimes the LLM includes extra text - try to extract JSON object
|
||||
size_t start = res->body.find('{');
|
||||
size_t end = res->body.rfind('}');
|
||||
size_t start = llm_output.find('{');
|
||||
size_t end = llm_output.rfind('}');
|
||||
|
||||
if (start != std::string::npos && end != std::string::npos && end > start) {
|
||||
std::string json_only = res->body.substr(start, end - start + 1);
|
||||
std::string json_only = llm_output.substr(start, end - start + 1);
|
||||
try {
|
||||
response_json = nlohmann::json::parse(json_only);
|
||||
} catch (const nlohmann::json::exception&) {
|
||||
return absl::InvalidArgumentError(
|
||||
"LLM did not return valid JSON. Response:\n" + res->body);
|
||||
"LLM did not return valid JSON. Response:\n" + llm_output);
|
||||
}
|
||||
} else {
|
||||
return absl::InvalidArgumentError(
|
||||
"LLM did not return a JSON object. Response:\n" + res->body);
|
||||
"LLM did not return a JSON object. Response:\n" + llm_output);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user