feat: enhance CMake configuration and CI workflow for AI features

- Added new build options for agent UI and remote automation in CMake presets, improving modularity.
- Updated CI workflow to enable AI runtime and agent UI during builds, ensuring compatibility with new features.
- Adjusted default Ollama model in scripts and documentation to reflect the lightweight version used in CI.
- Enhanced agent test suite script to support model overrides, improving flexibility for testing scenarios.
This commit is contained in:
scawful
2025-11-16 21:36:04 -05:00
parent a5d98ad83c
commit 61c99ecfcd
6 changed files with 33 additions and 8 deletions

View File

@@ -266,16 +266,23 @@ jobs:
cmake -B build_test -G Ninja \ cmake -B build_test -G Ninja \
-DCMAKE_BUILD_TYPE=Release \ -DCMAKE_BUILD_TYPE=Release \
-DZ3ED_AI=ON \ -DZ3ED_AI=ON \
-DYAZE_BUILD_Z3ED=ON -DYAZE_BUILD_Z3ED=ON \
-DYAZE_ENABLE_AI_RUNTIME=ON \
-DYAZE_ENABLE_REMOTE_AUTOMATION=ON \
-DYAZE_BUILD_AGENT_UI=ON
cmake --build build_test --target z3ed cmake --build build_test --target z3ed
- name: Start Ollama - name: Start Ollama
env:
OLLAMA_MODEL: qwen2.5-coder:0.5b
run: | run: |
ollama serve & ollama serve &
sleep 10 sleep 10
ollama pull qwen2.5-coder:7b ollama pull "$OLLAMA_MODEL"
- name: Run Test Suite - name: Run Test Suite
env:
OLLAMA_MODEL: qwen2.5-coder:0.5b
run: | run: |
chmod +x ./scripts/agent_test_suite.sh chmod +x ./scripts/agent_test_suite.sh
./scripts/agent_test_suite.sh ollama ./scripts/agent_test_suite.sh ollama

View File

@@ -139,7 +139,10 @@
"YAZE_ENABLE_JSON": "ON", "YAZE_ENABLE_JSON": "ON",
"YAZE_ENABLE_AI": "OFF", "YAZE_ENABLE_AI": "OFF",
"YAZE_ENABLE_ROM_TESTS": "OFF", "YAZE_ENABLE_ROM_TESTS": "OFF",
"YAZE_MINIMAL_BUILD": "OFF" "YAZE_MINIMAL_BUILD": "OFF",
"YAZE_BUILD_AGENT_UI": "ON",
"YAZE_ENABLE_REMOTE_AUTOMATION": "ON",
"YAZE_ENABLE_AI_RUNTIME": "OFF"
} }
}, },
{ {
@@ -154,7 +157,10 @@
"YAZE_ENABLE_JSON": "ON", "YAZE_ENABLE_JSON": "ON",
"YAZE_ENABLE_AI": "OFF", "YAZE_ENABLE_AI": "OFF",
"YAZE_ENABLE_ROM_TESTS": "OFF", "YAZE_ENABLE_ROM_TESTS": "OFF",
"YAZE_MINIMAL_BUILD": "OFF" "YAZE_MINIMAL_BUILD": "OFF",
"YAZE_BUILD_AGENT_UI": "ON",
"YAZE_ENABLE_REMOTE_AUTOMATION": "ON",
"YAZE_ENABLE_AI_RUNTIME": "OFF"
} }
}, },
{ {

View File

@@ -80,6 +80,10 @@ if(YAZE_ENABLE_AI)
add_compile_definitions(Z3ED_AI) add_compile_definitions(Z3ED_AI)
endif() endif()
if(YAZE_ENABLE_AI_RUNTIME)
add_compile_definitions(YAZE_AI_RUNTIME_AVAILABLE)
endif()
# Print configuration summary # Print configuration summary
message(STATUS "=== YAZE Build Configuration ===") message(STATUS "=== YAZE Build Configuration ===")
message(STATUS "GUI Application: ${YAZE_BUILD_GUI}") message(STATUS "GUI Application: ${YAZE_BUILD_GUI}")

View File

@@ -53,6 +53,9 @@ The `agent_test_suite.sh` script now defaults to mock ROM mode:
# Or with Gemini # Or with Gemini
./scripts/agent_test_suite.sh gemini ./scripts/agent_test_suite.sh gemini
# Override the Ollama model (CI uses qwen2.5-coder:0.5b)
OLLAMA_MODEL=qwen2.5-coder:0.5b ./scripts/agent_test_suite.sh ollama
``` ```
To use a real ROM instead, edit the script: To use a real ROM instead, edit the script:

View File

@@ -20,7 +20,8 @@ cmake --build --preset mac-ai --target z3ed
The AI features require at least one provider: The AI features require at least one provider:
- **Ollama (local)** install via `brew install ollama`, run `ollama serve`, then set - **Ollama (local)** install via `brew install ollama`, run `ollama serve`, then set
`Z3ED_OLLAMA_MODEL=qwen2.5-coder:7b` (or any supported model). `OLLAMA_MODEL=qwen2.5-coder:0.5b` (the lightweight default used in CI) or any other supported
model. Pass `--ai_model "$OLLAMA_MODEL"` on the CLI to override per-run.
- **Gemini (cloud)** export `GEMINI_API_KEY` before launching `z3ed`. - **Gemini (cloud)** export `GEMINI_API_KEY` before launching `z3ed`.
If no provider is configured the CLI still works, but agent subcommands will fall back to manual If no provider is configured the CLI still works, but agent subcommands will fall back to manual

View File

@@ -12,7 +12,7 @@ NC='\033[0m' # No Color
Z3ED="./build_test/bin/z3ed" Z3ED="./build_test/bin/z3ed"
RESULTS_FILE="/tmp/z3ed_ai_test_results.txt" RESULTS_FILE="/tmp/z3ed_ai_test_results.txt"
USE_MOCK_ROM=true # Set to false if you want to test with a real ROM USE_MOCK_ROM=true # Set to false if you want to test with a real ROM
OLLAMA_MODEL="${OLLAMA_MODEL:-qwen2.5-coder:latest}" OLLAMA_MODEL="${OLLAMA_MODEL:-qwen2.5-coder:0.5b}"
OLLAMA_PID="" OLLAMA_PID=""
echo "==========================================" echo "=========================================="
@@ -124,7 +124,7 @@ if [ -z "$1" ]; then
echo "Usage: $0 <ollama|gemini|mock>" echo "Usage: $0 <ollama|gemini|mock>"
echo "" echo ""
echo "Environment Variables:" echo "Environment Variables:"
echo " OLLAMA_MODEL - Ollama model to use (default: qwen2.5-coder:latest)" echo " OLLAMA_MODEL - Ollama model to use (default: qwen2.5-coder:0.5b)"
echo " GEMINI_API_KEY - Required for Gemini provider" echo " GEMINI_API_KEY - Required for Gemini provider"
echo "" echo ""
echo "Examples:" echo "Examples:"
@@ -228,7 +228,11 @@ run_test() {
echo "Query: $query" echo "Query: $query"
echo "" echo ""
local cmd="$Z3ED agent simple-chat \"$query\" $ROM_FLAGS --ai_provider=$provider $extra_args" local provider_args="$extra_args"
if [ "$provider" == "ollama" ]; then
provider_args="--ai_model=\"$OLLAMA_MODEL\" $provider_args"
fi
local cmd="$Z3ED agent simple-chat \"$query\" $ROM_FLAGS --ai_provider=$provider $provider_args"
echo "Running: $cmd" echo "Running: $cmd"
echo "" echo ""