Enhance testing framework and UI integration for YAZE
- Added a comprehensive testing framework with support for unit, integration, and UI tests, improving overall test coverage and reliability. - Integrated ImGui Test Engine for UI testing, allowing for real-time feedback and visualization of test results. - Updated CMake configuration to conditionally include testing components based on build options, enhancing flexibility for developers. - Introduced a new command in the CLI for running asset loading tests on ROMs, providing a straightforward way to validate functionality. - Enhanced error handling and resource management during testing, ensuring stability and clarity in test execution. - Improved user interface with a dedicated test dashboard for monitoring test progress and results, enhancing developer experience.
This commit is contained in:
17
src/app/test/test.cmake
Normal file
17
src/app/test/test.cmake
Normal file
@@ -0,0 +1,17 @@
|
||||
# Testing system components for YAZE
|
||||
|
||||
set(YAZE_TEST_CORE_SOURCES
|
||||
app/test/test_manager.cc
|
||||
app/test/test_manager.h
|
||||
app/test/unit_test_suite.h
|
||||
)
|
||||
|
||||
# Add test sources to the main app target if testing is enabled
|
||||
if(BUILD_TESTING)
|
||||
list(APPEND YAZE_APP_SRC ${YAZE_TEST_CORE_SOURCES})
|
||||
endif()
|
||||
|
||||
# Set up test-specific compiler flags and definitions
|
||||
if(BUILD_TESTING)
|
||||
target_compile_definitions(yaze_lib PRIVATE YAZE_ENABLE_TESTING=1)
|
||||
endif()
|
||||
408
src/app/test/test_manager.cc
Normal file
408
src/app/test/test_manager.cc
Normal file
@@ -0,0 +1,408 @@
|
||||
#include "app/test/test_manager.h"
|
||||
|
||||
#include "app/gfx/arena.h"
|
||||
#include "imgui/imgui.h"
|
||||
|
||||
#ifdef YAZE_ENABLE_IMGUI_TEST_ENGINE
|
||||
#include "imgui_test_engine/imgui_te_engine.h"
|
||||
#endif
|
||||
|
||||
namespace yaze {
|
||||
namespace test {
|
||||
|
||||
// Utility function implementations
|
||||
const char* TestStatusToString(TestStatus status) {
|
||||
switch (status) {
|
||||
case TestStatus::kNotRun: return "Not Run";
|
||||
case TestStatus::kRunning: return "Running";
|
||||
case TestStatus::kPassed: return "Passed";
|
||||
case TestStatus::kFailed: return "Failed";
|
||||
case TestStatus::kSkipped: return "Skipped";
|
||||
}
|
||||
return "Unknown";
|
||||
}
|
||||
|
||||
const char* TestCategoryToString(TestCategory category) {
|
||||
switch (category) {
|
||||
case TestCategory::kUnit: return "Unit";
|
||||
case TestCategory::kIntegration: return "Integration";
|
||||
case TestCategory::kUI: return "UI";
|
||||
case TestCategory::kPerformance: return "Performance";
|
||||
case TestCategory::kMemory: return "Memory";
|
||||
}
|
||||
return "Unknown";
|
||||
}
|
||||
|
||||
ImVec4 GetTestStatusColor(TestStatus status) {
|
||||
switch (status) {
|
||||
case TestStatus::kNotRun: return ImVec4(0.6f, 0.6f, 0.6f, 1.0f); // Gray
|
||||
case TestStatus::kRunning: return ImVec4(1.0f, 1.0f, 0.0f, 1.0f); // Yellow
|
||||
case TestStatus::kPassed: return ImVec4(0.0f, 1.0f, 0.0f, 1.0f); // Green
|
||||
case TestStatus::kFailed: return ImVec4(1.0f, 0.0f, 0.0f, 1.0f); // Red
|
||||
case TestStatus::kSkipped: return ImVec4(1.0f, 0.5f, 0.0f, 1.0f); // Orange
|
||||
}
|
||||
return ImVec4(1.0f, 1.0f, 1.0f, 1.0f);
|
||||
}
|
||||
|
||||
// TestManager implementation
|
||||
TestManager& TestManager::Get() {
|
||||
static TestManager instance;
|
||||
return instance;
|
||||
}
|
||||
|
||||
TestManager::TestManager() {
|
||||
// Initialize UI test engine
|
||||
InitializeUITesting();
|
||||
}
|
||||
|
||||
TestManager::~TestManager() {
|
||||
ShutdownUITesting();
|
||||
}
|
||||
|
||||
#ifdef YAZE_ENABLE_IMGUI_TEST_ENGINE
|
||||
void TestManager::InitializeUITesting() {
|
||||
if (!ui_test_engine_) {
|
||||
ui_test_engine_ = ImGuiTestEngine_CreateContext();
|
||||
if (ui_test_engine_) {
|
||||
ImGuiTestEngineIO& test_io = ImGuiTestEngine_GetIO(ui_test_engine_);
|
||||
test_io.ConfigVerboseLevel = ImGuiTestVerboseLevel_Info;
|
||||
test_io.ConfigVerboseLevelOnError = ImGuiTestVerboseLevel_Debug;
|
||||
test_io.ConfigRunSpeed = ImGuiTestRunSpeed_Fast;
|
||||
|
||||
// Start the test engine
|
||||
ImGuiTestEngine_Start(ui_test_engine_, ImGui::GetCurrentContext());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void TestManager::StopUITesting() {
|
||||
if (ui_test_engine_ && ImGui::GetCurrentContext() != nullptr) {
|
||||
ImGuiTestEngine_Stop(ui_test_engine_);
|
||||
}
|
||||
}
|
||||
|
||||
void TestManager::DestroyUITestingContext() {
|
||||
if (ui_test_engine_) {
|
||||
ImGuiTestEngine_DestroyContext(ui_test_engine_);
|
||||
ui_test_engine_ = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
void TestManager::ShutdownUITesting() {
|
||||
// Complete shutdown - calls both phases
|
||||
StopUITesting();
|
||||
DestroyUITestingContext();
|
||||
}
|
||||
#endif
|
||||
|
||||
absl::Status TestManager::RunAllTests() {
|
||||
if (is_running_) {
|
||||
return absl::FailedPreconditionError("Tests are already running");
|
||||
}
|
||||
|
||||
is_running_ = true;
|
||||
progress_ = 0.0f;
|
||||
last_results_.Clear();
|
||||
|
||||
// Execute all test suites
|
||||
for (auto& suite : test_suites_) {
|
||||
if (suite->IsEnabled()) {
|
||||
current_test_name_ = suite->GetName();
|
||||
auto status = ExecuteTestSuite(suite.get());
|
||||
if (!status.ok()) {
|
||||
is_running_ = false;
|
||||
return status;
|
||||
}
|
||||
UpdateProgress();
|
||||
}
|
||||
}
|
||||
|
||||
is_running_ = false;
|
||||
current_test_name_.clear();
|
||||
progress_ = 1.0f;
|
||||
|
||||
return absl::OkStatus();
|
||||
}
|
||||
|
||||
absl::Status TestManager::RunTestsByCategory(TestCategory category) {
|
||||
if (is_running_) {
|
||||
return absl::FailedPreconditionError("Tests are already running");
|
||||
}
|
||||
|
||||
is_running_ = true;
|
||||
progress_ = 0.0f;
|
||||
last_results_.Clear();
|
||||
|
||||
// Filter and execute test suites by category
|
||||
std::vector<TestSuite*> filtered_suites;
|
||||
for (auto& suite : test_suites_) {
|
||||
if (suite->IsEnabled() && suite->GetCategory() == category) {
|
||||
filtered_suites.push_back(suite.get());
|
||||
}
|
||||
}
|
||||
|
||||
for (auto* suite : filtered_suites) {
|
||||
current_test_name_ = suite->GetName();
|
||||
auto status = ExecuteTestSuite(suite);
|
||||
if (!status.ok()) {
|
||||
is_running_ = false;
|
||||
return status;
|
||||
}
|
||||
UpdateProgress();
|
||||
}
|
||||
|
||||
is_running_ = false;
|
||||
current_test_name_.clear();
|
||||
progress_ = 1.0f;
|
||||
|
||||
return absl::OkStatus();
|
||||
}
|
||||
|
||||
absl::Status TestManager::RunTestSuite(const std::string& suite_name) {
|
||||
if (is_running_) {
|
||||
return absl::FailedPreconditionError("Tests are already running");
|
||||
}
|
||||
|
||||
auto it = suite_lookup_.find(suite_name);
|
||||
if (it == suite_lookup_.end()) {
|
||||
return absl::NotFoundError("Test suite not found: " + suite_name);
|
||||
}
|
||||
|
||||
is_running_ = true;
|
||||
progress_ = 0.0f;
|
||||
last_results_.Clear();
|
||||
current_test_name_ = suite_name;
|
||||
|
||||
auto status = ExecuteTestSuite(it->second);
|
||||
|
||||
is_running_ = false;
|
||||
current_test_name_.clear();
|
||||
progress_ = 1.0f;
|
||||
|
||||
return status;
|
||||
}
|
||||
|
||||
void TestManager::RegisterTestSuite(std::unique_ptr<TestSuite> suite) {
|
||||
if (suite) {
|
||||
std::string name = suite->GetName();
|
||||
suite_lookup_[name] = suite.get();
|
||||
test_suites_.push_back(std::move(suite));
|
||||
}
|
||||
}
|
||||
|
||||
std::vector<std::string> TestManager::GetTestSuiteNames() const {
|
||||
std::vector<std::string> names;
|
||||
names.reserve(test_suites_.size());
|
||||
for (const auto& suite : test_suites_) {
|
||||
names.push_back(suite->GetName());
|
||||
}
|
||||
return names;
|
||||
}
|
||||
|
||||
TestSuite* TestManager::GetTestSuite(const std::string& name) {
|
||||
auto it = suite_lookup_.find(name);
|
||||
return it != suite_lookup_.end() ? it->second : nullptr;
|
||||
}
|
||||
|
||||
void TestManager::UpdateResourceStats() {
|
||||
CollectResourceStats();
|
||||
TrimResourceHistory();
|
||||
}
|
||||
|
||||
absl::Status TestManager::ExecuteTestSuite(TestSuite* suite) {
|
||||
if (!suite) {
|
||||
return absl::InvalidArgumentError("Test suite is null");
|
||||
}
|
||||
|
||||
// Collect resource stats before test
|
||||
CollectResourceStats();
|
||||
|
||||
// Execute the test suite
|
||||
auto status = suite->RunTests(last_results_);
|
||||
|
||||
// Collect resource stats after test
|
||||
CollectResourceStats();
|
||||
|
||||
return status;
|
||||
}
|
||||
|
||||
void TestManager::UpdateProgress() {
|
||||
if (test_suites_.empty()) {
|
||||
progress_ = 1.0f;
|
||||
return;
|
||||
}
|
||||
|
||||
size_t completed = 0;
|
||||
for (const auto& suite : test_suites_) {
|
||||
if (suite->IsEnabled()) {
|
||||
completed++;
|
||||
}
|
||||
}
|
||||
|
||||
progress_ = static_cast<float>(completed) / test_suites_.size();
|
||||
}
|
||||
|
||||
void TestManager::CollectResourceStats() {
|
||||
ResourceStats stats;
|
||||
stats.timestamp = std::chrono::steady_clock::now();
|
||||
|
||||
// Get Arena statistics
|
||||
auto& arena = gfx::Arena::Get();
|
||||
stats.texture_count = arena.GetTextureCount();
|
||||
stats.surface_count = arena.GetSurfaceCount();
|
||||
|
||||
// Get frame rate from ImGui
|
||||
stats.frame_rate = ImGui::GetIO().Framerate;
|
||||
|
||||
// Estimate memory usage (simplified)
|
||||
stats.memory_usage_mb = (stats.texture_count + stats.surface_count) / 1024; // Rough estimate
|
||||
|
||||
resource_history_.push_back(stats);
|
||||
}
|
||||
|
||||
void TestManager::TrimResourceHistory() {
|
||||
if (resource_history_.size() > kMaxResourceHistorySize) {
|
||||
resource_history_.erase(
|
||||
resource_history_.begin(),
|
||||
resource_history_.begin() + (resource_history_.size() - kMaxResourceHistorySize));
|
||||
}
|
||||
}
|
||||
|
||||
void TestManager::DrawTestDashboard() {
|
||||
show_dashboard_ = true; // Enable dashboard visibility
|
||||
|
||||
ImGui::Begin("Test Dashboard", &show_dashboard_, ImGuiWindowFlags_MenuBar);
|
||||
|
||||
// Menu bar
|
||||
if (ImGui::BeginMenuBar()) {
|
||||
if (ImGui::BeginMenu("Run")) {
|
||||
if (ImGui::MenuItem("All Tests", nullptr, false, !is_running_)) {
|
||||
[[maybe_unused]] auto status = RunAllTests();
|
||||
}
|
||||
if (ImGui::MenuItem("Unit Tests", nullptr, false, !is_running_)) {
|
||||
[[maybe_unused]] auto status = RunTestsByCategory(TestCategory::kUnit);
|
||||
}
|
||||
if (ImGui::MenuItem("Integration Tests", nullptr, false, !is_running_)) {
|
||||
[[maybe_unused]] auto status = RunTestsByCategory(TestCategory::kIntegration);
|
||||
}
|
||||
if (ImGui::MenuItem("UI Tests", nullptr, false, !is_running_)) {
|
||||
[[maybe_unused]] auto status = RunTestsByCategory(TestCategory::kUI);
|
||||
}
|
||||
ImGui::EndMenu();
|
||||
}
|
||||
|
||||
if (ImGui::BeginMenu("View")) {
|
||||
ImGui::MenuItem("Resource Monitor", nullptr, &show_resource_monitor_);
|
||||
ImGui::EndMenu();
|
||||
}
|
||||
|
||||
ImGui::EndMenuBar();
|
||||
}
|
||||
|
||||
// Test execution status
|
||||
if (is_running_) {
|
||||
ImGui::Text("Running: %s", current_test_name_.c_str());
|
||||
ImGui::ProgressBar(progress_, ImVec2(-1, 0), "");
|
||||
} else {
|
||||
if (ImGui::Button("Run All Tests", ImVec2(120, 0))) {
|
||||
[[maybe_unused]] auto status = RunAllTests();
|
||||
}
|
||||
ImGui::SameLine();
|
||||
if (ImGui::Button("Clear Results", ImVec2(120, 0))) {
|
||||
ClearResults();
|
||||
}
|
||||
}
|
||||
|
||||
ImGui::Separator();
|
||||
|
||||
// Test results summary
|
||||
if (last_results_.total_tests > 0) {
|
||||
ImGui::Text("Total Tests: %zu", last_results_.total_tests);
|
||||
ImGui::SameLine();
|
||||
ImGui::TextColored(GetTestStatusColor(TestStatus::kPassed),
|
||||
"Passed: %zu", last_results_.passed_tests);
|
||||
ImGui::SameLine();
|
||||
ImGui::TextColored(GetTestStatusColor(TestStatus::kFailed),
|
||||
"Failed: %zu", last_results_.failed_tests);
|
||||
ImGui::SameLine();
|
||||
ImGui::TextColored(GetTestStatusColor(TestStatus::kSkipped),
|
||||
"Skipped: %zu", last_results_.skipped_tests);
|
||||
|
||||
ImGui::Text("Pass Rate: %.1f%%", last_results_.GetPassRate() * 100.0f);
|
||||
ImGui::Text("Total Duration: %lld ms", last_results_.total_duration.count());
|
||||
}
|
||||
|
||||
ImGui::Separator();
|
||||
|
||||
// Test filter
|
||||
static char filter_buffer[256] = "";
|
||||
if (ImGui::InputText("Filter", filter_buffer, sizeof(filter_buffer))) {
|
||||
test_filter_ = std::string(filter_buffer);
|
||||
}
|
||||
|
||||
// Test results list
|
||||
if (ImGui::BeginChild("TestResults", ImVec2(0, 0), true)) {
|
||||
for (const auto& result : last_results_.individual_results) {
|
||||
if (!test_filter_.empty() &&
|
||||
result.name.find(test_filter_) == std::string::npos) {
|
||||
continue;
|
||||
}
|
||||
|
||||
ImGui::PushID(&result);
|
||||
ImGui::TextColored(GetTestStatusColor(result.status),
|
||||
"[%s] %s::%s",
|
||||
TestStatusToString(result.status),
|
||||
result.suite_name.c_str(),
|
||||
result.name.c_str());
|
||||
|
||||
if (result.status == TestStatus::kFailed && !result.error_message.empty()) {
|
||||
ImGui::Indent();
|
||||
ImGui::TextWrapped("Error: %s", result.error_message.c_str());
|
||||
ImGui::Unindent();
|
||||
}
|
||||
|
||||
ImGui::PopID();
|
||||
}
|
||||
}
|
||||
ImGui::EndChild();
|
||||
|
||||
ImGui::End();
|
||||
|
||||
// Resource monitor window
|
||||
if (show_resource_monitor_) {
|
||||
ImGui::Begin("Resource Monitor", &show_resource_monitor_);
|
||||
|
||||
if (!resource_history_.empty()) {
|
||||
const auto& latest = resource_history_.back();
|
||||
ImGui::Text("Textures: %zu", latest.texture_count);
|
||||
ImGui::Text("Surfaces: %zu", latest.surface_count);
|
||||
ImGui::Text("Memory: %zu MB", latest.memory_usage_mb);
|
||||
ImGui::Text("FPS: %.1f", latest.frame_rate);
|
||||
|
||||
// Simple plot of resource usage over time
|
||||
if (resource_history_.size() > 1) {
|
||||
std::vector<float> texture_counts;
|
||||
std::vector<float> surface_counts;
|
||||
texture_counts.reserve(resource_history_.size());
|
||||
surface_counts.reserve(resource_history_.size());
|
||||
|
||||
for (const auto& stats : resource_history_) {
|
||||
texture_counts.push_back(static_cast<float>(stats.texture_count));
|
||||
surface_counts.push_back(static_cast<float>(stats.surface_count));
|
||||
}
|
||||
|
||||
ImGui::PlotLines("Textures", texture_counts.data(),
|
||||
static_cast<int>(texture_counts.size()), 0, nullptr,
|
||||
0.0f, FLT_MAX, ImVec2(0, 80));
|
||||
ImGui::PlotLines("Surfaces", surface_counts.data(),
|
||||
static_cast<int>(surface_counts.size()), 0, nullptr,
|
||||
0.0f, FLT_MAX, ImVec2(0, 80));
|
||||
}
|
||||
}
|
||||
|
||||
ImGui::End();
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace test
|
||||
} // namespace yaze
|
||||
213
src/app/test/test_manager.h
Normal file
213
src/app/test/test_manager.h
Normal file
@@ -0,0 +1,213 @@
|
||||
#ifndef YAZE_APP_TEST_TEST_MANAGER_H
|
||||
#define YAZE_APP_TEST_TEST_MANAGER_H
|
||||
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <unordered_map>
|
||||
#include <chrono>
|
||||
|
||||
#include "absl/status/status.h"
|
||||
#include "imgui/imgui.h"
|
||||
|
||||
#ifdef YAZE_ENABLE_IMGUI_TEST_ENGINE
|
||||
#include "imgui_test_engine/imgui_te_engine.h"
|
||||
#else
|
||||
// Forward declaration when ImGui Test Engine is not available
|
||||
struct ImGuiTestEngine;
|
||||
#endif
|
||||
|
||||
namespace yaze {
|
||||
namespace test {
|
||||
|
||||
// Test execution status
|
||||
enum class TestStatus {
|
||||
kNotRun,
|
||||
kRunning,
|
||||
kPassed,
|
||||
kFailed,
|
||||
kSkipped
|
||||
};
|
||||
|
||||
// Test categories for organization
|
||||
enum class TestCategory {
|
||||
kUnit,
|
||||
kIntegration,
|
||||
kUI,
|
||||
kPerformance,
|
||||
kMemory
|
||||
};
|
||||
|
||||
// Individual test result
|
||||
struct TestResult {
|
||||
std::string name;
|
||||
std::string suite_name;
|
||||
TestCategory category;
|
||||
TestStatus status;
|
||||
std::string error_message;
|
||||
std::chrono::milliseconds duration;
|
||||
std::chrono::time_point<std::chrono::steady_clock> timestamp;
|
||||
};
|
||||
|
||||
// Overall test results summary
|
||||
struct TestResults {
|
||||
std::vector<TestResult> individual_results;
|
||||
size_t total_tests = 0;
|
||||
size_t passed_tests = 0;
|
||||
size_t failed_tests = 0;
|
||||
size_t skipped_tests = 0;
|
||||
std::chrono::milliseconds total_duration{0};
|
||||
|
||||
void AddResult(const TestResult& result) {
|
||||
individual_results.push_back(result);
|
||||
total_tests++;
|
||||
switch (result.status) {
|
||||
case TestStatus::kPassed: passed_tests++; break;
|
||||
case TestStatus::kFailed: failed_tests++; break;
|
||||
case TestStatus::kSkipped: skipped_tests++; break;
|
||||
default: break;
|
||||
}
|
||||
total_duration += result.duration;
|
||||
}
|
||||
|
||||
void Clear() {
|
||||
individual_results.clear();
|
||||
total_tests = passed_tests = failed_tests = skipped_tests = 0;
|
||||
total_duration = std::chrono::milliseconds{0};
|
||||
}
|
||||
|
||||
float GetPassRate() const {
|
||||
return total_tests > 0 ? static_cast<float>(passed_tests) / total_tests : 0.0f;
|
||||
}
|
||||
};
|
||||
|
||||
// Base class for test suites
|
||||
class TestSuite {
|
||||
public:
|
||||
virtual ~TestSuite() = default;
|
||||
virtual std::string GetName() const = 0;
|
||||
virtual TestCategory GetCategory() const = 0;
|
||||
virtual absl::Status RunTests(TestResults& results) = 0;
|
||||
virtual void DrawConfiguration() {}
|
||||
virtual bool IsEnabled() const { return enabled_; }
|
||||
virtual void SetEnabled(bool enabled) { enabled_ = enabled; }
|
||||
|
||||
protected:
|
||||
bool enabled_ = true;
|
||||
};
|
||||
|
||||
// Resource monitoring for performance and memory tests
|
||||
struct ResourceStats {
|
||||
size_t texture_count = 0;
|
||||
size_t surface_count = 0;
|
||||
size_t memory_usage_mb = 0;
|
||||
float frame_rate = 0.0f;
|
||||
std::chrono::time_point<std::chrono::steady_clock> timestamp;
|
||||
};
|
||||
|
||||
// Main test manager - singleton
|
||||
class TestManager {
|
||||
public:
|
||||
static TestManager& Get();
|
||||
|
||||
// Core test execution
|
||||
absl::Status RunAllTests();
|
||||
absl::Status RunTestsByCategory(TestCategory category);
|
||||
absl::Status RunTestSuite(const std::string& suite_name);
|
||||
|
||||
// Test suite management
|
||||
void RegisterTestSuite(std::unique_ptr<TestSuite> suite);
|
||||
std::vector<std::string> GetTestSuiteNames() const;
|
||||
TestSuite* GetTestSuite(const std::string& name);
|
||||
|
||||
// Results access
|
||||
const TestResults& GetLastResults() const { return last_results_; }
|
||||
void ClearResults() { last_results_.Clear(); }
|
||||
|
||||
// Configuration
|
||||
void SetMaxConcurrentTests(size_t max_concurrent) {
|
||||
max_concurrent_tests_ = max_concurrent;
|
||||
}
|
||||
void SetTestTimeout(std::chrono::seconds timeout) {
|
||||
test_timeout_ = timeout;
|
||||
}
|
||||
|
||||
// Resource monitoring
|
||||
void UpdateResourceStats();
|
||||
const std::vector<ResourceStats>& GetResourceHistory() const {
|
||||
return resource_history_;
|
||||
}
|
||||
|
||||
// UI Testing (ImGui Test Engine integration)
|
||||
#ifdef YAZE_ENABLE_IMGUI_TEST_ENGINE
|
||||
ImGuiTestEngine* GetUITestEngine() { return ui_test_engine_; }
|
||||
void InitializeUITesting();
|
||||
void StopUITesting(); // Stop test engine while ImGui context is valid
|
||||
void DestroyUITestingContext(); // Destroy test engine after ImGui context is destroyed
|
||||
void ShutdownUITesting(); // Complete shutdown (calls both Stop and Destroy)
|
||||
#else
|
||||
void* GetUITestEngine() { return nullptr; }
|
||||
void InitializeUITesting() {}
|
||||
void StopUITesting() {}
|
||||
void DestroyUITestingContext() {}
|
||||
void ShutdownUITesting() {}
|
||||
#endif
|
||||
|
||||
// Status queries
|
||||
bool IsTestRunning() const { return is_running_; }
|
||||
const std::string& GetCurrentTestName() const { return current_test_name_; }
|
||||
float GetProgress() const { return progress_; }
|
||||
|
||||
// UI Interface
|
||||
void DrawTestDashboard();
|
||||
|
||||
private:
|
||||
TestManager();
|
||||
~TestManager();
|
||||
|
||||
// Test execution helpers
|
||||
absl::Status ExecuteTestSuite(TestSuite* suite);
|
||||
void UpdateProgress();
|
||||
|
||||
// Resource monitoring helpers
|
||||
void CollectResourceStats();
|
||||
void TrimResourceHistory();
|
||||
|
||||
// Member variables
|
||||
std::vector<std::unique_ptr<TestSuite>> test_suites_;
|
||||
std::unordered_map<std::string, TestSuite*> suite_lookup_;
|
||||
|
||||
TestResults last_results_;
|
||||
bool is_running_ = false;
|
||||
std::string current_test_name_;
|
||||
float progress_ = 0.0f;
|
||||
|
||||
// Configuration
|
||||
size_t max_concurrent_tests_ = 1;
|
||||
std::chrono::seconds test_timeout_{30};
|
||||
|
||||
// Resource monitoring
|
||||
std::vector<ResourceStats> resource_history_;
|
||||
static constexpr size_t kMaxResourceHistorySize = 1000;
|
||||
|
||||
// UI Testing
|
||||
#ifdef YAZE_ENABLE_IMGUI_TEST_ENGINE
|
||||
ImGuiTestEngine* ui_test_engine_ = nullptr;
|
||||
#endif
|
||||
|
||||
// UI State
|
||||
bool show_dashboard_ = false;
|
||||
bool show_resource_monitor_ = false;
|
||||
std::string test_filter_;
|
||||
TestCategory category_filter_ = TestCategory::kUnit;
|
||||
};
|
||||
|
||||
// Utility functions for test result formatting
|
||||
const char* TestStatusToString(TestStatus status);
|
||||
const char* TestCategoryToString(TestCategory category);
|
||||
ImVec4 GetTestStatusColor(TestStatus status);
|
||||
|
||||
} // namespace test
|
||||
} // namespace yaze
|
||||
|
||||
#endif // YAZE_APP_TEST_TEST_MANAGER_H
|
||||
299
src/app/test/unit_test_suite.h
Normal file
299
src/app/test/unit_test_suite.h
Normal file
@@ -0,0 +1,299 @@
|
||||
#ifndef YAZE_APP_TEST_UNIT_TEST_SUITE_H
|
||||
#define YAZE_APP_TEST_UNIT_TEST_SUITE_H
|
||||
|
||||
#include <chrono>
|
||||
#include <memory>
|
||||
|
||||
#include "app/gfx/arena.h"
|
||||
#include "app/test/test_manager.h"
|
||||
|
||||
#ifdef YAZE_ENABLE_GTEST
|
||||
#include <gtest/gtest.h>
|
||||
#endif
|
||||
|
||||
// Note: ImGui Test Engine is handled through YAZE_ENABLE_IMGUI_TEST_ENGINE in TestManager
|
||||
|
||||
namespace yaze {
|
||||
namespace test {
|
||||
|
||||
#ifdef YAZE_ENABLE_GTEST
|
||||
// Custom test listener to capture Google Test results
|
||||
class TestResultCapture : public ::testing::TestEventListener {
|
||||
public:
|
||||
explicit TestResultCapture(TestResults* results) : results_(results) {}
|
||||
|
||||
void OnTestStart(const ::testing::TestInfo& test_info) override {
|
||||
current_test_start_ = std::chrono::steady_clock::now();
|
||||
current_test_name_ =
|
||||
std::string(test_info.test_case_name()) + "." + test_info.name();
|
||||
}
|
||||
|
||||
void OnTestEnd(const ::testing::TestInfo& test_info) override {
|
||||
auto end_time = std::chrono::steady_clock::now();
|
||||
auto duration = std::chrono::duration_cast<std::chrono::milliseconds>(
|
||||
end_time - current_test_start_);
|
||||
|
||||
TestResult result;
|
||||
result.name = test_info.name();
|
||||
result.suite_name = test_info.test_case_name();
|
||||
result.category = TestCategory::kUnit;
|
||||
result.duration = duration;
|
||||
result.timestamp = current_test_start_;
|
||||
|
||||
if (test_info.result()->Passed()) {
|
||||
result.status = TestStatus::kPassed;
|
||||
} else if (test_info.result()->Skipped()) {
|
||||
result.status = TestStatus::kSkipped;
|
||||
} else {
|
||||
result.status = TestStatus::kFailed;
|
||||
|
||||
// Capture failure message
|
||||
std::stringstream error_stream;
|
||||
for (int i = 0; i < test_info.result()->total_part_count(); ++i) {
|
||||
const auto& part = test_info.result()->GetTestPartResult(i);
|
||||
if (part.failed()) {
|
||||
error_stream << part.file_name() << ":" << part.line_number() << " "
|
||||
<< part.message() << "\n";
|
||||
}
|
||||
}
|
||||
result.error_message = error_stream.str();
|
||||
}
|
||||
|
||||
if (results_) {
|
||||
results_->AddResult(result);
|
||||
}
|
||||
}
|
||||
|
||||
// Required overrides (can be empty)
|
||||
void OnTestProgramStart(const ::testing::UnitTest&) override {}
|
||||
void OnTestIterationStart(const ::testing::UnitTest&, int) override {}
|
||||
void OnEnvironmentsSetUpStart(const ::testing::UnitTest&) override {}
|
||||
void OnEnvironmentsSetUpEnd(const ::testing::UnitTest&) override {}
|
||||
void OnTestCaseStart(const ::testing::TestCase&) override {}
|
||||
void OnTestCaseEnd(const ::testing::TestCase&) override {}
|
||||
void OnEnvironmentsTearDownStart(const ::testing::UnitTest&) override {}
|
||||
void OnEnvironmentsTearDownEnd(const ::testing::UnitTest&) override {}
|
||||
void OnTestIterationEnd(const ::testing::UnitTest&, int) override {}
|
||||
void OnTestProgramEnd(const ::testing::UnitTest&) override {}
|
||||
|
||||
private:
|
||||
TestResults* results_;
|
||||
std::chrono::time_point<std::chrono::steady_clock> current_test_start_;
|
||||
std::string current_test_name_;
|
||||
};
|
||||
#endif // YAZE_ENABLE_GTEST
|
||||
|
||||
// Unit test suite that runs Google Test cases
|
||||
class UnitTestSuite : public TestSuite {
|
||||
public:
|
||||
UnitTestSuite() = default;
|
||||
~UnitTestSuite() override = default;
|
||||
|
||||
std::string GetName() const override { return "Google Test Unit Tests"; }
|
||||
TestCategory GetCategory() const override { return TestCategory::kUnit; }
|
||||
|
||||
absl::Status RunTests(TestResults& results) override {
|
||||
#ifdef YAZE_ENABLE_GTEST
|
||||
// Set up Google Test to capture results
|
||||
auto& listeners = ::testing::UnitTest::GetInstance()->listeners();
|
||||
|
||||
// Remove default console output (we'll capture it ourselves)
|
||||
delete listeners.Release(listeners.default_result_printer());
|
||||
|
||||
// Add our custom listener
|
||||
auto capture_listener = new TestResultCapture(&results);
|
||||
listeners.Append(capture_listener);
|
||||
|
||||
// Configure test execution
|
||||
int argc = 1;
|
||||
const char* argv[] = {"yaze_tests"};
|
||||
::testing::InitGoogleTest(&argc, const_cast<char**>(argv));
|
||||
|
||||
// Run the tests
|
||||
int result = RUN_ALL_TESTS();
|
||||
|
||||
// Clean up
|
||||
listeners.Release(capture_listener);
|
||||
delete capture_listener;
|
||||
|
||||
return result == 0 ? absl::OkStatus()
|
||||
: absl::InternalError("Some unit tests failed");
|
||||
#else
|
||||
// Google Test not available - add a placeholder test
|
||||
TestResult result;
|
||||
result.name = "Placeholder Test";
|
||||
result.suite_name = GetName();
|
||||
result.category = GetCategory();
|
||||
result.status = TestStatus::kSkipped;
|
||||
result.error_message = "Google Test not available in this build";
|
||||
result.duration = std::chrono::milliseconds{0};
|
||||
result.timestamp = std::chrono::steady_clock::now();
|
||||
results.AddResult(result);
|
||||
|
||||
return absl::OkStatus();
|
||||
#endif
|
||||
}
|
||||
|
||||
void DrawConfiguration() override {
|
||||
ImGui::Text("Google Test Configuration");
|
||||
ImGui::Checkbox("Run disabled tests", &run_disabled_tests_);
|
||||
ImGui::Checkbox("Shuffle tests", &shuffle_tests_);
|
||||
ImGui::InputInt("Repeat count", &repeat_count_);
|
||||
if (repeat_count_ < 1) repeat_count_ = 1;
|
||||
|
||||
ImGui::InputText("Test filter", test_filter_, sizeof(test_filter_));
|
||||
ImGui::SameLine();
|
||||
if (ImGui::Button("Clear")) {
|
||||
test_filter_[0] = '\0';
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
bool run_disabled_tests_ = false;
|
||||
bool shuffle_tests_ = false;
|
||||
int repeat_count_ = 1;
|
||||
char test_filter_[256] = "";
|
||||
};
|
||||
|
||||
// Arena-specific test suite for memory management
|
||||
class ArenaTestSuite : public TestSuite {
|
||||
public:
|
||||
ArenaTestSuite() = default;
|
||||
~ArenaTestSuite() override = default;
|
||||
|
||||
std::string GetName() const override { return "Arena Memory Tests"; }
|
||||
TestCategory GetCategory() const override { return TestCategory::kMemory; }
|
||||
|
||||
absl::Status RunTests(TestResults& results) override {
|
||||
// Test Arena resource management
|
||||
RunArenaAllocationTest(results);
|
||||
RunArenaCleanupTest(results);
|
||||
RunArenaResourceTrackingTest(results);
|
||||
|
||||
return absl::OkStatus();
|
||||
}
|
||||
|
||||
void DrawConfiguration() override {
|
||||
ImGui::Text("Arena Test Configuration");
|
||||
ImGui::InputInt("Test allocations", &test_allocation_count_);
|
||||
ImGui::InputInt("Test texture size", &test_texture_size_);
|
||||
ImGui::Checkbox("Test cleanup order", &test_cleanup_order_);
|
||||
}
|
||||
|
||||
private:
|
||||
void RunArenaAllocationTest(TestResults& results) {
|
||||
auto start_time = std::chrono::steady_clock::now();
|
||||
|
||||
TestResult result;
|
||||
result.name = "Arena_Allocation_Test";
|
||||
result.suite_name = GetName();
|
||||
result.category = GetCategory();
|
||||
result.timestamp = start_time;
|
||||
|
||||
try {
|
||||
auto& arena = gfx::Arena::Get();
|
||||
size_t initial_texture_count = arena.GetTextureCount();
|
||||
size_t initial_surface_count = arena.GetSurfaceCount();
|
||||
|
||||
// Test texture allocation (would need a valid renderer)
|
||||
// This is a simplified test - in real implementation we'd mock the
|
||||
// renderer
|
||||
|
||||
size_t final_texture_count = arena.GetTextureCount();
|
||||
size_t final_surface_count = arena.GetSurfaceCount();
|
||||
|
||||
// For now, just verify the Arena can be accessed
|
||||
result.status = TestStatus::kPassed;
|
||||
|
||||
} catch (const std::exception& e) {
|
||||
result.status = TestStatus::kFailed;
|
||||
result.error_message =
|
||||
"Arena allocation test failed: " + std::string(e.what());
|
||||
}
|
||||
|
||||
auto end_time = std::chrono::steady_clock::now();
|
||||
result.duration = std::chrono::duration_cast<std::chrono::milliseconds>(
|
||||
end_time - start_time);
|
||||
|
||||
results.AddResult(result);
|
||||
}
|
||||
|
||||
void RunArenaCleanupTest(TestResults& results) {
|
||||
auto start_time = std::chrono::steady_clock::now();
|
||||
|
||||
TestResult result;
|
||||
result.name = "Arena_Cleanup_Test";
|
||||
result.suite_name = GetName();
|
||||
result.category = GetCategory();
|
||||
result.timestamp = start_time;
|
||||
|
||||
try {
|
||||
auto& arena = gfx::Arena::Get();
|
||||
|
||||
// Test that shutdown doesn't crash
|
||||
// Note: We can't actually call Shutdown() here as it would affect the
|
||||
// running app This test verifies the methods exist and are callable
|
||||
size_t texture_count = arena.GetTextureCount();
|
||||
size_t surface_count = arena.GetSurfaceCount();
|
||||
|
||||
result.status = TestStatus::kPassed;
|
||||
|
||||
} catch (const std::exception& e) {
|
||||
result.status = TestStatus::kFailed;
|
||||
result.error_message =
|
||||
"Arena cleanup test failed: " + std::string(e.what());
|
||||
}
|
||||
|
||||
auto end_time = std::chrono::steady_clock::now();
|
||||
result.duration = std::chrono::duration_cast<std::chrono::milliseconds>(
|
||||
end_time - start_time);
|
||||
|
||||
results.AddResult(result);
|
||||
}
|
||||
|
||||
void RunArenaResourceTrackingTest(TestResults& results) {
|
||||
auto start_time = std::chrono::steady_clock::now();
|
||||
|
||||
TestResult result;
|
||||
result.name = "Arena_Resource_Tracking_Test";
|
||||
result.suite_name = GetName();
|
||||
result.category = GetCategory();
|
||||
result.timestamp = start_time;
|
||||
|
||||
try {
|
||||
auto& arena = gfx::Arena::Get();
|
||||
|
||||
// Test resource tracking methods
|
||||
size_t texture_count = arena.GetTextureCount();
|
||||
size_t surface_count = arena.GetSurfaceCount();
|
||||
|
||||
// Verify tracking methods work
|
||||
if (texture_count >= 0 && surface_count >= 0) {
|
||||
result.status = TestStatus::kPassed;
|
||||
} else {
|
||||
result.status = TestStatus::kFailed;
|
||||
result.error_message = "Invalid resource counts returned";
|
||||
}
|
||||
|
||||
} catch (const std::exception& e) {
|
||||
result.status = TestStatus::kFailed;
|
||||
result.error_message =
|
||||
"Resource tracking test failed: " + std::string(e.what());
|
||||
}
|
||||
|
||||
auto end_time = std::chrono::steady_clock::now();
|
||||
result.duration = std::chrono::duration_cast<std::chrono::milliseconds>(
|
||||
end_time - start_time);
|
||||
|
||||
results.AddResult(result);
|
||||
}
|
||||
|
||||
int test_allocation_count_ = 10;
|
||||
int test_texture_size_ = 64;
|
||||
bool test_cleanup_order_ = true;
|
||||
};
|
||||
|
||||
} // namespace test
|
||||
} // namespace yaze
|
||||
|
||||
#endif // YAZE_APP_TEST_UNIT_TEST_SUITE_H
|
||||
Reference in New Issue
Block a user