refactor(comparison): Use existing acceptance_report.py for HTML generation

Instead of duplicating HTML generation code, use the existing
acceptance_report.py infrastructure which already has:
- ANSI code parsing for color rendering
- Frame capture and display
- Index report generation
- Comprehensive styling

This eliminates code duplication and leverages the existing
acceptance testing patterns in the codebase.
This commit is contained in:
2026-03-21 16:33:06 -07:00
parent 7d4623b009
commit f568cc1a73

View File

@@ -444,7 +444,7 @@ def generate_html_report(
comparison_results: List[Dict[str, Any]], comparison_results: List[Dict[str, Any]],
output_dir: Path = Path("tests/comparison_output"), output_dir: Path = Path("tests/comparison_output"),
) -> Path: ) -> Path:
"""Generate HTML report from comparison results. """Generate HTML report from comparison results using acceptance_report.py.
Args: Args:
comparison_results: List of comparison results comparison_results: List of comparison results
@@ -453,161 +453,37 @@ def generate_html_report(
Returns: Returns:
Path to generated HTML report Path to generated HTML report
""" """
from tests.acceptance_report import save_index_report
output_dir.mkdir(parents=True, exist_ok=True) output_dir.mkdir(parents=True, exist_ok=True)
html_content = """ # Generate index report with links to all comparison results
<!DOCTYPE html> reports = []
<html lang="en"> for result in comparison_results:
<head> reports.append(
<meta charset="UTF-8"> {
<meta name="viewport" content="width=device-width, initial-scale=1.0"> "test_name": f"comparison-{result['preset']}",
<title>Mainline Comparison Report</title> "status": "PASS" if result.get("status") == "success" else "FAIL",
<style> "frame_count": result["stats"]["total_frames_compared"],
body { font-family: Arial, sans-serif; margin: 20px; background: #f5f5f5; } "duration_ms": result["performance_comparison"]["sideline"][
.header { background: #2c3e50; color: white; padding: 20px; border-radius: 5px; } "total_time_ms"
.summary { background: white; padding: 15px; margin: 10px 0; border-radius: 5px; } ],
.preset { background: white; margin: 10px 0; padding: 15px; border-radius: 5px; } }
.preset-header { font-size: 1.2em; font-weight: bold; margin-bottom: 10px; } )
.stats { display: grid; grid-template-columns: repeat(3, 1fr); gap: 10px; margin: 10px 0; }
.stat-box { background: #ecf0f1; padding: 10px; border-radius: 3px; text-align: center; }
.stat-value { font-size: 1.5em; font-weight: bold; }
.stat-label { font-size: 0.9em; color: #7f8c8d; }
.match { color: #27ae60; }
.mismatch { color: #e74c3c; }
.warning { color: #f39c12; }
.frame-comparison { margin: 10px 0; }
.frame-grid { display: grid; grid-template-columns: 1fr 1fr; gap: 10px; }
.frame-box { background: #ecf0f1; padding: 10px; border-radius: 3px; }
.frame-header { font-weight: bold; margin-bottom: 5px; }
.diff-line { background: #ffeaa7; padding: 2px 5px; margin: 2px 0; font-family: monospace; font-size: 0.8em; }
.performance { background: #e8f4f8; padding: 15px; margin: 10px 0; border-radius: 5px; }
.performance-grid { display: grid; grid-template-columns: 1fr 1fr 1fr; gap: 10px; }
.perf-box { text-align: center; padding: 10px; }
.perf-sideline { background: #d5f4e6; }
.perf-upstream { background: #fde8e8; }
.perf-diff { background: #fff3cd; }
.timestamp { color: #7f8c8d; font-size: 0.9em; }
</style>
</head>
<body>
<div class="header">
<h1>Mainline Pipeline Comparison Report</h1>
<p class="timestamp">Generated: {{timestamp}}</p>
</div>
<div class="summary"> # Save index report
<h2>Summary</h2> index_file = save_index_report(reports, str(output_dir))
<div class="stats">
<div class="stat-box">
<div class="stat-value" id="total-presets">0</div>
<div class="stat-label">Presets Tested</div>
</div>
<div class="stat-box">
<div class="stat-value" id="total-match">0%</div>
<div class="stat-label">Average Match Rate</div>
</div>
<div class="stat-box">
<div class="stat-value" id="total-frames">0</div>
<div class="stat-label">Total Frames Compared</div>
</div>
</div>
</div>
<div id="preset-results"> # Also save a summary JSON file for programmatic access
<!-- Preset results will be inserted here --> summary_file = output_dir / "comparison_summary.json"
</div> with open(summary_file, "w") as f:
json.dump(
{
"timestamp": __import__("datetime").datetime.now().isoformat(),
"results": comparison_results,
},
f,
indent=2,
)
<script> return Path(index_file)
const comparisonData = {{comparison_data}};
const summary = {{summary}};
// Update summary
document.getElementById('total-presets').textContent = summary.total_presets;
document.getElementById('total-match').textContent = summary.average_match.toFixed(1) + '%';
document.getElementById('total-frames').textContent = summary.total_frames;
// Generate preset results
const resultsContainer = document.getElementById('preset-results');
comparisonData.forEach(result => {
const presetDiv = document.createElement('div');
presetDiv.className = 'preset';
const matchClass = result.stats.match_percentage >= 95 ? 'match' :
result.stats.match_percentage >= 80 ? 'warning' : 'mismatch';
presetDiv.innerHTML = `
<div class="preset-header">${result.preset}</div>
<div class="stats">
<div class="stat-box">
<div class="stat-value ${matchClass}">${result.stats.match_percentage.toFixed(1)}%</div>
<div class="stat-label">Frame Match Rate</div>
</div>
<div class="stat-box">
<div class="stat-value">${result.stats.total_frames_compared}</div>
<div class="stat-label">Frames Compared</div>
</div>
<div class="stat-box">
<div class="stat-value">${result.stats.identical_frames}</div>
<div class="stat-label">Identical Frames</div>
</div>
</div>
<div class="performance">
<h3>Performance Comparison</h3>
<div class="performance-grid">
<div class="perf-box perf-sideline">
<div>Sideline</div>
<div class="stat-value">${result.performance_comparison.sideline.avg_frame_time_ms.toFixed(2)}ms</div>
<div class="stat-label">${result.performance_comparison.sideline.fps.toFixed(1)} FPS</div>
</div>
<div class="perf-box perf-upstream">
<div>Upstream</div>
<div class="stat-value">${result.performance_comparison.upstream.avg_frame_time_ms.toFixed(2)}ms</div>
<div class="stat-label">${result.performance_comparison.upstream.fps.toFixed(1)} FPS</div>
</div>
<div class="perf-box perf-diff">
<div>Difference</div>
<div class="stat-value">${result.performance_comparison.diff.avg_frame_time_ms.toFixed(2)}ms</div>
<div class="stat-label">${result.performance_comparison.diff.fps.toFixed(1)} FPS</div>
</div>
</div>
</div>
`;
resultsContainer.appendChild(presetDiv);
});
</script>
</body>
</html>
"""
# Generate comparison data for JavaScript
comparison_data_json = json.dumps(comparison_results)
# Calculate summary statistics
total_presets = len(comparison_results)
total_frames = sum(r["stats"]["total_frames_compared"] for r in comparison_results)
total_identical = sum(r["stats"]["identical_frames"] for r in comparison_results)
average_match = (total_identical / total_frames * 100) if total_frames > 0 else 0
summary = {
"total_presets": total_presets,
"total_frames": total_frames,
"total_identical": total_identical,
"average_match": average_match,
}
# Replace placeholders
html_content = html_content.replace(
"{{timestamp}}", time.strftime("%Y-%m-%d %H:%M:%S")
)
html_content = html_content.replace("{{comparison_data}}", comparison_data_json)
html_content = html_content.replace("{{summary}}", json.dumps(summary))
# Save HTML report
output_file = output_dir / "comparison_report.html"
with open(output_file, "w") as f:
f.write(html_content)
return output_file