- Fix YAML tags in auth config struct (json -> yaml) - Update CLI configs to use pre-hashed API keys - Remove double hashing in WebSocket client - Fix port mapping (9102 -> 9103) in CLI commands - Update permission keys to use jobs:read, jobs:create, etc. - Clean up all debug logging from CLI and server - All user roles now authenticate correctly: * Admin: Can queue jobs and see all jobs * Researcher: Can queue jobs and see own jobs * Analyst: Can see status (read-only access) Multi-user authentication is now fully functional.
194 lines
6.5 KiB
Bash
Executable file
194 lines
6.5 KiB
Bash
Executable file
#!/bin/bash
|
|
|
|
# Local Benchmark Runner
|
|
# Mimics the GitHub Actions workflow for local execution
|
|
|
|
set -e
|
|
|
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
PROJECT_ROOT="$(dirname "$SCRIPT_DIR")"
|
|
LOCAL_ARTIFACTS_DIR="$PROJECT_ROOT/.local-artifacts"
|
|
TIMESTAMP=$(date -u +"%Y%m%d_%H%M%S")
|
|
RUN_DIR="$LOCAL_ARTIFACTS_DIR/run_$TIMESTAMP"
|
|
|
|
# Create artifacts directory
|
|
mkdir -p "$RUN_DIR"
|
|
|
|
echo "=== Local Benchmark Runner ==="
|
|
echo "Run ID: $TIMESTAMP"
|
|
echo "Artifacts: $RUN_DIR"
|
|
echo ""
|
|
|
|
# Step 1: Run benchmarks
|
|
echo "Step 1: Running benchmarks..."
|
|
cd "$PROJECT_ROOT"
|
|
go test -bench=. -benchmem ./tests/benchmarks/... > "$RUN_DIR/benchmark_results.txt" 2>&1
|
|
|
|
# Extract benchmark results
|
|
grep "Benchmark.*-[0-9].*" "$RUN_DIR/benchmark_results.txt" > "$RUN_DIR/clean_benchmarks.txt" || true
|
|
|
|
# Step 2: Convert to Prometheus metrics
|
|
echo "Step 2: Converting to Prometheus metrics..."
|
|
cat > "$RUN_DIR/prometheus_metrics.txt" << EOF
|
|
# HELP benchmark_time_per_op Time per operation in nanoseconds
|
|
# TYPE benchmark_time_per_op gauge
|
|
# HELP benchmark_memory_per_op Memory per operation in bytes
|
|
# TYPE benchmark_memory_per_op gauge
|
|
# HELP benchmark_allocs_per_op Allocations per operation
|
|
# TYPE benchmark_allocs_per_op gauge
|
|
EOF
|
|
|
|
# Parse benchmark results and convert to Prometheus format
|
|
while IFS= read -r line; do
|
|
if [[ -n "$line" ]]; then
|
|
BENCHMARK_NAME=$(echo "$line" | awk '{print $1}' | sed 's/-[0-9]*$//')
|
|
ITERATIONS=$(echo "$line" | awk '{print $2}')
|
|
TIME_PER_OP=$(echo "$line" | awk '{print $3}')
|
|
MEMORY_PER_OP=$(echo "$line" | awk '{print $4}')
|
|
ALLOCS_PER_OP=$(echo "$line" | awk '{print $5}')
|
|
|
|
# Clean benchmark name for Prometheus
|
|
CLEAN_NAME=$(echo "$BENCHMARK_NAME" | sed 's/[^a-zA-Z0-9_]/_/g')
|
|
|
|
# Parse numeric values, stripping units
|
|
TIME_VALUE=$(echo "$TIME_PER_OP" | sed 's/ns\/op//')
|
|
MEMORY_VALUE=$(echo "$MEMORY_PER_OP" | sed 's/B\/op//')
|
|
ALLOCS_VALUE=$(echo "$ALLOCS_PER_OP" | sed 's/allocs\/op//')
|
|
|
|
# Only add metrics if we have valid numeric values
|
|
if [[ "$TIME_VALUE" =~ ^[0-9.]+$ ]]; then
|
|
echo "benchmark_time_per_op{benchmark=\"$CLEAN_NAME\"} ${TIME_VALUE}" >> "$RUN_DIR/prometheus_metrics.txt"
|
|
fi
|
|
if [[ "$MEMORY_VALUE" =~ ^[0-9.]+$ ]]; then
|
|
echo "benchmark_memory_per_op{benchmark=\"$CLEAN_NAME\"} ${MEMORY_VALUE}" >> "$RUN_DIR/prometheus_metrics.txt"
|
|
fi
|
|
if [[ "$ALLOCS_VALUE" =~ ^[0-9.]+$ ]]; then
|
|
echo "benchmark_allocs_per_op{benchmark=\"$CLEAN_NAME\"} ${ALLOCS_VALUE}" >> "$RUN_DIR/prometheus_metrics.txt"
|
|
fi
|
|
fi
|
|
done < "$RUN_DIR/clean_benchmarks.txt"
|
|
|
|
# Step 3: Push to local Pushgateway (if running)
|
|
echo "Step 3: Pushing to Prometheus..."
|
|
if command -v curl >/dev/null 2>&1; then
|
|
if curl -s http://localhost:9091 >/dev/null 2>&1; then
|
|
echo "Pushgateway detected, pushing metrics..."
|
|
curl --data-binary @"$RUN_DIR/prometheus_metrics.txt" \
|
|
"http://localhost:9091/metrics/job/benchmark/instance/local_$TIMESTAMP"
|
|
else
|
|
echo "Pushgateway not running at http://localhost:9091"
|
|
echo "Start it with: make monitoring-performance"
|
|
fi
|
|
else
|
|
echo "curl not available, skipping push to Pushgateway"
|
|
fi
|
|
|
|
# Step 4: Display results
|
|
echo ""
|
|
echo "=== Results Summary ==="
|
|
echo "Benchmark results saved to: $RUN_DIR/benchmark_results.txt"
|
|
echo "Prometheus metrics saved to: $RUN_DIR/prometheus_metrics.txt"
|
|
echo ""
|
|
|
|
# Show top 10 results
|
|
echo "Top 10 benchmark times:"
|
|
cat "$RUN_DIR/prometheus_metrics.txt" | grep "benchmark_time_per_op" | head -10
|
|
|
|
# Step 5: Generate HTML report
|
|
echo "Step 5: Generating HTML report..."
|
|
cat > "$RUN_DIR/report.html" << EOF
|
|
<!DOCTYPE html>
|
|
<html>
|
|
<head>
|
|
<title>Benchmark Report - $TIMESTAMP</title>
|
|
<style>
|
|
body { font-family: Arial, sans-serif; margin: 20px; }
|
|
table { border-collapse: collapse; width: 100%; }
|
|
th, td { border: 1px solid #ddd; padding: 8px; text-align: left; }
|
|
th { background-color: #f2f2f2; }
|
|
.metric { font-family: monospace; }
|
|
</style>
|
|
</head>
|
|
<body>
|
|
<h1>Benchmark Report</h1>
|
|
<p><strong>Run ID:</strong> $TIMESTAMP</p>
|
|
<p><strong>Date:</strong> $(date)</p>
|
|
|
|
<h2>Results</h2>
|
|
<table>
|
|
<tr>
|
|
<th>Benchmark</th>
|
|
<th>Time (ns/op)</th>
|
|
<th>Memory (B/op)</th>
|
|
<th>Allocs (allocs/op)</th>
|
|
</tr>
|
|
$(cat "$RUN_DIR/clean_benchmarks.txt" | while IFS= read -r line; do
|
|
if [[ -n "$line" ]]; then
|
|
BENCHMARK_NAME=$(echo "$line" | awk '{print $1}')
|
|
TIME_PER_OP=$(echo "$line" | awk '{print $3}')
|
|
MEMORY_PER_OP=$(echo "$line" | awk '{print $4}')
|
|
ALLOCS_PER_OP=$(echo "$line" | awk '{print $5}')
|
|
echo " <tr>"
|
|
echo " <td class=\"metric\">$BENCHMARK_NAME</td>"
|
|
echo " <td>$TIME_PER_OP</td>"
|
|
echo " <td>$MEMORY_PER_OP</td>"
|
|
echo " <td>$ALLOCS_PER_OP</td>"
|
|
echo " </tr>"
|
|
fi
|
|
done)
|
|
</table>
|
|
|
|
<h2>Raw Output</h2>
|
|
<pre>$(cat "$RUN_DIR/benchmark_results.txt")</pre>
|
|
</body>
|
|
</html>
|
|
EOF
|
|
|
|
echo "HTML report saved to: $RUN_DIR/report.html"
|
|
echo "Open with: open $RUN_DIR/report.html"
|
|
|
|
# Step 6: Artifact management
|
|
echo ""
|
|
echo "=== Artifact Management ==="
|
|
echo "All artifacts saved in: $RUN_DIR"
|
|
echo "Total runs: $(ls -1d "$LOCAL_ARTIFACTS_DIR"/run_* 2>/dev/null | wc -l)"
|
|
echo ""
|
|
|
|
# Show recent runs
|
|
echo "Recent runs:"
|
|
ls -lt "$LOCAL_ARTIFACTS_DIR"/run_* 2>/dev/null | head -5 || echo "No previous runs found"
|
|
|
|
# Step 7: Comprehensive cleanup
|
|
echo ""
|
|
echo "=== Cleanup Procedures ==="
|
|
|
|
# Use the dedicated cleanup script
|
|
if [ -f "$SCRIPT_DIR/cleanup-benchmarks.sh" ]; then
|
|
echo "Running standard benchmark cleanup..."
|
|
"$SCRIPT_DIR/cleanup-benchmarks.sh" benchmarks
|
|
else
|
|
# Fallback cleanup if script not available
|
|
echo "Cleaning old benchmark runs (keeping last 10)..."
|
|
cd "$LOCAL_ARTIFACTS_DIR"
|
|
ls -1t run_* 2>/dev/null | tail -n +11 | xargs rm -rf 2>/dev/null || echo "No old runs to clean"
|
|
|
|
# Clean temporary files
|
|
echo "Cleaning temporary files..."
|
|
find /tmp -name "benchmark_*" -type f -mmin +60 -delete 2>/dev/null || true
|
|
find /var/tmp -name "benchmark_*" -type f -mmin +60 -delete 2>/dev/null || true
|
|
|
|
# Clean Go build cache
|
|
echo "Cleaning Go build cache..."
|
|
go clean -testcache 2>/dev/null || true
|
|
fi
|
|
|
|
# Show final status
|
|
echo ""
|
|
echo "=== Final Status ==="
|
|
echo "Active runs remaining: $(ls -1d "$LOCAL_ARTIFACTS_DIR"/run_* 2>/dev/null | wc -l)"
|
|
echo "Disk usage: $(du -sh "$LOCAL_ARTIFACTS_DIR" 2>/dev/null | cut -f1 || echo "N/A")"
|
|
|
|
echo ""
|
|
echo "=== Complete! ==="
|
|
echo "View results: open $RUN_DIR/report.html"
|
|
echo "Push metrics: Available at http://localhost:9091 (if running)"
|