fetch_ml/.github/workflows/benchmark-metrics.yml

87 lines
3.4 KiB
YAML

name: Benchmark Metrics
on:
workflow_dispatch:
jobs:
benchmark:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup Go
uses: actions/setup-go@v4
with:
go-version: '1.21'
- name: Cache Go modules
uses: actions/cache@v3
with:
path: ~/go/pkg/mod
key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }}
- name: Run benchmarks
run: |
echo "Running performance benchmarks..."
go test -bench=. -benchmem ./tests/benchmarks/... > benchmark_results.txt 2>&1
# Extract benchmark results
grep "Benchmark.*-[0-9].*" benchmark_results.txt > clean_benchmarks.txt || true
- name: Convert to Prometheus metrics
run: |
# Create Prometheus metrics file
echo "# HELP benchmark_time_per_op Time per operation in nanoseconds" > prometheus_metrics.txt
echo "# TYPE benchmark_time_per_op gauge" >> prometheus_metrics.txt
echo "# HELP benchmark_memory_per_op Memory per operation in bytes" >> prometheus_metrics.txt
echo "# TYPE benchmark_memory_per_op gauge" >> prometheus_metrics.txt
echo "# HELP benchmark_allocs_per_op Allocations per operation" >> prometheus_metrics.txt
echo "# TYPE benchmark_allocs_per_op gauge" >> prometheus_metrics.txt
# Parse benchmark results and convert to Prometheus format
while IFS= read -r line; do
if [[ -n "$line" ]]; then
BENCHMARK_NAME=$(echo "$line" | awk '{print $1}' | sed 's/-[0-9]*$//')
ITERATIONS=$(echo "$line" | awk '{print $2}')
TIME_PER_OP=$(echo "$line" | awk '{print $3}')
MEMORY_PER_OP=$(echo "$line" | awk '{print $4}')
ALLOCS_PER_OP=$(echo "$line" | awk '{print $5}')
# Clean benchmark name for Prometheus
CLEAN_NAME=$(echo "$BENCHMARK_NAME" | sed 's/[^a-zA-Z0-9_]/_/g')
echo "benchmark_time_per_op{benchmark=\"$CLEAN_NAME\"} ${TIME_PER_OP/ns/}" >> prometheus_metrics.txt
echo "benchmark_memory_per_op{benchmark=\"$CLEAN_NAME\"} ${MEMORY_PER_OP/B\/op/}" >> prometheus_metrics.txt
echo "benchmark_allocs_per_op{benchmark=\"$CLEAN_NAME\"} ${ALLOCS_PER_OP/allocs\/op/}" >> prometheus_metrics.txt
fi
done < clean_benchmarks.txt
- name: Push to Prometheus Pushgateway
env:
PROMETHEUS_PUSHGATEWAY_URL: ${{ secrets['PROMETHEUS_PUSHGATEWAY_URL'] }}
run: |
# Push metrics to Prometheus Pushgateway (if configured)
if [ -n "$PROMETHEUS_PUSHGATEWAY_URL" ]; then
echo "Pushing metrics to Prometheus..."
curl --data-binary @prometheus_metrics.txt \
"$PROMETHEUS_PUSHGATEWAY_URL/metrics/job/benchmark/instance/${{ github.run_id }}"
else
echo "PROMETHEUS_PUSHGATEWAY_URL not configured, skipping push"
fi
- name: Upload benchmark results
uses: actions/upload-artifact@v3
with:
name: benchmark-results-${{ github.run_id }}
path: |
benchmark_results.txt
clean_benchmarks.txt
prometheus_metrics.txt
retention-days: 30
- name: Display results summary
run: |
echo "=== Benchmark Results Summary ==="
cat prometheus_metrics.txt | grep "benchmark_time_per_op" | head -10