diff --git a/.github/workflows/staging-validation.yml b/.github/workflows/staging-validation.yml new file mode 100644 index 000000000..60276af09 --- /dev/null +++ b/.github/workflows/staging-validation.yml @@ -0,0 +1,306 @@ +name: Staging Validation Pipeline +# v0.14.0 TASK-STAG-001 through TASK-STAG-006 +# Comprehensive staging validation: deploy, perf, Lighthouse, stability, GDPR, bundle size + +on: + workflow_dispatch: + inputs: + skip_deploy: + description: 'Skip deployment (validate existing staging)' + required: false + default: 'false' + type: boolean + stability_duration: + description: 'Stability check duration (minutes)' + required: false + default: '10' + type: string + +env: + STAGING_URL: ${{ vars.STAGING_URL || 'https://staging.veza.app' }} + STAGING_API_URL: ${{ vars.STAGING_API_URL || 'https://staging.veza.app/api/v1' }} + +jobs: + # ───────────────────────────────────────────────────── + # TASK-STAG-001: Deploy staging (all services) + # ───────────────────────────────────────────────────── + deploy-staging: + name: Deploy to Staging + runs-on: ubuntu-latest + if: inputs.skip_deploy != 'true' + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Build all images + run: | + docker build -t veza-backend-api:staging -f veza-backend-api/Dockerfile.production veza-backend-api/ + docker build -t veza-frontend:staging -f apps/web/Dockerfile.production apps/web/ + docker build -t veza-stream-server:staging -f veza-stream-server/Dockerfile.production veza-stream-server/ + + - name: Push to registry + if: vars.DOCKER_REGISTRY != '' + run: | + echo "${{ secrets.DOCKER_REGISTRY_PASSWORD }}" | docker login "${{ vars.DOCKER_REGISTRY }}" -u "${{ secrets.DOCKER_REGISTRY_USERNAME }}" --password-stdin + for svc in veza-backend-api veza-frontend veza-stream-server; do + docker tag "${svc}:staging" "${{ vars.DOCKER_REGISTRY }}/${svc}:staging" + docker push "${{ vars.DOCKER_REGISTRY }}/${svc}:staging" + done + + - name: Deploy via SSH (docker-compose) + if: vars.STAGING_SSH_HOST != '' + env: + SSH_KEY: ${{ secrets.STAGING_SSH_KEY }} + run: | + mkdir -p ~/.ssh + echo "$SSH_KEY" > ~/.ssh/staging_key + chmod 600 ~/.ssh/staging_key + ssh -i ~/.ssh/staging_key -o StrictHostKeyChecking=no \ + ${{ vars.STAGING_SSH_USER }}@${{ vars.STAGING_SSH_HOST }} \ + "cd /opt/veza && docker compose -f docker-compose.staging.yml pull && docker compose -f docker-compose.staging.yml up -d" + rm -f ~/.ssh/staging_key + + - name: Deploy via Kubernetes + if: vars.KUBE_CONFIG_SET == 'true' + run: | + KUBECONFIG="${{ runner.temp }}/kubeconfig" + echo "${{ secrets.KUBE_CONFIG }}" | base64 -d > "$KUBECONFIG" + chmod 600 "$KUBECONFIG" + export KUBECONFIG + for svc in veza-backend-api veza-stream-server; do + kubectl set image "deployment/${svc}" "${svc}=${{ vars.DOCKER_REGISTRY }}/${svc}:staging" \ + -n veza --record || echo "Skipping ${svc}" + done + kubectl rollout status deployment/veza-backend-api -n veza --timeout=300s || true + rm -f "$KUBECONFIG" + + - name: Wait for staging to be healthy + run: | + echo "Waiting for staging services to be healthy..." + for i in $(seq 1 30); do + STATUS=$(curl -sf "${{ env.STAGING_API_URL }}/health" | jq -r '.status' 2>/dev/null || echo "unreachable") + if [ "$STATUS" = "ok" ] || [ "$STATUS" = "healthy" ]; then + echo "Staging is healthy!" + exit 0 + fi + echo "Attempt $i/30: status=$STATUS, waiting 10s..." + sleep 10 + done + echo "Staging did not become healthy in 300s" + exit 1 + + - name: Deep health check + run: | + echo "## Deep Health Check" >> $GITHUB_STEP_SUMMARY + HEALTH=$(curl -sf "${{ env.STAGING_API_URL }}/health/deep" || echo '{"error":"unreachable"}') + echo '```json' >> $GITHUB_STEP_SUMMARY + echo "$HEALTH" | jq . >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + + # ───────────────────────────────────────────────────── + # TASK-STAG-002: Performance validation (p95 < 100ms) + # ───────────────────────────────────────────────────── + performance-validation: + name: Performance Validation (k6) + runs-on: ubuntu-latest + needs: deploy-staging + if: always() && (needs.deploy-staging.result == 'success' || needs.deploy-staging.result == 'skipped') + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + + - name: Install k6 + run: | + sudo gpg -k + sudo gpg --no-default-keyring --keyring /usr/share/keyrings/k6-archive-keyring.gpg --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys C5AD17C747E3415A3642D57D77C6C491D6AC1D69 + echo "deb [signed-by=/usr/share/keyrings/k6-archive-keyring.gpg] https://dl.k6.io/deb stable main" | sudo tee /etc/apt/sources.list.d/k6.list + sudo apt-get update && sudo apt-get install -y k6 + + - name: Run staging performance validation + run: | + k6 run --out json=perf-results.json \ + --env BASE_URL="${{ env.STAGING_API_URL }}" \ + --env SCENARIO=smoke \ + loadtests/staging/validation_v0140.js + + - name: Upload performance results + uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0 + with: + name: performance-results + path: perf-results.json + if: always() + + - name: Performance summary + if: always() + run: | + echo "## Performance Validation" >> $GITHUB_STEP_SUMMARY + echo "Target: p95 < 100ms, stream start < 500ms" >> $GITHUB_STEP_SUMMARY + + # ───────────────────────────────────────────────────── + # TASK-STAG-003: Lighthouse validation + # ───────────────────────────────────────────────────── + lighthouse-validation: + name: Lighthouse Audit + runs-on: ubuntu-latest + needs: deploy-staging + if: always() && (needs.deploy-staging.result == 'success' || needs.deploy-staging.result == 'skipped') + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + + - name: Set up Node + uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4.2.0 + with: + node-version: '20' + + - name: Install Lighthouse CI + run: npm install -g @lhci/cli@0.14.x + + - name: Run Lighthouse CI + run: lhci autorun --config=.lighthouserc.js + env: + LHCI_BUILD_CONTEXT__CURRENT_HASH: ${{ github.sha }} + + - name: Upload Lighthouse results + uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0 + with: + name: lighthouse-results + path: .lighthouseci/ + if: always() + + - name: Lighthouse summary + if: always() + run: | + echo "## Lighthouse Validation" >> $GITHUB_STEP_SUMMARY + echo "Targets: Performance >= 85, Accessibility >= 90, PWA >= 90" >> $GITHUB_STEP_SUMMARY + if [ -f .lighthouseci/assertion-results.json ]; then + PASSED=$(jq '[.[] | select(.level == "error")] | length' .lighthouseci/assertion-results.json 2>/dev/null || echo "?") + echo "Assertion errors: $PASSED" >> $GITHUB_STEP_SUMMARY + fi + + # ───────────────────────────────────────────────────── + # TASK-STAG-004: Stability validation (5xx < 0.1%) + # ───────────────────────────────────────────────────── + stability-validation: + name: Stability Check + runs-on: ubuntu-latest + needs: [deploy-staging, performance-validation] + if: always() && (needs.deploy-staging.result == 'success' || needs.deploy-staging.result == 'skipped') + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + + - name: Run stability check + run: | + chmod +x scripts/staging-stability-check.sh + DURATION_MINUTES=${{ inputs.stability_duration || '10' }} \ + STAGING_API_URL="${{ env.STAGING_API_URL }}" \ + MAX_5XX_RATE="0.001" \ + bash scripts/staging-stability-check.sh + + - name: Stability summary + if: always() + run: | + echo "## Stability Validation" >> $GITHUB_STEP_SUMMARY + echo "Duration: ${{ inputs.stability_duration || '10' }} minutes" >> $GITHUB_STEP_SUMMARY + echo "Target: 5xx rate < 0.1%" >> $GITHUB_STEP_SUMMARY + if [ -f stability-report.json ]; then + cat stability-report.json | jq . >> $GITHUB_STEP_SUMMARY + fi + + # ───────────────────────────────────────────────────── + # TASK-STAG-005: GDPR validation (export + deletion E2E) + # ───────────────────────────────────────────────────── + gdpr-validation: + name: GDPR Compliance Check + runs-on: ubuntu-latest + needs: deploy-staging + if: always() && (needs.deploy-staging.result == 'success' || needs.deploy-staging.result == 'skipped') + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + + - name: Set up Go + uses: actions/setup-go@f111f3307d8850f501ac008e886eec1fd1932a34 # v5.3.0 + with: + go-version: "1.24" + cache: true + + - name: Run GDPR integration tests + working-directory: veza-backend-api + run: go test -v -tags=integration -run TestGDPR -timeout 120s ./tests/integration/... + + - name: GDPR summary + if: always() + run: | + echo "## GDPR Validation" >> $GITHUB_STEP_SUMMARY + echo "- Data export: tested" >> $GITHUB_STEP_SUMMARY + echo "- Account deletion: tested" >> $GITHUB_STEP_SUMMARY + + # ───────────────────────────────────────────────────── + # TASK-STAG-006: Bundle size validation (< 200KB gzip) + # ───────────────────────────────────────────────────── + bundle-size-validation: + name: Bundle Size Check + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + + - name: Set up Node + uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4.2.0 + with: + node-version: '20' + cache: 'npm' + + - name: Install dependencies + run: npm ci + + - name: Build frontend + working-directory: apps/web + run: npx vite build --outDir dist_verification + env: + NODE_ENV: production + + - name: Check bundle size + working-directory: apps/web + run: node scripts/check-bundle-size.mjs + + - name: Bundle size summary + if: always() + run: | + echo "## Bundle Size Validation" >> $GITHUB_STEP_SUMMARY + echo "Target: JS initial < 200KB gzipped" >> $GITHUB_STEP_SUMMARY + + # ───────────────────────────────────────────────────── + # Final summary + # ───────────────────────────────────────────────────── + validation-summary: + name: Validation Summary + runs-on: ubuntu-latest + needs: [deploy-staging, performance-validation, lighthouse-validation, stability-validation, gdpr-validation, bundle-size-validation] + if: always() + steps: + - name: Generate final report + run: | + echo "# Staging Validation Report" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "| Check | Status |" >> $GITHUB_STEP_SUMMARY + echo "|-------|--------|" >> $GITHUB_STEP_SUMMARY + echo "| Deploy (STAG-001) | ${{ needs.deploy-staging.result }} |" >> $GITHUB_STEP_SUMMARY + echo "| Performance (STAG-002) | ${{ needs.performance-validation.result }} |" >> $GITHUB_STEP_SUMMARY + echo "| Lighthouse (STAG-003) | ${{ needs.lighthouse-validation.result }} |" >> $GITHUB_STEP_SUMMARY + echo "| Stability (STAG-004) | ${{ needs.stability-validation.result }} |" >> $GITHUB_STEP_SUMMARY + echo "| GDPR (STAG-005) | ${{ needs.gdpr-validation.result }} |" >> $GITHUB_STEP_SUMMARY + echo "| Bundle Size (STAG-006) | ${{ needs.bundle-size-validation.result }} |" >> $GITHUB_STEP_SUMMARY + + - name: Check all passed + run: | + FAILED=0 + for result in "${{ needs.performance-validation.result }}" "${{ needs.lighthouse-validation.result }}" "${{ needs.bundle-size-validation.result }}"; do + if [ "$result" = "failure" ]; then + FAILED=1 + fi + done + if [ "$FAILED" = "1" ]; then + echo "Some validations failed — see summary above." + exit 1 + fi + echo "All critical validations passed!" diff --git a/.lighthouserc.js b/.lighthouserc.js new file mode 100644 index 000000000..d6ddf20ab --- /dev/null +++ b/.lighthouserc.js @@ -0,0 +1,68 @@ +/** + * Lighthouse CI Configuration + * v0.14.0 TASK-STAG-003: Validation Lighthouse + * + * Targets: + * Performance >= 85 + * Accessibility >= 90 + * PWA >= 90 (best-practices proxy when PWA not applicable) + * Best Practices >= 85 + * SEO >= 80 + */ +module.exports = { + ci: { + collect: { + url: [ + `${process.env.STAGING_URL || 'https://staging.veza.app'}/login`, + `${process.env.STAGING_URL || 'https://staging.veza.app'}/register`, + ], + numberOfRuns: 3, + settings: { + preset: 'desktop', + // Throttling: simulate cable connection + throttling: { + cpuSlowdownMultiplier: 1, + downloadThroughputKbps: 10240, + uploadThroughputKbps: 5120, + rttMs: 40, + }, + // Skip audits that require auth + skipAudits: [ + 'uses-http2', // Depends on server config + ], + }, + }, + assert: { + assertions: { + // Performance >= 85 + 'categories:performance': ['error', { minScore: 0.85 }], + // Accessibility >= 90 + 'categories:accessibility': ['error', { minScore: 0.90 }], + // Best Practices >= 85 + 'categories:best-practices': ['warn', { minScore: 0.85 }], + // SEO >= 80 + 'categories:seo': ['warn', { minScore: 0.80 }], + + // Core Web Vitals + 'first-contentful-paint': ['warn', { maxNumericValue: 1800 }], + 'largest-contentful-paint': ['warn', { maxNumericValue: 2500 }], + 'cumulative-layout-shift': ['error', { maxNumericValue: 0.1 }], + 'total-blocking-time': ['warn', { maxNumericValue: 300 }], + + // Accessibility specifics (ORIGIN_UI_UX_SYSTEM compliance) + 'color-contrast': 'error', + 'image-alt': 'error', + 'label': 'error', + 'button-name': 'error', + 'link-name': 'error', + 'document-title': 'error', + 'html-has-lang': 'error', + 'meta-viewport': 'error', + }, + }, + upload: { + target: 'filesystem', + outputDir: '.lighthouseci', + }, + }, +}; diff --git a/loadtests/staging/validation_v0140.js b/loadtests/staging/validation_v0140.js new file mode 100644 index 000000000..dfb2ac330 --- /dev/null +++ b/loadtests/staging/validation_v0140.js @@ -0,0 +1,169 @@ +/** + * v0.14.0 Staging Validation Load Test + * TASK-STAG-002: Validation performances (p95 < 100ms, stream start < 500ms) + * + * Usage: + * k6 run --env BASE_URL=https://staging.veza.app/api/v1 loadtests/staging/validation_v0140.js + * k6 run --env BASE_URL=https://staging.veza.app/api/v1 --env SCENARIO=load loadtests/staging/validation_v0140.js + */ +import http from 'k6/http'; +import { check, sleep, group } from 'k6'; +import { Rate, Trend } from 'k6/metrics'; + +const errorRate = new Rate('error_rate'); +const apiDuration = new Trend('api_duration', true); +const streamStartTime = new Trend('stream_start_time', true); + +const BASE_URL = __ENV.BASE_URL || 'http://localhost:8080/api/v1'; +const STREAM_URL = __ENV.STREAM_URL || BASE_URL.replace('/api/v1', '/stream'); +const AUTH_TOKEN = __ENV.AUTH_TOKEN || ''; +const SCENARIO = __ENV.SCENARIO || 'smoke'; + +const scenarios = { + smoke: { + stages: [ + { duration: '10s', target: 5 }, + { duration: '30s', target: 10 }, + { duration: '10s', target: 0 }, + ], + }, + load: { + stages: [ + { duration: '30s', target: 50 }, + { duration: '2m', target: 200 }, + { duration: '1m', target: 50 }, + { duration: '30s', target: 0 }, + ], + }, +}; + +export const options = { + stages: scenarios[SCENARIO]?.stages || scenarios.smoke.stages, + thresholds: { + // TASK-STAG-002 targets + http_req_duration: ['p(95)<100', 'p(99)<200'], + api_duration: ['p(95)<100'], + stream_start_time: ['p(95)<500'], + error_rate: ['rate<0.001'], // < 0.1% for staging stability + }, + gracefulStop: '10s', +}; + +function headers() { + const h = { 'Content-Type': 'application/json' }; + if (AUTH_TOKEN) { + h['Authorization'] = `Bearer ${AUTH_TOKEN}`; + } + return h; +} + +export default function () { + const h = headers(); + + // Health check — must always be fast + group('health', () => { + const res = http.get(`${BASE_URL}/health`, { headers: h }); + check(res, { + 'health 200': (r) => r.status === 200, + 'health < 50ms': (r) => r.timings.duration < 50, + }); + errorRate.add(res.status >= 500); + apiDuration.add(res.timings.duration); + }); + + // Deep health — checks DB, Redis, RabbitMQ + group('health_deep', () => { + const res = http.get(`${BASE_URL}/health/deep`, { headers: h }); + check(res, { + 'deep health 2xx': (r) => r.status >= 200 && r.status < 300, + }); + errorRate.add(res.status >= 500); + apiDuration.add(res.timings.duration); + }); + sleep(0.2); + + // Readiness probe + group('readiness', () => { + const res = http.get(`${BASE_URL}/readyz`, { headers: h }); + check(res, { + 'readyz 200': (r) => r.status === 200, + }); + errorRate.add(res.status >= 500); + }); + + // Track listing (high-traffic endpoint) + group('tracks_list', () => { + const res = http.get(`${BASE_URL}/tracks?page=1&limit=20`, { headers: h }); + check(res, { + 'tracks 2xx or 401': (r) => (r.status >= 200 && r.status < 300) || r.status === 401, + 'tracks p95 < 100ms': (r) => r.timings.duration < 100, + }); + errorRate.add(res.status >= 500); + apiDuration.add(res.timings.duration); + }); + sleep(0.2); + + // Search + group('search', () => { + const queries = ['rock', 'jazz', 'piano', 'guitar', 'beat', 'electronic']; + const q = queries[Math.floor(Math.random() * queries.length)]; + const res = http.get(`${BASE_URL}/search?q=${q}&limit=10`, { headers: h }); + check(res, { + 'search 2xx or 401': (r) => (r.status >= 200 && r.status < 300) || r.status === 401, + }); + errorRate.add(res.status >= 500); + apiDuration.add(res.timings.duration); + }); + sleep(0.2); + + // Marketplace products + group('marketplace', () => { + const res = http.get(`${BASE_URL}/commerce/products?page=1&limit=20`, { headers: h }); + check(res, { + 'products 2xx or 401': (r) => (r.status >= 200 && r.status < 300) || r.status === 401, + }); + errorRate.add(res.status >= 500); + apiDuration.add(res.timings.duration); + }); + sleep(0.2); + + // Stream start simulation (if stream endpoint is accessible) + group('stream_start', () => { + const start = new Date().getTime(); + const res = http.get(`${STREAM_URL}/health`, { headers: h, timeout: '2s' }); + const elapsed = new Date().getTime() - start; + streamStartTime.add(elapsed); + check(res, { + 'stream health reachable': (r) => r.status === 200 || r.status === 404, + 'stream start < 500ms': () => elapsed < 500, + }); + }); + sleep(0.3); +} + +export function handleSummary(data) { + const p95 = data.metrics.http_req_duration?.values?.['p(95)'] || 'N/A'; + const p99 = data.metrics.http_req_duration?.values?.['p(99)'] || 'N/A'; + const errRate = data.metrics.error_rate?.values?.rate || 0; + const streamP95 = data.metrics.stream_start_time?.values?.['p(95)'] || 'N/A'; + + const passed = (typeof p95 === 'number' && p95 < 100) && + (typeof streamP95 === 'number' && streamP95 < 500) && + (errRate < 0.001); + + console.log(` +═══════════════════════════════════════════ + v0.14.0 Staging Validation Results (${SCENARIO}) +═══════════════════════════════════════════ + API p95 latency: ${typeof p95 === 'number' ? p95.toFixed(2) : p95}ms (target: <100ms) ${typeof p95 === 'number' && p95 < 100 ? '✅' : '❌'} + API p99 latency: ${typeof p99 === 'number' ? p99.toFixed(2) : p99}ms (target: <200ms) ${typeof p99 === 'number' && p99 < 200 ? '✅' : '❌'} + Stream start p95: ${typeof streamP95 === 'number' ? streamP95.toFixed(2) : streamP95}ms (target: <500ms) ${typeof streamP95 === 'number' && streamP95 < 500 ? '✅' : '❌'} + Error rate: ${(errRate * 100).toFixed(3)}% (target: <0.1%) ${errRate < 0.001 ? '✅' : '❌'} + Overall: ${passed ? '✅ PASS' : '❌ FAIL'} +═══════════════════════════════════════════ + `); + + return { + 'staging-perf-results.json': JSON.stringify(data, null, 2), + }; +} diff --git a/scripts/staging-stability-check.sh b/scripts/staging-stability-check.sh new file mode 100755 index 000000000..dc275a783 --- /dev/null +++ b/scripts/staging-stability-check.sh @@ -0,0 +1,120 @@ +#!/usr/bin/env bash +# v0.14.0 TASK-STAG-004: Stability validation script +# Monitors staging for N minutes, checking 5xx rate and availability. +# +# Usage: +# STAGING_API_URL=https://staging.veza.app/api/v1 DURATION_MINUTES=10 bash scripts/staging-stability-check.sh +# +# Environment: +# STAGING_API_URL — Base API URL (default: http://localhost:8080/api/v1) +# DURATION_MINUTES — How long to monitor (default: 10) +# INTERVAL_SECONDS — Seconds between checks (default: 10) +# MAX_5XX_RATE — Maximum 5xx rate as decimal (default: 0.001 = 0.1%) + +set -euo pipefail + +API_URL="${STAGING_API_URL:-http://localhost:8080/api/v1}" +DURATION="${DURATION_MINUTES:-10}" +INTERVAL="${INTERVAL_SECONDS:-10}" +MAX_RATE="${MAX_5XX_RATE:-0.001}" + +TOTAL_REQUESTS=0 +TOTAL_5XX=0 +TOTAL_ERRORS=0 +START_TIME=$(date +%s) +END_TIME=$((START_TIME + DURATION * 60)) + +ENDPOINTS=( + "/health" + "/healthz" + "/readyz" + "/health/deep" + "/tracks?page=1&limit=5" + "/search?q=test&limit=5" +) + +echo "═══════════════════════════════════════════" +echo " Staging Stability Check" +echo " URL: ${API_URL}" +echo " Duration: ${DURATION} minutes" +echo " Interval: ${INTERVAL}s" +echo " Max 5xx rate: $(echo "${MAX_RATE} * 100" | bc)%" +echo "═══════════════════════════════════════════" +echo "" + +while [ "$(date +%s)" -lt "$END_TIME" ]; do + for endpoint in "${ENDPOINTS[@]}"; do + HTTP_CODE=$(curl -sf -o /dev/null -w "%{http_code}" "${API_URL}${endpoint}" 2>/dev/null || echo "000") + TOTAL_REQUESTS=$((TOTAL_REQUESTS + 1)) + + if [ "$HTTP_CODE" = "000" ]; then + TOTAL_ERRORS=$((TOTAL_ERRORS + 1)) + echo "[$(date '+%H:%M:%S')] ERROR: ${endpoint} — connection failed" + elif [ "$HTTP_CODE" -ge 500 ]; then + TOTAL_5XX=$((TOTAL_5XX + 1)) + echo "[$(date '+%H:%M:%S')] 5XX: ${endpoint} — HTTP ${HTTP_CODE}" + fi + done + + ELAPSED=$(( $(date +%s) - START_TIME )) + REMAINING=$(( END_TIME - $(date +%s) )) + if [ "$TOTAL_REQUESTS" -gt 0 ]; then + CURRENT_RATE=$(echo "scale=6; $TOTAL_5XX / $TOTAL_REQUESTS" | bc) + printf "\r[%ds/%ds] Requests: %d | 5xx: %d | Errors: %d | 5xx rate: %s" \ + "$ELAPSED" "$((DURATION * 60))" "$TOTAL_REQUESTS" "$TOTAL_5XX" "$TOTAL_ERRORS" "$CURRENT_RATE" + fi + + sleep "$INTERVAL" +done + +echo "" +echo "" + +# Calculate final rate +if [ "$TOTAL_REQUESTS" -gt 0 ]; then + FINAL_RATE=$(echo "scale=6; $TOTAL_5XX / $TOTAL_REQUESTS" | bc) + FINAL_RATE_PCT=$(echo "scale=3; $FINAL_RATE * 100" | bc) + ERROR_RATE=$(echo "scale=6; $TOTAL_ERRORS / $TOTAL_REQUESTS" | bc) + ERROR_RATE_PCT=$(echo "scale=3; $ERROR_RATE * 100" | bc) +else + FINAL_RATE="0" + FINAL_RATE_PCT="0" + ERROR_RATE="0" + ERROR_RATE_PCT="0" +fi + +# Generate report +cat > stability-report.json <