veza/loadtests/backend/uploads.js
senke fef7e7fc7c feat(loadtests): audit 3.2 — tests de charge k6 complets
- loadtests: centraliser scripts (backend, stream, chat)
- backend: health, auth, tracks, uploads, playlists, marketplace
- stream: http health, healthz, readyz
- chat: WebSocket load (register -> login -> chat token -> WS)
- ci: workflow nightly load-test-nightly.yml
- docs: README loadtests
- make: load-test-smoke, load-test-backend, load-test-all
- fix: veza-backend-api Makefile load-test (scripts/load_test_uploads.js -> loadtests)
2026-02-15 15:22:48 +01:00

183 lines
5.7 KiB
JavaScript

/**
* Load test: upload simple, chunked, batch
* Usage: k6 run loadtests/backend/uploads.js
* Requires: AUTH_TOKEN (JWT)
*/
import http from 'k6/http';
import { check, sleep } from 'k6';
import { Rate, Trend, Counter } from 'k6/metrics';
const errorRate = new Rate('errors');
const uploadDuration = new Trend('upload_duration');
const chunkedUploadDuration = new Trend('chunked_upload_duration');
const uploadFailures = new Counter('upload_failures');
const BASE_URL = __ENV.BASE_URL || __ENV.API_ORIGIN || 'http://localhost:8080';
const AUTH_TOKEN = __ENV.AUTH_TOKEN || '';
const CHUNK_SIZE = parseInt(__ENV.CHUNK_SIZE || '1048576');
const TOTAL_CHUNKS = parseInt(__ENV.TOTAL_CHUNKS || '5');
export const options = {
stages: [
{ duration: '30s', target: 5 },
{ duration: '2m', target: 10 },
{ duration: '2m', target: 10 },
{ duration: '30s', target: 0 },
],
thresholds: {
http_req_duration: ['p(95)<5000', 'p(99)<10000'],
errors: ['rate<0.10'],
upload_duration: ['p(95)<3000'],
chunked_upload_duration: ['p(95)<8000'],
},
};
function generateTestFile(size) {
const buffer = new Uint8Array(size);
for (let i = 0; i < size; i++) {
buffer[i] = Math.floor(Math.random() * 256);
}
return buffer;
}
function createMultipartBody(fields, fileField, fileData, filename, contentType) {
const boundary = `----WebKitFormBoundary${Date.now()}${Math.random().toString(36)}`;
let body = '';
for (const [key, value] of Object.entries(fields)) {
body += `--${boundary}\r\n`;
body += `Content-Disposition: form-data; name="${key}"\r\n\r\n`;
body += `${value}\r\n`;
}
body += `--${boundary}\r\n`;
body += `Content-Disposition: form-data; name="${fileField}"; filename="${filename}"\r\n`;
body += `Content-Type: ${contentType}\r\n\r\n`;
const fileString = String.fromCharCode.apply(null, fileData);
body += fileString;
body += `\r\n--${boundary}--\r\n`;
return {
body,
contentType: `multipart/form-data; boundary=${boundary}`,
};
}
function testSimpleUpload() {
const filename = `test_${Date.now()}_${Math.random().toString(36).substring(7)}.mp3`;
const fileSize = Math.min(CHUNK_SIZE * 2, 1024 * 1024);
const fileData = generateTestFile(fileSize);
const fields = {
title: `Test Track ${Date.now()}`,
artist: 'Test Artist',
file_type: 'audio',
};
const multipart = createMultipartBody(fields, 'file', fileData, filename, 'audio/mpeg');
const startTime = Date.now();
const res = http.post(`${BASE_URL}/api/v1/tracks`, multipart.body, {
headers: {
Authorization: `Bearer ${AUTH_TOKEN}`,
'Content-Type': multipart.contentType,
},
});
uploadDuration.add(Date.now() - startTime);
const success = check(res, {
'simple upload status is 201 or 200': (r) => r.status === 201 || r.status === 200,
});
errorRate.add(!success);
if (!success) uploadFailures.add(1);
return success;
}
function testChunkedUpload() {
const filename = `test_chunked_${Date.now()}_${Math.random().toString(36).substring(7)}.mp3`;
const totalSize = CHUNK_SIZE * TOTAL_CHUNKS;
const startTime = Date.now();
const initiateRes = http.post(
`${BASE_URL}/api/v1/tracks/initiate`,
JSON.stringify({ total_chunks: TOTAL_CHUNKS, total_size: totalSize, filename }),
{
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${AUTH_TOKEN}`,
},
}
);
if (initiateRes.status !== 200) {
errorRate.add(true);
uploadFailures.add(1);
return false;
}
let uploadID;
try {
const b = JSON.parse(initiateRes.body);
uploadID = b.data?.upload_id;
} catch (e) {
return false;
}
if (!uploadID) return false;
for (let chunkNum = 1; chunkNum <= TOTAL_CHUNKS; chunkNum++) {
const chunkData = generateTestFile(Math.min(CHUNK_SIZE, 1024 * 1024));
const fields = {
upload_id: uploadID,
chunk_number: chunkNum.toString(),
total_chunks: TOTAL_CHUNKS.toString(),
total_size: totalSize.toString(),
filename,
};
const multipart = createMultipartBody(fields, 'chunk', chunkData, `chunk${chunkNum}.bin`, 'application/octet-stream');
const chunkRes = http.post(`${BASE_URL}/api/v1/tracks/chunk`, multipart.body, {
headers: {
Authorization: `Bearer ${AUTH_TOKEN}`,
'Content-Type': multipart.contentType,
},
});
if (chunkRes.status !== 200) {
errorRate.add(true);
uploadFailures.add(1);
return false;
}
sleep(0.1);
}
const completeRes = http.post(
`${BASE_URL}/api/v1/tracks/complete`,
JSON.stringify({ upload_id: uploadID }),
{
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${AUTH_TOKEN}`,
},
}
);
chunkedUploadDuration.add(Date.now() - startTime);
const success = check(completeRes, {
'complete returns 201 or 200': (r) => r.status === 201 || r.status === 200,
});
errorRate.add(!success);
if (!success) uploadFailures.add(1);
return success;
}
export default function () {
if (!AUTH_TOKEN) {
console.error('AUTH_TOKEN is required for upload tests');
return;
}
const rand = Math.random();
if (rand < 0.5) {
testSimpleUpload();
} else if (rand < 0.9) {
testChunkedUpload();
} else {
const filename = `test_batch_${Date.now()}.mp3`;
const fileData = generateTestFile(Math.min(CHUNK_SIZE, 1024 * 1024));
const multipart = createMultipartBody({}, 'files', fileData, filename, 'audio/mpeg');
const res = http.post(`${BASE_URL}/api/v1/uploads/batch`, multipart.body, {
headers: {
Authorization: `Bearer ${AUTH_TOKEN}`,
'Content-Type': multipart.contentType,
},
});
check(res, { 'batch status 200 or 201': (r) => r.status === 200 || r.status === 201 });
}
sleep(2);
}