- Created k6 load test script for concurrent and chunked uploads - Added Go performance tests for upload endpoints - Updated README with usage instructions for upload load tests - Tests cover simple upload, chunked upload (initiate/chunk/complete), and batch upload - Performance thresholds defined for upload operations Phase: PHASE-5 Priority: P2 Progress: 136/267 (50.94%)
381 lines
12 KiB
JavaScript
381 lines
12 KiB
JavaScript
// k6 load test pour les endpoints d'upload de veza-backend-api
|
|
// Installation: https://k6.io/docs/get-started/installation/
|
|
// Usage: k6 run scripts/loadtest/k6_upload_load_test.js
|
|
//
|
|
// Variables d'environnement:
|
|
// BASE_URL: URL de base de l'API (défaut: http://localhost:8080)
|
|
// AUTH_TOKEN: Token JWT pour authentification (requis pour uploads)
|
|
// CHUNK_SIZE: Taille des chunks en bytes (défaut: 1024 * 1024 = 1MB)
|
|
// TOTAL_CHUNKS: Nombre de chunks par upload (défaut: 5)
|
|
|
|
import http from 'k6/http';
|
|
import { check, sleep } from 'k6';
|
|
import { Rate, Trend, Counter } from 'k6/metrics';
|
|
|
|
// Métriques custom
|
|
const errorRate = new Rate('errors');
|
|
const uploadDuration = new Trend('upload_duration');
|
|
const chunkedUploadDuration = new Trend('chunked_upload_duration');
|
|
const concurrentUploads = new Counter('concurrent_uploads');
|
|
const chunkedUploads = new Counter('chunked_uploads');
|
|
const uploadFailures = new Counter('upload_failures');
|
|
|
|
// Configuration
|
|
export const options = {
|
|
stages: [
|
|
{ duration: '30s', target: 5 }, // Ramp-up: 0 à 5 VUs en 30s
|
|
{ duration: '2m', target: 10 }, // Augmentation: 5 à 10 VUs en 2m
|
|
{ duration: '2m', target: 10 }, // Stabilité: 10 VUs pendant 2m
|
|
{ duration: '30s', target: 0 }, // Ramp-down: 10 à 0 VUs en 30s
|
|
],
|
|
thresholds: {
|
|
'http_req_duration': ['p(95)<5000', 'p(99)<10000'], // Uploads peuvent être plus lents
|
|
'errors': ['rate<0.10'], // < 10% d'erreurs (uploads plus fragiles)
|
|
'upload_duration': ['p(95)<3000'], // Upload simple < 3s
|
|
'chunked_upload_duration': ['p(95)<8000'], // Upload chunked < 8s
|
|
},
|
|
};
|
|
|
|
// Base URL (configurable via env)
|
|
const BASE_URL = __ENV.BASE_URL || 'http://localhost:8080';
|
|
const AUTH_TOKEN = __ENV.AUTH_TOKEN || '';
|
|
const CHUNK_SIZE = parseInt(__ENV.CHUNK_SIZE || '1048576'); // 1MB par défaut
|
|
const TOTAL_CHUNKS = parseInt(__ENV.TOTAL_CHUNKS || '5');
|
|
|
|
// Générer un fichier de test (simulation)
|
|
// k6 utilise des Uint8Array pour les données binaires
|
|
function generateTestFile(size) {
|
|
const buffer = new Uint8Array(size);
|
|
// Remplir avec des données pseudo-aléatoires
|
|
for (let i = 0; i < size; i++) {
|
|
buffer[i] = Math.floor(Math.random() * 256);
|
|
}
|
|
return buffer;
|
|
}
|
|
|
|
// Créer un multipart form data manuellement pour k6
|
|
function createMultipartBody(fields, fileField, fileData, filename, contentType) {
|
|
const boundary = `----WebKitFormBoundary${Date.now()}${Math.random().toString(36)}`;
|
|
let body = '';
|
|
|
|
// Ajouter les champs de formulaire
|
|
for (const [key, value] of Object.entries(fields)) {
|
|
body += `--${boundary}\r\n`;
|
|
body += `Content-Disposition: form-data; name="${key}"\r\n\r\n`;
|
|
body += `${value}\r\n`;
|
|
}
|
|
|
|
// Ajouter le fichier
|
|
body += `--${boundary}\r\n`;
|
|
body += `Content-Disposition: form-data; name="${fileField}"; filename="${filename}"\r\n`;
|
|
body += `Content-Type: ${contentType}\r\n\r\n`;
|
|
|
|
// Convertir Uint8Array en string binaire (approximation)
|
|
// Note: Pour un vrai test, on devrait utiliser une approche plus robuste
|
|
const fileString = String.fromCharCode.apply(null, fileData);
|
|
body += fileString;
|
|
body += `\r\n--${boundary}--\r\n`;
|
|
|
|
return {
|
|
body: body,
|
|
contentType: `multipart/form-data; boundary=${boundary}`,
|
|
};
|
|
}
|
|
|
|
// Test upload simple
|
|
function testSimpleUpload() {
|
|
const filename = `test_${Date.now()}_${Math.random().toString(36).substring(7)}.mp3`;
|
|
const fileSize = Math.min(CHUNK_SIZE * 2, 1024 * 1024); // Max 1MB pour éviter problèmes mémoire
|
|
const fileData = generateTestFile(fileSize);
|
|
|
|
const startTime = Date.now();
|
|
|
|
// Créer un multipart form manuellement
|
|
const fields = {
|
|
title: `Test Track ${Date.now()}`,
|
|
artist: 'Test Artist',
|
|
file_type: 'audio',
|
|
};
|
|
|
|
const multipart = createMultipartBody(fields, 'file', fileData, filename, 'audio/mpeg');
|
|
|
|
const params = {
|
|
headers: {
|
|
'Authorization': `Bearer ${AUTH_TOKEN}`,
|
|
'Content-Type': multipart.contentType,
|
|
},
|
|
};
|
|
|
|
// POST /api/v1/tracks (upload simple)
|
|
const res = http.post(`${BASE_URL}/api/v1/tracks`, multipart.body, params);
|
|
|
|
const duration = Date.now() - startTime;
|
|
uploadDuration.add(duration);
|
|
concurrentUploads.add(1);
|
|
|
|
const success = check(res, {
|
|
'simple upload status is 201 or 200': (r) => r.status === 201 || r.status === 200,
|
|
'simple upload has response data': (r) => {
|
|
try {
|
|
const body = JSON.parse(r.body);
|
|
return body.success !== false && body.data !== undefined;
|
|
} catch (e) {
|
|
return false;
|
|
}
|
|
},
|
|
});
|
|
|
|
errorRate.add(!success);
|
|
if (!success) {
|
|
uploadFailures.add(1);
|
|
}
|
|
|
|
return success;
|
|
}
|
|
|
|
// Test upload par chunks
|
|
function testChunkedUpload() {
|
|
const filename = `test_chunked_${Date.now()}_${Math.random().toString(36).substring(7)}.mp3`;
|
|
const totalSize = CHUNK_SIZE * TOTAL_CHUNKS;
|
|
|
|
const startTime = Date.now();
|
|
|
|
// Step 1: Initiate chunked upload
|
|
const initiatePayload = JSON.stringify({
|
|
total_chunks: TOTAL_CHUNKS,
|
|
total_size: totalSize,
|
|
filename: filename,
|
|
});
|
|
|
|
const initiateRes = http.post(
|
|
`${BASE_URL}/api/v1/tracks/initiate`,
|
|
initiatePayload,
|
|
{
|
|
headers: {
|
|
'Content-Type': 'application/json',
|
|
'Authorization': `Bearer ${AUTH_TOKEN}`,
|
|
},
|
|
}
|
|
);
|
|
|
|
const initiateCheck = check(initiateRes, {
|
|
'initiate returns 200': (r) => r.status === 200,
|
|
'initiate returns upload_id': (r) => {
|
|
try {
|
|
const body = JSON.parse(r.body);
|
|
return body.success === true && body.data && body.data.upload_id;
|
|
} catch (e) {
|
|
return false;
|
|
}
|
|
},
|
|
});
|
|
|
|
if (!initiateCheck) {
|
|
errorRate.add(true);
|
|
uploadFailures.add(1);
|
|
return false;
|
|
}
|
|
|
|
const initiateBody = JSON.parse(initiateRes.body);
|
|
const uploadID = initiateBody.data.upload_id;
|
|
|
|
// Step 2: Upload chunks
|
|
for (let chunkNum = 1; chunkNum <= TOTAL_CHUNKS; chunkNum++) {
|
|
const chunkData = generateTestFile(Math.min(CHUNK_SIZE, 1024 * 1024)); // Max 1MB par chunk
|
|
|
|
// Créer multipart form manuellement
|
|
const fields = {
|
|
upload_id: uploadID,
|
|
chunk_number: chunkNum.toString(),
|
|
total_chunks: TOTAL_CHUNKS.toString(),
|
|
total_size: totalSize.toString(),
|
|
filename: filename,
|
|
};
|
|
|
|
const multipart = createMultipartBody(fields, 'chunk', chunkData, `chunk${chunkNum}.bin`, 'application/octet-stream');
|
|
|
|
const chunkRes = http.post(
|
|
`${BASE_URL}/api/v1/tracks/chunk`,
|
|
multipart.body,
|
|
{
|
|
headers: {
|
|
'Authorization': `Bearer ${AUTH_TOKEN}`,
|
|
'Content-Type': multipart.contentType,
|
|
},
|
|
}
|
|
);
|
|
|
|
const chunkCheck = check(chunkRes, {
|
|
[`chunk ${chunkNum} returns 200`]: (r) => r.status === 200,
|
|
[`chunk ${chunkNum} has progress`]: (r) => {
|
|
try {
|
|
const body = JSON.parse(r.body);
|
|
return body.success === true && body.data && body.data.progress !== undefined;
|
|
} catch (e) {
|
|
return false;
|
|
}
|
|
},
|
|
});
|
|
|
|
if (!chunkCheck) {
|
|
errorRate.add(true);
|
|
uploadFailures.add(1);
|
|
return false;
|
|
}
|
|
|
|
// Petit délai entre chunks pour simuler un comportement réaliste
|
|
sleep(0.1);
|
|
}
|
|
|
|
// Step 3: Complete chunked upload
|
|
const completePayload = JSON.stringify({
|
|
upload_id: uploadID,
|
|
});
|
|
|
|
const completeRes = http.post(
|
|
`${BASE_URL}/api/v1/tracks/complete`,
|
|
completePayload,
|
|
{
|
|
headers: {
|
|
'Content-Type': 'application/json',
|
|
'Authorization': `Bearer ${AUTH_TOKEN}`,
|
|
},
|
|
}
|
|
);
|
|
|
|
const duration = Date.now() - startTime;
|
|
chunkedUploadDuration.add(duration);
|
|
chunkedUploads.add(1);
|
|
|
|
const completeCheck = check(completeRes, {
|
|
'complete returns 201 or 200': (r) => r.status === 201 || r.status === 200,
|
|
'complete has track data': (r) => {
|
|
try {
|
|
const body = JSON.parse(r.body);
|
|
return body.success === true && body.data && body.data.track;
|
|
} catch (e) {
|
|
return false;
|
|
}
|
|
},
|
|
});
|
|
|
|
errorRate.add(!completeCheck);
|
|
if (!completeCheck) {
|
|
uploadFailures.add(1);
|
|
}
|
|
|
|
return completeCheck;
|
|
}
|
|
|
|
// Test upload batch
|
|
function testBatchUpload() {
|
|
// Note: Batch upload peut nécessiter plusieurs fichiers
|
|
// Pour simplifier, on teste juste l'endpoint avec un fichier
|
|
const filename = `test_batch_${Date.now()}_${Math.random().toString(36).substring(7)}.mp3`;
|
|
const fileSize = Math.min(CHUNK_SIZE, 1024 * 1024); // Max 1MB
|
|
const fileData = generateTestFile(fileSize);
|
|
|
|
// Créer multipart form manuellement
|
|
const fields = {};
|
|
const multipart = createMultipartBody(fields, 'files', fileData, filename, 'audio/mpeg');
|
|
|
|
const params = {
|
|
headers: {
|
|
'Authorization': `Bearer ${AUTH_TOKEN}`,
|
|
'Content-Type': multipart.contentType,
|
|
},
|
|
};
|
|
|
|
const res = http.post(`${BASE_URL}/api/v1/uploads/batch`, multipart.body, params);
|
|
|
|
const success = check(res, {
|
|
'batch upload status is 200 or 201': (r) => r.status === 200 || r.status === 201,
|
|
});
|
|
|
|
errorRate.add(!success);
|
|
if (!success) {
|
|
uploadFailures.add(1);
|
|
}
|
|
|
|
return success;
|
|
}
|
|
|
|
// Fonction principale exécutée par chaque VU
|
|
export default function () {
|
|
if (!AUTH_TOKEN) {
|
|
console.error('AUTH_TOKEN is required for upload tests');
|
|
return;
|
|
}
|
|
|
|
// Distribuer les tests: 50% upload simple, 40% chunked, 10% batch
|
|
const rand = Math.random();
|
|
|
|
if (rand < 0.5) {
|
|
// 50% upload simple
|
|
testSimpleUpload();
|
|
} else if (rand < 0.9) {
|
|
// 40% upload chunked
|
|
testChunkedUpload();
|
|
} else {
|
|
// 10% batch upload
|
|
testBatchUpload();
|
|
}
|
|
|
|
// Délai entre requêtes pour simuler un comportement utilisateur
|
|
sleep(2);
|
|
}
|
|
|
|
// Résumé des résultats
|
|
export function handleSummary(data) {
|
|
return {
|
|
'stdout': textSummary(data, { indent: ' ', enableColors: true }),
|
|
'scripts/loadtest/k6_upload_summary.json': JSON.stringify(data),
|
|
};
|
|
}
|
|
|
|
function textSummary(data, options) {
|
|
const indent = options.indent || '';
|
|
const enableColors = options.enableColors || false;
|
|
|
|
let summary = '\n';
|
|
summary += `${indent}Upload Load Test Summary\n`;
|
|
summary += `${indent}========================\n\n`;
|
|
|
|
// HTTP Requests
|
|
summary += `${indent}HTTP Requests:\n`;
|
|
summary += `${indent} Total: ${data.metrics.http_reqs.values.count}\n`;
|
|
summary += `${indent} Failed: ${(data.metrics.http_req_failed.values.rate * 100).toFixed(2)}%\n\n`;
|
|
|
|
// Durations
|
|
summary += `${indent}Durations:\n`;
|
|
summary += `${indent} P95: ${data.metrics.http_req_duration.values['p(95)']}ms\n`;
|
|
summary += `${indent} P99: ${data.metrics.http_req_duration.values['p(99)']}ms\n\n`;
|
|
|
|
// Upload metrics
|
|
if (data.metrics.upload_duration) {
|
|
summary += `${indent}Simple Upload Duration:\n`;
|
|
summary += `${indent} P95: ${data.metrics.upload_duration.values['p(95)']}ms\n`;
|
|
summary += `${indent} P99: ${data.metrics.upload_duration.values['p(99)']}ms\n`;
|
|
}
|
|
|
|
if (data.metrics.chunked_upload_duration) {
|
|
summary += `${indent}Chunked Upload Duration:\n`;
|
|
summary += `${indent} P95: ${data.metrics.chunked_upload_duration.values['p(95)']}ms\n`;
|
|
summary += `${indent} P99: ${data.metrics.chunked_upload_duration.values['p(99)']}ms\n`;
|
|
}
|
|
|
|
if (data.metrics.concurrent_uploads) {
|
|
summary += `${indent}Concurrent Uploads: ${data.metrics.concurrent_uploads.values.count}\n`;
|
|
}
|
|
|
|
if (data.metrics.chunked_uploads) {
|
|
summary += `${indent}Chunked Uploads: ${data.metrics.chunked_uploads.values.count}\n`;
|
|
}
|
|
|
|
if (data.metrics.upload_failures) {
|
|
summary += `${indent}Upload Failures: ${data.metrics.upload_failures.values.count}\n`;
|
|
}
|
|
|
|
return summary;
|
|
}
|
|
|