veza/apps/web/src/features/tracks/services/chunkedUploadService.ts

259 lines
6.5 KiB
TypeScript
Raw Normal View History

2025-12-22 21:00:50 +00:00
import { Track } from '../types/track';
2026-01-07 18:39:21 +00:00
import * as trackService from '../services/uploadService';
2025-12-22 21:00:50 +00:00
/**
* Taille par défaut d'un chunk (5MB)
*/
export const CHUNK_SIZE = 5 * 1024 * 1024; // 5MB
/**
* Seuil pour utiliser le chunked upload (100MB)
*/
const CHUNKED_UPLOAD_THRESHOLD = 10 * 1024 * 1024; // 10MB
/**
* Divise un fichier en chunks
* @param file Fichier à diviser
* @param chunkSize Taille de chaque chunk
* @returns Tableau de Blobs représentant les chunks
*/
export function splitFileIntoChunks(
file: File,
chunkSize: number = CHUNK_SIZE,
): Blob[] {
const chunks: Blob[] = [];
let start = 0;
while (start < file.size) {
const end = Math.min(start + chunkSize, file.size);
chunks.push(file.slice(start, end));
start = end;
}
return chunks;
}
/**
* Calcule le nombre total de chunks nécessaires
* @param totalSize Taille totale du fichier
* @param chunkSize Taille de chaque chunk
* @returns Nombre total de chunks
*/
export function calculateTotalChunks(
totalSize: number,
chunkSize: number = CHUNK_SIZE,
): number {
return Math.ceil(totalSize / chunkSize);
}
/**
* État d'un upload par chunks
*/
export interface ChunkedUploadState {
uploadId: string | null;
totalChunks: number;
uploadedChunks: number;
progress: number;
isPaused: boolean;
isComplete: boolean;
error: string | null;
track: Track | null;
}
/**
* Gestionnaire d'upload par chunks
* Gère l'upload d'un fichier volumineux en le divisant en chunks
*/
export class ChunkedUploadManager {
private file: File;
private chunks: Blob[];
private state: ChunkedUploadState;
private onProgress?: (progress: number) => void;
private isCancelled = false;
private currentChunkIndex = 0;
private readonly MAX_RETRIES = 3;
constructor(file: File, onProgress?: (progress: number) => void) {
this.file = file;
this.chunks = splitFileIntoChunks(file, CHUNK_SIZE);
this.onProgress = onProgress;
this.state = {
uploadId: null,
totalChunks: this.chunks.length,
uploadedChunks: 0,
progress: 0,
isPaused: false,
isComplete: false,
error: null,
track: null,
};
}
/**
* Démarre l'upload par chunks
*/
async start(): Promise<Track> {
if (this.state.isComplete) {
if (this.state.track) {
return this.state.track;
}
throw new Error('Upload already completed but no track available');
}
if (this.isCancelled) {
throw new Error('Upload was cancelled');
}
try {
// 1. Initier l'upload
if (!this.state.uploadId) {
const uploadId = await trackService.initiateChunkedUpload(
this.state.totalChunks,
this.file.size,
this.file.name,
);
this.state.uploadId = uploadId;
}
// 2. Uploader chaque chunk séquentiellement
for (let i = this.currentChunkIndex; i < this.chunks.length; i++) {
2025-12-22 21:00:50 +00:00
if (this.isCancelled || this.state.isPaused) {
break;
}
this.currentChunkIndex = i;
const chunk = this.chunks[i];
const chunkNumber = i + 1; // 1-based
// Retry logic pour chaque chunk
let success = false;
let lastError: Error | null = null;
for (let retry = 0; retry <= this.MAX_RETRIES; retry++) {
try {
const response = await trackService.uploadChunk(
this.state.uploadId!,
chunkNumber,
this.state.totalChunks,
this.file.size,
this.file.name,
chunk,
(chunkProgress) => {
// Progression globale = (chunks uploadés + progression du chunk actuel) / total
const globalProgress =
((i * 100 + chunkProgress) / this.state.totalChunks) * 0.9; // 90% pour l'upload, 10% pour l'assemblage
this.updateProgress(globalProgress);
},
);
// Le backend retourne la progression dans la réponse
this.state.uploadedChunks = response.received_chunks;
this.updateProgress(response.progress * 0.9); // 90% pour l'upload
success = true;
break;
} catch (error) {
lastError = error as Error;
if (retry < this.MAX_RETRIES) {
// Attendre avant de réessayer (backoff exponentiel)
await new Promise((resolve) =>
setTimeout(resolve, Math.pow(2, retry) * 1000),
);
}
}
}
if (!success) {
throw lastError || new Error('Failed to upload chunk after retries');
}
}
// 3. Vérifier que tous les chunks ont été uploadés
if (this.isCancelled) {
throw new Error('Upload was cancelled');
}
if (this.state.uploadedChunks < this.state.totalChunks) {
throw new Error(
`Not all chunks uploaded: ${this.state.uploadedChunks}/${this.state.totalChunks}`,
);
}
// 4. Compléter l'upload
this.updateProgress(95);
const track = await trackService.completeChunkedUpload(
this.state.uploadId!,
);
this.state.track = track;
this.state.isComplete = true;
this.updateProgress(100);
return track;
} catch (error) {
this.state.error =
error instanceof Error ? error.message : 'Unknown error occurred';
throw error;
}
}
/**
* Met en pause l'upload
*/
pause(): void {
this.state.isPaused = true;
}
/**
* Reprend l'upload
*/
async resume(): Promise<Track> {
if (this.state.isComplete) {
if (this.state.track) {
return this.state.track;
}
throw new Error('Upload already completed but no track available');
}
this.state.isPaused = false;
return this.start();
}
/**
* Annule l'upload
*/
cancel(): void {
this.isCancelled = true;
this.state.isPaused = true;
this.state.error = 'Upload cancelled';
}
/**
* Retourne l'état actuel de l'upload
*/
getState(): ChunkedUploadState {
return { ...this.state };
}
/**
* Met à jour la progression et appelle le callback
*/
private updateProgress(progress: number): void {
this.state.progress = Math.min(100, Math.max(0, progress));
if (this.onProgress) {
this.onProgress(this.state.progress);
}
}
}
/**
* Détermine si un fichier doit utiliser le chunked upload
* @param file Fichier à vérifier
* @returns true si le fichier est > 100MB
*/
export function shouldUseChunkedUpload(file: File): boolean {
return file.size > CHUNKED_UPLOAD_THRESHOLD;
}