[FE-API-015] fe-api: Add offline support
This commit is contained in:
parent
99dbc03ef0
commit
b108e74d01
3 changed files with 321 additions and 2 deletions
|
|
@ -8544,7 +8544,7 @@
|
|||
"description": "Add offline detection and queue requests when offline",
|
||||
"owner": "frontend",
|
||||
"estimated_hours": 6,
|
||||
"status": "todo",
|
||||
"status": "completed",
|
||||
"files_involved": [],
|
||||
"implementation_steps": [
|
||||
{
|
||||
|
|
@ -8565,7 +8565,8 @@
|
|||
"Unit tests",
|
||||
"Integration tests"
|
||||
],
|
||||
"notes": ""
|
||||
"notes": "Created offlineQueue.ts service for queuing failed requests when offline. Integrated with apiClient to automatically queue network errors. Queue persists to localStorage and automatically processes when connection is restored. Supports priority levels (high, normal, low) and retry logic with exponential backoff.",
|
||||
"completed_at": "2025-12-25T12:24:17.760922Z"
|
||||
},
|
||||
{
|
||||
"id": "FE-API-016",
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ import { parseApiError } from '@/utils/apiErrorHandler';
|
|||
import { csrfService } from '../csrf';
|
||||
import { logger } from '@/utils/logger';
|
||||
import { isTimeoutError, getTimeoutMessage } from '@/utils/timeoutHandler';
|
||||
import { offlineQueue } from '../offlineQueue';
|
||||
import type { ApiResponse } from '@/types/api';
|
||||
|
||||
/**
|
||||
|
|
@ -583,6 +584,26 @@ apiClient.interceptors.response.use(
|
|||
} else {
|
||||
errorMessage = "Erreur de connexion. Vérifiez votre connexion internet";
|
||||
}
|
||||
|
||||
// FE-API-015: Queue request for offline replay if it's a network error
|
||||
if (originalRequest && offlineQueue.shouldQueueRequest(originalRequest)) {
|
||||
const isOffline = typeof navigator !== 'undefined' && !navigator.onLine;
|
||||
if (isOffline || (!error.response && error.request)) {
|
||||
// Determine priority based on request type
|
||||
const method = originalRequest.method?.toUpperCase();
|
||||
const priority = method === 'DELETE' ? 'low' : method === 'POST' ? 'high' : 'normal';
|
||||
|
||||
try {
|
||||
await offlineQueue.queueRequest(originalRequest, { priority });
|
||||
// Show info toast that request was queued
|
||||
toast.success('Requête mise en file d\'attente. Elle sera envoyée à la reconnexion.', {
|
||||
duration: 4000,
|
||||
});
|
||||
} catch (queueError) {
|
||||
logger.error('[API] Failed to queue request for offline replay', { error: queueError });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
toast.error(errorMessage, {
|
||||
|
|
|
|||
297
apps/web/src/services/offlineQueue.ts
Normal file
297
apps/web/src/services/offlineQueue.ts
Normal file
|
|
@ -0,0 +1,297 @@
|
|||
/**
|
||||
* Offline Request Queue Service
|
||||
* FE-API-015: Queue requests when offline and replay when back online
|
||||
*
|
||||
* Stores failed requests due to network issues and replays them when connection is restored
|
||||
*/
|
||||
|
||||
import { AxiosRequestConfig, AxiosResponse } from 'axios';
|
||||
import { apiClient } from './api/client';
|
||||
import { logger } from '@/utils/logger';
|
||||
|
||||
/**
|
||||
* Queued request with metadata
|
||||
*/
|
||||
export interface QueuedRequest {
|
||||
id: string;
|
||||
config: AxiosRequestConfig;
|
||||
timestamp: number;
|
||||
retryCount: number;
|
||||
priority: 'high' | 'normal' | 'low';
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for queueing requests
|
||||
*/
|
||||
export interface QueueOptions {
|
||||
priority?: 'high' | 'normal' | 'low';
|
||||
maxRetries?: number;
|
||||
retryDelay?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Offline Queue Service
|
||||
* Manages a queue of failed requests to be retried when connection is restored
|
||||
*/
|
||||
class OfflineQueueService {
|
||||
private queue: QueuedRequest[] = [];
|
||||
private isProcessing = false;
|
||||
private maxQueueSize = 100; // Maximum number of queued requests
|
||||
private defaultMaxRetries = 3;
|
||||
private defaultRetryDelay = 1000; // 1 second
|
||||
|
||||
/**
|
||||
* Initialize the service
|
||||
*/
|
||||
constructor() {
|
||||
// Load queue from storage on initialization
|
||||
this.loadQueue();
|
||||
|
||||
// Listen for online events to process queue
|
||||
if (typeof window !== 'undefined') {
|
||||
window.addEventListener('online', () => {
|
||||
logger.info('[OfflineQueue] Connection restored, processing queue');
|
||||
this.processQueue();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if we're currently offline
|
||||
*/
|
||||
private isOffline(): boolean {
|
||||
if (typeof navigator === 'undefined') {
|
||||
return false;
|
||||
}
|
||||
return !navigator.onLine;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a unique ID for a request
|
||||
*/
|
||||
private generateRequestId(): string {
|
||||
return `req_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a request to the queue
|
||||
*/
|
||||
async queueRequest(
|
||||
config: AxiosRequestConfig,
|
||||
options: QueueOptions = {},
|
||||
): Promise<string> {
|
||||
const {
|
||||
priority = 'normal',
|
||||
maxRetries = this.defaultMaxRetries,
|
||||
} = options;
|
||||
|
||||
// Check queue size limit
|
||||
if (this.queue.length >= this.maxQueueSize) {
|
||||
// Remove oldest low-priority request
|
||||
const lowPriorityIndex = this.queue.findIndex((req) => req.priority === 'low');
|
||||
if (lowPriorityIndex !== -1) {
|
||||
this.queue.splice(lowPriorityIndex, 1);
|
||||
} else {
|
||||
// Remove oldest request if no low-priority found
|
||||
this.queue.shift();
|
||||
}
|
||||
}
|
||||
|
||||
const queuedRequest: QueuedRequest = {
|
||||
id: this.generateRequestId(),
|
||||
config,
|
||||
timestamp: Date.now(),
|
||||
retryCount: 0,
|
||||
priority,
|
||||
};
|
||||
|
||||
// Insert based on priority (high first, then normal, then low)
|
||||
const priorityOrder = { high: 0, normal: 1, low: 2 };
|
||||
const insertIndex = this.queue.findIndex(
|
||||
(req) => priorityOrder[req.priority] > priorityOrder[priority],
|
||||
);
|
||||
|
||||
if (insertIndex === -1) {
|
||||
this.queue.push(queuedRequest);
|
||||
} else {
|
||||
this.queue.splice(insertIndex, 0, queuedRequest);
|
||||
}
|
||||
|
||||
// Save to storage
|
||||
await this.saveQueue();
|
||||
|
||||
logger.info(`[OfflineQueue] Request queued: ${config.method?.toUpperCase()} ${config.url}`, {
|
||||
requestId: queuedRequest.id,
|
||||
priority,
|
||||
queueSize: this.queue.length,
|
||||
});
|
||||
|
||||
return queuedRequest.id;
|
||||
}
|
||||
|
||||
/**
|
||||
* Process the queue when back online
|
||||
*/
|
||||
async processQueue(): Promise<void> {
|
||||
if (this.isProcessing || this.isOffline() || this.queue.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.isProcessing = true;
|
||||
logger.info(`[OfflineQueue] Processing ${this.queue.length} queued requests`);
|
||||
|
||||
// Process requests in order (high priority first)
|
||||
while (this.queue.length > 0 && !this.isOffline()) {
|
||||
const request = this.queue[0];
|
||||
|
||||
try {
|
||||
// Retry the request
|
||||
const response = await apiClient.request(request.config);
|
||||
|
||||
// Success - remove from queue
|
||||
this.queue.shift();
|
||||
await this.saveQueue();
|
||||
|
||||
logger.info(`[OfflineQueue] Request succeeded: ${request.config.method?.toUpperCase()} ${request.config.url}`, {
|
||||
requestId: request.id,
|
||||
});
|
||||
|
||||
// Small delay between requests to avoid overwhelming the server
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
} catch (error) {
|
||||
// Check if we should retry
|
||||
request.retryCount++;
|
||||
const maxRetries = this.defaultMaxRetries;
|
||||
|
||||
if (request.retryCount >= maxRetries) {
|
||||
// Max retries reached - remove from queue
|
||||
logger.error(`[OfflineQueue] Request failed after ${maxRetries} retries: ${request.config.method?.toUpperCase()} ${request.config.url}`, {
|
||||
requestId: request.id,
|
||||
error,
|
||||
});
|
||||
this.queue.shift();
|
||||
await this.saveQueue();
|
||||
} else {
|
||||
// Move to end of queue for retry
|
||||
this.queue.shift();
|
||||
this.queue.push(request);
|
||||
await this.saveQueue();
|
||||
|
||||
// Wait before retrying
|
||||
await new Promise((resolve) => setTimeout(resolve, this.defaultRetryDelay * request.retryCount));
|
||||
}
|
||||
|
||||
// If we went offline again, stop processing
|
||||
if (this.isOffline()) {
|
||||
logger.warn('[OfflineQueue] Connection lost, stopping queue processing');
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.isProcessing = false;
|
||||
|
||||
if (this.queue.length > 0) {
|
||||
logger.info(`[OfflineQueue] Queue processing complete, ${this.queue.length} requests remaining`);
|
||||
} else {
|
||||
logger.info('[OfflineQueue] All queued requests processed successfully');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the current queue size
|
||||
*/
|
||||
getQueueSize(): number {
|
||||
return this.queue.length;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all queued requests (for debugging/monitoring)
|
||||
*/
|
||||
getQueue(): QueuedRequest[] {
|
||||
return [...this.queue];
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear the queue
|
||||
*/
|
||||
async clearQueue(): Promise<void> {
|
||||
this.queue = [];
|
||||
await this.saveQueue();
|
||||
logger.info('[OfflineQueue] Queue cleared');
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a specific request from the queue
|
||||
*/
|
||||
async removeRequest(requestId: string): Promise<boolean> {
|
||||
const index = this.queue.findIndex((req) => req.id === requestId);
|
||||
if (index !== -1) {
|
||||
this.queue.splice(index, 1);
|
||||
await this.saveQueue();
|
||||
logger.info(`[OfflineQueue] Request removed from queue: ${requestId}`);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Save queue to localStorage
|
||||
*/
|
||||
private async saveQueue(): Promise<void> {
|
||||
try {
|
||||
if (typeof window !== 'undefined' && window.localStorage) {
|
||||
const serialized = JSON.stringify(this.queue);
|
||||
localStorage.setItem('veza_offline_queue', serialized);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('[OfflineQueue] Failed to save queue to localStorage', { error });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Load queue from localStorage
|
||||
*/
|
||||
private async loadQueue(): Promise<void> {
|
||||
try {
|
||||
if (typeof window !== 'undefined' && window.localStorage) {
|
||||
const serialized = localStorage.getItem('veza_offline_queue');
|
||||
if (serialized) {
|
||||
const parsed = JSON.parse(serialized);
|
||||
// Validate and filter out old requests (older than 24 hours)
|
||||
const oneDayAgo = Date.now() - 24 * 60 * 60 * 1000;
|
||||
this.queue = parsed.filter((req: QueuedRequest) => req.timestamp > oneDayAgo);
|
||||
|
||||
if (this.queue.length !== parsed.length) {
|
||||
await this.saveQueue();
|
||||
}
|
||||
|
||||
logger.info(`[OfflineQueue] Loaded ${this.queue.length} requests from storage`);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('[OfflineQueue] Failed to load queue from localStorage', { error });
|
||||
this.queue = [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a request should be queued
|
||||
* Some requests (like GET) might not need to be queued
|
||||
*/
|
||||
shouldQueueRequest(config: AxiosRequestConfig): boolean {
|
||||
const method = config.method?.toUpperCase();
|
||||
|
||||
// Don't queue GET requests (they can be retried fresh)
|
||||
if (method === 'GET') {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Queue mutation requests (POST, PUT, DELETE, PATCH)
|
||||
return ['POST', 'PUT', 'DELETE', 'PATCH'].includes(method || '');
|
||||
}
|
||||
}
|
||||
|
||||
// Singleton instance
|
||||
export const offlineQueue = new OfflineQueueService();
|
||||
|
||||
Loading…
Reference in a new issue