feat(cloud): GDPR data export and automatic backup cron

This commit is contained in:
senke 2026-02-25 13:35:16 +01:00
parent dced768c01
commit 8162d1b419
5 changed files with 262 additions and 0 deletions

View file

@ -170,6 +170,19 @@ func main() {
logger.Info("Transfer Retry Worker skipped — Stripe Connect not enabled")
}
// v0.802: Start Cloud Backup Worker (copies cloud files to backup prefix every 24h)
if cfg.S3StorageService != nil {
backupWorker := services.NewCloudBackupWorker(db.GormDB, cfg.S3StorageService, logger)
backupCtx, backupCancel := context.WithCancel(context.Background())
go backupWorker.Start(backupCtx)
logger.Info("Cloud Backup Worker started (24h interval)")
shutdownManager.Register(shutdown.NewShutdownFunc("cloud_backup_worker", func(ctx context.Context) error {
backupCancel()
return nil
}))
}
// Configuration du mode Gin
// Correction: Utilisation directe de la variable d'env car non exposée dans Config
appEnv := os.Getenv("APP_ENV")

View file

@ -140,6 +140,25 @@ func (r *APIRouter) setupUserRoutes(router *gin.RouterGroup) {
c.Data(http.StatusOK, "application/json", jsonData)
}
protected.GET("/me/export", exportHandler)
// POST /me/export: async GDPR export (ZIP to S3, notification when ready)
gdprExportService := services.NewGDPRExportService(
r.db.GormDB, dataExportService, r.config.S3StorageService, r.notificationService, r.logger,
)
protected.POST("/me/export", func(c *gin.Context) {
userID, exists := c.Get("user_id")
if !exists {
c.JSON(http.StatusUnauthorized, gin.H{"error": "User ID not found"})
return
}
userUUID, ok := userID.(uuid.UUID)
if !ok {
c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid user ID"})
return
}
gdprExportService.ExportUserDataAsync(c.Request.Context(), userUUID)
c.JSON(http.StatusAccepted, gin.H{"message": "Export started, you will be notified when ready"})
})
}
}
}

View file

@ -0,0 +1,83 @@
package services
import (
"context"
"fmt"
"time"
"go.uber.org/zap"
"gorm.io/gorm"
"veza-backend-api/internal/models"
)
// CloudBackupWorker copies cloud files to backup prefix periodically
type CloudBackupWorker struct {
db *gorm.DB
s3Service *S3StorageService
logger *zap.Logger
interval time.Duration
}
// NewCloudBackupWorker creates a new backup worker
func NewCloudBackupWorker(db *gorm.DB, s3Service *S3StorageService, logger *zap.Logger) *CloudBackupWorker {
return &CloudBackupWorker{
db: db,
s3Service: s3Service,
logger: logger,
interval: 24 * time.Hour,
}
}
// Start runs the backup loop
func (w *CloudBackupWorker) Start(ctx context.Context) {
if w.s3Service == nil {
w.logger.Info("Cloud backup worker: S3 not configured, skipping")
return
}
ticker := time.NewTicker(w.interval)
defer ticker.Stop()
w.logger.Info("Cloud backup worker started", zap.Duration("interval", w.interval))
for {
select {
case <-ctx.Done():
return
case <-ticker.C:
if err := w.runBackup(ctx); err != nil {
w.logger.Error("Cloud backup failed", zap.Error(err))
}
}
}
}
func (w *CloudBackupWorker) runBackup(ctx context.Context) error {
prefix := "backup/" + time.Now().Format("2006-01-02") + "/"
var files []models.UserFile
if err := w.db.WithContext(ctx).Find(&files).Error; err != nil {
return fmt.Errorf("list files: %w", err)
}
copied := 0
for _, f := range files {
backupKey := prefix + f.S3Key
data, err := w.s3Service.DownloadFile(ctx, f.S3Key)
if err != nil {
w.logger.Warn("backup: skip file", zap.String("key", f.S3Key), zap.Error(err))
continue
}
if _, err := w.s3Service.UploadFile(ctx, data, backupKey, f.MimeType); err != nil {
w.logger.Warn("backup: upload failed", zap.String("key", backupKey), zap.Error(err))
continue
}
copied++
}
if copied > 0 {
w.logger.Info("Cloud backup completed", zap.Int("files_copied", copied), zap.String("prefix", prefix))
}
return nil
}

View file

@ -40,6 +40,27 @@ type UserDataExport struct {
Analytics []AnalyticsExport `json:"analytics"`
FederatedIDs []FederatedIDExport `json:"federated_identities"`
Roles []RoleExport `json:"roles"`
CloudFiles []CloudFileExport `json:"cloud_files,omitempty"`
Gear []GearExport `json:"gear,omitempty"`
}
// CloudFileExport represents cloud file metadata for export
type CloudFileExport struct {
ID uuid.UUID `json:"id"`
Filename string `json:"filename"`
SizeBytes int64 `json:"size_bytes"`
MimeType string `json:"mime_type"`
CreatedAt time.Time `json:"created_at"`
}
// GearExport represents gear item metadata for export
type GearExport struct {
ID uuid.UUID `json:"id"`
Name string `json:"name"`
Category string `json:"category"`
Brand string `json:"brand"`
Model string `json:"model"`
CreatedAt time.Time `json:"created_at"`
}
// UserProfileExport représente les données de profil utilisateur

View file

@ -0,0 +1,126 @@
package services
import (
"archive/zip"
"bytes"
"context"
"encoding/json"
"fmt"
"time"
"github.com/google/uuid"
"go.uber.org/zap"
"gorm.io/gorm"
"veza-backend-api/internal/models"
)
// GDPRExportService handles async GDPR data export with ZIP upload and notification
type GDPRExportService struct {
db *gorm.DB
dataExportService *DataExportService
s3Service *S3StorageService
notificationService *NotificationService
logger *zap.Logger
}
// NewGDPRExportService creates a new GDPR export service
func NewGDPRExportService(
db *gorm.DB,
dataExportService *DataExportService,
s3Service *S3StorageService,
notificationService *NotificationService,
logger *zap.Logger,
) *GDPRExportService {
return &GDPRExportService{
db: db,
dataExportService: dataExportService,
s3Service: s3Service,
notificationService: notificationService,
logger: logger,
}
}
// ExportUserDataAsync runs the export in a goroutine and notifies when ready
func (s *GDPRExportService) ExportUserDataAsync(ctx context.Context, userID uuid.UUID) {
go func() {
exportCtx, cancel := context.WithTimeout(context.Background(), 15*time.Minute)
defer cancel()
export, err := s.dataExportService.ExportUserData(exportCtx, userID)
if err != nil {
s.logger.Error("GDPR export failed", zap.Error(err), zap.String("user_id", userID.String()))
if s.notificationService != nil {
_ = s.notificationService.CreateNotification(userID, "export_failed", "Data export failed", "Your data export could not be completed. Please try again.", "")
}
return
}
// Add cloud files metadata
var cloudFiles []models.UserFile
if err := s.db.WithContext(exportCtx).Where("user_id = ?", userID).Find(&cloudFiles).Error; err == nil {
export.CloudFiles = make([]CloudFileExport, len(cloudFiles))
for i, f := range cloudFiles {
export.CloudFiles[i] = CloudFileExport{
ID: f.ID, Filename: f.Filename, SizeBytes: f.SizeBytes, MimeType: f.MimeType, CreatedAt: f.CreatedAt,
}
}
}
// Add gear metadata
var gear []models.GearItem
if err := s.db.WithContext(exportCtx).Where("user_id = ?", userID).Find(&gear).Error; err == nil {
export.Gear = make([]GearExport, len(gear))
for i, g := range gear {
export.Gear[i] = GearExport{
ID: g.ID, Name: g.Name, Category: g.Category, Brand: g.Brand, Model: g.Model, CreatedAt: g.CreatedAt,
}
}
}
jsonData, err := json.MarshalIndent(export, "", " ")
if err != nil {
s.logger.Error("GDPR export marshal failed", zap.Error(err))
return
}
var buf bytes.Buffer
zw := zip.NewWriter(&buf)
w, err := zw.Create("veza-export.json")
if err != nil {
s.logger.Error("GDPR export zip create failed", zap.Error(err))
return
}
if _, err := w.Write(jsonData); err != nil {
zw.Close()
s.logger.Error("GDPR export zip write failed", zap.Error(err))
return
}
if err := zw.Close(); err != nil {
s.logger.Error("GDPR export zip close failed", zap.Error(err))
return
}
key := fmt.Sprintf("exports/%s/veza-export-%s.zip", userID, time.Now().Format("20060102-150405"))
if s.s3Service != nil {
if _, err := s.s3Service.UploadFile(exportCtx, buf.Bytes(), key, "application/zip"); err != nil {
s.logger.Error("GDPR export S3 upload failed", zap.Error(err))
return
}
presignCtx, presignCancel := context.WithTimeout(context.Background(), 10*time.Second)
defer presignCancel()
url, err := s.s3Service.GetPresignedURL(presignCtx, key)
if err != nil {
s.logger.Error("GDPR export presign failed", zap.Error(err))
return
}
if s.notificationService != nil {
_ = s.notificationService.CreateNotification(userID, "export_ready", "Your data export is ready", "Click to download your personal data export (link expires in 1 hour).", url)
}
} else {
s.logger.Warn("GDPR export: S3 not configured, skipping upload")
}
}()
}