adding initial backend API (Go)
This commit is contained in:
parent
6e2e16fbb5
commit
2425c15b09
707 changed files with 166497 additions and 0 deletions
61
veza-backend-api/.dockerignore
Normal file
61
veza-backend-api/.dockerignore
Normal file
|
|
@ -0,0 +1,61 @@
|
||||||
|
# Binaries
|
||||||
|
*.exe
|
||||||
|
*.exe~
|
||||||
|
*.dll
|
||||||
|
*.so
|
||||||
|
*.dylib
|
||||||
|
bin/
|
||||||
|
veza-api
|
||||||
|
veza-api-simple
|
||||||
|
|
||||||
|
# Test files
|
||||||
|
*_test.go
|
||||||
|
*.test
|
||||||
|
testdata/
|
||||||
|
**/*_test.go
|
||||||
|
|
||||||
|
# Documentation
|
||||||
|
*.md
|
||||||
|
docs/
|
||||||
|
README.md
|
||||||
|
|
||||||
|
# Git
|
||||||
|
.git
|
||||||
|
.gitignore
|
||||||
|
.gitattributes
|
||||||
|
|
||||||
|
# IDE
|
||||||
|
.vscode/
|
||||||
|
.idea/
|
||||||
|
*.swp
|
||||||
|
*.swo
|
||||||
|
*~
|
||||||
|
|
||||||
|
# OS
|
||||||
|
.DS_Store
|
||||||
|
Thumbs.db
|
||||||
|
|
||||||
|
# Logs
|
||||||
|
*.log
|
||||||
|
logs/
|
||||||
|
|
||||||
|
# Environment
|
||||||
|
.env
|
||||||
|
.env.local
|
||||||
|
.env.*.local
|
||||||
|
|
||||||
|
# Build artifacts
|
||||||
|
*.out
|
||||||
|
coverage/
|
||||||
|
|
||||||
|
# Temporary files
|
||||||
|
tmp/
|
||||||
|
temp/
|
||||||
|
*.tmp
|
||||||
|
|
||||||
|
# Dependencies (will be installed in container)
|
||||||
|
vendor/
|
||||||
|
|
||||||
|
# Scripts (not needed in container)
|
||||||
|
scripts/
|
||||||
|
|
||||||
1017
veza-backend-api/AUDIT_BACKEND_GO.md
Normal file
1017
veza-backend-api/AUDIT_BACKEND_GO.md
Normal file
File diff suppressed because it is too large
Load diff
63
veza-backend-api/Dockerfile
Normal file
63
veza-backend-api/Dockerfile
Normal file
|
|
@ -0,0 +1,63 @@
|
||||||
|
# Build stage
|
||||||
|
FROM golang:1.23-alpine AS builder
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Install build dependencies
|
||||||
|
RUN apk add --no-cache git ca-certificates tzdata
|
||||||
|
|
||||||
|
# Copy go mod files first for better caching
|
||||||
|
COPY go.mod go.sum ./
|
||||||
|
|
||||||
|
# Download dependencies (this layer will be cached if go.mod/go.sum don't change)
|
||||||
|
RUN go mod download
|
||||||
|
|
||||||
|
# Copy source code
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
# Build the application
|
||||||
|
# Using CGO_ENABLED=0 for static binary and smaller size
|
||||||
|
# Using -ldflags to reduce binary size
|
||||||
|
RUN CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build \
|
||||||
|
-a -installsuffix cgo \
|
||||||
|
-ldflags="-w -s" \
|
||||||
|
-o veza-api \
|
||||||
|
./cmd/api/main.go
|
||||||
|
|
||||||
|
# Runtime stage
|
||||||
|
FROM alpine:latest
|
||||||
|
|
||||||
|
# Install runtime dependencies
|
||||||
|
RUN apk --no-cache add ca-certificates tzdata wget
|
||||||
|
|
||||||
|
# Create non-root user for security
|
||||||
|
RUN addgroup -g 1001 -S app && \
|
||||||
|
adduser -S app -u 1001 -G app
|
||||||
|
|
||||||
|
# Create app directory
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Copy binary from builder
|
||||||
|
COPY --from=builder /app/veza-api /app/veza-api
|
||||||
|
|
||||||
|
# Copy docs directory if it exists (generated by swaggo)
|
||||||
|
COPY --from=builder /app/docs /app/docs
|
||||||
|
|
||||||
|
# Copy migrations if they exist
|
||||||
|
COPY --from=builder /app/migrations /app/migrations
|
||||||
|
|
||||||
|
# Change ownership to non-root user
|
||||||
|
RUN chown -R app:app /app
|
||||||
|
|
||||||
|
# Switch to non-root user
|
||||||
|
USER app
|
||||||
|
|
||||||
|
# Expose port
|
||||||
|
EXPOSE 8080
|
||||||
|
|
||||||
|
# Health check
|
||||||
|
HEALTHCHECK --interval=30s --timeout=10s --start-period=40s --retries=3 \
|
||||||
|
CMD wget --no-verbose --tries=1 --spider http://localhost:8080/health || exit 1
|
||||||
|
|
||||||
|
# Run the application
|
||||||
|
CMD ["./veza-api"]
|
||||||
67
veza-backend-api/Dockerfile.production
Normal file
67
veza-backend-api/Dockerfile.production
Normal file
|
|
@ -0,0 +1,67 @@
|
||||||
|
# Production Dockerfile for Backend API
|
||||||
|
# Optimized for smaller size and security
|
||||||
|
|
||||||
|
# Build stage
|
||||||
|
FROM golang:1.23-alpine AS builder
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Install build dependencies
|
||||||
|
RUN apk add --no-cache git ca-certificates tzdata
|
||||||
|
|
||||||
|
# Copy go mod files first for better caching
|
||||||
|
COPY go.mod go.sum ./
|
||||||
|
|
||||||
|
# Download dependencies (this layer will be cached if go.mod/go.sum don't change)
|
||||||
|
RUN go mod download
|
||||||
|
|
||||||
|
# Copy source code
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
# Build the application with optimizations
|
||||||
|
# - CGO_ENABLED=0: static binary, no C dependencies
|
||||||
|
# - -ldflags="-w -s": strip debug info and symbol table
|
||||||
|
# - -trimpath: remove file system paths from binaries
|
||||||
|
RUN CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build \
|
||||||
|
-a -installsuffix cgo \
|
||||||
|
-ldflags="-w -s -extldflags '-static'" \
|
||||||
|
-trimpath \
|
||||||
|
-o veza-api \
|
||||||
|
./main.go
|
||||||
|
|
||||||
|
# Runtime stage - minimal alpine
|
||||||
|
FROM alpine:latest
|
||||||
|
|
||||||
|
# Install only runtime dependencies
|
||||||
|
RUN apk --no-cache add ca-certificates tzdata && \
|
||||||
|
# Add wget for health checks
|
||||||
|
apk --no-cache add wget && \
|
||||||
|
# Clean up apk cache
|
||||||
|
rm -rf /var/cache/apk/*
|
||||||
|
|
||||||
|
# Create non-root user for security
|
||||||
|
RUN addgroup -g 1001 -S app && \
|
||||||
|
adduser -S app -u 1001 -G app -h /app -s /bin/sh
|
||||||
|
|
||||||
|
# Create app directory
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Copy binary from builder
|
||||||
|
COPY --from=builder --chown=app:app /app/veza-api /app/veza-api
|
||||||
|
|
||||||
|
# Copy migrations if they exist
|
||||||
|
COPY --from=builder --chown=app:app /app/migrations /app/migrations 2>/dev/null || true
|
||||||
|
|
||||||
|
# Switch to non-root user
|
||||||
|
USER app
|
||||||
|
|
||||||
|
# Expose port
|
||||||
|
EXPOSE 8080
|
||||||
|
|
||||||
|
# Health check
|
||||||
|
HEALTHCHECK --interval=30s --timeout=10s --start-period=40s --retries=3 \
|
||||||
|
CMD wget --no-verbose --tries=1 --spider http://localhost:8080/health || exit 1
|
||||||
|
|
||||||
|
# Run the application
|
||||||
|
ENTRYPOINT ["./veza-api"]
|
||||||
|
|
||||||
185
veza-backend-api/Makefile
Normal file
185
veza-backend-api/Makefile
Normal file
|
|
@ -0,0 +1,185 @@
|
||||||
|
# Makefile pour Veza Backend API
|
||||||
|
# Ce Makefile facilite le développement et la maintenance du backend Go
|
||||||
|
|
||||||
|
.PHONY: help build test clean lint format vet tidy deps install run dev docker-build docker-run
|
||||||
|
|
||||||
|
# Variables
|
||||||
|
BINARY_NAME=veza-backend-api
|
||||||
|
DOCKER_IMAGE=veza-backend-api
|
||||||
|
DOCKER_TAG=latest
|
||||||
|
GO_VERSION=1.21
|
||||||
|
LINT_VERSION=1.54.2
|
||||||
|
|
||||||
|
# Couleurs pour les messages
|
||||||
|
GREEN=\033[0;32m
|
||||||
|
YELLOW=\033[1;33m
|
||||||
|
RED=\033[0;31m
|
||||||
|
NC=\033[0m # No Color
|
||||||
|
|
||||||
|
# Aide par défaut
|
||||||
|
help: ## Affiche cette aide
|
||||||
|
@echo "$(GREEN)Veza Backend API - Makefile$(NC)"
|
||||||
|
@echo ""
|
||||||
|
@echo "$(YELLOW)Commandes disponibles:$(NC)"
|
||||||
|
@awk 'BEGIN {FS = ":.*?## "} /^[a-zA-Z_-]+:.*?## / {printf " $(GREEN)%-15s$(NC) %s\n", $$1, $$2}' $(MAKEFILE_LIST)
|
||||||
|
|
||||||
|
# Développement
|
||||||
|
build: ## Compile l'application
|
||||||
|
@echo "$(GREEN)🔨 Compilation de l'application...$(NC)"
|
||||||
|
@go build -o bin/$(BINARY_NAME) ./cmd/modern-server/main.go
|
||||||
|
@echo "$(GREEN)✅ Compilation terminée: bin/$(BINARY_NAME)$(NC)"
|
||||||
|
|
||||||
|
build-linux: ## Compile l'application pour Linux
|
||||||
|
@echo "$(GREEN)🔨 Compilation pour Linux...$(NC)"
|
||||||
|
@GOOS=linux GOARCH=amd64 go build -o bin/$(BINARY_NAME)-linux ./cmd/modern-server/main.go
|
||||||
|
@echo "$(GREEN)✅ Compilation Linux terminée: bin/$(BINARY_NAME)-linux$(NC)"
|
||||||
|
|
||||||
|
# Tests
|
||||||
|
test: ## Exécute tous les tests
|
||||||
|
@echo "$(GREEN)🧪 Exécution des tests...$(NC)"
|
||||||
|
@go test -v ./...
|
||||||
|
|
||||||
|
test-coverage: ## Exécute les tests avec couverture
|
||||||
|
@echo "$(GREEN)🧪 Tests avec couverture...$(NC)"
|
||||||
|
@go test -coverprofile=coverage.out ./...
|
||||||
|
@go tool cover -html=coverage.out -o coverage.html
|
||||||
|
@echo "$(GREEN)✅ Rapport de couverture généré: coverage.html$(NC)"
|
||||||
|
|
||||||
|
test-race: ## Exécute les tests avec détection de race conditions
|
||||||
|
@echo "$(GREEN)🧪 Tests avec détection de race conditions...$(NC)"
|
||||||
|
@go test -race ./...
|
||||||
|
|
||||||
|
# Qualité du code
|
||||||
|
lint: ## Exécute golangci-lint
|
||||||
|
@echo "$(GREEN)🔍 Vérification avec golangci-lint...$(NC)"
|
||||||
|
@if command -v golangci-lint >/dev/null 2>&1; then \
|
||||||
|
golangci-lint run; \
|
||||||
|
else \
|
||||||
|
echo "$(YELLOW)⚠️ golangci-lint non installé. Installation...$(NC)"; \
|
||||||
|
go install github.com/golangci/golangci-lint/cmd/golangci-lint@v$(LINT_VERSION); \
|
||||||
|
golangci-lint run; \
|
||||||
|
fi
|
||||||
|
|
||||||
|
format: ## Formate le code Go
|
||||||
|
@echo "$(GREEN)🎨 Formatage du code...$(NC)"
|
||||||
|
@go fmt ./...
|
||||||
|
@if command -v goimports >/dev/null 2>&1; then \
|
||||||
|
find . -name "*.go" -not -path "./vendor/*" | xargs goimports -w; \
|
||||||
|
else \
|
||||||
|
echo "$(YELLOW)⚠️ goimports non installé. Installation...$(NC)"; \
|
||||||
|
go install golang.org/x/tools/cmd/goimports@latest; \
|
||||||
|
find . -name "*.go" -not -path "./vendor/*" | xargs goimports -w; \
|
||||||
|
fi
|
||||||
|
@echo "$(GREEN)✅ Code formaté$(NC)"
|
||||||
|
|
||||||
|
vet: ## Exécute go vet
|
||||||
|
@echo "$(GREEN)🔍 Vérification avec go vet...$(NC)"
|
||||||
|
@go vet ./...
|
||||||
|
|
||||||
|
# Dépendances
|
||||||
|
deps: ## Installe les dépendances
|
||||||
|
@echo "$(GREEN)📦 Installation des dépendances...$(NC)"
|
||||||
|
@go mod download
|
||||||
|
@go mod tidy
|
||||||
|
@echo "$(GREEN)✅ Dépendances installées$(NC)"
|
||||||
|
|
||||||
|
install: ## Installe l'application
|
||||||
|
@echo "$(GREEN)📦 Installation de l'application...$(NC)"
|
||||||
|
@go install ./cmd/modern-server/main.go
|
||||||
|
@echo "$(GREEN)✅ Application installée$(NC)"
|
||||||
|
|
||||||
|
# Nettoyage
|
||||||
|
clean: ## Nettoie les fichiers générés
|
||||||
|
@echo "$(GREEN)🧹 Nettoyage...$(NC)"
|
||||||
|
@rm -rf bin/
|
||||||
|
@rm -f coverage.out coverage.html
|
||||||
|
@go clean
|
||||||
|
@echo "$(GREEN)✅ Nettoyage terminé$(NC)"
|
||||||
|
|
||||||
|
# Exécution
|
||||||
|
run: build ## Compile et exécute l'application
|
||||||
|
@echo "$(GREEN)🚀 Démarrage de l'application...$(NC)"
|
||||||
|
@./bin/$(BINARY_NAME)
|
||||||
|
|
||||||
|
dev: ## Exécute l'application en mode développement
|
||||||
|
@echo "$(GREEN)🚀 Mode développement...$(NC)"
|
||||||
|
@go run ./cmd/modern-server/main.go
|
||||||
|
|
||||||
|
# Docker
|
||||||
|
docker-build: ## Construit l'image Docker
|
||||||
|
@echo "$(GREEN)🐳 Construction de l'image Docker...$(NC)"
|
||||||
|
@docker build -t $(DOCKER_IMAGE):$(DOCKER_TAG) .
|
||||||
|
@echo "$(GREEN)✅ Image Docker construite: $(DOCKER_IMAGE):$(DOCKER_TAG)$(NC)"
|
||||||
|
|
||||||
|
docker-run: docker-build ## Construit et exécute l'image Docker
|
||||||
|
@echo "$(GREEN)🐳 Exécution de l'image Docker...$(NC)"
|
||||||
|
@docker run -p 8080:8080 $(DOCKER_IMAGE):$(DOCKER_TAG)
|
||||||
|
|
||||||
|
# Outils de développement
|
||||||
|
install-tools: ## Installe les outils de développement
|
||||||
|
@echo "$(GREEN)🛠️ Installation des outils de développement...$(NC)"
|
||||||
|
@go install golang.org/x/tools/cmd/goimports@latest
|
||||||
|
@go install github.com/golangci/golangci-lint/cmd/golangci-lint@v$(LINT_VERSION)
|
||||||
|
@go install github.com/securecodewarrior/gosec/v2/cmd/gosec@latest
|
||||||
|
@go install honnef.co/go/tools/cmd/staticcheck@latest
|
||||||
|
@echo "$(GREEN)✅ Outils installés$(NC)"
|
||||||
|
|
||||||
|
# Sécurité
|
||||||
|
security: ## Exécute les vérifications de sécurité
|
||||||
|
@echo "$(GREEN)🔒 Vérifications de sécurité...$(NC)"
|
||||||
|
@if command -v gosec >/dev/null 2>&1; then \
|
||||||
|
gosec ./...; \
|
||||||
|
else \
|
||||||
|
echo "$(YELLOW)⚠️ gosec non installé. Installation...$(NC)"; \
|
||||||
|
go install github.com/securecodewarrior/gosec/v2/cmd/gosec@latest; \
|
||||||
|
gosec ./...; \
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Performance
|
||||||
|
benchmark: ## Exécute les benchmarks
|
||||||
|
@echo "$(GREEN)⚡ Exécution des benchmarks...$(NC)"
|
||||||
|
@go test -bench=. ./...
|
||||||
|
|
||||||
|
# Documentation
|
||||||
|
docs: ## Génère la documentation
|
||||||
|
@echo "$(GREEN)📚 Génération de la documentation...$(NC)"
|
||||||
|
@go doc -all ./... > docs.txt
|
||||||
|
@echo "$(GREEN)✅ Documentation générée: docs.txt$(NC)"
|
||||||
|
|
||||||
|
# Scripts personnalisés
|
||||||
|
cleanup: ## Exécute le script de nettoyage
|
||||||
|
@echo "$(GREEN)🧹 Exécution du script de nettoyage...$(NC)"
|
||||||
|
@./scripts/cleanup-go.sh
|
||||||
|
|
||||||
|
# CI/CD
|
||||||
|
ci: deps lint test build ## Pipeline CI complet
|
||||||
|
@echo "$(GREEN)✅ Pipeline CI terminé$(NC)"
|
||||||
|
|
||||||
|
# Déploiement
|
||||||
|
deploy-staging: build-linux ## Déploie en staging
|
||||||
|
@echo "$(GREEN)🚀 Déploiement en staging...$(NC)"
|
||||||
|
@echo "$(YELLOW)⚠️ Déploiement en staging non implémenté$(NC)"
|
||||||
|
|
||||||
|
deploy-production: build-linux ## Déploie en production
|
||||||
|
@echo "$(GREEN)🚀 Déploiement en production...$(NC)"
|
||||||
|
@echo "$(YELLOW)⚠️ Déploiement en production non implémenté$(NC)"
|
||||||
|
|
||||||
|
# Monitoring
|
||||||
|
health: ## Vérifie la santé de l'application
|
||||||
|
@echo "$(GREEN)🏥 Vérification de la santé...$(NC)"
|
||||||
|
@curl -f http://localhost:8080/health || echo "$(RED)❌ Application non accessible$(NC)"
|
||||||
|
|
||||||
|
# Base de données
|
||||||
|
migrate: ## Exécute les migrations de base de données
|
||||||
|
@echo "$(GREEN)🗄️ Exécution des migrations...$(NC)"
|
||||||
|
@go run cmd/migrate_tool/main.go
|
||||||
|
@echo "$(GREEN)✅ Migrations terminées$(NC)"
|
||||||
|
|
||||||
|
db-migrate: migrate ## Alias pour migrate
|
||||||
|
|
||||||
|
db-seed: ## Peuple la base de données avec des données de test
|
||||||
|
@echo "$(GREEN)🌱 Peuplement de la base de données...$(NC)"
|
||||||
|
@echo "$(YELLOW)⚠️ Seeding non implémenté$(NC)"
|
||||||
|
|
||||||
|
# Par défaut
|
||||||
|
.DEFAULT_GOAL := help
|
||||||
133
veza-backend-api/cmd/api/main.go
Normal file
133
veza-backend-api/cmd/api/main.go
Normal file
|
|
@ -0,0 +1,133 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"os/signal"
|
||||||
|
"syscall"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/joho/godotenv"
|
||||||
|
"go.uber.org/zap"
|
||||||
|
|
||||||
|
"veza-backend-api/internal/api"
|
||||||
|
"veza-backend-api/internal/config"
|
||||||
|
|
||||||
|
_ "veza-backend-api/docs" // Import docs for swagger
|
||||||
|
)
|
||||||
|
|
||||||
|
// @title Veza Backend API
|
||||||
|
// @version 1.2.0
|
||||||
|
// @description Backend API for Veza platform.
|
||||||
|
// @termsOfService http://swagger.io/terms/
|
||||||
|
|
||||||
|
// @contact.name API Support
|
||||||
|
// @contact.url http://www.veza.app/support
|
||||||
|
// @contact.email support@veza.app
|
||||||
|
|
||||||
|
// @license.name Apache 2.0
|
||||||
|
// @license.url http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
|
|
||||||
|
// @host localhost:8080
|
||||||
|
// @BasePath /api/v1
|
||||||
|
|
||||||
|
// @securityDefinitions.apikey BearerAuth
|
||||||
|
// @in header
|
||||||
|
// @name Authorization
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
// Charger les variables d'environnement
|
||||||
|
if err := godotenv.Load(); err != nil {
|
||||||
|
log.Printf("ℹ️ Note: Fichier .env non trouvé, utilisation des variables d'environnement système")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Configuration du logger
|
||||||
|
logger, err := zap.NewProduction()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Impossible d'initialiser le logger: %v", err)
|
||||||
|
}
|
||||||
|
defer logger.Sync()
|
||||||
|
|
||||||
|
logger.Info("🚀 Démarrage de Veza Backend API")
|
||||||
|
|
||||||
|
// Charger la configuration
|
||||||
|
cfg, err := config.NewConfig()
|
||||||
|
if err != nil {
|
||||||
|
logger.Fatal("❌ Impossible de charger la configuration", zap.Error(err))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Valider la configuration
|
||||||
|
if err := cfg.Validate(); err != nil {
|
||||||
|
logger.Fatal("❌ Configuration invalide", zap.Error(err))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialisation de la base de données
|
||||||
|
db := cfg.Database
|
||||||
|
if db == nil {
|
||||||
|
logger.Fatal("❌ Base de données non initialisée")
|
||||||
|
}
|
||||||
|
defer db.Close()
|
||||||
|
|
||||||
|
if err := db.Initialize(); err != nil {
|
||||||
|
logger.Fatal("❌ Impossible d'initialiser la base de données", zap.Error(err))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Configuration du mode Gin
|
||||||
|
// Correction: Utilisation directe de la variable d'env car non exposée dans Config
|
||||||
|
appEnv := os.Getenv("APP_ENV")
|
||||||
|
if appEnv == "production" {
|
||||||
|
gin.SetMode(gin.ReleaseMode)
|
||||||
|
} else {
|
||||||
|
gin.SetMode(gin.DebugMode)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Créer le router Gin
|
||||||
|
router := gin.New()
|
||||||
|
|
||||||
|
// Middleware globaux (Logger, Recovery) recommandés par ORIGIN
|
||||||
|
router.Use(gin.Logger(), gin.Recovery())
|
||||||
|
|
||||||
|
// Configuration des routes
|
||||||
|
apiRouter := api.NewAPIRouter(db, cfg) // Instantiate APIRouter
|
||||||
|
apiRouter.Setup(router) // Call its Setup method
|
||||||
|
|
||||||
|
// Configuration du serveur HTTP
|
||||||
|
port := fmt.Sprintf("%d", cfg.AppPort)
|
||||||
|
if cfg.AppPort == 0 {
|
||||||
|
port = "8080"
|
||||||
|
}
|
||||||
|
|
||||||
|
server := &http.Server{
|
||||||
|
Addr: fmt.Sprintf(":%s", port),
|
||||||
|
Handler: router,
|
||||||
|
ReadTimeout: 30 * time.Second, // Standards ORIGIN
|
||||||
|
WriteTimeout: 30 * time.Second,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Gestion de l'arrêt gracieux
|
||||||
|
quit := make(chan os.Signal, 1)
|
||||||
|
signal.Notify(quit, syscall.SIGINT, syscall.SIGTERM)
|
||||||
|
|
||||||
|
go func() {
|
||||||
|
logger.Info("🌐 Serveur HTTP démarré", zap.String("port", port))
|
||||||
|
if err := server.ListenAndServe(); err != nil && err != http.ErrServerClosed {
|
||||||
|
logger.Fatal("❌ Erreur du serveur HTTP", zap.Error(err))
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
<-quit
|
||||||
|
logger.Info("🔄 Arrêt du serveur...")
|
||||||
|
|
||||||
|
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
if err := server.Shutdown(ctx); err != nil {
|
||||||
|
logger.Error("❌ Erreur lors de l'arrêt", zap.Error(err))
|
||||||
|
} else {
|
||||||
|
logger.Info("✅ Serveur arrêté proprement")
|
||||||
|
}
|
||||||
|
}
|
||||||
32
veza-backend-api/cmd/generate-config-docs/main.go
Normal file
32
veza-backend-api/cmd/generate-config-docs/main.go
Normal file
|
|
@ -0,0 +1,32 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
|
||||||
|
"veza-backend-api/internal/config"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
// Générer la documentation
|
||||||
|
docs := config.GenerateConfigDocs()
|
||||||
|
|
||||||
|
// Déterminer le chemin du fichier (relatif à la racine du projet)
|
||||||
|
outputPath := filepath.Join("docs", "CONFIGURATION.md")
|
||||||
|
|
||||||
|
// Créer le répertoire docs s'il n'existe pas
|
||||||
|
docsDir := filepath.Dir(outputPath)
|
||||||
|
if err := os.MkdirAll(docsDir, 0755); err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "Error creating docs directory: %v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Écrire le fichier
|
||||||
|
if err := os.WriteFile(outputPath, []byte(docs), 0644); err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "Error writing file: %v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("✅ CONFIGURATION.md generated successfully at %s\n", outputPath)
|
||||||
|
}
|
||||||
78
veza-backend-api/cmd/main.go.legacy
Normal file
78
veza-backend-api/cmd/main.go.legacy
Normal file
|
|
@ -0,0 +1,78 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"log"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"os/signal"
|
||||||
|
"syscall"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"veza-backend-api/internal/config"
|
||||||
|
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"go.uber.org/zap"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
// Initialiser la configuration
|
||||||
|
cfg, err := config.NewConfig()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Failed to initialize configuration: %v", err)
|
||||||
|
}
|
||||||
|
defer cfg.Close()
|
||||||
|
|
||||||
|
// Configurer Gin
|
||||||
|
if os.Getenv("GIN_MODE") == "release" {
|
||||||
|
gin.SetMode(gin.ReleaseMode)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Créer le router
|
||||||
|
router := gin.New()
|
||||||
|
|
||||||
|
// Configurer les middlewares globaux
|
||||||
|
cfg.SetupMiddleware(router)
|
||||||
|
|
||||||
|
// Configurer les routes
|
||||||
|
cfg.SetupRoutes(router)
|
||||||
|
|
||||||
|
// Configuration du serveur
|
||||||
|
port := os.Getenv("PORT")
|
||||||
|
if port == "" {
|
||||||
|
port = "8080"
|
||||||
|
}
|
||||||
|
|
||||||
|
server := &http.Server{
|
||||||
|
Addr: ":" + port,
|
||||||
|
Handler: router,
|
||||||
|
ReadTimeout: 15 * time.Second,
|
||||||
|
WriteTimeout: 15 * time.Second,
|
||||||
|
IdleTimeout: 60 * time.Second,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Démarrer le serveur en arrière-plan
|
||||||
|
go func() {
|
||||||
|
cfg.Logger.Info("Starting server", zap.String("port", port))
|
||||||
|
if err := server.ListenAndServe(); err != nil && err != http.ErrServerClosed {
|
||||||
|
cfg.Logger.Fatal("Failed to start server", zap.Error(err))
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Attendre un signal d'arrêt
|
||||||
|
quit := make(chan os.Signal, 1)
|
||||||
|
signal.Notify(quit, syscall.SIGINT, syscall.SIGTERM)
|
||||||
|
<-quit
|
||||||
|
|
||||||
|
cfg.Logger.Info("Shutting down server...")
|
||||||
|
|
||||||
|
// Arrêter le serveur gracieusement
|
||||||
|
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
if err := server.Shutdown(ctx); err != nil {
|
||||||
|
cfg.Logger.Fatal("Server forced to shutdown", zap.Error(err))
|
||||||
|
}
|
||||||
|
|
||||||
|
cfg.Logger.Info("Server exited")
|
||||||
|
}
|
||||||
45
veza-backend-api/cmd/migrate_tool/main.go
Normal file
45
veza-backend-api/cmd/migrate_tool/main.go
Normal file
|
|
@ -0,0 +1,45 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"veza-backend-api/internal/database"
|
||||||
|
"go.uber.org/zap"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
logger, _ := zap.NewProduction()
|
||||||
|
|
||||||
|
// Override config from env
|
||||||
|
cfg := &database.Config{
|
||||||
|
Host: getEnv("DB_HOST", "localhost"),
|
||||||
|
Port: getEnv("DB_PORT", "5432"),
|
||||||
|
Username: getEnv("DB_USER", "veza"),
|
||||||
|
Password: getEnv("DB_PASSWORD", "veza"),
|
||||||
|
Database: getEnv("DB_NAME", "veza"),
|
||||||
|
SSLMode: "disable",
|
||||||
|
MaxRetries: 5,
|
||||||
|
RetryInterval: 2 * time.Second,
|
||||||
|
}
|
||||||
|
|
||||||
|
db, err := database.NewDatabaseWithRetry(cfg, logger)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Failed to connect: %v", err)
|
||||||
|
}
|
||||||
|
defer db.Close()
|
||||||
|
|
||||||
|
if err := db.RunMigrations(); err != nil {
|
||||||
|
log.Fatalf("Migration failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Info("Migrations completed successfully")
|
||||||
|
}
|
||||||
|
|
||||||
|
func getEnv(key, fallback string) string {
|
||||||
|
if v := os.Getenv(key); v != "" {
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
return fallback
|
||||||
|
}
|
||||||
142
veza-backend-api/cmd/modern-server/main.go
Normal file
142
veza-backend-api/cmd/modern-server/main.go
Normal file
|
|
@ -0,0 +1,142 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"os/signal"
|
||||||
|
"syscall"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/joho/godotenv"
|
||||||
|
"go.uber.org/zap"
|
||||||
|
|
||||||
|
"veza-backend-api/internal/api"
|
||||||
|
// TODO: Réactiver internal/api/handlers après stabilisation du noyau
|
||||||
|
// "veza-backend-api/internal/api/handlers"
|
||||||
|
"veza-backend-api/internal/config"
|
||||||
|
// TODO: Réactiver services après stabilisation du noyau
|
||||||
|
// "veza-backend-api/internal/services"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
// Charger les variables d'environnement depuis le fichier .env
|
||||||
|
if err := godotenv.Load(); err != nil {
|
||||||
|
log.Printf("⚠️ Impossible de charger le fichier .env: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Configuration du logger
|
||||||
|
logger, err := zap.NewProduction()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Impossible d'initialiser le logger: %v", err)
|
||||||
|
}
|
||||||
|
defer logger.Sync()
|
||||||
|
|
||||||
|
logger.Info("🚀 Démarrage du serveur Veza Backend API (Architecture Moderne)")
|
||||||
|
|
||||||
|
// Charger la configuration
|
||||||
|
cfg, err := config.NewConfig()
|
||||||
|
if err != nil {
|
||||||
|
logger.Fatal("❌ Impossible de charger la configuration", zap.Error(err))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Valider la configuration
|
||||||
|
if err := cfg.Validate(); err != nil {
|
||||||
|
logger.Fatal("❌ Configuration invalide", zap.Error(err))
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Info("✅ Configuration validée avec succès")
|
||||||
|
|
||||||
|
// La base de données est déjà initialisée dans config.NewConfig()
|
||||||
|
db := cfg.Database
|
||||||
|
if db == nil {
|
||||||
|
logger.Fatal("❌ Base de données non initialisée")
|
||||||
|
}
|
||||||
|
defer db.Close()
|
||||||
|
|
||||||
|
// Initialiser la base de données (migrations, etc.)
|
||||||
|
if err := db.Initialize(); err != nil {
|
||||||
|
logger.Fatal("❌ Impossible d'initialiser la base de données", zap.Error(err))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: Réactiver les services après stabilisation du noyau et alignement des signatures
|
||||||
|
// Initialiser les services
|
||||||
|
// authService := services.NewAuthService(db, &cfg.JWT, logger)
|
||||||
|
// oauthService := services.NewOAuthService(db, cfg, logger)
|
||||||
|
// chatService := services.NewChatService(db, logger)
|
||||||
|
// twoFactorService := services.NewTwoFactorService(db, logger)
|
||||||
|
// rbacService := services.NewRBACService(db, logger)
|
||||||
|
|
||||||
|
// TODO: Réactiver les handlers après stabilisation du noyau et alignement des services
|
||||||
|
// Initialiser les handlers
|
||||||
|
// handlers.InitHandlers(authService, logger)
|
||||||
|
// handlers.InitOAuthHandlers(oauthService, authService, logger)
|
||||||
|
// handlers.InitChatHandlers(chatService, logger)
|
||||||
|
// handlers.InitTwoFactorHandlers(twoFactorService, authService, logger)
|
||||||
|
// handlers.InitRBACHandlers(rbacService, logger)
|
||||||
|
|
||||||
|
// Configuration de Gin selon l'environnement
|
||||||
|
gin.SetMode(gin.DebugMode) // TODO: Utiliser cfg.LogLevel pour déterminer le mode
|
||||||
|
|
||||||
|
// Créer le router Gin
|
||||||
|
router := gin.New()
|
||||||
|
|
||||||
|
// Configuration des routes avec la nouvelle architecture
|
||||||
|
apiRouter := api.NewAPIRouter(db, cfg) // Instantiate APIRouter
|
||||||
|
apiRouter.Setup(router) // Call its Setup method
|
||||||
|
|
||||||
|
// Configuration du serveur HTTP
|
||||||
|
port := fmt.Sprintf("%d", cfg.AppPort)
|
||||||
|
if port == "0" {
|
||||||
|
port = "8080"
|
||||||
|
}
|
||||||
|
server := &http.Server{
|
||||||
|
Addr: fmt.Sprintf(":%s", port),
|
||||||
|
Handler: router,
|
||||||
|
// TODO: Ajouter ReadTimeout et WriteTimeout si nécessaire
|
||||||
|
}
|
||||||
|
|
||||||
|
// Canal pour écouter les signaux du système
|
||||||
|
quit := make(chan os.Signal, 1)
|
||||||
|
signal.Notify(quit, syscall.SIGINT, syscall.SIGTERM)
|
||||||
|
|
||||||
|
// Démarrer le serveur dans une goroutine
|
||||||
|
go func() {
|
||||||
|
logger.Info("🌐 Serveur HTTP démarré",
|
||||||
|
zap.String("port", port),
|
||||||
|
)
|
||||||
|
|
||||||
|
if err := server.ListenAndServe(); err != nil && err != http.ErrServerClosed {
|
||||||
|
logger.Fatal("❌ Erreur du serveur HTTP", zap.Error(err))
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
logger.Info("✅ Serveur Veza Backend API prêt à recevoir des requêtes")
|
||||||
|
logger.Info("📋 Endpoints disponibles:")
|
||||||
|
logger.Info(" - GET /health - Health check global")
|
||||||
|
logger.Info(" - POST /api/v1/auth/register - Inscription utilisateur")
|
||||||
|
logger.Info(" - POST /api/v1/auth/login - Connexion utilisateur")
|
||||||
|
logger.Info(" - POST /api/v1/auth/refresh - Renouvellement de token")
|
||||||
|
logger.Info(" - POST /api/v1/auth/logout - Déconnexion utilisateur")
|
||||||
|
logger.Info(" - GET /api/v1/profile - Profil utilisateur")
|
||||||
|
logger.Info(" - PUT /api/v1/profile - Mise à jour profil")
|
||||||
|
logger.Info(" - GET /api/v1/health/detailed - Health check détaillé")
|
||||||
|
|
||||||
|
// Attendre un signal d'arrêt
|
||||||
|
<-quit
|
||||||
|
logger.Info("🔄 Arrêt du serveur en cours...")
|
||||||
|
|
||||||
|
// Créer un contexte avec timeout pour l'arrêt gracieux
|
||||||
|
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) // TODO: Utiliser config pour timeout
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
// Arrêt gracieux du serveur
|
||||||
|
if err := server.Shutdown(ctx); err != nil {
|
||||||
|
logger.Error("❌ Erreur lors de l'arrêt du serveur", zap.Error(err))
|
||||||
|
} else {
|
||||||
|
logger.Info("✅ Serveur arrêté proprement")
|
||||||
|
}
|
||||||
|
}
|
||||||
143
veza-backend-api/cmd/simple_main.go
Normal file
143
veza-backend-api/cmd/simple_main.go
Normal file
|
|
@ -0,0 +1,143 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"log"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"os/signal"
|
||||||
|
"syscall"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/redis/go-redis/v9"
|
||||||
|
"go.uber.org/zap"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
// Initialiser le logger
|
||||||
|
logger, err := zap.NewProduction()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Failed to initialize logger: %v", err)
|
||||||
|
}
|
||||||
|
defer logger.Sync()
|
||||||
|
|
||||||
|
// Initialiser Redis
|
||||||
|
redisClient, err := initRedis("redis://localhost:6379")
|
||||||
|
if err != nil {
|
||||||
|
logger.Error("Failed to initialize Redis", zap.Error(err))
|
||||||
|
// Continuer sans Redis pour les tests
|
||||||
|
redisClient = nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Configurer Gin
|
||||||
|
if os.Getenv("GIN_MODE") == "release" {
|
||||||
|
gin.SetMode(gin.ReleaseMode)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Créer le router
|
||||||
|
router := gin.New()
|
||||||
|
|
||||||
|
// Middleware de logging
|
||||||
|
router.Use(gin.LoggerWithFormatter(func(param gin.LogFormatterParams) string {
|
||||||
|
logger.Info("HTTP Request",
|
||||||
|
zap.String("method", param.Method),
|
||||||
|
zap.String("path", param.Path),
|
||||||
|
zap.Int("status", param.StatusCode),
|
||||||
|
zap.Duration("latency", param.Latency),
|
||||||
|
zap.String("client_ip", param.ClientIP),
|
||||||
|
)
|
||||||
|
return ""
|
||||||
|
}))
|
||||||
|
|
||||||
|
// Middleware de récupération d'erreurs
|
||||||
|
router.Use(gin.Recovery())
|
||||||
|
|
||||||
|
// Middleware CORS
|
||||||
|
router.Use(func(c *gin.Context) {
|
||||||
|
c.Header("Access-Control-Allow-Origin", "*")
|
||||||
|
c.Header("Access-Control-Allow-Methods", "GET, POST, PUT, DELETE, OPTIONS")
|
||||||
|
c.Header("Access-Control-Allow-Headers", "Origin, Content-Type, Accept, Authorization")
|
||||||
|
c.Header("Access-Control-Max-Age", "86400")
|
||||||
|
|
||||||
|
if c.Request.Method == "OPTIONS" {
|
||||||
|
c.AbortWithStatus(204)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.Next()
|
||||||
|
})
|
||||||
|
|
||||||
|
// Routes de test
|
||||||
|
router.GET("/health", func(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"status": "ok",
|
||||||
|
"timestamp": time.Now(),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
router.GET("/test", func(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"message": "Test endpoint",
|
||||||
|
"redis_connected": redisClient != nil,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
// Configuration du serveur
|
||||||
|
port := os.Getenv("PORT")
|
||||||
|
if port == "" {
|
||||||
|
port = "8080"
|
||||||
|
}
|
||||||
|
|
||||||
|
server := &http.Server{
|
||||||
|
Addr: ":" + port,
|
||||||
|
Handler: router,
|
||||||
|
ReadTimeout: 15 * time.Second,
|
||||||
|
WriteTimeout: 15 * time.Second,
|
||||||
|
IdleTimeout: 60 * time.Second,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Démarrer le serveur en arrière-plan
|
||||||
|
go func() {
|
||||||
|
logger.Info("Starting server", zap.String("port", port))
|
||||||
|
if err := server.ListenAndServe(); err != nil && err != http.ErrServerClosed {
|
||||||
|
logger.Fatal("Failed to start server", zap.Error(err))
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Attendre un signal d'arrêt
|
||||||
|
quit := make(chan os.Signal, 1)
|
||||||
|
signal.Notify(quit, syscall.SIGINT, syscall.SIGTERM)
|
||||||
|
<-quit
|
||||||
|
|
||||||
|
logger.Info("Shutting down server...")
|
||||||
|
|
||||||
|
// Arrêter le serveur gracieusement
|
||||||
|
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
if err := server.Shutdown(ctx); err != nil {
|
||||||
|
logger.Fatal("Server forced to shutdown", zap.Error(err))
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Info("Server exited")
|
||||||
|
}
|
||||||
|
|
||||||
|
// initRedis initialise la connexion Redis
|
||||||
|
func initRedis(redisURL string) (*redis.Client, error) {
|
||||||
|
opts, err := redis.ParseURL(redisURL)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
client := redis.NewClient(opts)
|
||||||
|
|
||||||
|
// Test de connexion
|
||||||
|
ctx := context.Background()
|
||||||
|
_, err = client.Ping(ctx).Result()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return client, nil
|
||||||
|
}
|
||||||
30
veza-backend-api/coverage.out
Normal file
30
veza-backend-api/coverage.out
Normal file
|
|
@ -0,0 +1,30 @@
|
||||||
|
mode: set
|
||||||
|
/home/senke/Documents/veza-full-stack/veza-backend-api/internal/services/permission_service.go:21.59,23.2 1 1
|
||||||
|
/home/senke/Documents/veza-full-stack/veza-backend-api/internal/services/permission_service.go:26.94,28.71 2 0
|
||||||
|
/home/senke/Documents/veza-full-stack/veza-backend-api/internal/services/permission_service.go:28.71,30.3 1 0
|
||||||
|
/home/senke/Documents/veza-full-stack/veza-backend-api/internal/services/permission_service.go:31.2,31.25 1 0
|
||||||
|
/home/senke/Documents/veza-full-stack/veza-backend-api/internal/services/permission_service.go:35.116,37.85 2 0
|
||||||
|
/home/senke/Documents/veza-full-stack/veza-backend-api/internal/services/permission_service.go:37.85,38.45 1 0
|
||||||
|
/home/senke/Documents/veza-full-stack/veza-backend-api/internal/services/permission_service.go:38.45,40.4 1 0
|
||||||
|
/home/senke/Documents/veza-full-stack/veza-backend-api/internal/services/permission_service.go:41.3,41.62 1 0
|
||||||
|
/home/senke/Documents/veza-full-stack/veza-backend-api/internal/services/permission_service.go:43.2,43.25 1 0
|
||||||
|
/home/senke/Documents/veza-full-stack/veza-backend-api/internal/services/permission_service.go:47.104,48.71 1 0
|
||||||
|
/home/senke/Documents/veza-full-stack/veza-backend-api/internal/services/permission_service.go:48.71,50.3 1 0
|
||||||
|
/home/senke/Documents/veza-full-stack/veza-backend-api/internal/services/permission_service.go:51.2,51.12 1 0
|
||||||
|
/home/senke/Documents/veza-full-stack/veza-backend-api/internal/services/permission_service.go:55.111,60.75 2 1
|
||||||
|
/home/senke/Documents/veza-full-stack/veza-backend-api/internal/services/permission_service.go:60.75,62.3 1 0
|
||||||
|
/home/senke/Documents/veza-full-stack/veza-backend-api/internal/services/permission_service.go:63.2,63.12 1 1
|
||||||
|
/home/senke/Documents/veza-full-stack/veza-backend-api/internal/services/permission_service.go:67.113,71.25 2 1
|
||||||
|
/home/senke/Documents/veza-full-stack/veza-backend-api/internal/services/permission_service.go:71.25,73.3 1 0
|
||||||
|
/home/senke/Documents/veza-full-stack/veza-backend-api/internal/services/permission_service.go:74.2,74.30 1 1
|
||||||
|
/home/senke/Documents/veza-full-stack/veza-backend-api/internal/services/permission_service.go:74.30,76.3 1 1
|
||||||
|
/home/senke/Documents/veza-full-stack/veza-backend-api/internal/services/permission_service.go:77.2,77.12 1 1
|
||||||
|
/home/senke/Documents/veza-full-stack/veza-backend-api/internal/services/permission_service.go:81.116,87.40 2 1
|
||||||
|
/home/senke/Documents/veza-full-stack/veza-backend-api/internal/services/permission_service.go:87.40,89.3 1 0
|
||||||
|
/home/senke/Documents/veza-full-stack/veza-backend-api/internal/services/permission_service.go:90.2,90.25 1 1
|
||||||
|
/home/senke/Documents/veza-full-stack/veza-backend-api/internal/services/permission_service.go:94.107,101.16 3 1
|
||||||
|
/home/senke/Documents/veza-full-stack/veza-backend-api/internal/services/permission_service.go:101.16,103.3 1 1
|
||||||
|
/home/senke/Documents/veza-full-stack/veza-backend-api/internal/services/permission_service.go:104.2,104.23 1 0
|
||||||
|
/home/senke/Documents/veza-full-stack/veza-backend-api/internal/services/permission_service.go:108.119,116.16 3 1
|
||||||
|
/home/senke/Documents/veza-full-stack/veza-backend-api/internal/services/permission_service.go:116.16,118.3 1 1
|
||||||
|
/home/senke/Documents/veza-full-stack/veza-backend-api/internal/services/permission_service.go:119.2,119.23 1 0
|
||||||
446
veza-backend-api/docs/docs.go
Normal file
446
veza-backend-api/docs/docs.go
Normal file
|
|
@ -0,0 +1,446 @@
|
||||||
|
// Package docs Code generated by swaggo/swag. DO NOT EDIT
|
||||||
|
package docs
|
||||||
|
|
||||||
|
import "github.com/swaggo/swag"
|
||||||
|
|
||||||
|
const docTemplate = `{
|
||||||
|
"schemes": {{ marshal .Schemes }},
|
||||||
|
"swagger": "2.0",
|
||||||
|
"info": {
|
||||||
|
"description": "{{escape .Description}}",
|
||||||
|
"title": "{{.Title}}",
|
||||||
|
"termsOfService": "http://swagger.io/terms/",
|
||||||
|
"contact": {
|
||||||
|
"name": "API Support",
|
||||||
|
"url": "http://www.veza.app/support",
|
||||||
|
"email": "support@veza.app"
|
||||||
|
},
|
||||||
|
"license": {
|
||||||
|
"name": "Apache 2.0",
|
||||||
|
"url": "http://www.apache.org/licenses/LICENSE-2.0.html"
|
||||||
|
},
|
||||||
|
"version": "{{.Version}}"
|
||||||
|
},
|
||||||
|
"host": "{{.Host}}",
|
||||||
|
"basePath": "{{.BasePath}}",
|
||||||
|
"paths": {
|
||||||
|
"/api/v1/marketplace/download/{product_id}": {
|
||||||
|
"get": {
|
||||||
|
"security": [
|
||||||
|
{
|
||||||
|
"BearerAuth": []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"description": "Get a secure download URL for a purchased product",
|
||||||
|
"consumes": [
|
||||||
|
"application/json"
|
||||||
|
],
|
||||||
|
"produces": [
|
||||||
|
"application/json"
|
||||||
|
],
|
||||||
|
"tags": [
|
||||||
|
"Marketplace"
|
||||||
|
],
|
||||||
|
"summary": "Get download URL",
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"description": "Product ID",
|
||||||
|
"name": "product_id",
|
||||||
|
"in": "path",
|
||||||
|
"required": true
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"responses": {
|
||||||
|
"200": {
|
||||||
|
"description": "OK",
|
||||||
|
"schema": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"403": {
|
||||||
|
"description": "No license",
|
||||||
|
"schema": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"404": {
|
||||||
|
"description": "Not Found",
|
||||||
|
"schema": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"/api/v1/marketplace/orders": {
|
||||||
|
"post": {
|
||||||
|
"security": [
|
||||||
|
{
|
||||||
|
"BearerAuth": []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"description": "Purchase products",
|
||||||
|
"consumes": [
|
||||||
|
"application/json"
|
||||||
|
],
|
||||||
|
"produces": [
|
||||||
|
"application/json"
|
||||||
|
],
|
||||||
|
"tags": [
|
||||||
|
"Marketplace"
|
||||||
|
],
|
||||||
|
"summary": "Create a new order",
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"description": "Order items",
|
||||||
|
"name": "order",
|
||||||
|
"in": "body",
|
||||||
|
"required": true,
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/definitions/handlers.CreateOrderRequest"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"responses": {
|
||||||
|
"201": {
|
||||||
|
"description": "Created",
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/definitions/marketplace.Order"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"400": {
|
||||||
|
"description": "Bad Request",
|
||||||
|
"schema": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"401": {
|
||||||
|
"description": "Unauthorized",
|
||||||
|
"schema": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"/api/v1/marketplace/products": {
|
||||||
|
"get": {
|
||||||
|
"description": "List marketplace products with filters",
|
||||||
|
"consumes": [
|
||||||
|
"application/json"
|
||||||
|
],
|
||||||
|
"produces": [
|
||||||
|
"application/json"
|
||||||
|
],
|
||||||
|
"tags": [
|
||||||
|
"Marketplace"
|
||||||
|
],
|
||||||
|
"summary": "List products",
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"description": "Product status",
|
||||||
|
"name": "status",
|
||||||
|
"in": "query"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"description": "Seller ID",
|
||||||
|
"name": "seller_id",
|
||||||
|
"in": "query"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"responses": {
|
||||||
|
"200": {
|
||||||
|
"description": "OK",
|
||||||
|
"schema": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"$ref": "#/definitions/marketplace.Product"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"post": {
|
||||||
|
"security": [
|
||||||
|
{
|
||||||
|
"BearerAuth": []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"description": "Create a product (Track, Pack, Service) for sale",
|
||||||
|
"consumes": [
|
||||||
|
"application/json"
|
||||||
|
],
|
||||||
|
"produces": [
|
||||||
|
"application/json"
|
||||||
|
],
|
||||||
|
"tags": [
|
||||||
|
"Marketplace"
|
||||||
|
],
|
||||||
|
"summary": "Create a new product",
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"description": "Product info",
|
||||||
|
"name": "product",
|
||||||
|
"in": "body",
|
||||||
|
"required": true,
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/definitions/handlers.CreateProductRequest"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"responses": {
|
||||||
|
"201": {
|
||||||
|
"description": "Created",
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/definitions/marketplace.Product"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"400": {
|
||||||
|
"description": "Bad Request",
|
||||||
|
"schema": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"401": {
|
||||||
|
"description": "Unauthorized",
|
||||||
|
"schema": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"definitions": {
|
||||||
|
"handlers.CreateOrderRequest": {
|
||||||
|
"type": "object",
|
||||||
|
"required": [
|
||||||
|
"items"
|
||||||
|
],
|
||||||
|
"properties": {
|
||||||
|
"items": {
|
||||||
|
"type": "array",
|
||||||
|
"minItems": 1,
|
||||||
|
"items": {
|
||||||
|
"type": "object",
|
||||||
|
"required": [
|
||||||
|
"product_id"
|
||||||
|
],
|
||||||
|
"properties": {
|
||||||
|
"product_id": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"handlers.CreateProductRequest": {
|
||||||
|
"type": "object",
|
||||||
|
"required": [
|
||||||
|
"price",
|
||||||
|
"product_type",
|
||||||
|
"title"
|
||||||
|
],
|
||||||
|
"properties": {
|
||||||
|
"description": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"license_type": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"price": {
|
||||||
|
"type": "number",
|
||||||
|
"minimum": 0
|
||||||
|
},
|
||||||
|
"product_type": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"track",
|
||||||
|
"pack",
|
||||||
|
"service"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"title": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"track_id": {
|
||||||
|
"description": "UUID string",
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"marketplace.LicenseType": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"basic",
|
||||||
|
"premium",
|
||||||
|
"exclusive"
|
||||||
|
],
|
||||||
|
"x-enum-varnames": [
|
||||||
|
"LicenseBasic",
|
||||||
|
"LicensePremium",
|
||||||
|
"LicenseExclusive"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"marketplace.Order": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"buyer_id": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"created_at": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"currency": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"id": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"items": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"$ref": "#/definitions/marketplace.OrderItem"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"payment_intent": {
|
||||||
|
"description": "Stripe PaymentIntent ID",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"status": {
|
||||||
|
"description": "pending, paid, failed, refunded",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"total_amount": {
|
||||||
|
"type": "number"
|
||||||
|
},
|
||||||
|
"updated_at": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"marketplace.OrderItem": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"id": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"order_id": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"price": {
|
||||||
|
"type": "number"
|
||||||
|
},
|
||||||
|
"product_id": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"marketplace.Product": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"created_at": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"currency": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"description": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"id": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"license_type": {
|
||||||
|
"$ref": "#/definitions/marketplace.LicenseType"
|
||||||
|
},
|
||||||
|
"price": {
|
||||||
|
"type": "number"
|
||||||
|
},
|
||||||
|
"product_type": {
|
||||||
|
"description": "\"track\", \"pack\", \"service\"",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"seller_id": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"status": {
|
||||||
|
"$ref": "#/definitions/marketplace.ProductStatus"
|
||||||
|
},
|
||||||
|
"title": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"track_id": {
|
||||||
|
"description": "Liaison optionnelle avec un Track (si ProductType == \"track\")",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"updated_at": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"marketplace.ProductStatus": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"draft",
|
||||||
|
"active",
|
||||||
|
"archived"
|
||||||
|
],
|
||||||
|
"x-enum-varnames": [
|
||||||
|
"ProductStatusDraft",
|
||||||
|
"ProductStatusActive",
|
||||||
|
"ProductStatusArchived"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"securityDefinitions": {
|
||||||
|
"BearerAuth": {
|
||||||
|
"type": "apiKey",
|
||||||
|
"name": "Authorization",
|
||||||
|
"in": "header"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}`
|
||||||
|
|
||||||
|
// SwaggerInfo holds exported Swagger Info so clients can modify it
|
||||||
|
var SwaggerInfo = &swag.Spec{
|
||||||
|
Version: "1.2.0",
|
||||||
|
Host: "localhost:8080",
|
||||||
|
BasePath: "/api/v1",
|
||||||
|
Schemes: []string{},
|
||||||
|
Title: "Veza Backend API",
|
||||||
|
Description: "Backend API for Veza platform.",
|
||||||
|
InfoInstanceName: "swagger",
|
||||||
|
SwaggerTemplate: docTemplate,
|
||||||
|
LeftDelim: "{{",
|
||||||
|
RightDelim: "}}",
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
swag.Register(SwaggerInfo.InstanceName(), SwaggerInfo)
|
||||||
|
}
|
||||||
422
veza-backend-api/docs/swagger.json
Normal file
422
veza-backend-api/docs/swagger.json
Normal file
|
|
@ -0,0 +1,422 @@
|
||||||
|
{
|
||||||
|
"swagger": "2.0",
|
||||||
|
"info": {
|
||||||
|
"description": "Backend API for Veza platform.",
|
||||||
|
"title": "Veza Backend API",
|
||||||
|
"termsOfService": "http://swagger.io/terms/",
|
||||||
|
"contact": {
|
||||||
|
"name": "API Support",
|
||||||
|
"url": "http://www.veza.app/support",
|
||||||
|
"email": "support@veza.app"
|
||||||
|
},
|
||||||
|
"license": {
|
||||||
|
"name": "Apache 2.0",
|
||||||
|
"url": "http://www.apache.org/licenses/LICENSE-2.0.html"
|
||||||
|
},
|
||||||
|
"version": "1.2.0"
|
||||||
|
},
|
||||||
|
"host": "localhost:8080",
|
||||||
|
"basePath": "/api/v1",
|
||||||
|
"paths": {
|
||||||
|
"/api/v1/marketplace/download/{product_id}": {
|
||||||
|
"get": {
|
||||||
|
"security": [
|
||||||
|
{
|
||||||
|
"BearerAuth": []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"description": "Get a secure download URL for a purchased product",
|
||||||
|
"consumes": [
|
||||||
|
"application/json"
|
||||||
|
],
|
||||||
|
"produces": [
|
||||||
|
"application/json"
|
||||||
|
],
|
||||||
|
"tags": [
|
||||||
|
"Marketplace"
|
||||||
|
],
|
||||||
|
"summary": "Get download URL",
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"description": "Product ID",
|
||||||
|
"name": "product_id",
|
||||||
|
"in": "path",
|
||||||
|
"required": true
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"responses": {
|
||||||
|
"200": {
|
||||||
|
"description": "OK",
|
||||||
|
"schema": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"403": {
|
||||||
|
"description": "No license",
|
||||||
|
"schema": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"404": {
|
||||||
|
"description": "Not Found",
|
||||||
|
"schema": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"/api/v1/marketplace/orders": {
|
||||||
|
"post": {
|
||||||
|
"security": [
|
||||||
|
{
|
||||||
|
"BearerAuth": []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"description": "Purchase products",
|
||||||
|
"consumes": [
|
||||||
|
"application/json"
|
||||||
|
],
|
||||||
|
"produces": [
|
||||||
|
"application/json"
|
||||||
|
],
|
||||||
|
"tags": [
|
||||||
|
"Marketplace"
|
||||||
|
],
|
||||||
|
"summary": "Create a new order",
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"description": "Order items",
|
||||||
|
"name": "order",
|
||||||
|
"in": "body",
|
||||||
|
"required": true,
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/definitions/handlers.CreateOrderRequest"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"responses": {
|
||||||
|
"201": {
|
||||||
|
"description": "Created",
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/definitions/marketplace.Order"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"400": {
|
||||||
|
"description": "Bad Request",
|
||||||
|
"schema": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"401": {
|
||||||
|
"description": "Unauthorized",
|
||||||
|
"schema": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"/api/v1/marketplace/products": {
|
||||||
|
"get": {
|
||||||
|
"description": "List marketplace products with filters",
|
||||||
|
"consumes": [
|
||||||
|
"application/json"
|
||||||
|
],
|
||||||
|
"produces": [
|
||||||
|
"application/json"
|
||||||
|
],
|
||||||
|
"tags": [
|
||||||
|
"Marketplace"
|
||||||
|
],
|
||||||
|
"summary": "List products",
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"description": "Product status",
|
||||||
|
"name": "status",
|
||||||
|
"in": "query"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"description": "Seller ID",
|
||||||
|
"name": "seller_id",
|
||||||
|
"in": "query"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"responses": {
|
||||||
|
"200": {
|
||||||
|
"description": "OK",
|
||||||
|
"schema": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"$ref": "#/definitions/marketplace.Product"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"post": {
|
||||||
|
"security": [
|
||||||
|
{
|
||||||
|
"BearerAuth": []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"description": "Create a product (Track, Pack, Service) for sale",
|
||||||
|
"consumes": [
|
||||||
|
"application/json"
|
||||||
|
],
|
||||||
|
"produces": [
|
||||||
|
"application/json"
|
||||||
|
],
|
||||||
|
"tags": [
|
||||||
|
"Marketplace"
|
||||||
|
],
|
||||||
|
"summary": "Create a new product",
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"description": "Product info",
|
||||||
|
"name": "product",
|
||||||
|
"in": "body",
|
||||||
|
"required": true,
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/definitions/handlers.CreateProductRequest"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"responses": {
|
||||||
|
"201": {
|
||||||
|
"description": "Created",
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/definitions/marketplace.Product"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"400": {
|
||||||
|
"description": "Bad Request",
|
||||||
|
"schema": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"401": {
|
||||||
|
"description": "Unauthorized",
|
||||||
|
"schema": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"definitions": {
|
||||||
|
"handlers.CreateOrderRequest": {
|
||||||
|
"type": "object",
|
||||||
|
"required": [
|
||||||
|
"items"
|
||||||
|
],
|
||||||
|
"properties": {
|
||||||
|
"items": {
|
||||||
|
"type": "array",
|
||||||
|
"minItems": 1,
|
||||||
|
"items": {
|
||||||
|
"type": "object",
|
||||||
|
"required": [
|
||||||
|
"product_id"
|
||||||
|
],
|
||||||
|
"properties": {
|
||||||
|
"product_id": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"handlers.CreateProductRequest": {
|
||||||
|
"type": "object",
|
||||||
|
"required": [
|
||||||
|
"price",
|
||||||
|
"product_type",
|
||||||
|
"title"
|
||||||
|
],
|
||||||
|
"properties": {
|
||||||
|
"description": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"license_type": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"price": {
|
||||||
|
"type": "number",
|
||||||
|
"minimum": 0
|
||||||
|
},
|
||||||
|
"product_type": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"track",
|
||||||
|
"pack",
|
||||||
|
"service"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"title": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"track_id": {
|
||||||
|
"description": "UUID string",
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"marketplace.LicenseType": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"basic",
|
||||||
|
"premium",
|
||||||
|
"exclusive"
|
||||||
|
],
|
||||||
|
"x-enum-varnames": [
|
||||||
|
"LicenseBasic",
|
||||||
|
"LicensePremium",
|
||||||
|
"LicenseExclusive"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"marketplace.Order": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"buyer_id": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"created_at": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"currency": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"id": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"items": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"$ref": "#/definitions/marketplace.OrderItem"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"payment_intent": {
|
||||||
|
"description": "Stripe PaymentIntent ID",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"status": {
|
||||||
|
"description": "pending, paid, failed, refunded",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"total_amount": {
|
||||||
|
"type": "number"
|
||||||
|
},
|
||||||
|
"updated_at": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"marketplace.OrderItem": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"id": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"order_id": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"price": {
|
||||||
|
"type": "number"
|
||||||
|
},
|
||||||
|
"product_id": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"marketplace.Product": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"created_at": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"currency": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"description": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"id": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"license_type": {
|
||||||
|
"$ref": "#/definitions/marketplace.LicenseType"
|
||||||
|
},
|
||||||
|
"price": {
|
||||||
|
"type": "number"
|
||||||
|
},
|
||||||
|
"product_type": {
|
||||||
|
"description": "\"track\", \"pack\", \"service\"",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"seller_id": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"status": {
|
||||||
|
"$ref": "#/definitions/marketplace.ProductStatus"
|
||||||
|
},
|
||||||
|
"title": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"track_id": {
|
||||||
|
"description": "Liaison optionnelle avec un Track (si ProductType == \"track\")",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"updated_at": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"marketplace.ProductStatus": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"draft",
|
||||||
|
"active",
|
||||||
|
"archived"
|
||||||
|
],
|
||||||
|
"x-enum-varnames": [
|
||||||
|
"ProductStatusDraft",
|
||||||
|
"ProductStatusActive",
|
||||||
|
"ProductStatusArchived"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"securityDefinitions": {
|
||||||
|
"BearerAuth": {
|
||||||
|
"type": "apiKey",
|
||||||
|
"name": "Authorization",
|
||||||
|
"in": "header"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
281
veza-backend-api/docs/swagger.yaml
Normal file
281
veza-backend-api/docs/swagger.yaml
Normal file
|
|
@ -0,0 +1,281 @@
|
||||||
|
basePath: /api/v1
|
||||||
|
definitions:
|
||||||
|
handlers.CreateOrderRequest:
|
||||||
|
properties:
|
||||||
|
items:
|
||||||
|
items:
|
||||||
|
properties:
|
||||||
|
product_id:
|
||||||
|
type: string
|
||||||
|
required:
|
||||||
|
- product_id
|
||||||
|
type: object
|
||||||
|
minItems: 1
|
||||||
|
type: array
|
||||||
|
required:
|
||||||
|
- items
|
||||||
|
type: object
|
||||||
|
handlers.CreateProductRequest:
|
||||||
|
properties:
|
||||||
|
description:
|
||||||
|
type: string
|
||||||
|
license_type:
|
||||||
|
type: string
|
||||||
|
price:
|
||||||
|
minimum: 0
|
||||||
|
type: number
|
||||||
|
product_type:
|
||||||
|
enum:
|
||||||
|
- track
|
||||||
|
- pack
|
||||||
|
- service
|
||||||
|
type: string
|
||||||
|
title:
|
||||||
|
type: string
|
||||||
|
track_id:
|
||||||
|
description: UUID string
|
||||||
|
type: string
|
||||||
|
required:
|
||||||
|
- price
|
||||||
|
- product_type
|
||||||
|
- title
|
||||||
|
type: object
|
||||||
|
marketplace.LicenseType:
|
||||||
|
enum:
|
||||||
|
- basic
|
||||||
|
- premium
|
||||||
|
- exclusive
|
||||||
|
type: string
|
||||||
|
x-enum-varnames:
|
||||||
|
- LicenseBasic
|
||||||
|
- LicensePremium
|
||||||
|
- LicenseExclusive
|
||||||
|
marketplace.Order:
|
||||||
|
properties:
|
||||||
|
buyer_id:
|
||||||
|
type: string
|
||||||
|
created_at:
|
||||||
|
type: string
|
||||||
|
currency:
|
||||||
|
type: string
|
||||||
|
id:
|
||||||
|
type: string
|
||||||
|
items:
|
||||||
|
items:
|
||||||
|
$ref: '#/definitions/marketplace.OrderItem'
|
||||||
|
type: array
|
||||||
|
payment_intent:
|
||||||
|
description: Stripe PaymentIntent ID
|
||||||
|
type: string
|
||||||
|
status:
|
||||||
|
description: pending, paid, failed, refunded
|
||||||
|
type: string
|
||||||
|
total_amount:
|
||||||
|
type: number
|
||||||
|
updated_at:
|
||||||
|
type: string
|
||||||
|
type: object
|
||||||
|
marketplace.OrderItem:
|
||||||
|
properties:
|
||||||
|
id:
|
||||||
|
type: string
|
||||||
|
order_id:
|
||||||
|
type: string
|
||||||
|
price:
|
||||||
|
type: number
|
||||||
|
product_id:
|
||||||
|
type: string
|
||||||
|
type: object
|
||||||
|
marketplace.Product:
|
||||||
|
properties:
|
||||||
|
created_at:
|
||||||
|
type: string
|
||||||
|
currency:
|
||||||
|
type: string
|
||||||
|
description:
|
||||||
|
type: string
|
||||||
|
id:
|
||||||
|
type: string
|
||||||
|
license_type:
|
||||||
|
$ref: '#/definitions/marketplace.LicenseType'
|
||||||
|
price:
|
||||||
|
type: number
|
||||||
|
product_type:
|
||||||
|
description: '"track", "pack", "service"'
|
||||||
|
type: string
|
||||||
|
seller_id:
|
||||||
|
type: string
|
||||||
|
status:
|
||||||
|
$ref: '#/definitions/marketplace.ProductStatus'
|
||||||
|
title:
|
||||||
|
type: string
|
||||||
|
track_id:
|
||||||
|
description: Liaison optionnelle avec un Track (si ProductType == "track")
|
||||||
|
type: string
|
||||||
|
updated_at:
|
||||||
|
type: string
|
||||||
|
type: object
|
||||||
|
marketplace.ProductStatus:
|
||||||
|
enum:
|
||||||
|
- draft
|
||||||
|
- active
|
||||||
|
- archived
|
||||||
|
type: string
|
||||||
|
x-enum-varnames:
|
||||||
|
- ProductStatusDraft
|
||||||
|
- ProductStatusActive
|
||||||
|
- ProductStatusArchived
|
||||||
|
host: localhost:8080
|
||||||
|
info:
|
||||||
|
contact:
|
||||||
|
email: support@veza.app
|
||||||
|
name: API Support
|
||||||
|
url: http://www.veza.app/support
|
||||||
|
description: Backend API for Veza platform.
|
||||||
|
license:
|
||||||
|
name: Apache 2.0
|
||||||
|
url: http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
|
termsOfService: http://swagger.io/terms/
|
||||||
|
title: Veza Backend API
|
||||||
|
version: 1.2.0
|
||||||
|
paths:
|
||||||
|
/api/v1/marketplace/download/{product_id}:
|
||||||
|
get:
|
||||||
|
consumes:
|
||||||
|
- application/json
|
||||||
|
description: Get a secure download URL for a purchased product
|
||||||
|
parameters:
|
||||||
|
- description: Product ID
|
||||||
|
in: path
|
||||||
|
name: product_id
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
produces:
|
||||||
|
- application/json
|
||||||
|
responses:
|
||||||
|
"200":
|
||||||
|
description: OK
|
||||||
|
schema:
|
||||||
|
additionalProperties:
|
||||||
|
type: string
|
||||||
|
type: object
|
||||||
|
"403":
|
||||||
|
description: No license
|
||||||
|
schema:
|
||||||
|
additionalProperties:
|
||||||
|
type: string
|
||||||
|
type: object
|
||||||
|
"404":
|
||||||
|
description: Not Found
|
||||||
|
schema:
|
||||||
|
additionalProperties:
|
||||||
|
type: string
|
||||||
|
type: object
|
||||||
|
security:
|
||||||
|
- BearerAuth: []
|
||||||
|
summary: Get download URL
|
||||||
|
tags:
|
||||||
|
- Marketplace
|
||||||
|
/api/v1/marketplace/orders:
|
||||||
|
post:
|
||||||
|
consumes:
|
||||||
|
- application/json
|
||||||
|
description: Purchase products
|
||||||
|
parameters:
|
||||||
|
- description: Order items
|
||||||
|
in: body
|
||||||
|
name: order
|
||||||
|
required: true
|
||||||
|
schema:
|
||||||
|
$ref: '#/definitions/handlers.CreateOrderRequest'
|
||||||
|
produces:
|
||||||
|
- application/json
|
||||||
|
responses:
|
||||||
|
"201":
|
||||||
|
description: Created
|
||||||
|
schema:
|
||||||
|
$ref: '#/definitions/marketplace.Order'
|
||||||
|
"400":
|
||||||
|
description: Bad Request
|
||||||
|
schema:
|
||||||
|
additionalProperties:
|
||||||
|
type: string
|
||||||
|
type: object
|
||||||
|
"401":
|
||||||
|
description: Unauthorized
|
||||||
|
schema:
|
||||||
|
additionalProperties:
|
||||||
|
type: string
|
||||||
|
type: object
|
||||||
|
security:
|
||||||
|
- BearerAuth: []
|
||||||
|
summary: Create a new order
|
||||||
|
tags:
|
||||||
|
- Marketplace
|
||||||
|
/api/v1/marketplace/products:
|
||||||
|
get:
|
||||||
|
consumes:
|
||||||
|
- application/json
|
||||||
|
description: List marketplace products with filters
|
||||||
|
parameters:
|
||||||
|
- description: Product status
|
||||||
|
in: query
|
||||||
|
name: status
|
||||||
|
type: string
|
||||||
|
- description: Seller ID
|
||||||
|
in: query
|
||||||
|
name: seller_id
|
||||||
|
type: string
|
||||||
|
produces:
|
||||||
|
- application/json
|
||||||
|
responses:
|
||||||
|
"200":
|
||||||
|
description: OK
|
||||||
|
schema:
|
||||||
|
items:
|
||||||
|
$ref: '#/definitions/marketplace.Product'
|
||||||
|
type: array
|
||||||
|
summary: List products
|
||||||
|
tags:
|
||||||
|
- Marketplace
|
||||||
|
post:
|
||||||
|
consumes:
|
||||||
|
- application/json
|
||||||
|
description: Create a product (Track, Pack, Service) for sale
|
||||||
|
parameters:
|
||||||
|
- description: Product info
|
||||||
|
in: body
|
||||||
|
name: product
|
||||||
|
required: true
|
||||||
|
schema:
|
||||||
|
$ref: '#/definitions/handlers.CreateProductRequest'
|
||||||
|
produces:
|
||||||
|
- application/json
|
||||||
|
responses:
|
||||||
|
"201":
|
||||||
|
description: Created
|
||||||
|
schema:
|
||||||
|
$ref: '#/definitions/marketplace.Product'
|
||||||
|
"400":
|
||||||
|
description: Bad Request
|
||||||
|
schema:
|
||||||
|
additionalProperties:
|
||||||
|
type: string
|
||||||
|
type: object
|
||||||
|
"401":
|
||||||
|
description: Unauthorized
|
||||||
|
schema:
|
||||||
|
additionalProperties:
|
||||||
|
type: string
|
||||||
|
type: object
|
||||||
|
security:
|
||||||
|
- BearerAuth: []
|
||||||
|
summary: Create a new product
|
||||||
|
tags:
|
||||||
|
- Marketplace
|
||||||
|
securityDefinitions:
|
||||||
|
BearerAuth:
|
||||||
|
in: header
|
||||||
|
name: Authorization
|
||||||
|
type: apiKey
|
||||||
|
swagger: "2.0"
|
||||||
134
veza-backend-api/go.mod
Normal file
134
veza-backend-api/go.mod
Normal file
|
|
@ -0,0 +1,134 @@
|
||||||
|
module veza-backend-api
|
||||||
|
|
||||||
|
go 1.23.8
|
||||||
|
|
||||||
|
require (
|
||||||
|
github.com/dhowden/tag v0.0.0-20240417053706-3d75831295e8
|
||||||
|
github.com/disintegration/imaging v1.6.2
|
||||||
|
github.com/dutchcoders/go-clamd v0.0.0-20170520113014-b970184f4d9e
|
||||||
|
github.com/fsnotify/fsnotify v1.9.0
|
||||||
|
github.com/gin-gonic/gin v1.9.1
|
||||||
|
github.com/go-playground/validator/v10 v10.16.0
|
||||||
|
github.com/golang-jwt/jwt/v5 v5.3.0
|
||||||
|
github.com/google/uuid v1.6.0
|
||||||
|
github.com/gorilla/websocket v1.5.3
|
||||||
|
github.com/joho/godotenv v1.5.1
|
||||||
|
github.com/lib/pq v1.10.9
|
||||||
|
github.com/pquerna/otp v1.5.0
|
||||||
|
github.com/prometheus/client_golang v1.22.0
|
||||||
|
github.com/prometheus/client_model v0.6.2
|
||||||
|
github.com/rabbitmq/amqp091-go v1.10.0
|
||||||
|
github.com/redis/go-redis/v9 v9.16.0
|
||||||
|
github.com/stretchr/testify v1.11.1
|
||||||
|
github.com/swaggo/files v1.0.1
|
||||||
|
github.com/swaggo/gin-swagger v1.6.1
|
||||||
|
github.com/swaggo/swag v1.16.6
|
||||||
|
github.com/testcontainers/testcontainers-go v0.33.0
|
||||||
|
github.com/testcontainers/testcontainers-go/modules/postgres v0.33.0
|
||||||
|
go.uber.org/zap v1.27.0
|
||||||
|
golang.org/x/crypto v0.37.0
|
||||||
|
golang.org/x/oauth2 v0.30.0
|
||||||
|
golang.org/x/time v0.12.0
|
||||||
|
gopkg.in/natefinch/lumberjack.v2 v2.2.1
|
||||||
|
gorm.io/driver/postgres v1.6.0
|
||||||
|
gorm.io/driver/sqlite v1.6.0
|
||||||
|
gorm.io/gorm v1.30.0
|
||||||
|
)
|
||||||
|
|
||||||
|
require (
|
||||||
|
cloud.google.com/go/compute/metadata v0.3.0 // indirect
|
||||||
|
dario.cat/mergo v1.0.0 // indirect
|
||||||
|
github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 // indirect
|
||||||
|
github.com/KyleBanks/depth v1.2.1 // indirect
|
||||||
|
github.com/Microsoft/go-winio v0.6.2 // indirect
|
||||||
|
github.com/PuerkitoBio/purell v1.1.1 // indirect
|
||||||
|
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 // indirect
|
||||||
|
github.com/beorn7/perks v1.0.1 // indirect
|
||||||
|
github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc // indirect
|
||||||
|
github.com/bytedance/sonic v1.9.1 // indirect
|
||||||
|
github.com/cenkalti/backoff/v4 v4.2.1 // indirect
|
||||||
|
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
||||||
|
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 // indirect
|
||||||
|
github.com/containerd/containerd v1.7.18 // indirect
|
||||||
|
github.com/containerd/log v0.1.0 // indirect
|
||||||
|
github.com/containerd/platforms v0.2.1 // indirect
|
||||||
|
github.com/cpuguy83/dockercfg v0.3.1 // indirect
|
||||||
|
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||||
|
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
|
||||||
|
github.com/distribution/reference v0.6.0 // indirect
|
||||||
|
github.com/docker/docker v27.1.1+incompatible // indirect
|
||||||
|
github.com/docker/go-connections v0.5.0 // indirect
|
||||||
|
github.com/docker/go-units v0.5.0 // indirect
|
||||||
|
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||||
|
github.com/gabriel-vasile/mimetype v1.4.2 // indirect
|
||||||
|
github.com/gin-contrib/sse v0.1.0 // indirect
|
||||||
|
github.com/go-logr/logr v1.4.1 // indirect
|
||||||
|
github.com/go-logr/stdr v1.2.2 // indirect
|
||||||
|
github.com/go-ole/go-ole v1.2.6 // indirect
|
||||||
|
github.com/go-openapi/jsonpointer v0.19.5 // indirect
|
||||||
|
github.com/go-openapi/jsonreference v0.19.6 // indirect
|
||||||
|
github.com/go-openapi/spec v0.20.4 // indirect
|
||||||
|
github.com/go-openapi/swag v0.19.15 // indirect
|
||||||
|
github.com/go-playground/locales v0.14.1 // indirect
|
||||||
|
github.com/go-playground/universal-translator v0.18.1 // indirect
|
||||||
|
github.com/goccy/go-json v0.10.2 // indirect
|
||||||
|
github.com/gogo/protobuf v1.3.2 // indirect
|
||||||
|
github.com/jackc/pgpassfile v1.0.0 // indirect
|
||||||
|
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
|
||||||
|
github.com/jackc/pgx/v5 v5.6.0 // indirect
|
||||||
|
github.com/jackc/puddle/v2 v2.2.2 // indirect
|
||||||
|
github.com/jinzhu/inflection v1.0.0 // indirect
|
||||||
|
github.com/jinzhu/now v1.1.5 // indirect
|
||||||
|
github.com/josharian/intern v1.0.0 // indirect
|
||||||
|
github.com/json-iterator/go v1.1.12 // indirect
|
||||||
|
github.com/klauspost/compress v1.18.0 // indirect
|
||||||
|
github.com/klauspost/cpuid/v2 v2.2.4 // indirect
|
||||||
|
github.com/leodido/go-urn v1.2.4 // indirect
|
||||||
|
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect
|
||||||
|
github.com/magiconair/properties v1.8.7 // indirect
|
||||||
|
github.com/mailru/easyjson v0.7.6 // indirect
|
||||||
|
github.com/mattn/go-isatty v0.0.19 // indirect
|
||||||
|
github.com/mattn/go-sqlite3 v1.14.22 // indirect
|
||||||
|
github.com/moby/docker-image-spec v1.3.1 // indirect
|
||||||
|
github.com/moby/patternmatcher v0.6.0 // indirect
|
||||||
|
github.com/moby/sys/sequential v0.5.0 // indirect
|
||||||
|
github.com/moby/sys/user v0.1.0 // indirect
|
||||||
|
github.com/moby/term v0.5.0 // indirect
|
||||||
|
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||||
|
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||||
|
github.com/morikuni/aec v1.0.0 // indirect
|
||||||
|
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
|
||||||
|
github.com/opencontainers/go-digest v1.0.0 // indirect
|
||||||
|
github.com/opencontainers/image-spec v1.1.0 // indirect
|
||||||
|
github.com/pelletier/go-toml/v2 v2.0.8 // indirect
|
||||||
|
github.com/pkg/errors v0.9.1 // indirect
|
||||||
|
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||||
|
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c // indirect
|
||||||
|
github.com/prometheus/common v0.63.0 // indirect
|
||||||
|
github.com/prometheus/procfs v0.16.1 // indirect
|
||||||
|
github.com/shirou/gopsutil/v3 v3.23.12 // indirect
|
||||||
|
github.com/shoenig/go-m1cpu v0.1.6 // indirect
|
||||||
|
github.com/sirupsen/logrus v1.9.3 // indirect
|
||||||
|
github.com/stretchr/objx v0.5.2 // indirect
|
||||||
|
github.com/tklauser/go-sysconf v0.3.12 // indirect
|
||||||
|
github.com/tklauser/numcpus v0.6.1 // indirect
|
||||||
|
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
|
||||||
|
github.com/ugorji/go/codec v1.2.11 // indirect
|
||||||
|
github.com/yusufpapurcu/wmi v1.2.3 // indirect
|
||||||
|
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 // indirect
|
||||||
|
go.opentelemetry.io/otel v1.24.0 // indirect
|
||||||
|
go.opentelemetry.io/otel/metric v1.24.0 // indirect
|
||||||
|
go.opentelemetry.io/otel/trace v1.24.0 // indirect
|
||||||
|
go.uber.org/multierr v1.10.0 // indirect
|
||||||
|
golang.org/x/arch v0.3.0 // indirect
|
||||||
|
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8 // indirect
|
||||||
|
golang.org/x/mod v0.25.0 // indirect
|
||||||
|
golang.org/x/net v0.39.0 // indirect
|
||||||
|
golang.org/x/sync v0.16.0 // indirect
|
||||||
|
golang.org/x/sys v0.35.0 // indirect
|
||||||
|
golang.org/x/text v0.24.0 // indirect
|
||||||
|
golang.org/x/tools v0.32.0 // indirect
|
||||||
|
google.golang.org/protobuf v1.36.8 // indirect
|
||||||
|
gopkg.in/yaml.v2 v2.4.0 // indirect
|
||||||
|
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||||
|
)
|
||||||
397
veza-backend-api/go.sum
Normal file
397
veza-backend-api/go.sum
Normal file
|
|
@ -0,0 +1,397 @@
|
||||||
|
cloud.google.com/go/compute/metadata v0.3.0 h1:Tz+eQXMEqDIKRsmY3cHTL6FVaynIjX2QxYC4trgAKZc=
|
||||||
|
cloud.google.com/go/compute/metadata v0.3.0/go.mod h1:zFmK7XCadkQkj6TtorcaGlCW1hT1fIilQDwofLpJ20k=
|
||||||
|
dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk=
|
||||||
|
dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk=
|
||||||
|
github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24 h1:bvDV9vkmnHYOMsOr4WLk+Vo07yKIzd94sVoIqshQ4bU=
|
||||||
|
github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24/go.mod h1:8o94RPi1/7XTJvwPpRSzSUedZrtlirdB3r9Z20bi2f8=
|
||||||
|
github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8=
|
||||||
|
github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E=
|
||||||
|
github.com/KyleBanks/depth v1.2.1 h1:5h8fQADFrWtarTdtDudMmGsC7GPbOAu6RVB3ffsVFHc=
|
||||||
|
github.com/KyleBanks/depth v1.2.1/go.mod h1:jzSb9d0L43HxTQfT+oSA1EEp2q+ne2uh6XgeJcm8brE=
|
||||||
|
github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY=
|
||||||
|
github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU=
|
||||||
|
github.com/PuerkitoBio/purell v1.1.1 h1:WEQqlqaGbrPkxLJWfBwQmfEAE1Z7ONdDLqrN38tNFfI=
|
||||||
|
github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
|
||||||
|
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 h1:d+Bc7a5rLufV/sSk/8dngufqelfh6jnri85riMAaF/M=
|
||||||
|
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
|
||||||
|
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
|
||||||
|
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
|
||||||
|
github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc h1:biVzkmvwrH8WK8raXaxBx6fRVTlJILwEwQGL1I/ByEI=
|
||||||
|
github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8=
|
||||||
|
github.com/bsm/ginkgo/v2 v2.12.0 h1:Ny8MWAHyOepLGlLKYmXG4IEkioBysk6GpaRTLC8zwWs=
|
||||||
|
github.com/bsm/ginkgo/v2 v2.12.0/go.mod h1:SwYbGRRDovPVboqFv0tPTcG1sN61LM1Z4ARdbAV9g4c=
|
||||||
|
github.com/bsm/gomega v1.27.10 h1:yeMWxP2pV2fG3FgAODIY8EiRE3dy0aeFYt4l7wh6yKA=
|
||||||
|
github.com/bsm/gomega v1.27.10/go.mod h1:JyEr/xRbxbtgWNi8tIEVPUYZ5Dzef52k01W3YH0H+O0=
|
||||||
|
github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM=
|
||||||
|
github.com/bytedance/sonic v1.9.1 h1:6iJ6NqdoxCDr6mbY8h18oSO+cShGSMRGCEo7F2h0x8s=
|
||||||
|
github.com/bytedance/sonic v1.9.1/go.mod h1:i736AoUSYt75HyZLoJW9ERYxcy6eaN6h4BZXU064P/U=
|
||||||
|
github.com/cenkalti/backoff/v4 v4.2.1 h1:y4OZtCnogmCPw98Zjyt5a6+QwPLGkiQsYW5oUqylYbM=
|
||||||
|
github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE=
|
||||||
|
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
|
||||||
|
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||||
|
github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY=
|
||||||
|
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 h1:qSGYFH7+jGhDF8vLC+iwCD4WpbV1EBDSzWkJODFLams=
|
||||||
|
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk=
|
||||||
|
github.com/containerd/containerd v1.7.18 h1:jqjZTQNfXGoEaZdW1WwPU0RqSn1Bm2Ay/KJPUuO8nao=
|
||||||
|
github.com/containerd/containerd v1.7.18/go.mod h1:IYEk9/IO6wAPUz2bCMVUbsfXjzw5UNP5fLz4PsUygQ4=
|
||||||
|
github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I=
|
||||||
|
github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo=
|
||||||
|
github.com/containerd/platforms v0.2.1 h1:zvwtM3rz2YHPQsF2CHYM8+KtB5dvhISiXh5ZpSBQv6A=
|
||||||
|
github.com/containerd/platforms v0.2.1/go.mod h1:XHCb+2/hzowdiut9rkudds9bE5yJ7npe7dG/wG+uFPw=
|
||||||
|
github.com/cpuguy83/dockercfg v0.3.1 h1:/FpZ+JaygUR/lZP2NlFI2DVfrOEMAIKP5wWEJdoYe9E=
|
||||||
|
github.com/cpuguy83/dockercfg v0.3.1/go.mod h1:sugsbF4//dDlL/i+S+rtpIWp+5h0BHJHfjj5/jFyUJc=
|
||||||
|
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||||
|
github.com/creack/pty v1.1.18 h1:n56/Zwd5o6whRC5PMGretI4IdRLlmBXYNjScPaBgsbY=
|
||||||
|
github.com/creack/pty v1.1.18/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4=
|
||||||
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78=
|
||||||
|
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
|
||||||
|
github.com/dhowden/tag v0.0.0-20240417053706-3d75831295e8 h1:OtSeLS5y0Uy01jaKK4mA/WVIYtpzVm63vLVAPzJXigg=
|
||||||
|
github.com/dhowden/tag v0.0.0-20240417053706-3d75831295e8/go.mod h1:apkPC/CR3s48O2D7Y++n1XWEpgPNNCjXYga3PPbJe2E=
|
||||||
|
github.com/disintegration/imaging v1.6.2 h1:w1LecBlG2Lnp8B3jk5zSuNqd7b4DXhcjwek1ei82L+c=
|
||||||
|
github.com/disintegration/imaging v1.6.2/go.mod h1:44/5580QXChDfwIclfc/PCwrr44amcmDAg8hxG0Ewe4=
|
||||||
|
github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk=
|
||||||
|
github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E=
|
||||||
|
github.com/docker/docker v27.1.1+incompatible h1:hO/M4MtV36kzKldqnA37IWhebRA+LnqqcqDja6kVaKY=
|
||||||
|
github.com/docker/docker v27.1.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
|
||||||
|
github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj1Br63c=
|
||||||
|
github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc=
|
||||||
|
github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4=
|
||||||
|
github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
|
||||||
|
github.com/dutchcoders/go-clamd v0.0.0-20170520113014-b970184f4d9e h1:rcHHSQqzCgvlwP0I/fQ8rQMn/MpHE5gWSLdtpxtP6KQ=
|
||||||
|
github.com/dutchcoders/go-clamd v0.0.0-20170520113014-b970184f4d9e/go.mod h1:Byz7q8MSzSPkouskHJhX0er2mZY/m0Vj5bMeMCkkyY4=
|
||||||
|
github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
|
||||||
|
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
|
||||||
|
github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S9k=
|
||||||
|
github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0=
|
||||||
|
github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q7OhCmWKU=
|
||||||
|
github.com/gabriel-vasile/mimetype v1.4.2/go.mod h1:zApsH/mKG4w07erKIaJPFiX0Tsq9BFQgN3qGY5GnNgA=
|
||||||
|
github.com/gin-contrib/gzip v0.0.6 h1:NjcunTcGAj5CO1gn4N8jHOSIeRFHIbn51z6K+xaN4d4=
|
||||||
|
github.com/gin-contrib/gzip v0.0.6/go.mod h1:QOJlmV2xmayAjkNS2Y8NQsMneuRShOU/kjovCXNuzzk=
|
||||||
|
github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=
|
||||||
|
github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=
|
||||||
|
github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg=
|
||||||
|
github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU=
|
||||||
|
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
|
||||||
|
github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ=
|
||||||
|
github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
|
||||||
|
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
|
||||||
|
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
|
||||||
|
github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY=
|
||||||
|
github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0=
|
||||||
|
github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
|
||||||
|
github.com/go-openapi/jsonpointer v0.19.5 h1:gZr+CIYByUqjcgeLXnQu2gHYQC9o73G2XUeOFYEICuY=
|
||||||
|
github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
|
||||||
|
github.com/go-openapi/jsonreference v0.19.6 h1:UBIxjkht+AWIgYzCDSv2GN+E/togfwXUJFRTWhl2Jjs=
|
||||||
|
github.com/go-openapi/jsonreference v0.19.6/go.mod h1:diGHMEHg2IqXZGKxqyvWdfWU/aim5Dprw5bqpKkTvns=
|
||||||
|
github.com/go-openapi/spec v0.20.4 h1:O8hJrt0UMnhHcluhIdUgCLRWyM2x7QkBXRvOs7m+O1M=
|
||||||
|
github.com/go-openapi/spec v0.20.4/go.mod h1:faYFR1CvsJZ0mNsmsphTMSoRrNV3TEDoAM7FOEWeq8I=
|
||||||
|
github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk=
|
||||||
|
github.com/go-openapi/swag v0.19.15 h1:D2NRCBzS9/pEY3gP9Nl8aDqGUcPFrwG2p+CNFrLyrCM=
|
||||||
|
github.com/go-openapi/swag v0.19.15/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ=
|
||||||
|
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
|
||||||
|
github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
|
||||||
|
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
|
||||||
|
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
|
||||||
|
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
|
||||||
|
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
|
||||||
|
github.com/go-playground/validator/v10 v10.16.0 h1:x+plE831WK4vaKHO/jpgUGsvLKIqRRkz6M78GuJAfGE=
|
||||||
|
github.com/go-playground/validator/v10 v10.16.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU=
|
||||||
|
github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
|
||||||
|
github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
|
||||||
|
github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q=
|
||||||
|
github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
|
||||||
|
github.com/golang-jwt/jwt/v5 v5.3.0 h1:pv4AsKCKKZuqlgs5sUmn4x8UlGa0kEVt/puTpKx9vvo=
|
||||||
|
github.com/golang-jwt/jwt/v5 v5.3.0/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE=
|
||||||
|
github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||||
|
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||||
|
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||||
|
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||||
|
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
||||||
|
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||||
|
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||||
|
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||||
|
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
|
||||||
|
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||||
|
github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0 h1:YBftPWNWd4WwGqtY2yeZL2ef8rHAxPBD8KFhJpmcqms=
|
||||||
|
github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0/go.mod h1:YN5jB8ie0yfIUg6VvR9Kz84aCaG7AsGZnLjhHbUqwPg=
|
||||||
|
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
|
||||||
|
github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
|
||||||
|
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo=
|
||||||
|
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
|
||||||
|
github.com/jackc/pgx/v5 v5.6.0 h1:SWJzexBzPL5jb0GEsrPMLIsi/3jOo7RHlzTjcAeDrPY=
|
||||||
|
github.com/jackc/pgx/v5 v5.6.0/go.mod h1:DNZ/vlrUnhWCoFGxHAG8U2ljioxukquj7utPDgtQdTw=
|
||||||
|
github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo=
|
||||||
|
github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
|
||||||
|
github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
|
||||||
|
github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
|
||||||
|
github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ=
|
||||||
|
github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
|
||||||
|
github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0=
|
||||||
|
github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4=
|
||||||
|
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
|
||||||
|
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
|
||||||
|
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
|
||||||
|
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
||||||
|
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
|
||||||
|
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
||||||
|
github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo=
|
||||||
|
github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ=
|
||||||
|
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
|
||||||
|
github.com/klauspost/cpuid/v2 v2.2.4 h1:acbojRNwl3o09bUq+yDCtZFc1aiwaAAxtcn8YkZXnvk=
|
||||||
|
github.com/klauspost/cpuid/v2 v2.2.4/go.mod h1:RVVoqg1df56z8g3pUjL/3lE5UfnlrJX8tyFgg4nqhuY=
|
||||||
|
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||||
|
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
|
||||||
|
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
|
||||||
|
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||||
|
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||||
|
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||||
|
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||||
|
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
|
||||||
|
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
|
||||||
|
github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q=
|
||||||
|
github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4=
|
||||||
|
github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
|
||||||
|
github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
|
||||||
|
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4=
|
||||||
|
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I=
|
||||||
|
github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY=
|
||||||
|
github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0=
|
||||||
|
github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
|
||||||
|
github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
|
||||||
|
github.com/mailru/easyjson v0.7.6 h1:8yTIVnZgCoiM1TgqoeTl+LfU5Jg6/xL3QhGQnimLYnA=
|
||||||
|
github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
|
||||||
|
github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA=
|
||||||
|
github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||||
|
github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU=
|
||||||
|
github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
|
||||||
|
github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0=
|
||||||
|
github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo=
|
||||||
|
github.com/moby/patternmatcher v0.6.0 h1:GmP9lR19aU5GqSSFko+5pRqHi+Ohk1O69aFiKkVGiPk=
|
||||||
|
github.com/moby/patternmatcher v0.6.0/go.mod h1:hDPoyOpDY7OrrMDLaYoY3hf52gNCR/YOUYxkhApJIxc=
|
||||||
|
github.com/moby/sys/sequential v0.5.0 h1:OPvI35Lzn9K04PBbCLW0g4LcFAJgHsvXsRyewg5lXtc=
|
||||||
|
github.com/moby/sys/sequential v0.5.0/go.mod h1:tH2cOOs5V9MlPiXcQzRC+eEyab644PWKGRYaaV5ZZlo=
|
||||||
|
github.com/moby/sys/user v0.1.0 h1:WmZ93f5Ux6het5iituh9x2zAG7NFY9Aqi49jjE1PaQg=
|
||||||
|
github.com/moby/sys/user v0.1.0/go.mod h1:fKJhFOnsCN6xZ5gSfbM6zaHGgDJMrqt9/reuj4T7MmU=
|
||||||
|
github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0=
|
||||||
|
github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y=
|
||||||
|
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||||
|
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
|
||||||
|
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||||
|
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
|
||||||
|
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
|
||||||
|
github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A=
|
||||||
|
github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc=
|
||||||
|
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA=
|
||||||
|
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
|
||||||
|
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
|
||||||
|
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
|
||||||
|
github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
|
||||||
|
github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug=
|
||||||
|
github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM=
|
||||||
|
github.com/pelletier/go-toml/v2 v2.0.8 h1:0ctb6s9mE31h0/lhu+J6OPmVeDxJn+kYnJc2jZR9tGQ=
|
||||||
|
github.com/pelletier/go-toml/v2 v2.0.8/go.mod h1:vuYfssBdrU2XDZ9bYydBu6t+6a6PYNcZljzZR9VXg+4=
|
||||||
|
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||||
|
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||||
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
|
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c h1:ncq/mPwQF4JjgDlrVEn3C11VoGHZN7m8qihwgMEtzYw=
|
||||||
|
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE=
|
||||||
|
github.com/pquerna/otp v1.5.0 h1:NMMR+WrmaqXU4EzdGJEE1aUUI0AMRzsp96fFFWNPwxs=
|
||||||
|
github.com/pquerna/otp v1.5.0/go.mod h1:dkJfzwRKNiegxyNb54X/3fLwhCynbMspSyWKnvi1AEg=
|
||||||
|
github.com/prometheus/client_golang v1.22.0 h1:rb93p9lokFEsctTys46VnV1kLCDpVZ0a/Y92Vm0Zc6Q=
|
||||||
|
github.com/prometheus/client_golang v1.22.0/go.mod h1:R7ljNsLXhuQXYZYtw6GAE9AZg8Y7vEW5scdCXrWRXC0=
|
||||||
|
github.com/prometheus/client_model v0.6.2 h1:oBsgwpGs7iVziMvrGhE53c/GrLUsZdHnqNwqPLxwZyk=
|
||||||
|
github.com/prometheus/client_model v0.6.2/go.mod h1:y3m2F6Gdpfy6Ut/GBsUqTWZqCUvMVzSfMLjcu6wAwpE=
|
||||||
|
github.com/prometheus/common v0.63.0 h1:YR/EIY1o3mEFP/kZCD7iDMnLPlGyuU2Gb3HIcXnA98k=
|
||||||
|
github.com/prometheus/common v0.63.0/go.mod h1:VVFF/fBIoToEnWRVkYoXEkq3R3paCoxG9PXP74SnV18=
|
||||||
|
github.com/prometheus/procfs v0.16.1 h1:hZ15bTNuirocR6u0JZ6BAHHmwS1p8B4P6MRqxtzMyRg=
|
||||||
|
github.com/prometheus/procfs v0.16.1/go.mod h1:teAbpZRB1iIAJYREa1LsoWUXykVXA1KlTmWl8x/U+Is=
|
||||||
|
github.com/rabbitmq/amqp091-go v1.10.0 h1:STpn5XsHlHGcecLmMFCtg7mqq0RnD+zFr4uzukfVhBw=
|
||||||
|
github.com/rabbitmq/amqp091-go v1.10.0/go.mod h1:Hy4jKW5kQART1u+JkDTF9YYOQUHXqMuhrgxOEeS7G4o=
|
||||||
|
github.com/redis/go-redis/v9 v9.16.0 h1:OotgqgLSRCmzfqChbQyG1PHC3tLNR89DG4jdOERSEP4=
|
||||||
|
github.com/redis/go-redis/v9 v9.16.0/go.mod h1:u410H11HMLoB+TP67dz8rL9s6QW2j76l0//kSOd3370=
|
||||||
|
github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ=
|
||||||
|
github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog=
|
||||||
|
github.com/shirou/gopsutil/v3 v3.23.12 h1:z90NtUkp3bMtmICZKpC4+WaknU1eXtp5vtbQ11DgpE4=
|
||||||
|
github.com/shirou/gopsutil/v3 v3.23.12/go.mod h1:1FrWgea594Jp7qmjHUUPlJDTPgcsb9mGnXDxavtikzM=
|
||||||
|
github.com/shoenig/go-m1cpu v0.1.6 h1:nxdKQNcEB6vzgA2E2bvzKIYRuNj7XNJ4S/aRSwKzFtM=
|
||||||
|
github.com/shoenig/go-m1cpu v0.1.6/go.mod h1:1JJMcUBvfNwpq05QDQVAnx3gUHr9IYF7GNg9SUEw2VQ=
|
||||||
|
github.com/shoenig/test v0.6.4 h1:kVTaSd7WLz5WZ2IaoM0RSzRsUD+m8wRR+5qvntpn4LU=
|
||||||
|
github.com/shoenig/test v0.6.4/go.mod h1:byHiCGXqrVaflBLAMq/srcZIHynQPQgeyvkvXnjqq0k=
|
||||||
|
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
|
||||||
|
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
||||||
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
|
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||||
|
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||||
|
github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
|
||||||
|
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
|
||||||
|
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||||
|
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
|
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
|
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
|
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||||
|
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||||
|
github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||||
|
github.com/stretchr/testify v1.8.3/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||||
|
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||||
|
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
|
||||||
|
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
|
||||||
|
github.com/swaggo/files v1.0.1 h1:J1bVJ4XHZNq0I46UU90611i9/YzdrF7x92oX1ig5IdE=
|
||||||
|
github.com/swaggo/files v1.0.1/go.mod h1:0qXmMNH6sXNf+73t65aKeB+ApmgxdnkQzVTAj2uaMUg=
|
||||||
|
github.com/swaggo/gin-swagger v1.6.1 h1:Ri06G4gc9N4t4k8hekMigJ9zKTFSlqj/9paAQCQs7cY=
|
||||||
|
github.com/swaggo/gin-swagger v1.6.1/go.mod h1:LQ+hJStHakCWRiK/YNYtJOu4mR2FP+pxLnILT/qNiTw=
|
||||||
|
github.com/swaggo/swag v1.16.6 h1:qBNcx53ZaX+M5dxVyTrgQ0PJ/ACK+NzhwcbieTt+9yI=
|
||||||
|
github.com/swaggo/swag v1.16.6/go.mod h1:ngP2etMK5a0P3QBizic5MEwpRmluJZPHjXcMoj4Xesg=
|
||||||
|
github.com/testcontainers/testcontainers-go v0.33.0 h1:zJS9PfXYT5O0ZFXM2xxXfk4J5UMw/kRiISng037Gxdw=
|
||||||
|
github.com/testcontainers/testcontainers-go v0.33.0/go.mod h1:W80YpTa8D5C3Yy16icheD01UTDu+LmXIA2Keo+jWtT8=
|
||||||
|
github.com/testcontainers/testcontainers-go/modules/postgres v0.33.0 h1:c+Gt+XLJjqFAejgX4hSpnHIpC9eAhvgI/TFWL/PbrFI=
|
||||||
|
github.com/testcontainers/testcontainers-go/modules/postgres v0.33.0/go.mod h1:I4DazHBoWDyf69ByOIyt3OdNjefiUx372459txOpQ3o=
|
||||||
|
github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFAEVmqU=
|
||||||
|
github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI=
|
||||||
|
github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk=
|
||||||
|
github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY=
|
||||||
|
github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
|
||||||
|
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
|
||||||
|
github.com/ugorji/go/codec v1.2.11 h1:BMaWp1Bb6fHwEtbplGBGJ498wD+LKlNSl25MjdZY4dU=
|
||||||
|
github.com/ugorji/go/codec v1.2.11/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
|
||||||
|
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||||
|
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||||
|
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||||
|
github.com/yusufpapurcu/wmi v1.2.3 h1:E1ctvB7uKFMOJw3fdOW32DwGE9I7t++CRUEMKvFoFiw=
|
||||||
|
github.com/yusufpapurcu/wmi v1.2.3/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0=
|
||||||
|
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 h1:jq9TW8u3so/bN+JPT166wjOI6/vQPF6Xe7nMNIltagk=
|
||||||
|
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0/go.mod h1:p8pYQP+m5XfbZm9fxtSKAbM6oIllS7s2AfxrChvc7iw=
|
||||||
|
go.opentelemetry.io/otel v1.24.0 h1:0LAOdjNmQeSTzGBzduGe/rU4tZhMwL5rWgtp9Ku5Jfo=
|
||||||
|
go.opentelemetry.io/otel v1.24.0/go.mod h1:W7b9Ozg4nkF5tWI5zsXkaKKDjdVjpD4oAt9Qi/MArHo=
|
||||||
|
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.19.0 h1:Mne5On7VWdx7omSrSSZvM4Kw7cS7NQkOOmLcgscI51U=
|
||||||
|
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.19.0/go.mod h1:IPtUMKL4O3tH5y+iXVyAXqpAwMuzC1IrxVS81rummfE=
|
||||||
|
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.19.0 h1:IeMeyr1aBvBiPVYihXIaeIZba6b8E1bYp7lbdxK8CQg=
|
||||||
|
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.19.0/go.mod h1:oVdCUtjq9MK9BlS7TtucsQwUcXcymNiEDjgDD2jMtZU=
|
||||||
|
go.opentelemetry.io/otel/metric v1.24.0 h1:6EhoGWWK28x1fbpA4tYTOWBkPefTDQnb8WSGXlc88kI=
|
||||||
|
go.opentelemetry.io/otel/metric v1.24.0/go.mod h1:VYhLe1rFfxuTXLgj4CBiyz+9WYBA8pNGJgDcSFRKBco=
|
||||||
|
go.opentelemetry.io/otel/sdk v1.19.0 h1:6USY6zH+L8uMH8L3t1enZPR3WFEmSTADlqldyHtJi3o=
|
||||||
|
go.opentelemetry.io/otel/sdk v1.19.0/go.mod h1:NedEbbS4w3C6zElbLdPJKOpJQOrGUJ+GfzpjUvI0v1A=
|
||||||
|
go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y1YELI=
|
||||||
|
go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU=
|
||||||
|
go.opentelemetry.io/proto/otlp v1.0.0 h1:T0TX0tmXU8a3CbNXzEKGeU5mIVOdf0oykP+u2lIVU/I=
|
||||||
|
go.opentelemetry.io/proto/otlp v1.0.0/go.mod h1:Sy6pihPLfYHkr3NkUbEhGHFhINUSI/v80hjKIs5JXpM=
|
||||||
|
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
|
||||||
|
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
|
||||||
|
go.uber.org/multierr v1.10.0 h1:S0h4aNzvfcFsC3dRF1jLoaov7oRaKqRGC/pUEJ2yvPQ=
|
||||||
|
go.uber.org/multierr v1.10.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
|
||||||
|
go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8=
|
||||||
|
go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E=
|
||||||
|
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
|
||||||
|
golang.org/x/arch v0.3.0 h1:02VY4/ZcO/gBOH6PUaoiptASxtXU10jazRCP865E97k=
|
||||||
|
golang.org/x/arch v0.3.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
|
||||||
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
|
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
|
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||||
|
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||||
|
golang.org/x/crypto v0.37.0 h1:kJNSjF/Xp7kU0iB2Z+9viTPMW4EqqsrywMXLJOOsXSE=
|
||||||
|
golang.org/x/crypto v0.37.0/go.mod h1:vg+k43peMZ0pUMhYmVAWysMK35e6ioLh3wB8ZCAfbVc=
|
||||||
|
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8 h1:hVwzHzIUGRjiF7EcUjqNxk3NCfkPxbDKRdnNE1Rpg0U=
|
||||||
|
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||||
|
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||||
|
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||||
|
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||||
|
golang.org/x/mod v0.25.0 h1:n7a+ZbQKQA/Ysbyb0/6IbB1H/X41mKgbhfv7AfG/44w=
|
||||||
|
golang.org/x/mod v0.25.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww=
|
||||||
|
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||||
|
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
|
golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
|
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
||||||
|
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||||
|
golang.org/x/net v0.0.0-20210421230115-4e50805a0758/go.mod h1:72T/g9IO56b78aLF+1Kcs5dz7/ng1VjMUvfKvpfy+jM=
|
||||||
|
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||||
|
golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||||
|
golang.org/x/net v0.39.0 h1:ZCu7HMWDxpXpaiKdhzIfaltL9Lp31x/3fCP11bc6/fY=
|
||||||
|
golang.org/x/net v0.39.0/go.mod h1:X7NRbYVEA+ewNkCNyJ513WmMdQ3BineSwVtN2zD/d+E=
|
||||||
|
golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI=
|
||||||
|
golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU=
|
||||||
|
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw=
|
||||||
|
golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
|
||||||
|
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20210420072515-93ed5bcd2bfe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
|
golang.org/x/sys v0.35.0 h1:vz1N37gP5bs89s7He8XuIYXpyY0+QlsKmzipCbUtyxI=
|
||||||
|
golang.org/x/sys v0.35.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||||
|
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||||
|
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||||
|
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||||
|
golang.org/x/term v0.31.0 h1:erwDkOK1Msy6offm1mOgvspSkslFnIGsFnxOKoufg3o=
|
||||||
|
golang.org/x/term v0.31.0/go.mod h1:R4BeIy7D95HzImkxGkTW1UQTtP54tio2RyHz7PwK0aw=
|
||||||
|
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
|
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
|
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
|
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||||
|
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||||
|
golang.org/x/text v0.24.0 h1:dd5Bzh4yt5KYA8f9CJHCP4FB4D51c2c6JvN37xJJkJ0=
|
||||||
|
golang.org/x/text v0.24.0/go.mod h1:L8rBsPeo2pSS+xqN0d5u2ikmjtmoJbDBT1b7nHvFCdU=
|
||||||
|
golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE=
|
||||||
|
golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg=
|
||||||
|
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
|
golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
|
||||||
|
golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
||||||
|
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||||
|
golang.org/x/tools v0.32.0 h1:Q7N1vhpkQv7ybVzLFtTjvQya2ewbwNDZzUgfXGqtMWU=
|
||||||
|
golang.org/x/tools v0.32.0/go.mod h1:ZxrU41P/wAbZD8EDa6dDCa6XfpkhJ7HFMjHJXfBDu8s=
|
||||||
|
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
|
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
|
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
|
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
|
google.golang.org/genproto v0.0.0-20230920204549-e6e6cdab5c13 h1:vlzZttNJGVqTsRFU9AmdnrcO1Znh8Ew9kCD//yjigk0=
|
||||||
|
google.golang.org/genproto/googleapis/api v0.0.0-20240318140521-94a12d6c2237 h1:RFiFrvy37/mpSpdySBDrUdipW/dHwsRwh3J3+A9VgT4=
|
||||||
|
google.golang.org/genproto/googleapis/api v0.0.0-20240318140521-94a12d6c2237/go.mod h1:Z5Iiy3jtmioajWHDGFk7CeugTyHtPvMHA4UTmUkyalE=
|
||||||
|
google.golang.org/genproto/googleapis/rpc v0.0.0-20240318140521-94a12d6c2237 h1:NnYq6UN9ReLM9/Y01KWNOWyI5xQ9kbIms5GGJVwS/Yc=
|
||||||
|
google.golang.org/genproto/googleapis/rpc v0.0.0-20240318140521-94a12d6c2237/go.mod h1:WtryC6hu0hhx87FDGxWCDptyssuo68sk10vYjF+T9fY=
|
||||||
|
google.golang.org/grpc v1.64.1 h1:LKtvyfbX3UGVPFcGqJ9ItpVWW6oN/2XqTxfAnwRRXiA=
|
||||||
|
google.golang.org/grpc v1.64.1/go.mod h1:hiQF4LFZelK2WKaP6W0L92zGHtiQdZxk8CrSdvyjeP0=
|
||||||
|
google.golang.org/protobuf v1.36.8 h1:xHScyCOEuuwZEc6UtSOvPbAT4zRh0xcNRYekJwfqyMc=
|
||||||
|
google.golang.org/protobuf v1.36.8/go.mod h1:fuxRtAxBytpl4zzqUh6/eyUujkJdNiuEkXntxiD/uRU=
|
||||||
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
||||||
|
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
||||||
|
gopkg.in/natefinch/lumberjack.v2 v2.2.1 h1:bBRl1b0OH9s/DuPhuXpNl+VtCaJXFZ5/uEFST95x9zc=
|
||||||
|
gopkg.in/natefinch/lumberjack.v2 v2.2.1/go.mod h1:YD8tP3GAjkrDg1eZH7EGmyESg/lsYskCTPBJVb9jqSc=
|
||||||
|
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
|
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
|
||||||
|
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
||||||
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
|
gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
|
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||||
|
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
|
gorm.io/driver/postgres v1.6.0 h1:2dxzU8xJ+ivvqTRph34QX+WrRaJlmfyPqXmoGVjMBa4=
|
||||||
|
gorm.io/driver/postgres v1.6.0/go.mod h1:vUw0mrGgrTK+uPHEhAdV4sfFELrByKVGnaVRkXDhtWo=
|
||||||
|
gorm.io/driver/sqlite v1.6.0 h1:WHRRrIiulaPiPFmDcod6prc4l2VGVWHz80KspNsxSfQ=
|
||||||
|
gorm.io/driver/sqlite v1.6.0/go.mod h1:AO9V1qIQddBESngQUKWL9yoH93HIeA1X6V633rBwyT8=
|
||||||
|
gorm.io/gorm v1.30.0 h1:qbT5aPv1UH8gI99OsRlvDToLxW5zR7FzS9acZDOZcgs=
|
||||||
|
gorm.io/gorm v1.30.0/go.mod h1:8Z33v652h4//uMA76KjeDH8mJXPm1QNCYrMeatR0DOE=
|
||||||
|
gotest.tools/v3 v3.5.1 h1:EENdUnS3pdur5nybKYIh2Vfgc8IUNBjxDPSjtiJcOzU=
|
||||||
|
gotest.tools/v3 v3.5.1/go.mod h1:isy3WKz7GK6uNw/sbHzfKBLvlvXwUyV06n6brMxxopU=
|
||||||
|
rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4=
|
||||||
55
veza-backend-api/internal/api/admin/service.go
Normal file
55
veza-backend-api/internal/api/admin/service.go
Normal file
|
|
@ -0,0 +1,55 @@
|
||||||
|
package admin
|
||||||
|
|
||||||
|
import (
|
||||||
|
"veza-backend-api/internal/database"
|
||||||
|
"veza-backend-api/internal/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Service struct {
|
||||||
|
db *database.DB
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewService(db *database.DB) *Service {
|
||||||
|
return &Service{db: db}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Service) IsAdmin(userID int64) bool {
|
||||||
|
var role string
|
||||||
|
err := s.db.QueryRow("SELECT role FROM users WHERE id = $1", userID).Scan(&role)
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return role == "admin" || role == "super_admin"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Service) GetDashboardStats() (*models.DashboardStats, error) {
|
||||||
|
stats := &models.DashboardStats{}
|
||||||
|
|
||||||
|
// Récupérer les statistiques
|
||||||
|
if err := s.db.QueryRow("SELECT COUNT(*) FROM users WHERE is_active = true").Scan(&stats.TotalUsers); err != nil {
|
||||||
|
// Ignorer l'erreur pour l'instant
|
||||||
|
}
|
||||||
|
if err := s.db.QueryRow("SELECT COUNT(*) FROM tracks").Scan(&stats.TotalTracks); err != nil {
|
||||||
|
// Ignorer l'erreur pour l'instant
|
||||||
|
}
|
||||||
|
if err := s.db.QueryRow("SELECT COUNT(*) FROM listings WHERE status = 'open'").Scan(&stats.ActiveListings); err != nil {
|
||||||
|
// Ignorer l'erreur pour l'instant
|
||||||
|
}
|
||||||
|
|
||||||
|
return stats, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Service) GetUsers(page, limit int, search, role string) ([]models.UserAnalytics, int, error) {
|
||||||
|
// TODO: Implement based on doc_admin_handler.md
|
||||||
|
return []models.UserAnalytics{}, 0, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Service) GetAnalytics() (*models.AdminContentAnalytics, error) {
|
||||||
|
// TODO: Implement based on doc_admin_handler.md
|
||||||
|
return &models.AdminContentAnalytics{}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Service) GetCategories() ([]interface{}, error) {
|
||||||
|
// TODO: Implement categories
|
||||||
|
return []interface{}{}, nil
|
||||||
|
}
|
||||||
786
veza-backend-api/internal/api/api_manager.go
Normal file
786
veza-backend-api/internal/api/api_manager.go
Normal file
|
|
@ -0,0 +1,786 @@
|
||||||
|
//go:build ignore
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
// TODO: Réactiver api_manager.go après stabilisation du noyau et alignement des services (graphql, grpc, websocket, features)
|
||||||
|
|
||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
|
||||||
|
"veza-backend-api/internal/api/graphql"
|
||||||
|
"veza-backend-api/internal/api/grpc"
|
||||||
|
"veza-backend-api/internal/api/websocket"
|
||||||
|
"veza-backend-api/internal/config"
|
||||||
|
"veza-backend-api/internal/database"
|
||||||
|
"veza-backend-api/internal/features"
|
||||||
|
"veza-backend-api/internal/middleware"
|
||||||
|
)
|
||||||
|
|
||||||
|
// APIManager manages all API protocols (REST, GraphQL, gRPC, WebSocket)
|
||||||
|
type APIManager struct {
|
||||||
|
config *config.Config
|
||||||
|
db *database.DB
|
||||||
|
|
||||||
|
// API Servers
|
||||||
|
restRouter *gin.Engine
|
||||||
|
graphqlServer *graphql.GraphQLServer
|
||||||
|
grpcServer *grpc.GRPCServer
|
||||||
|
websocketManager *websocket.WebSocketManager
|
||||||
|
|
||||||
|
// Feature integration
|
||||||
|
featureManager *features.FeatureManager
|
||||||
|
|
||||||
|
// HTTP Server
|
||||||
|
httpServer *http.Server
|
||||||
|
|
||||||
|
isRunning bool
|
||||||
|
mu sync.RWMutex
|
||||||
|
}
|
||||||
|
|
||||||
|
// APIConfig contains configuration for all API protocols
|
||||||
|
type APIConfig struct {
|
||||||
|
REST RESTConfig `yaml:"rest"`
|
||||||
|
GraphQL graphql.GraphQLConfig `yaml:"graphql"`
|
||||||
|
GRPC grpc.GRPCConfig `yaml:"grpc"`
|
||||||
|
WebSocket websocket.WebSocketConfig `yaml:"websocket"`
|
||||||
|
Global GlobalAPIConfig `yaml:"global"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// RESTConfig contains REST API configuration
|
||||||
|
type RESTConfig struct {
|
||||||
|
Enabled bool `yaml:"enabled"`
|
||||||
|
Host string `yaml:"host"`
|
||||||
|
Port int `yaml:"port"`
|
||||||
|
Mode string `yaml:"mode"` // debug, release, test
|
||||||
|
TrustedProxies []string `yaml:"trusted_proxies"`
|
||||||
|
MaxMultipartMemory int64 `yaml:"max_multipart_memory"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// GlobalAPIConfig contains global API settings
|
||||||
|
type GlobalAPIConfig struct {
|
||||||
|
Timeout time.Duration `yaml:"timeout"`
|
||||||
|
ReadTimeout time.Duration `yaml:"read_timeout"`
|
||||||
|
WriteTimeout time.Duration `yaml:"write_timeout"`
|
||||||
|
IdleTimeout time.Duration `yaml:"idle_timeout"`
|
||||||
|
ShutdownTimeout time.Duration `yaml:"shutdown_timeout"`
|
||||||
|
CORS CORSConfig `yaml:"cors"`
|
||||||
|
RateLimit RateLimitConfig `yaml:"rate_limit"`
|
||||||
|
Security SecurityConfig `yaml:"security"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// CORSConfig contains CORS configuration
|
||||||
|
type CORSConfig struct {
|
||||||
|
Enabled bool `yaml:"enabled"`
|
||||||
|
AllowOrigins []string `yaml:"allow_origins"`
|
||||||
|
AllowMethods []string `yaml:"allow_methods"`
|
||||||
|
AllowHeaders []string `yaml:"allow_headers"`
|
||||||
|
ExposeHeaders []string `yaml:"expose_headers"`
|
||||||
|
AllowCredentials bool `yaml:"allow_credentials"`
|
||||||
|
MaxAge int `yaml:"max_age"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// RateLimitConfig contains rate limiting configuration
|
||||||
|
type RateLimitConfig struct {
|
||||||
|
Enabled bool `yaml:"enabled"`
|
||||||
|
RPS int `yaml:"rps"`
|
||||||
|
Burst int `yaml:"burst"`
|
||||||
|
Window time.Duration `yaml:"window"`
|
||||||
|
KeyFunc string `yaml:"key_func"` // ip, user, api_key
|
||||||
|
SkipPaths []string `yaml:"skip_paths"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// SecurityConfig contains security configuration
|
||||||
|
type SecurityConfig struct {
|
||||||
|
Enabled bool `yaml:"enabled"`
|
||||||
|
JWTSecret string `yaml:"jwt_secret"`
|
||||||
|
APIKeyHeader string `yaml:"api_key_header"`
|
||||||
|
AllowedUserAgents []string `yaml:"allowed_user_agents"`
|
||||||
|
CSRFProtection bool `yaml:"csrf_protection"`
|
||||||
|
HTTPSOnly bool `yaml:"https_only"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewAPIManager creates a new API manager instance
|
||||||
|
func NewAPIManager(config *config.Config, db *database.DB, featureManager *features.FeatureManager) *APIManager {
|
||||||
|
return &APIManager{
|
||||||
|
config: config,
|
||||||
|
db: db,
|
||||||
|
featureManager: featureManager,
|
||||||
|
isRunning: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize sets up all API protocols
|
||||||
|
func (am *APIManager) Initialize(apiConfig APIConfig) error {
|
||||||
|
am.mu.Lock()
|
||||||
|
defer am.mu.Unlock()
|
||||||
|
|
||||||
|
// Initialize REST API (Gin)
|
||||||
|
if err := am.initializeREST(apiConfig.REST, apiConfig.Global); err != nil {
|
||||||
|
return fmt.Errorf("failed to initialize REST API: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize GraphQL server
|
||||||
|
if apiConfig.GraphQL.Enabled {
|
||||||
|
if err := am.initializeGraphQL(apiConfig.GraphQL); err != nil {
|
||||||
|
return fmt.Errorf("failed to initialize GraphQL: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize gRPC server
|
||||||
|
if apiConfig.GRPC.Enabled {
|
||||||
|
if err := am.initializeGRPC(apiConfig.GRPC); err != nil {
|
||||||
|
return fmt.Errorf("failed to initialize gRPC: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize WebSocket manager
|
||||||
|
if apiConfig.WebSocket.Enabled {
|
||||||
|
if err := am.initializeWebSocket(apiConfig.WebSocket); err != nil {
|
||||||
|
return fmt.Errorf("failed to initialize WebSocket: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Setup HTTP server
|
||||||
|
am.setupHTTPServer(apiConfig)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// initializeREST sets up the REST API with Gin
|
||||||
|
func (am *APIManager) initializeREST(restConfig RESTConfig, globalConfig GlobalAPIConfig) error {
|
||||||
|
if !restConfig.Enabled {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set Gin mode
|
||||||
|
gin.SetMode(restConfig.Mode)
|
||||||
|
|
||||||
|
// Create Gin engine
|
||||||
|
am.restRouter = gin.New()
|
||||||
|
|
||||||
|
// Setup global middleware
|
||||||
|
am.setupGlobalMiddleware(globalConfig)
|
||||||
|
|
||||||
|
// Setup existing REST routes (from router.go)
|
||||||
|
am.setupExistingRESTRoutes()
|
||||||
|
|
||||||
|
// Setup feature-specific routes
|
||||||
|
am.setupFeatureRoutes()
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// initializeGraphQL sets up the GraphQL server
|
||||||
|
func (am *APIManager) initializeGraphQL(graphqlConfig graphql.GraphQLConfig) error {
|
||||||
|
am.graphqlServer = graphql.NewGraphQLServer(am.config, am.db, nil) // logger would be added
|
||||||
|
am.graphqlServer.Configure(graphqlConfig)
|
||||||
|
am.graphqlServer.SetupRoutes(am.restRouter, graphqlConfig)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// initializeGRPC sets up the gRPC server
|
||||||
|
func (am *APIManager) initializeGRPC(grpcConfig grpc.GRPCConfig) error {
|
||||||
|
am.grpcServer = grpc.NewGRPCServer(am.config, am.db)
|
||||||
|
return am.grpcServer.Initialize(grpcConfig)
|
||||||
|
}
|
||||||
|
|
||||||
|
// initializeWebSocket sets up the WebSocket manager
|
||||||
|
func (am *APIManager) initializeWebSocket(wsConfig websocket.WebSocketConfig) error {
|
||||||
|
am.websocketManager = websocket.NewWebSocketManager(am.config, am.db)
|
||||||
|
if err := am.websocketManager.Initialize(wsConfig); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
am.websocketManager.SetupRoutes(am.restRouter, wsConfig)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// setupGlobalMiddleware configures global middleware for REST API
|
||||||
|
func (am *APIManager) setupGlobalMiddleware(globalConfig GlobalAPIConfig) {
|
||||||
|
// Recovery middleware
|
||||||
|
am.restRouter.Use(gin.Recovery())
|
||||||
|
|
||||||
|
// Logger middleware
|
||||||
|
am.restRouter.Use(middleware.Logger())
|
||||||
|
|
||||||
|
// CORS middleware
|
||||||
|
if globalConfig.CORS.Enabled {
|
||||||
|
am.restRouter.Use(middleware.CORS())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rate limiting middleware
|
||||||
|
if globalConfig.RateLimit.Enabled {
|
||||||
|
am.restRouter.Use(middleware.RateLimiter(globalConfig.RateLimit.RPS, globalConfig.RateLimit.Window))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Security middleware
|
||||||
|
if globalConfig.Security.Enabled {
|
||||||
|
am.restRouter.Use(middleware.Security())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Request ID middleware
|
||||||
|
am.restRouter.Use(middleware.RequestID())
|
||||||
|
|
||||||
|
// Timeout middleware
|
||||||
|
am.restRouter.Use(middleware.Timeout(globalConfig.Timeout))
|
||||||
|
}
|
||||||
|
|
||||||
|
// setupExistingRESTRoutes sets up the existing REST routes
|
||||||
|
func (am *APIManager) setupExistingRESTRoutes() {
|
||||||
|
// Use the existing APIRouter setup
|
||||||
|
SetupRoutes(am.restRouter, am.db, am.config)
|
||||||
|
}
|
||||||
|
|
||||||
|
// setupFeatureRoutes sets up feature-specific API routes
|
||||||
|
func (am *APIManager) setupFeatureRoutes() {
|
||||||
|
if am.featureManager == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// API v2 group for new feature-based endpoints
|
||||||
|
v2 := am.restRouter.Group("/api/v2")
|
||||||
|
{
|
||||||
|
// User domain features
|
||||||
|
am.setupUserDomainRoutes(v2)
|
||||||
|
|
||||||
|
// Communication domain features
|
||||||
|
am.setupCommunicationDomainRoutes(v2)
|
||||||
|
|
||||||
|
// Media domain features
|
||||||
|
am.setupMediaDomainRoutes(v2)
|
||||||
|
|
||||||
|
// AI domain features
|
||||||
|
am.setupAIDomainRoutes(v2)
|
||||||
|
|
||||||
|
// Analytics domain features
|
||||||
|
am.setupAnalyticsDomainRoutes(v2)
|
||||||
|
|
||||||
|
// Integration domain features
|
||||||
|
am.setupIntegrationDomainRoutes(v2)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Feature management endpoints
|
||||||
|
admin := am.restRouter.Group("/api/admin")
|
||||||
|
{
|
||||||
|
admin.GET("/features", am.handleGetFeatures)
|
||||||
|
admin.GET("/features/:id", am.handleGetFeature)
|
||||||
|
admin.POST("/features/:id/start", am.handleStartFeature)
|
||||||
|
admin.POST("/features/:id/stop", am.handleStopFeature)
|
||||||
|
admin.GET("/features/health", am.handleFeaturesHealth)
|
||||||
|
admin.GET("/features/metrics", am.handleFeaturesMetrics)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// setupUserDomainRoutes sets up user domain feature routes
|
||||||
|
func (am *APIManager) setupUserDomainRoutes(router *gin.RouterGroup) {
|
||||||
|
userGroup := router.Group("/user")
|
||||||
|
{
|
||||||
|
// User Profiles Feature endpoints
|
||||||
|
userGroup.GET("/profiles/:id", am.handleGetUserProfile)
|
||||||
|
userGroup.PUT("/profiles/:id", am.handleUpdateUserProfile)
|
||||||
|
|
||||||
|
// Social Graph Feature endpoints
|
||||||
|
userGroup.POST("/follow/:id", am.handleFollowUser)
|
||||||
|
userGroup.DELETE("/follow/:id", am.handleUnfollowUser)
|
||||||
|
userGroup.GET("/followers/:id", am.handleGetFollowers)
|
||||||
|
userGroup.GET("/following/:id", am.handleGetFollowing)
|
||||||
|
|
||||||
|
// Gamification Feature endpoints
|
||||||
|
userGroup.GET("/achievements/:id", am.handleGetAchievements)
|
||||||
|
userGroup.GET("/leaderboard", am.handleGetLeaderboard)
|
||||||
|
userGroup.POST("/achievements/:id/claim", am.handleClaimAchievement)
|
||||||
|
|
||||||
|
// User Verification Feature endpoints
|
||||||
|
userGroup.POST("/verify", am.handleStartVerification)
|
||||||
|
userGroup.GET("/verify/status", am.handleGetVerificationStatus)
|
||||||
|
userGroup.GET("/trust-score/:id", am.handleGetTrustScore)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// setupCommunicationDomainRoutes sets up communication domain feature routes
|
||||||
|
func (am *APIManager) setupCommunicationDomainRoutes(router *gin.RouterGroup) {
|
||||||
|
commGroup := router.Group("/communication")
|
||||||
|
{
|
||||||
|
// Chat Rooms Feature endpoints
|
||||||
|
commGroup.GET("/rooms", am.handleGetRooms)
|
||||||
|
commGroup.POST("/rooms", am.handleCreateRoom)
|
||||||
|
commGroup.GET("/rooms/:id", am.handleGetRoom)
|
||||||
|
commGroup.POST("/rooms/:id/join", am.handleJoinRoom)
|
||||||
|
commGroup.POST("/rooms/:id/leave", am.handleLeaveRoom)
|
||||||
|
|
||||||
|
// Voice Chat Feature endpoints
|
||||||
|
commGroup.POST("/voice/start", am.handleStartVoiceChat)
|
||||||
|
commGroup.POST("/voice/stop", am.handleStopVoiceChat)
|
||||||
|
commGroup.GET("/voice/status", am.handleGetVoiceStatus)
|
||||||
|
|
||||||
|
// Video Streaming Feature endpoints
|
||||||
|
commGroup.POST("/video/start", am.handleStartVideoStream)
|
||||||
|
commGroup.POST("/video/stop", am.handleStopVideoStream)
|
||||||
|
commGroup.GET("/video/streams", am.handleGetVideoStreams)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// setupMediaDomainRoutes sets up media domain feature routes
|
||||||
|
func (am *APIManager) setupMediaDomainRoutes(router *gin.RouterGroup) {
|
||||||
|
mediaGroup := router.Group("/media")
|
||||||
|
{
|
||||||
|
// Audio Streaming Feature endpoints
|
||||||
|
mediaGroup.POST("/audio/upload", am.handleUploadAudio)
|
||||||
|
mediaGroup.GET("/audio/:id/stream", am.handleStreamAudio)
|
||||||
|
mediaGroup.GET("/audio/:id/metadata", am.handleGetAudioMetadata)
|
||||||
|
|
||||||
|
// Smart Playlists Feature endpoints
|
||||||
|
mediaGroup.GET("/playlists/smart", am.handleGetSmartPlaylists)
|
||||||
|
mediaGroup.POST("/playlists/smart", am.handleCreateSmartPlaylist)
|
||||||
|
mediaGroup.GET("/playlists/smart/:id", am.handleGetSmartPlaylist)
|
||||||
|
|
||||||
|
// Content Discovery Feature endpoints
|
||||||
|
mediaGroup.GET("/discover", am.handleDiscoverContent)
|
||||||
|
mediaGroup.GET("/trending", am.handleGetTrending)
|
||||||
|
mediaGroup.GET("/similar/:id", am.handleGetSimilarContent)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// setupAIDomainRoutes sets up AI domain feature routes
|
||||||
|
func (am *APIManager) setupAIDomainRoutes(router *gin.RouterGroup) {
|
||||||
|
aiGroup := router.Group("/ai")
|
||||||
|
{
|
||||||
|
// Smart Recommendations Feature endpoints
|
||||||
|
aiGroup.GET("/recommendations", am.handleGetRecommendations)
|
||||||
|
aiGroup.POST("/recommendations/feedback", am.handleRecommendationFeedback)
|
||||||
|
|
||||||
|
// Content Moderation Feature endpoints
|
||||||
|
aiGroup.POST("/moderate", am.handleModerateContent)
|
||||||
|
aiGroup.GET("/moderation/history", am.handleGetModerationHistory)
|
||||||
|
|
||||||
|
// Sentiment Analysis Feature endpoints
|
||||||
|
aiGroup.POST("/sentiment", am.handleAnalyzeSentiment)
|
||||||
|
aiGroup.GET("/sentiment/trends", am.handleGetSentimentTrends)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// setupAnalyticsDomainRoutes sets up analytics domain feature routes
|
||||||
|
func (am *APIManager) setupAnalyticsDomainRoutes(router *gin.RouterGroup) {
|
||||||
|
analyticsGroup := router.Group("/analytics")
|
||||||
|
{
|
||||||
|
// Realtime Dashboards Feature endpoints
|
||||||
|
analyticsGroup.GET("/dashboard", am.handleGetDashboard)
|
||||||
|
analyticsGroup.GET("/metrics/realtime", am.handleGetRealtimeMetrics)
|
||||||
|
|
||||||
|
// User Behavior Analytics Feature endpoints
|
||||||
|
analyticsGroup.GET("/behavior/:id", am.handleGetUserBehavior)
|
||||||
|
analyticsGroup.GET("/engagement", am.handleGetEngagementMetrics)
|
||||||
|
|
||||||
|
// Business Analytics Feature endpoints
|
||||||
|
analyticsGroup.GET("/business/revenue", am.handleGetRevenueAnalytics)
|
||||||
|
analyticsGroup.GET("/business/conversion", am.handleGetConversionMetrics)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// setupIntegrationDomainRoutes sets up integration domain feature routes
|
||||||
|
func (am *APIManager) setupIntegrationDomainRoutes(router *gin.RouterGroup) {
|
||||||
|
integrationGroup := router.Group("/integration")
|
||||||
|
{
|
||||||
|
// External API Gateway Feature endpoints
|
||||||
|
integrationGroup.POST("/external/request", am.handleExternalAPIRequest)
|
||||||
|
integrationGroup.GET("/external/status", am.handleGetExternalAPIStatus)
|
||||||
|
|
||||||
|
// Webhook System Feature endpoints
|
||||||
|
integrationGroup.POST("/webhooks", am.handleCreateWebhook)
|
||||||
|
integrationGroup.GET("/webhooks", am.handleGetWebhooks)
|
||||||
|
integrationGroup.DELETE("/webhooks/:id", am.handleDeleteWebhook)
|
||||||
|
|
||||||
|
// Payment Gateways Feature endpoints
|
||||||
|
integrationGroup.POST("/payments/process", am.handleProcessPayment)
|
||||||
|
integrationGroup.GET("/payments/methods", am.handleGetPaymentMethods)
|
||||||
|
integrationGroup.GET("/payments/history", am.handleGetPaymentHistory)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// setupHTTPServer configures the HTTP server
|
||||||
|
func (am *APIManager) setupHTTPServer(apiConfig APIConfig) {
|
||||||
|
addr := fmt.Sprintf("%s:%d", apiConfig.REST.Host, apiConfig.REST.Port)
|
||||||
|
|
||||||
|
am.httpServer = &http.Server{
|
||||||
|
Addr: addr,
|
||||||
|
Handler: am.restRouter,
|
||||||
|
ReadTimeout: apiConfig.Global.ReadTimeout,
|
||||||
|
WriteTimeout: apiConfig.Global.WriteTimeout,
|
||||||
|
IdleTimeout: apiConfig.Global.IdleTimeout,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start starts all API servers
|
||||||
|
func (am *APIManager) Start(ctx context.Context) error {
|
||||||
|
am.mu.Lock()
|
||||||
|
defer am.mu.Unlock()
|
||||||
|
|
||||||
|
if am.isRunning {
|
||||||
|
return fmt.Errorf("API manager is already running")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start gRPC server if enabled
|
||||||
|
if am.grpcServer != nil {
|
||||||
|
if err := am.grpcServer.Start(ctx); err != nil {
|
||||||
|
return fmt.Errorf("failed to start gRPC server: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start WebSocket manager if enabled
|
||||||
|
if am.websocketManager != nil {
|
||||||
|
if err := am.websocketManager.Start(ctx); err != nil {
|
||||||
|
return fmt.Errorf("failed to start WebSocket manager: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start HTTP server (REST + GraphQL)
|
||||||
|
go func() {
|
||||||
|
if err := am.httpServer.ListenAndServe(); err != nil && err != http.ErrServerClosed {
|
||||||
|
// Handle error
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
am.isRunning = true
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Stop stops all API servers
|
||||||
|
func (am *APIManager) Stop(ctx context.Context) error {
|
||||||
|
am.mu.Lock()
|
||||||
|
defer am.mu.Unlock()
|
||||||
|
|
||||||
|
if !am.isRunning {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Stop HTTP server
|
||||||
|
if am.httpServer != nil {
|
||||||
|
if err := am.httpServer.Shutdown(ctx); err != nil {
|
||||||
|
return fmt.Errorf("failed to stop HTTP server: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Stop WebSocket manager
|
||||||
|
if am.websocketManager != nil {
|
||||||
|
if err := am.websocketManager.Stop(ctx); err != nil {
|
||||||
|
return fmt.Errorf("failed to stop WebSocket manager: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Stop gRPC server
|
||||||
|
if am.grpcServer != nil {
|
||||||
|
if err := am.grpcServer.Stop(ctx); err != nil {
|
||||||
|
return fmt.Errorf("failed to stop gRPC server: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Stop GraphQL server
|
||||||
|
if am.graphqlServer != nil {
|
||||||
|
if err := am.graphqlServer.Shutdown(ctx); err != nil {
|
||||||
|
return fmt.Errorf("failed to stop GraphQL server: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
am.isRunning = false
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsHealthy checks if all API servers are healthy
|
||||||
|
func (am *APIManager) IsHealthy() bool {
|
||||||
|
am.mu.RLock()
|
||||||
|
defer am.mu.RUnlock()
|
||||||
|
|
||||||
|
if !am.isRunning {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check each server's health
|
||||||
|
if am.grpcServer != nil && !am.grpcServer.IsHealthy() {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if am.websocketManager != nil && !am.websocketManager.IsHealthy() {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if am.graphqlServer != nil && !am.graphqlServer.IsHealthy() {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetAPIStatus returns comprehensive API status
|
||||||
|
func (am *APIManager) GetAPIStatus() map[string]interface{} {
|
||||||
|
am.mu.RLock()
|
||||||
|
defer am.mu.RUnlock()
|
||||||
|
|
||||||
|
status := map[string]interface{}{
|
||||||
|
"status": "healthy",
|
||||||
|
"running": am.isRunning,
|
||||||
|
"timestamp": time.Now(),
|
||||||
|
"apis": map[string]interface{}{},
|
||||||
|
}
|
||||||
|
|
||||||
|
apis := status["apis"].(map[string]interface{})
|
||||||
|
|
||||||
|
// REST API status
|
||||||
|
apis["rest"] = map[string]interface{}{
|
||||||
|
"enabled": am.restRouter != nil,
|
||||||
|
"status": "healthy",
|
||||||
|
}
|
||||||
|
|
||||||
|
// GraphQL status
|
||||||
|
if am.graphqlServer != nil {
|
||||||
|
apis["graphql"] = am.graphqlServer.GetMetrics()
|
||||||
|
} else {
|
||||||
|
apis["graphql"] = map[string]interface{}{"enabled": false}
|
||||||
|
}
|
||||||
|
|
||||||
|
// gRPC status
|
||||||
|
if am.grpcServer != nil {
|
||||||
|
apis["grpc"] = am.grpcServer.GetMetrics()
|
||||||
|
} else {
|
||||||
|
apis["grpc"] = map[string]interface{}{"enabled": false}
|
||||||
|
}
|
||||||
|
|
||||||
|
// WebSocket status
|
||||||
|
if am.websocketManager != nil {
|
||||||
|
apis["websocket"] = am.websocketManager.GetMetrics()
|
||||||
|
} else {
|
||||||
|
apis["websocket"] = map[string]interface{}{"enabled": false}
|
||||||
|
}
|
||||||
|
|
||||||
|
return status
|
||||||
|
}
|
||||||
|
|
||||||
|
// Feature management handlers
|
||||||
|
func (am *APIManager) handleGetFeatures(c *gin.Context) {
|
||||||
|
if am.featureManager == nil {
|
||||||
|
c.JSON(http.StatusServiceUnavailable, gin.H{"error": "Feature manager not available"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
summary := am.featureManager.GetFeatureSummary()
|
||||||
|
c.JSON(http.StatusOK, gin.H{"data": summary})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (am *APIManager) handleGetFeature(c *gin.Context) {
|
||||||
|
featureID := c.Param("id")
|
||||||
|
|
||||||
|
if am.featureManager == nil {
|
||||||
|
c.JSON(http.StatusServiceUnavailable, gin.H{"error": "Feature manager not available"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
feature, err := am.featureManager.GetFeature(c.Request.Context(), featureID)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{"data": map[string]interface{}{
|
||||||
|
"id": feature.GetID(),
|
||||||
|
"name": feature.GetName(),
|
||||||
|
"version": feature.GetVersion(),
|
||||||
|
"type": feature.GetType(),
|
||||||
|
"domain": feature.GetDomain(),
|
||||||
|
"healthy": feature.IsHealthy(),
|
||||||
|
"status": feature.GetHealthStatus(),
|
||||||
|
"metrics": feature.GetMetrics(),
|
||||||
|
}})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (am *APIManager) handleStartFeature(c *gin.Context) {
|
||||||
|
// TODO: Implement feature start
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (am *APIManager) handleStopFeature(c *gin.Context) {
|
||||||
|
// TODO: Implement feature stop
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (am *APIManager) handleFeaturesHealth(c *gin.Context) {
|
||||||
|
if am.featureManager == nil {
|
||||||
|
c.JSON(http.StatusServiceUnavailable, gin.H{"error": "Feature manager not available"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
healthStatus := am.featureManager.GetFeatureHealthStatus()
|
||||||
|
c.JSON(http.StatusOK, gin.H{"data": healthStatus})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (am *APIManager) handleFeaturesMetrics(c *gin.Context) {
|
||||||
|
if am.featureManager == nil {
|
||||||
|
c.JSON(http.StatusServiceUnavailable, gin.H{"error": "Feature manager not available"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: Implement comprehensive metrics collection
|
||||||
|
c.JSON(http.StatusOK, gin.H{"data": "metrics"})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Placeholder handlers for feature endpoints (to be implemented)
|
||||||
|
func (am *APIManager) handleGetUserProfile(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleUpdateUserProfile(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleFollowUser(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleUnfollowUser(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleGetFollowers(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleGetFollowing(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleGetAchievements(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleGetLeaderboard(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleClaimAchievement(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleStartVerification(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleGetVerificationStatus(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleGetTrustScore(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleGetRooms(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleCreateRoom(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleGetRoom(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleJoinRoom(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleLeaveRoom(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleStartVoiceChat(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleStopVoiceChat(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleGetVoiceStatus(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleStartVideoStream(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleStopVideoStream(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleGetVideoStreams(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleUploadAudio(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleStreamAudio(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleGetAudioMetadata(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleGetSmartPlaylists(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleCreateSmartPlaylist(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleGetSmartPlaylist(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleDiscoverContent(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleGetTrending(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleGetSimilarContent(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleGetRecommendations(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleRecommendationFeedback(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleModerateContent(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleGetModerationHistory(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleAnalyzeSentiment(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleGetSentimentTrends(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleGetDashboard(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleGetRealtimeMetrics(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleGetUserBehavior(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleGetEngagementMetrics(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleGetRevenueAnalytics(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleGetConversionMetrics(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleExternalAPIRequest(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleGetExternalAPIStatus(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleCreateWebhook(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleGetWebhooks(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleDeleteWebhook(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleProcessPayment(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleGetPaymentMethods(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
|
func (am *APIManager) handleGetPaymentHistory(c *gin.Context) {
|
||||||
|
c.JSON(http.StatusNotImplemented, gin.H{"error": "Not implemented"})
|
||||||
|
}
|
||||||
2
veza-backend-api/internal/api/chat/handler.go
Normal file
2
veza-backend-api/internal/api/chat/handler.go
Normal file
|
|
@ -0,0 +1,2 @@
|
||||||
|
// Package chat - TO BE IMPLEMENTED
|
||||||
|
package chat
|
||||||
2
veza-backend-api/internal/api/collaboration/handler.go
Normal file
2
veza-backend-api/internal/api/collaboration/handler.go
Normal file
|
|
@ -0,0 +1,2 @@
|
||||||
|
// Package collaboration - TO BE IMPLEMENTED
|
||||||
|
package collaboration
|
||||||
2
veza-backend-api/internal/api/contest/handler.go
Normal file
2
veza-backend-api/internal/api/contest/handler.go
Normal file
|
|
@ -0,0 +1,2 @@
|
||||||
|
// Package contest - TO BE IMPLEMENTED
|
||||||
|
package contest
|
||||||
868
veza-backend-api/internal/api/education/handlers.go
Normal file
868
veza-backend-api/internal/api/education/handlers.go
Normal file
|
|
@ -0,0 +1,868 @@
|
||||||
|
package education
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"strconv"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"veza-backend-api/internal/common"
|
||||||
|
"veza-backend-api/internal/core/education"
|
||||||
|
"veza-backend-api/internal/response"
|
||||||
|
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"go.uber.org/zap"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Handler gère les requêtes HTTP pour l'éducation
|
||||||
|
type Handler struct {
|
||||||
|
courseManager *education.CourseManager
|
||||||
|
tutorialManager *education.TutorialManager
|
||||||
|
logger *zap.Logger
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewHandler crée un nouveau handler d'éducation
|
||||||
|
func NewHandler(courseManager *education.CourseManager, tutorialManager *education.TutorialManager, logger *zap.Logger) *Handler {
|
||||||
|
return &Handler{
|
||||||
|
courseManager: courseManager,
|
||||||
|
tutorialManager: tutorialManager,
|
||||||
|
logger: logger,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Request/Response structures
|
||||||
|
type CreateCourseRequest struct {
|
||||||
|
Title string `json:"title" binding:"required"`
|
||||||
|
Description string `json:"description" binding:"required"`
|
||||||
|
Instructor string `json:"instructor" binding:"required"`
|
||||||
|
Category string `json:"category" binding:"required"`
|
||||||
|
Level education.CourseLevel `json:"level" binding:"required"`
|
||||||
|
Duration time.Duration `json:"duration" binding:"required"`
|
||||||
|
Price float64 `json:"price"`
|
||||||
|
Language string `json:"language" binding:"required"`
|
||||||
|
Tags []string `json:"tags"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type UpdateCourseRequest struct {
|
||||||
|
Title *string `json:"title"`
|
||||||
|
Description *string `json:"description"`
|
||||||
|
Instructor *string `json:"instructor"`
|
||||||
|
Category *string `json:"category"`
|
||||||
|
Level *education.CourseLevel `json:"level"`
|
||||||
|
Duration *time.Duration `json:"duration"`
|
||||||
|
Price *float64 `json:"price"`
|
||||||
|
Language *string `json:"language"`
|
||||||
|
IsPublished *bool `json:"is_published"`
|
||||||
|
Tags []string `json:"tags"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type CreateTutorialRequest struct {
|
||||||
|
Title string `json:"title" binding:"required"`
|
||||||
|
Description string `json:"description" binding:"required"`
|
||||||
|
Author string `json:"author" binding:"required"`
|
||||||
|
Category string `json:"category" binding:"required"`
|
||||||
|
VideoURL string `json:"video_url" binding:"required"`
|
||||||
|
Thumbnail string `json:"thumbnail"`
|
||||||
|
Duration time.Duration `json:"duration" binding:"required"`
|
||||||
|
Quality education.VideoQuality `json:"quality" binding:"required"`
|
||||||
|
Language string `json:"language" binding:"required"`
|
||||||
|
IsFree bool `json:"is_free"`
|
||||||
|
Tags []string `json:"tags"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type UpdateTutorialRequest struct {
|
||||||
|
Title *string `json:"title"`
|
||||||
|
Description *string `json:"description"`
|
||||||
|
Author *string `json:"author"`
|
||||||
|
Category *string `json:"category"`
|
||||||
|
VideoURL *string `json:"video_url"`
|
||||||
|
Thumbnail *string `json:"thumbnail"`
|
||||||
|
Duration *time.Duration `json:"duration"`
|
||||||
|
Quality *education.VideoQuality `json:"quality"`
|
||||||
|
IsPublished *bool `json:"is_published"`
|
||||||
|
Tags []string `json:"tags"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type AddLessonRequest struct {
|
||||||
|
Title string `json:"title" binding:"required"`
|
||||||
|
Description string `json:"description" binding:"required"`
|
||||||
|
Content string `json:"content" binding:"required"`
|
||||||
|
VideoURL string `json:"video_url"`
|
||||||
|
Duration time.Duration `json:"duration" binding:"required"`
|
||||||
|
Order int `json:"order" binding:"required"`
|
||||||
|
IsFree bool `json:"is_free"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type AddExerciseRequest struct {
|
||||||
|
Title string `json:"title" binding:"required"`
|
||||||
|
Description string `json:"description" binding:"required"`
|
||||||
|
Content string `json:"content" binding:"required"`
|
||||||
|
Solution string `json:"solution" binding:"required"`
|
||||||
|
Type education.ExerciseType `json:"type" binding:"required"`
|
||||||
|
Points int `json:"points" binding:"required"`
|
||||||
|
TimeLimit time.Duration `json:"time_limit"`
|
||||||
|
IsRequired bool `json:"is_required"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type UpdateProgressRequest struct {
|
||||||
|
Progress float64 `json:"progress" binding:"required"`
|
||||||
|
CompletedLessons []string `json:"completed_lessons"`
|
||||||
|
CurrentLesson string `json:"current_lesson"`
|
||||||
|
Score float64 `json:"score"`
|
||||||
|
TimeSpent time.Duration `json:"time_spent"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type AddTutorialStepRequest struct {
|
||||||
|
Title string `json:"title" binding:"required"`
|
||||||
|
Description string `json:"description" binding:"required"`
|
||||||
|
Content string `json:"content" binding:"required"`
|
||||||
|
Order int `json:"order" binding:"required"`
|
||||||
|
Timestamp time.Duration `json:"timestamp"`
|
||||||
|
IsFree bool `json:"is_free"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type AddTutorialCommentRequest struct {
|
||||||
|
Content string `json:"content" binding:"required"`
|
||||||
|
Rating int `json:"rating" binding:"min=1,max=5"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// COURSES HANDLERS
|
||||||
|
|
||||||
|
// CreateCourse crée un nouveau cours
|
||||||
|
func (h *Handler) CreateCourse(c *gin.Context) {
|
||||||
|
_, exists := common.GetUserIDFromContext(c)
|
||||||
|
if !exists {
|
||||||
|
response.Error(c, http.StatusUnauthorized, "Utilisateur non authentifié")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req CreateCourseRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
response.Error(c, http.StatusBadRequest, "Données de requête invalides")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
course, err := h.courseManager.CreateCourse(
|
||||||
|
c.Request.Context(),
|
||||||
|
req.Title,
|
||||||
|
req.Description,
|
||||||
|
req.Instructor,
|
||||||
|
req.Category,
|
||||||
|
req.Level,
|
||||||
|
req.Duration,
|
||||||
|
req.Price,
|
||||||
|
req.Language,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Échec de création du cours", zap.Error(err))
|
||||||
|
response.Error(c, http.StatusInternalServerError, "Échec de création du cours")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Success(c, course, "Cours créé avec succès")
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetCourse récupère un cours par son ID
|
||||||
|
func (h *Handler) GetCourse(c *gin.Context) {
|
||||||
|
courseID := c.Param("course_id")
|
||||||
|
if courseID == "" {
|
||||||
|
response.Error(c, http.StatusBadRequest, "ID de cours requis")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
course, err := h.courseManager.GetCourse(c.Request.Context(), courseID)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Échec de récupération du cours", zap.Error(err))
|
||||||
|
response.Error(c, http.StatusNotFound, "Cours non trouvé")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Success(c, course, "Cours récupéré avec succès")
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListCourses liste tous les cours disponibles
|
||||||
|
func (h *Handler) ListCourses(c *gin.Context) {
|
||||||
|
filters := make(map[string]interface{})
|
||||||
|
|
||||||
|
if category := c.Query("category"); category != "" {
|
||||||
|
filters["category"] = category
|
||||||
|
}
|
||||||
|
if level := c.Query("level"); level != "" {
|
||||||
|
filters["level"] = education.CourseLevel(level)
|
||||||
|
}
|
||||||
|
if isPublished := c.Query("is_published"); isPublished != "" {
|
||||||
|
if published, err := strconv.ParseBool(isPublished); err == nil {
|
||||||
|
filters["is_published"] = published
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if isFree := c.Query("is_free"); isFree != "" {
|
||||||
|
if free, err := strconv.ParseBool(isFree); err == nil {
|
||||||
|
filters["is_free"] = free
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
courses, err := h.courseManager.ListCourses(c.Request.Context(), filters)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Échec de récupération des cours", zap.Error(err))
|
||||||
|
response.Error(c, http.StatusInternalServerError, "Échec de récupération des cours")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Success(c, courses, "Cours récupérés avec succès")
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateCourse met à jour un cours
|
||||||
|
func (h *Handler) UpdateCourse(c *gin.Context) {
|
||||||
|
_, exists := common.GetUserIDFromContext(c)
|
||||||
|
if !exists {
|
||||||
|
response.Error(c, http.StatusUnauthorized, "Utilisateur non authentifié")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
courseID := c.Param("course_id")
|
||||||
|
if courseID == "" {
|
||||||
|
response.Error(c, http.StatusBadRequest, "ID de cours requis")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req UpdateCourseRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
response.Error(c, http.StatusBadRequest, "Données de requête invalides")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
updates := make(map[string]interface{})
|
||||||
|
if req.Title != nil {
|
||||||
|
updates["title"] = *req.Title
|
||||||
|
}
|
||||||
|
if req.Description != nil {
|
||||||
|
updates["description"] = *req.Description
|
||||||
|
}
|
||||||
|
if req.Instructor != nil {
|
||||||
|
updates["instructor"] = *req.Instructor
|
||||||
|
}
|
||||||
|
if req.Category != nil {
|
||||||
|
updates["category"] = *req.Category
|
||||||
|
}
|
||||||
|
if req.Level != nil {
|
||||||
|
updates["level"] = *req.Level
|
||||||
|
}
|
||||||
|
if req.Duration != nil {
|
||||||
|
updates["duration"] = *req.Duration
|
||||||
|
}
|
||||||
|
if req.Price != nil {
|
||||||
|
updates["price"] = *req.Price
|
||||||
|
}
|
||||||
|
if req.Language != nil {
|
||||||
|
updates["language"] = *req.Language
|
||||||
|
}
|
||||||
|
if req.IsPublished != nil {
|
||||||
|
updates["is_published"] = *req.IsPublished
|
||||||
|
}
|
||||||
|
if req.Tags != nil {
|
||||||
|
updates["tags"] = req.Tags
|
||||||
|
}
|
||||||
|
|
||||||
|
course, err := h.courseManager.UpdateCourse(c.Request.Context(), courseID, updates)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Échec de mise à jour du cours", zap.Error(err))
|
||||||
|
response.Error(c, http.StatusInternalServerError, "Échec de mise à jour du cours")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Success(c, course, "Cours mis à jour avec succès")
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteCourse supprime un cours
|
||||||
|
func (h *Handler) DeleteCourse(c *gin.Context) {
|
||||||
|
_, exists := common.GetUserIDFromContext(c)
|
||||||
|
if !exists {
|
||||||
|
response.Error(c, http.StatusUnauthorized, "Utilisateur non authentifié")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
courseID := c.Param("course_id")
|
||||||
|
if courseID == "" {
|
||||||
|
response.Error(c, http.StatusBadRequest, "ID de cours requis")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
err := h.courseManager.DeleteCourse(c.Request.Context(), courseID)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Échec de suppression du cours", zap.Error(err))
|
||||||
|
response.Error(c, http.StatusInternalServerError, "Échec de suppression du cours")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Success(c, nil, "Cours supprimé avec succès")
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddLesson ajoute une leçon à un cours
|
||||||
|
func (h *Handler) AddLesson(c *gin.Context) {
|
||||||
|
_, exists := common.GetUserIDFromContext(c)
|
||||||
|
if !exists {
|
||||||
|
response.Error(c, http.StatusUnauthorized, "Utilisateur non authentifié")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
courseID := c.Param("course_id")
|
||||||
|
if courseID == "" {
|
||||||
|
response.Error(c, http.StatusBadRequest, "ID de cours requis")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req AddLessonRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
response.Error(c, http.StatusBadRequest, "Données de requête invalides")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
lesson, err := h.courseManager.AddLesson(
|
||||||
|
c.Request.Context(),
|
||||||
|
courseID,
|
||||||
|
req.Title,
|
||||||
|
req.Description,
|
||||||
|
req.Content,
|
||||||
|
req.VideoURL,
|
||||||
|
req.Duration,
|
||||||
|
req.Order,
|
||||||
|
req.IsFree,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Échec d'ajout de leçon", zap.Error(err))
|
||||||
|
response.Error(c, http.StatusInternalServerError, "Échec d'ajout de leçon")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Success(c, lesson, "Leçon ajoutée avec succès")
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddExercise ajoute un exercice à un cours
|
||||||
|
func (h *Handler) AddExercise(c *gin.Context) {
|
||||||
|
_, exists := common.GetUserIDFromContext(c)
|
||||||
|
if !exists {
|
||||||
|
response.Error(c, http.StatusUnauthorized, "Utilisateur non authentifié")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
courseID := c.Param("course_id")
|
||||||
|
lessonID := c.Param("lesson_id")
|
||||||
|
if courseID == "" || lessonID == "" {
|
||||||
|
response.Error(c, http.StatusBadRequest, "ID de cours et de leçon requis")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req AddExerciseRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
response.Error(c, http.StatusBadRequest, "Données de requête invalides")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
exercise, err := h.courseManager.AddExercise(
|
||||||
|
c.Request.Context(),
|
||||||
|
courseID,
|
||||||
|
lessonID,
|
||||||
|
req.Title,
|
||||||
|
req.Description,
|
||||||
|
req.Content,
|
||||||
|
req.Solution,
|
||||||
|
req.Type,
|
||||||
|
req.Points,
|
||||||
|
req.TimeLimit,
|
||||||
|
req.IsRequired,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Échec d'ajout d'exercice", zap.Error(err))
|
||||||
|
response.Error(c, http.StatusInternalServerError, "Échec d'ajout d'exercice")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Success(c, exercise, "Exercice ajouté avec succès")
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetUserProgress récupère la progression d'un utilisateur
|
||||||
|
func (h *Handler) GetUserProgress(c *gin.Context) {
|
||||||
|
userID, exists := common.GetUserIDFromContext(c)
|
||||||
|
if !exists {
|
||||||
|
response.Error(c, http.StatusUnauthorized, "Utilisateur non authentifié")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
courseID := c.Param("course_id")
|
||||||
|
if courseID == "" {
|
||||||
|
response.Error(c, http.StatusBadRequest, "ID de cours requis")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
progress, err := h.courseManager.GetUserProgress(c.Request.Context(), userID, courseID)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Échec de récupération de la progression", zap.Error(err))
|
||||||
|
response.Error(c, http.StatusNotFound, "Progression non trouvée")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Success(c, progress, "Progression récupérée avec succès")
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateUserProgress met à jour la progression d'un utilisateur
|
||||||
|
func (h *Handler) UpdateUserProgress(c *gin.Context) {
|
||||||
|
userID, exists := common.GetUserIDFromContext(c)
|
||||||
|
if !exists {
|
||||||
|
response.Error(c, http.StatusUnauthorized, "Utilisateur non authentifié")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
courseID := c.Param("course_id")
|
||||||
|
if courseID == "" {
|
||||||
|
response.Error(c, http.StatusBadRequest, "ID de cours requis")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req UpdateProgressRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
response.Error(c, http.StatusBadRequest, "Données de requête invalides")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
progress, err := h.courseManager.UpdateUserProgress(
|
||||||
|
c.Request.Context(),
|
||||||
|
userID,
|
||||||
|
courseID,
|
||||||
|
req.Progress,
|
||||||
|
req.CompletedLessons,
|
||||||
|
req.CurrentLesson,
|
||||||
|
req.Score,
|
||||||
|
req.TimeSpent,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Échec de mise à jour de la progression", zap.Error(err))
|
||||||
|
response.Error(c, http.StatusInternalServerError, "Échec de mise à jour de la progression")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Success(c, progress, "Progression mise à jour avec succès")
|
||||||
|
}
|
||||||
|
|
||||||
|
// IssueCertificate émet un certificat
|
||||||
|
func (h *Handler) IssueCertificate(c *gin.Context) {
|
||||||
|
userID, exists := common.GetUserIDFromContext(c)
|
||||||
|
if !exists {
|
||||||
|
response.Error(c, http.StatusUnauthorized, "Utilisateur non authentifié")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
courseID := c.Param("course_id")
|
||||||
|
if courseID == "" {
|
||||||
|
response.Error(c, http.StatusBadRequest, "ID de cours requis")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Récupérer les paramètres de la requête
|
||||||
|
title := c.Query("title")
|
||||||
|
description := c.Query("description")
|
||||||
|
scoreStr := c.Query("score")
|
||||||
|
maxScoreStr := c.Query("max_score")
|
||||||
|
|
||||||
|
if title == "" || description == "" || scoreStr == "" || maxScoreStr == "" {
|
||||||
|
response.Error(c, http.StatusBadRequest, "Tous les paramètres sont requis")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
score, err := strconv.ParseFloat(scoreStr, 64)
|
||||||
|
if err != nil {
|
||||||
|
response.Error(c, http.StatusBadRequest, "Score invalide")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
maxScore, err := strconv.ParseFloat(maxScoreStr, 64)
|
||||||
|
if err != nil {
|
||||||
|
response.Error(c, http.StatusBadRequest, "Score maximum invalide")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
certificate, err := h.courseManager.IssueCertificate(
|
||||||
|
c.Request.Context(),
|
||||||
|
courseID,
|
||||||
|
userID,
|
||||||
|
title,
|
||||||
|
description,
|
||||||
|
score,
|
||||||
|
maxScore,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Échec d'émission du certificat", zap.Error(err))
|
||||||
|
response.Error(c, http.StatusInternalServerError, "Échec d'émission du certificat")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Success(c, certificate, "Certificat émis avec succès")
|
||||||
|
}
|
||||||
|
|
||||||
|
// TUTORIALS HANDLERS
|
||||||
|
|
||||||
|
// CreateTutorial crée un nouveau tutoriel
|
||||||
|
func (h *Handler) CreateTutorial(c *gin.Context) {
|
||||||
|
_, exists := common.GetUserIDFromContext(c)
|
||||||
|
if !exists {
|
||||||
|
response.Error(c, http.StatusUnauthorized, "Utilisateur non authentifié")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req CreateTutorialRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
response.Error(c, http.StatusBadRequest, "Données de requête invalides")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
tutorial, err := h.tutorialManager.CreateTutorial(
|
||||||
|
c.Request.Context(),
|
||||||
|
req.Title,
|
||||||
|
req.Description,
|
||||||
|
req.Author,
|
||||||
|
req.Category,
|
||||||
|
req.VideoURL,
|
||||||
|
req.Thumbnail,
|
||||||
|
req.Language,
|
||||||
|
req.Duration,
|
||||||
|
req.Quality,
|
||||||
|
req.IsFree,
|
||||||
|
req.Tags,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Échec de création du tutoriel", zap.Error(err))
|
||||||
|
response.Error(c, http.StatusInternalServerError, "Échec de création du tutoriel")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Success(c, tutorial, "Tutoriel créé avec succès")
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetTutorial récupère un tutoriel par son ID
|
||||||
|
func (h *Handler) GetTutorial(c *gin.Context) {
|
||||||
|
tutorialID := c.Param("tutorial_id")
|
||||||
|
if tutorialID == "" {
|
||||||
|
response.Error(c, http.StatusBadRequest, "ID de tutoriel requis")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
tutorial, err := h.tutorialManager.GetTutorial(c.Request.Context(), tutorialID)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Échec de récupération du tutoriel", zap.Error(err))
|
||||||
|
response.Error(c, http.StatusNotFound, "Tutoriel non trouvé")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Incrémenter les vues
|
||||||
|
go func() {
|
||||||
|
if err := h.tutorialManager.IncrementViews(c.Request.Context(), tutorialID); err != nil {
|
||||||
|
h.logger.Error("Échec d'incrémentation des vues", zap.Error(err))
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
response.Success(c, tutorial, "Tutoriel récupéré avec succès")
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListTutorials liste tous les tutoriels disponibles
|
||||||
|
func (h *Handler) ListTutorials(c *gin.Context) {
|
||||||
|
filters := make(map[string]interface{})
|
||||||
|
|
||||||
|
if category := c.Query("category"); category != "" {
|
||||||
|
filters["category"] = category
|
||||||
|
}
|
||||||
|
if isPublished := c.Query("is_published"); isPublished != "" {
|
||||||
|
if published, err := strconv.ParseBool(isPublished); err == nil {
|
||||||
|
filters["is_published"] = published
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if isFree := c.Query("is_free"); isFree != "" {
|
||||||
|
if free, err := strconv.ParseBool(isFree); err == nil {
|
||||||
|
filters["is_free"] = free
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if language := c.Query("language"); language != "" {
|
||||||
|
filters["language"] = language
|
||||||
|
}
|
||||||
|
if author := c.Query("author"); author != "" {
|
||||||
|
filters["author"] = author
|
||||||
|
}
|
||||||
|
|
||||||
|
tutorials, err := h.tutorialManager.ListTutorials(c.Request.Context(), filters)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Échec de récupération des tutoriels", zap.Error(err))
|
||||||
|
response.Error(c, http.StatusInternalServerError, "Échec de récupération des tutoriels")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Success(c, tutorials, "Tutoriels récupérés avec succès")
|
||||||
|
}
|
||||||
|
|
||||||
|
// SearchTutorials recherche des tutoriels
|
||||||
|
func (h *Handler) SearchTutorials(c *gin.Context) {
|
||||||
|
query := c.Query("q")
|
||||||
|
if query == "" {
|
||||||
|
response.Error(c, http.StatusBadRequest, "Terme de recherche requis")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
filters := make(map[string]interface{})
|
||||||
|
if category := c.Query("category"); category != "" {
|
||||||
|
filters["category"] = category
|
||||||
|
}
|
||||||
|
if isPublished := c.Query("is_published"); isPublished != "" {
|
||||||
|
if published, err := strconv.ParseBool(isPublished); err == nil {
|
||||||
|
filters["is_published"] = published
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if isFree := c.Query("is_free"); isFree != "" {
|
||||||
|
if free, err := strconv.ParseBool(isFree); err == nil {
|
||||||
|
filters["is_free"] = free
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tutorials, err := h.tutorialManager.SearchTutorials(c.Request.Context(), query, filters)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Échec de recherche des tutoriels", zap.Error(err))
|
||||||
|
response.Error(c, http.StatusInternalServerError, "Échec de recherche des tutoriels")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Success(c, tutorials, "Recherche de tutoriels terminée")
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateTutorial met à jour un tutoriel
|
||||||
|
func (h *Handler) UpdateTutorial(c *gin.Context) {
|
||||||
|
_, exists := common.GetUserIDFromContext(c)
|
||||||
|
if !exists {
|
||||||
|
response.Error(c, http.StatusUnauthorized, "Utilisateur non authentifié")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
tutorialID := c.Param("tutorial_id")
|
||||||
|
if tutorialID == "" {
|
||||||
|
response.Error(c, http.StatusBadRequest, "ID de tutoriel requis")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req UpdateTutorialRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
response.Error(c, http.StatusBadRequest, "Données de requête invalides")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
updates := make(map[string]interface{})
|
||||||
|
if req.Title != nil {
|
||||||
|
updates["title"] = *req.Title
|
||||||
|
}
|
||||||
|
if req.Description != nil {
|
||||||
|
updates["description"] = *req.Description
|
||||||
|
}
|
||||||
|
if req.Author != nil {
|
||||||
|
updates["author"] = *req.Author
|
||||||
|
}
|
||||||
|
if req.Category != nil {
|
||||||
|
updates["category"] = *req.Category
|
||||||
|
}
|
||||||
|
if req.VideoURL != nil {
|
||||||
|
updates["video_url"] = *req.VideoURL
|
||||||
|
}
|
||||||
|
if req.Thumbnail != nil {
|
||||||
|
updates["thumbnail"] = *req.Thumbnail
|
||||||
|
}
|
||||||
|
if req.Duration != nil {
|
||||||
|
updates["duration"] = *req.Duration
|
||||||
|
}
|
||||||
|
if req.Quality != nil {
|
||||||
|
updates["quality"] = *req.Quality
|
||||||
|
}
|
||||||
|
if req.IsPublished != nil {
|
||||||
|
updates["is_published"] = *req.IsPublished
|
||||||
|
}
|
||||||
|
if req.Tags != nil {
|
||||||
|
updates["tags"] = req.Tags
|
||||||
|
}
|
||||||
|
|
||||||
|
tutorial, err := h.tutorialManager.UpdateTutorial(c.Request.Context(), tutorialID, updates)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Échec de mise à jour du tutoriel", zap.Error(err))
|
||||||
|
response.Error(c, http.StatusInternalServerError, "Échec de mise à jour du tutoriel")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Success(c, tutorial, "Tutoriel mis à jour avec succès")
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteTutorial supprime un tutoriel
|
||||||
|
func (h *Handler) DeleteTutorial(c *gin.Context) {
|
||||||
|
_, exists := common.GetUserIDFromContext(c)
|
||||||
|
if !exists {
|
||||||
|
response.Error(c, http.StatusUnauthorized, "Utilisateur non authentifié")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
tutorialID := c.Param("tutorial_id")
|
||||||
|
if tutorialID == "" {
|
||||||
|
response.Error(c, http.StatusBadRequest, "ID de tutoriel requis")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
err := h.tutorialManager.DeleteTutorial(c.Request.Context(), tutorialID)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Échec de suppression du tutoriel", zap.Error(err))
|
||||||
|
response.Error(c, http.StatusInternalServerError, "Échec de suppression du tutoriel")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Success(c, nil, "Tutoriel supprimé avec succès")
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddTutorialStep ajoute une étape à un tutoriel
|
||||||
|
func (h *Handler) AddTutorialStep(c *gin.Context) {
|
||||||
|
_, exists := common.GetUserIDFromContext(c)
|
||||||
|
if !exists {
|
||||||
|
response.Error(c, http.StatusUnauthorized, "Utilisateur non authentifié")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
tutorialID := c.Param("tutorial_id")
|
||||||
|
if tutorialID == "" {
|
||||||
|
response.Error(c, http.StatusBadRequest, "ID de tutoriel requis")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req AddTutorialStepRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
response.Error(c, http.StatusBadRequest, "Données de requête invalides")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
step, err := h.tutorialManager.AddTutorialStep(
|
||||||
|
c.Request.Context(),
|
||||||
|
tutorialID,
|
||||||
|
req.Title,
|
||||||
|
req.Description,
|
||||||
|
req.Content,
|
||||||
|
req.Order,
|
||||||
|
req.Timestamp,
|
||||||
|
req.IsFree,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Échec d'ajout d'étape de tutoriel", zap.Error(err))
|
||||||
|
response.Error(c, http.StatusInternalServerError, "Échec d'ajout d'étape de tutoriel")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Success(c, step, "Étape de tutoriel ajoutée avec succès")
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetTutorialSteps récupère les étapes d'un tutoriel
|
||||||
|
func (h *Handler) GetTutorialSteps(c *gin.Context) {
|
||||||
|
tutorialID := c.Param("tutorial_id")
|
||||||
|
if tutorialID == "" {
|
||||||
|
response.Error(c, http.StatusBadRequest, "ID de tutoriel requis")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
steps, err := h.tutorialManager.GetTutorialSteps(c.Request.Context(), tutorialID)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Échec de récupération des étapes", zap.Error(err))
|
||||||
|
response.Error(c, http.StatusInternalServerError, "Échec de récupération des étapes")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Success(c, steps, "Étapes récupérées avec succès")
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddTutorialComment ajoute un commentaire à un tutoriel
|
||||||
|
func (h *Handler) AddTutorialComment(c *gin.Context) {
|
||||||
|
userID, exists := common.GetUserIDFromContext(c)
|
||||||
|
if !exists {
|
||||||
|
response.Error(c, http.StatusUnauthorized, "Utilisateur non authentifié")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
tutorialID := c.Param("tutorial_id")
|
||||||
|
if tutorialID == "" {
|
||||||
|
response.Error(c, http.StatusBadRequest, "ID de tutoriel requis")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req AddTutorialCommentRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
response.Error(c, http.StatusBadRequest, "Données de requête invalides")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
username, _ := common.GetUsernameFromContext(c)
|
||||||
|
if username == "" {
|
||||||
|
username = "Utilisateur anonyme"
|
||||||
|
}
|
||||||
|
|
||||||
|
comment, err := h.tutorialManager.AddTutorialComment(
|
||||||
|
c.Request.Context(),
|
||||||
|
tutorialID,
|
||||||
|
userID.String(),
|
||||||
|
username,
|
||||||
|
req.Content,
|
||||||
|
req.Rating,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Échec d'ajout de commentaire", zap.Error(err))
|
||||||
|
response.Error(c, http.StatusInternalServerError, "Échec d'ajout de commentaire")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Success(c, comment, "Commentaire ajouté avec succès")
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetTutorialComments récupère les commentaires d'un tutoriel
|
||||||
|
func (h *Handler) GetTutorialComments(c *gin.Context) {
|
||||||
|
tutorialID := c.Param("tutorial_id")
|
||||||
|
if tutorialID == "" {
|
||||||
|
response.Error(c, http.StatusBadRequest, "ID de tutoriel requis")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
comments, err := h.tutorialManager.GetTutorialComments(c.Request.Context(), tutorialID)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Échec de récupération des commentaires", zap.Error(err))
|
||||||
|
response.Error(c, http.StatusInternalServerError, "Échec de récupération des commentaires")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Success(c, comments, "Commentaires récupérés avec succès")
|
||||||
|
}
|
||||||
|
|
||||||
|
// LikeTutorial ajoute un like à un tutoriel
|
||||||
|
func (h *Handler) LikeTutorial(c *gin.Context) {
|
||||||
|
tutorialID := c.Param("tutorial_id")
|
||||||
|
if tutorialID == "" {
|
||||||
|
response.Error(c, http.StatusBadRequest, "ID de tutoriel requis")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
err := h.tutorialManager.LikeTutorial(c.Request.Context(), tutorialID)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Échec d'ajout de like", zap.Error(err))
|
||||||
|
response.Error(c, http.StatusInternalServerError, "Échec d'ajout de like")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Success(c, nil, "Like ajouté avec succès")
|
||||||
|
}
|
||||||
|
|
||||||
|
// DislikeTutorial ajoute un dislike à un tutoriel
|
||||||
|
func (h *Handler) DislikeTutorial(c *gin.Context) {
|
||||||
|
tutorialID := c.Param("tutorial_id")
|
||||||
|
if tutorialID == "" {
|
||||||
|
response.Error(c, http.StatusBadRequest, "ID de tutoriel requis")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
err := h.tutorialManager.DislikeTutorial(c.Request.Context(), tutorialID)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Échec d'ajout de dislike", zap.Error(err))
|
||||||
|
response.Error(c, http.StatusInternalServerError, "Échec d'ajout de dislike")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Success(c, nil, "Dislike ajouté avec succès")
|
||||||
|
}
|
||||||
54
veza-backend-api/internal/api/education/routes.go
Normal file
54
veza-backend-api/internal/api/education/routes.go
Normal file
|
|
@ -0,0 +1,54 @@
|
||||||
|
package education
|
||||||
|
|
||||||
|
import (
|
||||||
|
"veza-backend-api/internal/middleware"
|
||||||
|
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SetupRoutes configure les routes d'éducation
|
||||||
|
func SetupRoutes(router *gin.RouterGroup, handler *Handler, jwtSecret string, authMiddleware *middleware.AuthMiddleware) { // Added authMiddleware parameter
|
||||||
|
// Groupe de routes pour l'éducation
|
||||||
|
edu := router.Group("/education")
|
||||||
|
{
|
||||||
|
// Routes des cours
|
||||||
|
courses := edu.Group("/courses")
|
||||||
|
courses.Use(authMiddleware.RequireAuth()) // Changed to authMiddleware.RequireAuth()
|
||||||
|
{
|
||||||
|
courses.POST("/create", handler.CreateCourse)
|
||||||
|
courses.GET("/list", handler.ListCourses)
|
||||||
|
courses.GET("/:course_id", handler.GetCourse)
|
||||||
|
courses.PUT("/:course_id", handler.UpdateCourse)
|
||||||
|
courses.DELETE("/:course_id", handler.DeleteCourse)
|
||||||
|
courses.POST("/:course_id/lessons", handler.AddLesson)
|
||||||
|
courses.POST("/:course_id/lessons/:lesson_id/exercises", handler.AddExercise)
|
||||||
|
courses.GET("/:course_id/progress", handler.GetUserProgress)
|
||||||
|
courses.PUT("/:course_id/progress", handler.UpdateUserProgress)
|
||||||
|
courses.POST("/:course_id/certificate", handler.IssueCertificate)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Routes des tutoriels
|
||||||
|
tutorials := edu.Group("/tutorials")
|
||||||
|
{
|
||||||
|
// Routes publiques (sans authentification)
|
||||||
|
tutorials.GET("/list", handler.ListTutorials)
|
||||||
|
tutorials.GET("/search", handler.SearchTutorials)
|
||||||
|
tutorials.GET("/:tutorial_id", handler.GetTutorial)
|
||||||
|
tutorials.GET("/:tutorial_id/steps", handler.GetTutorialSteps)
|
||||||
|
tutorials.GET("/:tutorial_id/comments", handler.GetTutorialComments)
|
||||||
|
tutorials.POST("/:tutorial_id/like", handler.LikeTutorial)
|
||||||
|
tutorials.POST("/:tutorial_id/dislike", handler.DislikeTutorial)
|
||||||
|
|
||||||
|
// Routes protégées (avec authentification)
|
||||||
|
protected := tutorials.Group("")
|
||||||
|
protected.Use(authMiddleware.RequireAuth()) // Changed to authMiddleware.RequireAuth()
|
||||||
|
{
|
||||||
|
protected.POST("/create", handler.CreateTutorial)
|
||||||
|
protected.PUT("/:tutorial_id", handler.UpdateTutorial)
|
||||||
|
protected.DELETE("/:tutorial_id", handler.DeleteTutorial)
|
||||||
|
protected.POST("/:tutorial_id/steps", handler.AddTutorialStep)
|
||||||
|
protected.POST("/:tutorial_id/comments", handler.AddTutorialComment)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
2
veza-backend-api/internal/api/graphql/handler.go
Normal file
2
veza-backend-api/internal/api/graphql/handler.go
Normal file
|
|
@ -0,0 +1,2 @@
|
||||||
|
// Package graphql - TO BE IMPLEMENTED
|
||||||
|
package graphql
|
||||||
2
veza-backend-api/internal/api/grpc/handler.go
Normal file
2
veza-backend-api/internal/api/grpc/handler.go
Normal file
|
|
@ -0,0 +1,2 @@
|
||||||
|
// Package grpc - TO BE IMPLEMENTED
|
||||||
|
package grpc
|
||||||
377
veza-backend-api/internal/api/handlers/chat_handlers.go
Normal file
377
veza-backend-api/internal/api/handlers/chat_handlers.go
Normal file
|
|
@ -0,0 +1,377 @@
|
||||||
|
//go:build ignore
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
// TODO: Réactiver chat_handlers après stabilisation du noyau et alignement des services (ChatService, MessageType, RoomType)
|
||||||
|
|
||||||
|
package handlers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"strconv"
|
||||||
|
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"go.uber.org/zap"
|
||||||
|
|
||||||
|
"veza-backend-api/internal/services"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ChatHandlers handles chat-related API endpoints
|
||||||
|
type ChatHandlers struct {
|
||||||
|
chatService *services.ChatService
|
||||||
|
logger *zap.Logger
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewChatHandlers creates new chat handlers
|
||||||
|
func NewChatHandlers(chatService *services.ChatService, logger *zap.Logger) *ChatHandlers {
|
||||||
|
return &ChatHandlers{
|
||||||
|
chatService: chatService,
|
||||||
|
logger: logger,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// InitChatHandlers initializes chat handlers
|
||||||
|
func InitChatHandlers(chatService *services.ChatService, logger *zap.Logger) {
|
||||||
|
handlers := NewChatHandlers(chatService, logger)
|
||||||
|
|
||||||
|
// Store handlers globally for route registration
|
||||||
|
ChatHandlersInstance = handlers
|
||||||
|
}
|
||||||
|
|
||||||
|
// ChatHandlersInstance holds the global chat handlers instance
|
||||||
|
var ChatHandlersInstance *ChatHandlers
|
||||||
|
|
||||||
|
// CreateMessage creates a new message in a room
|
||||||
|
func (h *ChatHandlers) CreateMessage(c *gin.Context) {
|
||||||
|
userID := c.GetInt64("user_id")
|
||||||
|
roomID, err := strconv.ParseInt(c.Param("room_id"), 10, 64)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid room ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req struct {
|
||||||
|
Content string `json:"content" binding:"required"`
|
||||||
|
Type services.MessageType `json:"type"`
|
||||||
|
ParentID *int64 `json:"parent_id,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if req.Type == "" {
|
||||||
|
req.Type = services.MessageTypeText
|
||||||
|
}
|
||||||
|
|
||||||
|
message, err := h.chatService.CreateMessage(c.Request.Context(), roomID, userID, req.Content, req.Type, req.ParentID)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Failed to create message", zap.Error(err))
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to create message"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusCreated, gin.H{
|
||||||
|
"success": true,
|
||||||
|
"message": message,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetMessages retrieves messages for a room
|
||||||
|
func (h *ChatHandlers) GetMessages(c *gin.Context) {
|
||||||
|
roomID, err := strconv.ParseInt(c.Param("room_id"), 10, 64)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid room ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
page, _ := strconv.Atoi(c.DefaultQuery("page", "1"))
|
||||||
|
limit, _ := strconv.Atoi(c.DefaultQuery("limit", "50"))
|
||||||
|
beforeIDStr := c.Query("before_id")
|
||||||
|
|
||||||
|
var beforeID *int64
|
||||||
|
if beforeIDStr != "" {
|
||||||
|
if id, err := strconv.ParseInt(beforeIDStr, 10, 64); err == nil {
|
||||||
|
beforeID = &id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
messages, err := h.chatService.GetMessages(c.Request.Context(), roomID, page, limit, beforeID)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Failed to get messages", zap.Error(err))
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to get messages"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"success": true,
|
||||||
|
"messages": messages,
|
||||||
|
"page": page,
|
||||||
|
"limit": limit,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddReaction adds a reaction to a message
|
||||||
|
func (h *ChatHandlers) AddReaction(c *gin.Context) {
|
||||||
|
userID := c.GetInt64("user_id")
|
||||||
|
messageID, err := strconv.ParseInt(c.Param("message_id"), 10, 64)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid message ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req struct {
|
||||||
|
Emoji string `json:"emoji" binding:"required"`
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
reaction, err := h.chatService.AddReaction(c.Request.Context(), messageID, userID, req.Emoji)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Failed to add reaction", zap.Error(err))
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to add reaction"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusCreated, gin.H{
|
||||||
|
"success": true,
|
||||||
|
"reaction": reaction,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// RemoveReaction removes a reaction from a message
|
||||||
|
func (h *ChatHandlers) RemoveReaction(c *gin.Context) {
|
||||||
|
userID := c.GetInt64("user_id")
|
||||||
|
messageID, err := strconv.ParseInt(c.Param("message_id"), 10, 64)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid message ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
emoji := c.Param("emoji")
|
||||||
|
if emoji == "" {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "Emoji is required"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
err = h.chatService.RemoveReaction(c.Request.Context(), messageID, userID, emoji)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Failed to remove reaction", zap.Error(err))
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to remove reaction"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"success": true,
|
||||||
|
"message": "Reaction removed",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateRoom creates a new chat room
|
||||||
|
func (h *ChatHandlers) CreateRoom(c *gin.Context) {
|
||||||
|
userID := c.GetInt64("user_id")
|
||||||
|
|
||||||
|
var req struct {
|
||||||
|
Name string `json:"name" binding:"required"`
|
||||||
|
Description string `json:"description"`
|
||||||
|
Type services.RoomType `json:"type"`
|
||||||
|
IsPrivate bool `json:"is_private"`
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if req.Type == "" {
|
||||||
|
req.Type = services.RoomTypePublic
|
||||||
|
}
|
||||||
|
|
||||||
|
room, err := h.chatService.CreateRoom(c.Request.Context(), req.Name, req.Description, req.Type, req.IsPrivate, userID)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Failed to create room", zap.Error(err))
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to create room"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusCreated, gin.H{
|
||||||
|
"success": true,
|
||||||
|
"room": room,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetRooms retrieves available rooms
|
||||||
|
func (h *ChatHandlers) GetRooms(c *gin.Context) {
|
||||||
|
userID := c.GetInt64("user_id")
|
||||||
|
includePrivate := c.DefaultQuery("include_private", "false") == "true"
|
||||||
|
|
||||||
|
rooms, err := h.chatService.GetRooms(c.Request.Context(), userID, includePrivate)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Failed to get rooms", zap.Error(err))
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to get rooms"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"success": true,
|
||||||
|
"rooms": rooms,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// JoinRoom adds a user to a room
|
||||||
|
func (h *ChatHandlers) JoinRoom(c *gin.Context) {
|
||||||
|
userID := c.GetInt64("user_id")
|
||||||
|
roomID, err := strconv.ParseInt(c.Param("room_id"), 10, 64)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid room ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
err = h.chatService.JoinRoom(c.Request.Context(), roomID, userID)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Failed to join room", zap.Error(err))
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to join room"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"success": true,
|
||||||
|
"message": "Successfully joined room",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// LeaveRoom removes a user from a room
|
||||||
|
func (h *ChatHandlers) LeaveRoom(c *gin.Context) {
|
||||||
|
userID := c.GetInt64("user_id")
|
||||||
|
roomID, err := strconv.ParseInt(c.Param("room_id"), 10, 64)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid room ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
err = h.chatService.LeaveRoom(c.Request.Context(), roomID, userID)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Failed to leave room", zap.Error(err))
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to leave room"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"success": true,
|
||||||
|
"message": "Successfully left room",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateDirectMessage creates a DM room between two users
|
||||||
|
func (h *ChatHandlers) CreateDirectMessage(c *gin.Context) {
|
||||||
|
userID := c.GetInt64("user_id")
|
||||||
|
|
||||||
|
var req struct {
|
||||||
|
UserID int64 `json:"user_id" binding:"required"`
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
room, err := h.chatService.CreateDirectMessage(c.Request.Context(), userID, req.UserID)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Failed to create DM", zap.Error(err))
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to create direct message"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusCreated, gin.H{
|
||||||
|
"success": true,
|
||||||
|
"room": room,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// SearchMessages searches for messages in a room
|
||||||
|
func (h *ChatHandlers) SearchMessages(c *gin.Context) {
|
||||||
|
roomID, err := strconv.ParseInt(c.Param("room_id"), 10, 64)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid room ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
query := c.Query("q")
|
||||||
|
if query == "" {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "Search query is required"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
limit, _ := strconv.Atoi(c.DefaultQuery("limit", "20"))
|
||||||
|
|
||||||
|
messages, err := h.chatService.SearchMessages(c.Request.Context(), roomID, query, limit)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Failed to search messages", zap.Error(err))
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to search messages"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"success": true,
|
||||||
|
"messages": messages,
|
||||||
|
"query": query,
|
||||||
|
"limit": limit,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// EditMessage edits an existing message
|
||||||
|
func (h *ChatHandlers) EditMessage(c *gin.Context) {
|
||||||
|
userID := c.GetInt64("user_id")
|
||||||
|
messageID, err := strconv.ParseInt(c.Param("message_id"), 10, 64)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid message ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req struct {
|
||||||
|
Content string `json:"content" binding:"required"`
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
message, err := h.chatService.EditMessage(c.Request.Context(), messageID, userID, req.Content)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Failed to edit message", zap.Error(err))
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to edit message"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"success": true,
|
||||||
|
"message": message,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteMessage deletes a message
|
||||||
|
func (h *ChatHandlers) DeleteMessage(c *gin.Context) {
|
||||||
|
userID := c.GetInt64("user_id")
|
||||||
|
messageID, err := strconv.ParseInt(c.Param("message_id"), 10, 64)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid message ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
err = h.chatService.DeleteMessage(c.Request.Context(), messageID, userID)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Failed to delete message", zap.Error(err))
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to delete message"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"success": true,
|
||||||
|
"message": "Message deleted successfully",
|
||||||
|
})
|
||||||
|
}
|
||||||
256
veza-backend-api/internal/api/handlers/rbac_handlers.go
Normal file
256
veza-backend-api/internal/api/handlers/rbac_handlers.go
Normal file
|
|
@ -0,0 +1,256 @@
|
||||||
|
package handlers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"strconv"
|
||||||
|
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"go.uber.org/zap"
|
||||||
|
|
||||||
|
"veza-backend-api/internal/services"
|
||||||
|
)
|
||||||
|
|
||||||
|
// RBACHandlers handles RBAC-related API endpoints
|
||||||
|
type RBACHandlers struct {
|
||||||
|
rbacService *services.RBACService
|
||||||
|
logger *zap.Logger
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewRBACHandlers creates new RBAC handlers
|
||||||
|
func NewRBACHandlers(rbacService *services.RBACService, logger *zap.Logger) *RBACHandlers {
|
||||||
|
return &RBACHandlers{
|
||||||
|
rbacService: rbacService,
|
||||||
|
logger: logger,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// InitRBACHandlers initializes RBAC handlers
|
||||||
|
func InitRBACHandlers(rbacService *services.RBACService, logger *zap.Logger) {
|
||||||
|
handlers := NewRBACHandlers(rbacService, logger)
|
||||||
|
|
||||||
|
// Store handlers globally for route registration
|
||||||
|
RBACHandlersInstance = handlers
|
||||||
|
}
|
||||||
|
|
||||||
|
// RBACHandlersInstance holds the global RBAC handlers instance
|
||||||
|
var RBACHandlersInstance *RBACHandlers
|
||||||
|
|
||||||
|
// CreateRole creates a new role
|
||||||
|
func (h *RBACHandlers) CreateRole(c *gin.Context) {
|
||||||
|
var req struct {
|
||||||
|
Name string `json:"name" binding:"required"`
|
||||||
|
Description string `json:"description"`
|
||||||
|
Permissions []int64 `json:"permissions"`
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
role, err := h.rbacService.CreateRole(c.Request.Context(), req.Name, req.Description, req.Permissions)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Failed to create role", zap.Error(err))
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to create role"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusCreated, gin.H{
|
||||||
|
"success": true,
|
||||||
|
"role": role,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetRole gets a role by ID
|
||||||
|
func (h *RBACHandlers) GetRole(c *gin.Context) {
|
||||||
|
roleID, err := strconv.ParseInt(c.Param("id"), 10, 64)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid role ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
role, err := h.rbacService.GetRoleByID(c.Request.Context(), roleID)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Failed to get role", zap.Error(err))
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "Role not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"success": true,
|
||||||
|
"role": role,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetAllRoles gets all roles
|
||||||
|
func (h *RBACHandlers) GetAllRoles(c *gin.Context) {
|
||||||
|
roles, err := h.rbacService.GetAllRoles(c.Request.Context())
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Failed to get roles", zap.Error(err))
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to get roles"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"success": true,
|
||||||
|
"roles": roles,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// AssignRoleToUser assigns a role to a user
|
||||||
|
func (h *RBACHandlers) AssignRoleToUser(c *gin.Context) {
|
||||||
|
userID, err := uuid.Parse(c.Param("user_id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid user ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req struct {
|
||||||
|
RoleID int64 `json:"role_id" binding:"required"`
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
err = h.rbacService.AssignRoleToUser(c.Request.Context(), userID, req.RoleID)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Failed to assign role to user", zap.Error(err))
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to assign role to user"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"success": true,
|
||||||
|
"message": "Role assigned to user successfully",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// RemoveRoleFromUser removes a role from a user
|
||||||
|
func (h *RBACHandlers) RemoveRoleFromUser(c *gin.Context) {
|
||||||
|
userID, err := uuid.Parse(c.Param("user_id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid user ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
roleID, err := strconv.ParseInt(c.Param("role_id"), 10, 64)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid role ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
err = h.rbacService.RemoveRoleFromUser(c.Request.Context(), userID, roleID)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Failed to remove role from user", zap.Error(err))
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to remove role from user"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"success": true,
|
||||||
|
"message": "Role removed from user successfully",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetUserRoles gets all roles for a user
|
||||||
|
func (h *RBACHandlers) GetUserRoles(c *gin.Context) {
|
||||||
|
userID, err := uuid.Parse(c.Param("user_id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid user ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
roles, err := h.rbacService.GetUserRoles(c.Request.Context(), userID)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Failed to get user roles", zap.Error(err))
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to get user roles"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"success": true,
|
||||||
|
"roles": roles,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetUserPermissions gets all permissions for a user
|
||||||
|
func (h *RBACHandlers) GetUserPermissions(c *gin.Context) {
|
||||||
|
userID, err := uuid.Parse(c.Param("user_id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid user ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
permissions, err := h.rbacService.GetUserPermissions(c.Request.Context(), userID)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Failed to get user permissions", zap.Error(err))
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to get user permissions"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"success": true,
|
||||||
|
"permissions": permissions,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// CheckPermission checks if a user has a specific permission
|
||||||
|
func (h *RBACHandlers) CheckPermission(c *gin.Context) {
|
||||||
|
userID, err := uuid.Parse(c.Param("user_id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid user ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
resource := c.Query("resource")
|
||||||
|
action := c.Query("action")
|
||||||
|
|
||||||
|
if resource == "" || action == "" {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "Resource and action are required"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
hasPermission, err := h.rbacService.CheckPermission(c.Request.Context(), userID, resource, action)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Failed to check permission", zap.Error(err))
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to check permission"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"success": true,
|
||||||
|
"has_permission": hasPermission,
|
||||||
|
"resource": resource,
|
||||||
|
"action": action,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatePermission creates a new permission
|
||||||
|
func (h *RBACHandlers) CreatePermission(c *gin.Context) {
|
||||||
|
var req struct {
|
||||||
|
Name string `json:"name" binding:"required"`
|
||||||
|
Description string `json:"description"`
|
||||||
|
Resource string `json:"resource" binding:"required"`
|
||||||
|
Action string `json:"action" binding:"required"`
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
permission, err := h.rbacService.CreatePermission(c.Request.Context(), req.Name, req.Description, req.Resource, req.Action)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Failed to create permission", zap.Error(err))
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to create permission"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusCreated, gin.H{
|
||||||
|
"success": true,
|
||||||
|
"permission": permission,
|
||||||
|
})
|
||||||
|
}
|
||||||
209
veza-backend-api/internal/api/handlers/two_factor_handlers.go
Normal file
209
veza-backend-api/internal/api/handlers/two_factor_handlers.go
Normal file
|
|
@ -0,0 +1,209 @@
|
||||||
|
//go:build ignore
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
// TODO: Réactiver two_factor_handlers après stabilisation du noyau et alignement des services (AuthService.GetUserByID)
|
||||||
|
|
||||||
|
package handlers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"go.uber.org/zap"
|
||||||
|
|
||||||
|
"veza-backend-api/internal/services"
|
||||||
|
)
|
||||||
|
|
||||||
|
// TwoFactorHandlers handles 2FA-related API endpoints
|
||||||
|
type TwoFactorHandlers struct {
|
||||||
|
twoFactorService *services.TwoFactorService
|
||||||
|
authService *services.AuthService
|
||||||
|
logger *zap.Logger
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewTwoFactorHandlers creates new 2FA handlers
|
||||||
|
func NewTwoFactorHandlers(twoFactorService *services.TwoFactorService, authService *services.AuthService, logger *zap.Logger) *TwoFactorHandlers {
|
||||||
|
return &TwoFactorHandlers{
|
||||||
|
twoFactorService: twoFactorService,
|
||||||
|
authService: authService,
|
||||||
|
logger: logger,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// InitTwoFactorHandlers initializes 2FA handlers
|
||||||
|
func InitTwoFactorHandlers(twoFactorService *services.TwoFactorService, authService *services.AuthService, logger *zap.Logger) {
|
||||||
|
handlers := NewTwoFactorHandlers(twoFactorService, authService, logger)
|
||||||
|
|
||||||
|
// Store handlers globally for route registration
|
||||||
|
TwoFactorHandlersInstance = handlers
|
||||||
|
}
|
||||||
|
|
||||||
|
// TwoFactorHandlersInstance holds the global 2FA handlers instance
|
||||||
|
var TwoFactorHandlersInstance *TwoFactorHandlers
|
||||||
|
|
||||||
|
// SetupTwoFactor initiates 2FA setup for a user
|
||||||
|
func (h *TwoFactorHandlers) SetupTwoFactor(c *gin.Context) {
|
||||||
|
userID := c.GetInt64("user_id")
|
||||||
|
|
||||||
|
// Get user information
|
||||||
|
user, err := h.authService.GetUserByID(c.Request.Context(), userID)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Failed to get user", zap.Error(err))
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to get user information"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if 2FA is already enabled
|
||||||
|
enabled, err := h.twoFactorService.GetTwoFactorStatus(c.Request.Context(), userID)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Failed to get 2FA status", zap.Error(err))
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to get 2FA status"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if enabled {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "2FA is already enabled"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate 2FA setup
|
||||||
|
setup, err := h.twoFactorService.GenerateSecret(user)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Failed to generate 2FA setup", zap.Error(err))
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to generate 2FA setup"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"success": true,
|
||||||
|
"setup": setup,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// EnableTwoFactor enables 2FA for a user
|
||||||
|
func (h *TwoFactorHandlers) EnableTwoFactor(c *gin.Context) {
|
||||||
|
userID := c.GetInt64("user_id")
|
||||||
|
|
||||||
|
var req struct {
|
||||||
|
Secret string `json:"secret" binding:"required"`
|
||||||
|
Code string `json:"code" binding:"required"`
|
||||||
|
RecoveryCodes []string `json:"recovery_codes" binding:"required"`
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify the code first
|
||||||
|
valid, err := h.twoFactorService.VerifyTwoFactor(c.Request.Context(), userID, req.Code)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Failed to verify 2FA code", zap.Error(err))
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to verify 2FA code"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if !valid {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid 2FA code"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Enable 2FA
|
||||||
|
err = h.twoFactorService.EnableTwoFactor(c.Request.Context(), userID, req.Secret, req.RecoveryCodes)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Failed to enable 2FA", zap.Error(err))
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to enable 2FA"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"success": true,
|
||||||
|
"message": "2FA enabled successfully",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// DisableTwoFactor disables 2FA for a user
|
||||||
|
func (h *TwoFactorHandlers) DisableTwoFactor(c *gin.Context) {
|
||||||
|
userID := c.GetInt64("user_id")
|
||||||
|
|
||||||
|
var req struct {
|
||||||
|
Code string `json:"code" binding:"required"`
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify the code first
|
||||||
|
valid, err := h.twoFactorService.VerifyTwoFactor(c.Request.Context(), userID, req.Code)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Failed to verify 2FA code", zap.Error(err))
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to verify 2FA code"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if !valid {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid 2FA code"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Disable 2FA
|
||||||
|
err = h.twoFactorService.DisableTwoFactor(c.Request.Context(), userID)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Failed to disable 2FA", zap.Error(err))
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to disable 2FA"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"success": true,
|
||||||
|
"message": "2FA disabled successfully",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// VerifyTwoFactor verifies a 2FA code
|
||||||
|
func (h *TwoFactorHandlers) VerifyTwoFactor(c *gin.Context) {
|
||||||
|
userID := c.GetInt64("user_id")
|
||||||
|
|
||||||
|
var req services.TwoFactorVerification
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify the code
|
||||||
|
valid, err := h.twoFactorService.VerifyTwoFactor(c.Request.Context(), userID, req.Code)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Failed to verify 2FA code", zap.Error(err))
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to verify 2FA code"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if !valid {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid 2FA code"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"success": true,
|
||||||
|
"message": "2FA code verified successfully",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetTwoFactorStatus gets the 2FA status for a user
|
||||||
|
func (h *TwoFactorHandlers) GetTwoFactorStatus(c *gin.Context) {
|
||||||
|
userID := c.GetInt64("user_id")
|
||||||
|
|
||||||
|
enabled, err := h.twoFactorService.GetTwoFactorStatus(c.Request.Context(), userID)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Failed to get 2FA status", zap.Error(err))
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to get 2FA status"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"success": true,
|
||||||
|
"enabled": enabled,
|
||||||
|
})
|
||||||
|
}
|
||||||
2
veza-backend-api/internal/api/listing/handler.go
Normal file
2
veza-backend-api/internal/api/listing/handler.go
Normal file
|
|
@ -0,0 +1,2 @@
|
||||||
|
// Package listing - TO BE IMPLEMENTED
|
||||||
|
package listing
|
||||||
2
veza-backend-api/internal/api/message/handler.go
Normal file
2
veza-backend-api/internal/api/message/handler.go
Normal file
|
|
@ -0,0 +1,2 @@
|
||||||
|
// Package message - TO BE IMPLEMENTED
|
||||||
|
package message
|
||||||
2
veza-backend-api/internal/api/offer/handler.go
Normal file
2
veza-backend-api/internal/api/offer/handler.go
Normal file
|
|
@ -0,0 +1,2 @@
|
||||||
|
// Package offer - TO BE IMPLEMENTED
|
||||||
|
package offer
|
||||||
|
|
@ -0,0 +1,2 @@
|
||||||
|
// Package production_challenge - TO BE IMPLEMENTED
|
||||||
|
package production_challenge
|
||||||
2
veza-backend-api/internal/api/room/handler.go
Normal file
2
veza-backend-api/internal/api/room/handler.go
Normal file
|
|
@ -0,0 +1,2 @@
|
||||||
|
// Package room - TO BE IMPLEMENTED
|
||||||
|
package room
|
||||||
528
veza-backend-api/internal/api/router.go
Normal file
528
veza-backend-api/internal/api/router.go
Normal file
|
|
@ -0,0 +1,528 @@
|
||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"go.uber.org/zap"
|
||||||
|
|
||||||
|
"veza-backend-api/internal/config"
|
||||||
|
"veza-backend-api/internal/database"
|
||||||
|
"veza-backend-api/internal/handlers" // Single handlers import
|
||||||
|
"veza-backend-api/internal/middleware"
|
||||||
|
|
||||||
|
"veza-backend-api/internal/repositories"
|
||||||
|
|
||||||
|
// swaggerFiles "github.com/swaggo/files" // Uncommented
|
||||||
|
// ginSwagger "github.com/swaggo/gin-swagger" // Uncommented
|
||||||
|
|
||||||
|
// Add missing imports.
|
||||||
|
swaggerFiles "github.com/swaggo/files"
|
||||||
|
ginSwagger "github.com/swaggo/gin-swagger"
|
||||||
|
|
||||||
|
"veza-backend-api/internal/core/marketplace"
|
||||||
|
"veza-backend-api/internal/services"
|
||||||
|
authcore "veza-backend-api/internal/core/auth"
|
||||||
|
trackcore "veza-backend-api/internal/core/track"
|
||||||
|
"veza-backend-api/internal/validators"
|
||||||
|
"veza-backend-api/internal/workers"
|
||||||
|
|
||||||
|
// swaggerFiles "github.com/swaggo/files"
|
||||||
|
// ginSwagger "github.com/swaggo/gin-swagger"
|
||||||
|
)
|
||||||
|
|
||||||
|
// APIRouter gère la configuration des routes de l'API
|
||||||
|
type APIRouter struct {
|
||||||
|
db *database.Database
|
||||||
|
config *config.Config
|
||||||
|
engine *gin.Engine
|
||||||
|
logger *zap.Logger
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewAPIRouter crée une nouvelle instance de APIRouter
|
||||||
|
func NewAPIRouter(db *database.Database, cfg *config.Config) *APIRouter {
|
||||||
|
return &APIRouter{
|
||||||
|
db: db,
|
||||||
|
config: cfg,
|
||||||
|
logger: zap.L(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Setup configure toutes les routes de l'API
|
||||||
|
func (r *APIRouter) Setup(router *gin.Engine) {
|
||||||
|
r.engine = router
|
||||||
|
|
||||||
|
// Middlewares globaux
|
||||||
|
router.Use(middleware.RequestLogger(r.logger)) // Utilisation du structured logger
|
||||||
|
router.Use(middleware.Metrics()) // Prometheus Metrics
|
||||||
|
router.Use(middleware.Recovery(r.logger))
|
||||||
|
if r.config != nil && len(r.config.CORSOrigins) > 0 {
|
||||||
|
router.Use(middleware.CORS(r.config.CORSOrigins))
|
||||||
|
} else {
|
||||||
|
router.Use(middleware.CORSDefault())
|
||||||
|
}
|
||||||
|
router.Use(middleware.RequestID())
|
||||||
|
// Rate limiting via config.RateLimiter si disponible, sinon utiliser SimpleRateLimiter
|
||||||
|
if r.config != nil && r.config.RateLimiter != nil {
|
||||||
|
router.Use(r.config.RateLimiter.RateLimitMiddleware())
|
||||||
|
} else if r.config != nil && r.config.SimpleRateLimiter != nil {
|
||||||
|
router.Use(r.config.SimpleRateLimiter.Middleware())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Swagger Documentation
|
||||||
|
router.GET("/swagger/*any", ginSwagger.WrapHandler(swaggerFiles.Handler))
|
||||||
|
|
||||||
|
// Routes core publiques (health, metrics, upload info)
|
||||||
|
r.setupCorePublicRoutes(router)
|
||||||
|
|
||||||
|
// Groupe API v1 (nouveau frontend React)
|
||||||
|
v1 := router.Group("/api/v1")
|
||||||
|
{
|
||||||
|
// Routes core protégées (sessions, uploads, audit, admin, conversations)
|
||||||
|
r.setupCoreProtectedRoutes(v1)
|
||||||
|
|
||||||
|
r.setupAuthRoutes(v1)
|
||||||
|
|
||||||
|
// Réactivation des routes User et Track pour Phase 1
|
||||||
|
r.setupUserRoutes(v1)
|
||||||
|
r.setupTrackRoutes(v1)
|
||||||
|
|
||||||
|
// Réactivation des routes Chat pour Phase 4
|
||||||
|
r.setupChatRoutes(v1)
|
||||||
|
// Réactivation des routes Playlists pour Phase 5
|
||||||
|
r.setupPlaylistRoutes(v1)
|
||||||
|
// Réactivation des routes Webhooks
|
||||||
|
r.setupWebhookRoutes(v1)
|
||||||
|
|
||||||
|
// Marketplace Routes (v1.2.0)
|
||||||
|
r.setupMarketplaceRoutes(v1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Méthodes de configuration des routes par module
|
||||||
|
// setupMarketplaceRoutes configure les routes de la marketplace
|
||||||
|
func (r *APIRouter) setupMarketplaceRoutes(router *gin.RouterGroup) {
|
||||||
|
uploadDir := r.config.UploadDir
|
||||||
|
if uploadDir == "" {
|
||||||
|
uploadDir = "uploads/tracks"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Storage service (reused from tracks logic)
|
||||||
|
storageService := services.NewTrackStorageService(uploadDir, false, r.logger)
|
||||||
|
|
||||||
|
// Marketplace service
|
||||||
|
marketService := marketplace.NewService(r.db.GormDB, r.logger, storageService)
|
||||||
|
marketHandler := handlers.NewMarketplaceHandler(marketService)
|
||||||
|
|
||||||
|
group := router.Group("/marketplace")
|
||||||
|
// Public routes
|
||||||
|
group.GET("/products", marketHandler.ListProducts)
|
||||||
|
|
||||||
|
// Protected routes
|
||||||
|
if r.config.AuthMiddleware != nil {
|
||||||
|
protected := group.Group("")
|
||||||
|
protected.Use(r.config.AuthMiddleware.RequireAuth())
|
||||||
|
|
||||||
|
// GO-012: Create product requires creator/premium/admin role
|
||||||
|
createGroup := protected.Group("")
|
||||||
|
createGroup.Use(r.config.AuthMiddleware.RequireContentCreatorRole())
|
||||||
|
createGroup.POST("/products", marketHandler.CreateProduct)
|
||||||
|
protected.POST("/orders", marketHandler.CreateOrder)
|
||||||
|
protected.GET("/download/:product_id", marketHandler.GetDownloadURL)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// setupAuthRoutes configure les routes d'authentification avec toutes les dépendances
|
||||||
|
func (r *APIRouter) setupAuthRoutes(router *gin.RouterGroup) {
|
||||||
|
// 1. Instanciation des dépendances
|
||||||
|
emailValidator := validators.NewEmailValidator(r.db.GormDB)
|
||||||
|
passwordValidator := validators.NewPasswordValidator()
|
||||||
|
passwordService := services.NewPasswordService(r.db, r.logger)
|
||||||
|
jwtService := services.NewJWTService(r.config.JWTSecret)
|
||||||
|
refreshTokenService := services.NewRefreshTokenService(r.db.GormDB)
|
||||||
|
emailVerificationService := services.NewEmailVerificationService(r.db, r.logger)
|
||||||
|
emailService := services.NewEmailService(r.db, r.logger)
|
||||||
|
sessionService := services.NewSessionService(r.db, r.logger)
|
||||||
|
|
||||||
|
// 2. Service Auth complet
|
||||||
|
authService := authcore.NewAuthService(
|
||||||
|
r.db.GormDB,
|
||||||
|
emailValidator,
|
||||||
|
passwordValidator,
|
||||||
|
passwordService,
|
||||||
|
jwtService,
|
||||||
|
refreshTokenService,
|
||||||
|
emailVerificationService,
|
||||||
|
emailService,
|
||||||
|
r.logger,
|
||||||
|
)
|
||||||
|
|
||||||
|
// 3. Handlers
|
||||||
|
authGroup := router.Group("/auth")
|
||||||
|
{
|
||||||
|
authGroup.POST("/register", handlers.Register(authService))
|
||||||
|
authGroup.POST("/login", handlers.Login(authService, sessionService, r.logger))
|
||||||
|
authGroup.POST("/refresh", handlers.Refresh(authService))
|
||||||
|
authGroup.POST("/verify-email", handlers.VerifyEmail(authService))
|
||||||
|
authGroup.POST("/resend-verification", handlers.ResendVerification(authService))
|
||||||
|
authGroup.GET("/check-username", handlers.CheckUsername(authService))
|
||||||
|
|
||||||
|
// Protected routes (authentification JWT requise)
|
||||||
|
protected := authGroup.Group("")
|
||||||
|
protected.Use(r.config.AuthMiddleware.RequireAuth()) // Changed to RequireAuth()
|
||||||
|
{
|
||||||
|
protected.POST("/logout", handlers.Logout(authService, sessionService))
|
||||||
|
protected.GET("/me", handlers.GetMe())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// setupUserRoutes configure les routes utilisateur
|
||||||
|
func (r *APIRouter) setupUserRoutes(router *gin.RouterGroup) {
|
||||||
|
userRepo := repositories.NewGormUserRepository(r.db.GormDB)
|
||||||
|
userService := services.NewUserServiceWithDB(userRepo, r.db.GormDB)
|
||||||
|
profileHandler := handlers.NewProfileHandler(userService)
|
||||||
|
|
||||||
|
users := router.Group("/users")
|
||||||
|
{
|
||||||
|
users.GET("/:id", profileHandler.GetProfile)
|
||||||
|
users.GET("/by-username/:username", profileHandler.GetProfileByUsername)
|
||||||
|
|
||||||
|
// Protected routes
|
||||||
|
if r.config.AuthMiddleware != nil {
|
||||||
|
protected := users.Group("")
|
||||||
|
protected.Use(r.config.AuthMiddleware.RequireAuth())
|
||||||
|
protected.PUT("/:id", profileHandler.UpdateProfile)
|
||||||
|
protected.GET("/:id/completion", profileHandler.GetProfileCompletion)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// setupTrackRoutes configure les routes de gestion des tracks
|
||||||
|
func (r *APIRouter) setupTrackRoutes(router *gin.RouterGroup) {
|
||||||
|
uploadDir := r.config.UploadDir
|
||||||
|
if uploadDir == "" {
|
||||||
|
uploadDir = "uploads/tracks"
|
||||||
|
}
|
||||||
|
chunksDir := uploadDir + "/chunks"
|
||||||
|
|
||||||
|
trackService := trackcore.NewTrackService(r.db.GormDB, r.logger, uploadDir)
|
||||||
|
trackUploadService := services.NewTrackUploadService(r.db.GormDB, r.logger)
|
||||||
|
chunkService := services.NewTrackChunkService(chunksDir, r.logger)
|
||||||
|
likeService := services.NewTrackLikeService(r.db.GormDB, r.logger)
|
||||||
|
streamService := services.NewStreamService(r.config.StreamServerURL, r.logger)
|
||||||
|
|
||||||
|
trackHandler := trackcore.NewTrackHandler(
|
||||||
|
trackService,
|
||||||
|
trackUploadService,
|
||||||
|
chunkService,
|
||||||
|
likeService,
|
||||||
|
streamService,
|
||||||
|
)
|
||||||
|
|
||||||
|
tracks := router.Group("/tracks")
|
||||||
|
{
|
||||||
|
// Public routes
|
||||||
|
tracks.GET("", trackHandler.ListTracks)
|
||||||
|
tracks.GET("/:id", trackHandler.GetTrack)
|
||||||
|
tracks.GET("/:id/stats", trackHandler.GetTrackStats)
|
||||||
|
tracks.GET("/:id/history", trackHandler.GetTrackHistory)
|
||||||
|
tracks.GET("/:id/download", trackHandler.DownloadTrack)
|
||||||
|
tracks.GET("/shared/:token", trackHandler.GetSharedTrack)
|
||||||
|
|
||||||
|
// Protected routes
|
||||||
|
if r.config.AuthMiddleware != nil {
|
||||||
|
protected := tracks.Group("")
|
||||||
|
protected.Use(r.config.AuthMiddleware.RequireAuth())
|
||||||
|
|
||||||
|
// GO-012: Upload track requires creator/premium/admin role
|
||||||
|
uploadGroup := protected.Group("")
|
||||||
|
uploadGroup.Use(r.config.AuthMiddleware.RequireContentCreatorRole())
|
||||||
|
uploadGroup.POST("", trackHandler.UploadTrack)
|
||||||
|
protected.PUT("/:id", trackHandler.UpdateTrack)
|
||||||
|
protected.DELETE("/:id", trackHandler.DeleteTrack)
|
||||||
|
|
||||||
|
// Upload
|
||||||
|
protected.GET("/:id/status", trackHandler.GetUploadStatus)
|
||||||
|
protected.POST("/initiate", trackHandler.InitiateChunkedUpload)
|
||||||
|
protected.POST("/chunk", trackHandler.UploadChunk)
|
||||||
|
protected.POST("/complete", trackHandler.CompleteChunkedUpload)
|
||||||
|
protected.GET("/quota/:id", trackHandler.GetUploadQuota)
|
||||||
|
protected.GET("/resume/:uploadId", trackHandler.ResumeUpload)
|
||||||
|
|
||||||
|
// Batch operations
|
||||||
|
protected.POST("/batch/delete", trackHandler.BatchDeleteTracks)
|
||||||
|
protected.POST("/batch/update", trackHandler.BatchUpdateTracks)
|
||||||
|
|
||||||
|
// Social
|
||||||
|
protected.POST("/:id/like", trackHandler.LikeTrack)
|
||||||
|
protected.DELETE("/:id/like", trackHandler.UnlikeTrack)
|
||||||
|
protected.GET("/:id/likes", trackHandler.GetTrackLikes)
|
||||||
|
|
||||||
|
// Sharing
|
||||||
|
protected.POST("/:id/share", trackHandler.CreateShare)
|
||||||
|
protected.DELETE("/share/:id", trackHandler.RevokeShare)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deprecated /internal routes
|
||||||
|
internalDeprecated := router.Group("/internal")
|
||||||
|
internalDeprecated.Use(middleware.DeprecationWarning(r.logger))
|
||||||
|
{
|
||||||
|
internalDeprecated.POST("/tracks/:id/stream-ready", trackHandler.HandleStreamCallback)
|
||||||
|
}
|
||||||
|
|
||||||
|
// New /api/v1/internal routes
|
||||||
|
v1Internal := router.Group("/api/v1/internal")
|
||||||
|
{
|
||||||
|
v1Internal.POST("/tracks/:id/stream-ready", trackHandler.HandleStreamCallback)
|
||||||
|
}
|
||||||
|
|
||||||
|
users := router.Group("/users")
|
||||||
|
{
|
||||||
|
users.GET("/:id/likes", trackHandler.GetUserLikedTracks)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// setupChatRoutes configure les routes de chat
|
||||||
|
func (r *APIRouter) setupChatRoutes(router *gin.RouterGroup) {
|
||||||
|
chatService := services.NewChatService(r.config.ChatJWTSecret, r.logger)
|
||||||
|
userRepo := repositories.NewGormUserRepository(r.db.GormDB)
|
||||||
|
userService := services.NewUserServiceWithDB(userRepo, r.db.GormDB)
|
||||||
|
|
||||||
|
chatHandler := handlers.NewChatHandler(chatService, userService, r.logger)
|
||||||
|
|
||||||
|
chat := router.Group("/chat")
|
||||||
|
{
|
||||||
|
if r.config.AuthMiddleware != nil {
|
||||||
|
chat.Use(r.config.AuthMiddleware.RequireAuth())
|
||||||
|
chat.POST("/token", chatHandler.GetToken)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// setupPlaylistRoutes configure les routes pour les playlists
|
||||||
|
func (r *APIRouter) setupPlaylistRoutes(router *gin.RouterGroup) {
|
||||||
|
playlistRepo := repositories.NewPlaylistRepository(r.db.GormDB)
|
||||||
|
playlistTrackRepo := repositories.NewPlaylistTrackRepository(r.db.GormDB)
|
||||||
|
playlistCollaboratorRepo := repositories.NewPlaylistCollaboratorRepository(r.db.GormDB)
|
||||||
|
userRepo := repositories.NewGormUserRepository(r.db.GormDB)
|
||||||
|
|
||||||
|
playlistService := services.NewPlaylistService(
|
||||||
|
playlistRepo,
|
||||||
|
playlistTrackRepo,
|
||||||
|
playlistCollaboratorRepo,
|
||||||
|
userRepo,
|
||||||
|
r.logger,
|
||||||
|
)
|
||||||
|
|
||||||
|
playlistHandler := handlers.NewPlaylistHandler(playlistService)
|
||||||
|
|
||||||
|
// Protected routes for playlists
|
||||||
|
playlists := router.Group("/playlists")
|
||||||
|
if r.config.AuthMiddleware != nil {
|
||||||
|
playlists.Use(r.config.AuthMiddleware.RequireAuth())
|
||||||
|
{
|
||||||
|
playlists.GET("", playlistHandler.GetPlaylists)
|
||||||
|
playlists.POST("", playlistHandler.CreatePlaylist)
|
||||||
|
playlists.GET("/:id", playlistHandler.GetPlaylist)
|
||||||
|
playlists.PUT("/:id", playlistHandler.UpdatePlaylist)
|
||||||
|
playlists.DELETE("/:id", playlistHandler.DeletePlaylist)
|
||||||
|
|
||||||
|
// Playlist Tracks
|
||||||
|
playlists.POST("/:id/tracks", playlistHandler.AddTrack)
|
||||||
|
playlists.DELETE("/:id/tracks/:track_id", playlistHandler.RemoveTrack)
|
||||||
|
playlists.PUT("/:id/tracks/reorder", playlistHandler.ReorderTracks)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// setupWebhookRoutes configure les routes pour les webhooks
|
||||||
|
func (r *APIRouter) setupWebhookRoutes(router *gin.RouterGroup) {
|
||||||
|
webhookService := services.NewWebhookService(r.db.GormDB, r.logger, r.config.JWTSecret)
|
||||||
|
|
||||||
|
webhookWorker := workers.NewWebhookWorker(
|
||||||
|
r.db.GormDB,
|
||||||
|
webhookService,
|
||||||
|
r.logger,
|
||||||
|
100, // Queue size
|
||||||
|
5, // Workers
|
||||||
|
3, // Max retries
|
||||||
|
)
|
||||||
|
|
||||||
|
// Start worker in background
|
||||||
|
go webhookWorker.Start(context.Background())
|
||||||
|
|
||||||
|
webhookHandler := handlers.NewWebhookHandler(webhookService, webhookWorker, r.logger)
|
||||||
|
|
||||||
|
webhooks := router.Group("/webhooks")
|
||||||
|
if r.config.AuthMiddleware != nil {
|
||||||
|
webhooks.Use(r.config.AuthMiddleware.RequireAuth())
|
||||||
|
}
|
||||||
|
{
|
||||||
|
webhooks.POST("", webhookHandler.RegisterWebhook())
|
||||||
|
webhooks.GET("", webhookHandler.ListWebhooks())
|
||||||
|
webhooks.DELETE("/:id", webhookHandler.DeleteWebhook())
|
||||||
|
webhooks.GET("/stats", webhookHandler.GetWebhookStats())
|
||||||
|
webhooks.POST("/:id/test", webhookHandler.TestWebhook())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// setupCorePublicRoutes configure les routes publiques core (health, metrics, upload info)
|
||||||
|
func (r *APIRouter) setupCorePublicRoutes(router *gin.Engine) {
|
||||||
|
// Middleware for deprecated routes
|
||||||
|
deprecated := router.Group("/")
|
||||||
|
deprecated.Use(middleware.DeprecationWarning(r.logger))
|
||||||
|
|
||||||
|
// Health check handlers
|
||||||
|
var healthCheckHandler gin.HandlerFunc
|
||||||
|
var livenessHandler gin.HandlerFunc
|
||||||
|
var readinessHandler gin.HandlerFunc
|
||||||
|
|
||||||
|
if r.db != nil && r.db.GormDB != nil {
|
||||||
|
var redisClient interface{}
|
||||||
|
if r.config != nil {
|
||||||
|
redisClient = r.config.RedisClient
|
||||||
|
}
|
||||||
|
var rabbitMQEventBus interface{}
|
||||||
|
if r.config != nil {
|
||||||
|
rabbitMQEventBus = r.config.RabbitMQEventBus
|
||||||
|
}
|
||||||
|
healthHandler := handlers.NewHealthHandler(r.db.GormDB, r.logger, redisClient, rabbitMQEventBus)
|
||||||
|
healthCheckHandler = healthHandler.Check
|
||||||
|
livenessHandler = healthHandler.Liveness
|
||||||
|
readinessHandler = healthHandler.Readiness
|
||||||
|
} else {
|
||||||
|
healthCheckHandler = handlers.SimpleHealthCheck
|
||||||
|
livenessHandler = handlers.SimpleHealthCheck
|
||||||
|
readinessHandler = handlers.SimpleHealthCheck
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deprecated Public Core Routes
|
||||||
|
deprecated.GET("/health", healthCheckHandler)
|
||||||
|
deprecated.GET("/healthz", livenessHandler)
|
||||||
|
deprecated.GET("/readyz", readinessHandler)
|
||||||
|
deprecated.GET("/metrics", handlers.PrometheusMetrics())
|
||||||
|
if r.config != nil && r.config.ErrorMetrics != nil {
|
||||||
|
deprecated.GET("/metrics/aggregated", handlers.AggregatedMetrics(r.config.ErrorMetrics))
|
||||||
|
}
|
||||||
|
deprecated.GET("/system/metrics", handlers.SystemMetrics)
|
||||||
|
|
||||||
|
// New /api/v1 Public Core Routes
|
||||||
|
v1Public := router.Group("/api/v1")
|
||||||
|
{
|
||||||
|
v1Public.GET("/health", healthCheckHandler)
|
||||||
|
v1Public.GET("/healthz", livenessHandler)
|
||||||
|
v1Public.GET("/readyz", readinessHandler)
|
||||||
|
v1Public.GET("/metrics", handlers.PrometheusMetrics())
|
||||||
|
if r.config != nil && r.config.ErrorMetrics != nil {
|
||||||
|
v1Public.GET("/metrics/aggregated", handlers.AggregatedMetrics(r.config.ErrorMetrics))
|
||||||
|
}
|
||||||
|
v1Public.GET("/system/metrics", handlers.SystemMetrics)
|
||||||
|
|
||||||
|
// Upload info endpoints (public, already in /api/v1)
|
||||||
|
if r.db != nil && r.db.GormDB != nil {
|
||||||
|
uploadConfig := services.DefaultUploadConfig()
|
||||||
|
uploadValidator, err := services.NewUploadValidator(uploadConfig, r.logger)
|
||||||
|
if err == nil {
|
||||||
|
auditService := services.NewAuditService(r.db, r.logger)
|
||||||
|
uploadHandler := handlers.NewUploadHandler(uploadValidator, auditService, r.logger)
|
||||||
|
v1Public.GET("/upload/limits", uploadHandler.GetUploadLimits())
|
||||||
|
v1Public.GET("/upload/validate-type", uploadHandler.ValidateFileType())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// setupCoreProtectedRoutes configure les routes protégées core (sessions, uploads, audit, admin, conversations)
|
||||||
|
func (r *APIRouter) setupCoreProtectedRoutes(v1 *gin.RouterGroup) {
|
||||||
|
if r.db == nil || r.db.GormDB == nil || r.config == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Middleware d'authentification pour routes protégées
|
||||||
|
protected := v1.Group("/")
|
||||||
|
if r.config.AuthMiddleware != nil {
|
||||||
|
protected.Use(r.config.AuthMiddleware.RequireAuth())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Services nécessaires
|
||||||
|
sessionService := services.NewSessionService(r.db, r.logger)
|
||||||
|
uploadConfig := services.DefaultUploadConfig()
|
||||||
|
uploadValidator, err := services.NewUploadValidator(uploadConfig, r.logger)
|
||||||
|
if err != nil {
|
||||||
|
r.logger.Error("Failed to create upload validator", zap.Error(err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
auditService := services.NewAuditService(r.db, r.logger)
|
||||||
|
|
||||||
|
// Handlers
|
||||||
|
sessionHandler := handlers.NewSessionHandler(sessionService, auditService, r.logger)
|
||||||
|
uploadHandler := handlers.NewUploadHandler(uploadValidator, auditService, r.logger)
|
||||||
|
auditHandler := handlers.NewAuditHandler(auditService, r.logger)
|
||||||
|
|
||||||
|
// Routes de session
|
||||||
|
sessions := protected.Group("/sessions")
|
||||||
|
{
|
||||||
|
sessions.POST("/logout", sessionHandler.Logout())
|
||||||
|
sessions.POST("/logout-all", sessionHandler.LogoutAll())
|
||||||
|
sessions.GET("/", sessionHandler.GetSessions())
|
||||||
|
sessions.DELETE("/:session_id", sessionHandler.RevokeSession())
|
||||||
|
sessions.GET("/stats", sessionHandler.GetSessionStats())
|
||||||
|
sessions.POST("/refresh", sessionHandler.RefreshSession())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Routes d'upload avec rate limiting spécifique
|
||||||
|
uploads := protected.Group("/uploads")
|
||||||
|
{
|
||||||
|
if r.config.RedisClient != nil {
|
||||||
|
uploads.Use(middleware.UploadRateLimit(r.config.RedisClient))
|
||||||
|
}
|
||||||
|
uploads.POST("/", uploadHandler.UploadFile())
|
||||||
|
uploads.POST("/batch", uploadHandler.BatchUpload())
|
||||||
|
uploads.GET("/:id/status", uploadHandler.GetUploadStatus())
|
||||||
|
uploads.GET("/:id/progress", uploadHandler.UploadProgress())
|
||||||
|
uploads.DELETE("/:id", uploadHandler.DeleteUpload())
|
||||||
|
uploads.GET("/stats", uploadHandler.GetUploadStats())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Routes d'audit
|
||||||
|
audit := protected.Group("/audit")
|
||||||
|
{
|
||||||
|
audit.GET("/logs", auditHandler.SearchLogs())
|
||||||
|
audit.GET("/stats", auditHandler.GetStats())
|
||||||
|
audit.GET("/activity", auditHandler.GetUserActivity())
|
||||||
|
audit.GET("/suspicious", auditHandler.DetectSuspiciousActivity())
|
||||||
|
audit.GET("/ip/:ip", auditHandler.GetIPActivity())
|
||||||
|
audit.GET("/logs/:id", auditHandler.GetAuditLog())
|
||||||
|
audit.POST("/cleanup", auditHandler.CleanupOldLogs())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Routes de conversations (chat rooms)
|
||||||
|
roomRepo := repositories.NewRoomRepository(r.db.GormDB)
|
||||||
|
messageRepo := repositories.NewChatMessageRepository(r.db.GormDB) // New
|
||||||
|
roomService := services.NewRoomService(roomRepo, messageRepo, r.logger) // Updated constructor
|
||||||
|
roomHandler := handlers.NewRoomHandler(roomService, r.logger)
|
||||||
|
|
||||||
|
conversations := protected.Group("/conversations")
|
||||||
|
{
|
||||||
|
conversations.GET("", roomHandler.GetUserRooms)
|
||||||
|
conversations.POST("", roomHandler.CreateRoom)
|
||||||
|
conversations.GET("/:id", roomHandler.GetRoom)
|
||||||
|
conversations.POST("/:id/members", roomHandler.AddMember)
|
||||||
|
conversations.GET("/:id/history", roomHandler.GetRoomHistory)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Routes administrateur (avec authentification + permissions admin)
|
||||||
|
admin := v1.Group("/admin")
|
||||||
|
{
|
||||||
|
if r.config.AuthMiddleware != nil {
|
||||||
|
admin.Use(r.config.AuthMiddleware.RequireAuth())
|
||||||
|
admin.Use(r.config.AuthMiddleware.RequireAdmin())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Audit logs (disponibles)
|
||||||
|
admin.GET("/audit/logs", auditHandler.SearchLogs())
|
||||||
|
admin.GET("/audit/stats", auditHandler.GetStats())
|
||||||
|
admin.GET("/audit/suspicious", auditHandler.DetectSuspiciousActivity())
|
||||||
|
}
|
||||||
|
}
|
||||||
2
veza-backend-api/internal/api/search/handler.go
Normal file
2
veza-backend-api/internal/api/search/handler.go
Normal file
|
|
@ -0,0 +1,2 @@
|
||||||
|
// Package search - TO BE IMPLEMENTED
|
||||||
|
package search
|
||||||
|
|
@ -0,0 +1,2 @@
|
||||||
|
// Package shared_resources - TO BE IMPLEMENTED
|
||||||
|
package shared_resources
|
||||||
|
|
@ -0,0 +1,2 @@
|
||||||
|
// Package sound_design_contest - TO BE IMPLEMENTED
|
||||||
|
package sound_design_contest
|
||||||
2
veza-backend-api/internal/api/tag/handler.go
Normal file
2
veza-backend-api/internal/api/tag/handler.go
Normal file
|
|
@ -0,0 +1,2 @@
|
||||||
|
// Package tag - TO BE IMPLEMENTED
|
||||||
|
package tag
|
||||||
2
veza-backend-api/internal/api/track/handler.go
Normal file
2
veza-backend-api/internal/api/track/handler.go
Normal file
|
|
@ -0,0 +1,2 @@
|
||||||
|
// Package track - TO BE IMPLEMENTED
|
||||||
|
package track
|
||||||
357
veza-backend-api/internal/api/user/handler.go
Normal file
357
veza-backend-api/internal/api/user/handler.go
Normal file
|
|
@ -0,0 +1,357 @@
|
||||||
|
// veza-backend-api/internal/api/user/handler.go
|
||||||
|
package user
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"strconv"
|
||||||
|
|
||||||
|
"veza-backend-api/internal/common"
|
||||||
|
"veza-backend-api/internal/response"
|
||||||
|
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/google/uuid" // Added import
|
||||||
|
)
|
||||||
|
|
||||||
|
type Handler struct {
|
||||||
|
service *Service
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewHandler(service *Service) *Handler {
|
||||||
|
return &Handler{service: service}
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetMe récupère le profil de l'utilisateur connecté
|
||||||
|
func (h *Handler) GetMe(c *gin.Context) {
|
||||||
|
userID, exists := common.GetUserIDFromContext(c)
|
||||||
|
if !exists {
|
||||||
|
response.Unauthorized(c, "User ID not found")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
user, err := h.service.GetUserByID(userID)
|
||||||
|
if err != nil {
|
||||||
|
response.NotFound(c, "User not found")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Success(c, user)
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateMe met à jour le profil de l'utilisateur connecté
|
||||||
|
func (h *Handler) UpdateMe(c *gin.Context) {
|
||||||
|
userID, exists := common.GetUserIDFromContext(c)
|
||||||
|
if !exists {
|
||||||
|
response.Unauthorized(c, "User ID not found")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req UpdateUserRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
response.BadRequest(c, "Invalid request data")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
user, err := h.service.UpdateUser(userID, req)
|
||||||
|
if err != nil {
|
||||||
|
response.BadRequest(c, err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Success(c, user)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ChangePassword change le mot de passe de l'utilisateur
|
||||||
|
func (h *Handler) ChangePassword(c *gin.Context) {
|
||||||
|
userID, exists := common.GetUserIDFromContext(c)
|
||||||
|
if !exists {
|
||||||
|
response.Unauthorized(c, "User ID not found")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req struct {
|
||||||
|
CurrentPassword string `json:"current_password" binding:"required"`
|
||||||
|
NewPassword string `json:"new_password" binding:"required,min=8"`
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
response.BadRequest(c, "Invalid request data")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
err := h.service.ChangePassword(userID, req.CurrentPassword, req.NewPassword)
|
||||||
|
if err != nil {
|
||||||
|
response.BadRequest(c, err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Success(c, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetUsers liste tous les utilisateurs
|
||||||
|
func (h *Handler) GetUsers(c *gin.Context) {
|
||||||
|
page, _ := strconv.Atoi(c.DefaultQuery("page", "1"))
|
||||||
|
limit, _ := strconv.Atoi(c.DefaultQuery("limit", "20"))
|
||||||
|
search := c.Query("search")
|
||||||
|
|
||||||
|
users, total, err := h.service.GetUsers(page, limit, search)
|
||||||
|
if err != nil {
|
||||||
|
response.InternalServerError(c, "Failed to retrieve users")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Success(c, gin.H{
|
||||||
|
"data": users,
|
||||||
|
"pagination": gin.H{
|
||||||
|
"page": page,
|
||||||
|
"limit": limit,
|
||||||
|
"total": total,
|
||||||
|
"total_pages": (total + limit - 1) / limit,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetUsersExceptMe liste tous les utilisateurs sauf l'utilisateur connecté
|
||||||
|
func (h *Handler) GetUsersExceptMe(c *gin.Context) {
|
||||||
|
userID, exists := common.GetUserIDFromContext(c)
|
||||||
|
if !exists {
|
||||||
|
response.Unauthorized(c, "User ID not found")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
page, _ := strconv.Atoi(c.DefaultQuery("page", "1"))
|
||||||
|
limit, _ := strconv.Atoi(c.DefaultQuery("limit", "20"))
|
||||||
|
search := c.Query("search")
|
||||||
|
|
||||||
|
// Ajouter le filtre pour exclure l'utilisateur actuel
|
||||||
|
users, total, err := h.service.GetUsers(page, limit, search)
|
||||||
|
if err != nil {
|
||||||
|
response.InternalServerError(c, "Failed to retrieve users")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Filtrer l'utilisateur connecté
|
||||||
|
filteredUsers := []UserResponse{}
|
||||||
|
for _, user := range users {
|
||||||
|
if user.ID != userID { // Direct comparison of uuid.UUID
|
||||||
|
filteredUsers = append(filteredUsers, user)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Success(c, gin.H{
|
||||||
|
"data": filteredUsers,
|
||||||
|
"pagination": gin.H{
|
||||||
|
"page": page,
|
||||||
|
"limit": limit,
|
||||||
|
"total": total - 1, // -1 car on exclut l'utilisateur connecté
|
||||||
|
"total_pages": (total + limit - 2) / limit,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// SearchUsers recherche des utilisateurs
|
||||||
|
func (h *Handler) SearchUsers(c *gin.Context) {
|
||||||
|
query := c.Query("q")
|
||||||
|
if query == "" {
|
||||||
|
response.BadRequest(c, "Query parameter 'q' is required")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
page, _ := strconv.Atoi(c.DefaultQuery("page", "1"))
|
||||||
|
limit, _ := strconv.Atoi(c.DefaultQuery("limit", "20"))
|
||||||
|
|
||||||
|
users, total, err := h.service.GetUsers(page, limit, query)
|
||||||
|
if err != nil {
|
||||||
|
response.InternalServerError(c, "Failed to search users")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Success(c, gin.H{
|
||||||
|
"data": users,
|
||||||
|
"pagination": gin.H{
|
||||||
|
"page": page,
|
||||||
|
"limit": limit,
|
||||||
|
"total": total,
|
||||||
|
"total_pages": (total + limit - 1) / limit,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *Handler) GetUserAvatar(c *gin.Context) {
|
||||||
|
idStr := c.Param("id")
|
||||||
|
userID, err := uuid.Parse(idStr)
|
||||||
|
if err != nil {
|
||||||
|
response.BadRequest(c, "Invalid user ID")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
user, err := h.service.GetUserByID(userID)
|
||||||
|
if err != nil {
|
||||||
|
response.NotFound(c, "User not found")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// ✅ Correct way to handle sql.NullString
|
||||||
|
if !user.Avatar.Valid || user.Avatar.String == "" {
|
||||||
|
response.NotFound(c, "No avatar found")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rediriger vers l'URL de l'avatar ou servir le fichier
|
||||||
|
c.Redirect(http.StatusFound, user.Avatar.String)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetPreferences récupère les préférences de l'utilisateur connecté
|
||||||
|
func (h *Handler) GetPreferences(c *gin.Context) {
|
||||||
|
userID, exists := common.GetUserIDFromContext(c)
|
||||||
|
if !exists {
|
||||||
|
response.Unauthorized(c, "User ID not found")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
preferences, err := h.service.GetUserPreferences(userID)
|
||||||
|
if err != nil {
|
||||||
|
response.InternalServerError(c, "Failed to get preferences")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Success(c, preferences)
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatePreferences met à jour les préférences de l'utilisateur connecté
|
||||||
|
func (h *Handler) UpdatePreferences(c *gin.Context) {
|
||||||
|
userID, exists := common.GetUserIDFromContext(c)
|
||||||
|
if !exists {
|
||||||
|
response.Unauthorized(c, "User ID not found")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req UserPreferencesRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
response.BadRequest(c, "Invalid request data")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
preferences, err := h.service.UpdateUserPreferences(userID, req)
|
||||||
|
if err != nil {
|
||||||
|
response.BadRequest(c, err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Success(c, preferences)
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteAccount supprime le compte de l'utilisateur (soft delete)
|
||||||
|
func (h *Handler) DeleteAccount(c *gin.Context) {
|
||||||
|
userID, exists := common.GetUserIDFromContext(c)
|
||||||
|
if !exists {
|
||||||
|
response.Unauthorized(c, "User ID not found")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req struct {
|
||||||
|
Password string `json:"password" binding:"required"`
|
||||||
|
Reason string `json:"reason"`
|
||||||
|
ConfirmText string `json:"confirm_text" binding:"required"`
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
response.BadRequest(c, "Invalid request data")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Vérifier le texte de confirmation
|
||||||
|
if req.ConfirmText != "DELETE" {
|
||||||
|
response.BadRequest(c, "Confirmation text must be 'DELETE'")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
err := h.service.DeleteAccount(userID, req.Password, req.Reason)
|
||||||
|
if err != nil {
|
||||||
|
response.BadRequest(c, err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Success(c, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
// RecoverAccount récupère un compte supprimé
|
||||||
|
func (h *Handler) RecoverAccount(c *gin.Context) {
|
||||||
|
var req struct {
|
||||||
|
Email string `json:"email" binding:"required,email"`
|
||||||
|
Password string `json:"password" binding:"required"`
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
response.BadRequest(c, "Invalid request data")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
err := h.service.RecoverAccount(req.Email, req.Password)
|
||||||
|
if err != nil {
|
||||||
|
response.BadRequest(c, err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Success(c, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExportData exporte les données de l'utilisateur (RGPD)
|
||||||
|
func (h *Handler) ExportData(c *gin.Context) {
|
||||||
|
userID, exists := common.GetUserIDFromContext(c)
|
||||||
|
if !exists {
|
||||||
|
response.Unauthorized(c, "User ID not found")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
exportData, err := h.service.ExportUserData(userID)
|
||||||
|
if err != nil {
|
||||||
|
response.InternalServerError(c, "Failed to export user data")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Success(c, exportData)
|
||||||
|
}
|
||||||
|
|
||||||
|
// RequestDataDeletion demande la suppression définitive des données (RGPD)
|
||||||
|
func (h *Handler) RequestDataDeletion(c *gin.Context) {
|
||||||
|
userID, exists := common.GetUserIDFromContext(c)
|
||||||
|
if !exists {
|
||||||
|
response.Unauthorized(c, "User ID not found")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req struct {
|
||||||
|
Password string `json:"password" binding:"required"`
|
||||||
|
Reason string `json:"reason"`
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
response.BadRequest(c, "Invalid request data")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
err := h.service.RequestDataDeletion(userID, req.Password, req.Reason)
|
||||||
|
if err != nil {
|
||||||
|
response.BadRequest(c, err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Success(c, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetAccountStatus récupère le statut du compte
|
||||||
|
func (h *Handler) GetAccountStatus(c *gin.Context) {
|
||||||
|
userID, exists := common.GetUserIDFromContext(c)
|
||||||
|
if !exists {
|
||||||
|
response.Unauthorized(c, "User ID not found")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
status, err := h.service.GetAccountStatus(userID)
|
||||||
|
if err != nil {
|
||||||
|
response.InternalServerError(c, "Failed to get account status")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Success(c, status)
|
||||||
|
}
|
||||||
94
veza-backend-api/internal/api/user/routes.go
Normal file
94
veza-backend-api/internal/api/user/routes.go
Normal file
|
|
@ -0,0 +1,94 @@
|
||||||
|
package user
|
||||||
|
|
||||||
|
import (
|
||||||
|
"veza-backend-api/internal/middleware"
|
||||||
|
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
)
|
||||||
|
|
||||||
|
// RouteGroup représente un groupe de routes pour le module utilisateur
|
||||||
|
type RouteGroup struct {
|
||||||
|
handler *Handler
|
||||||
|
secret string
|
||||||
|
authMiddleware *middleware.AuthMiddleware // Added authMiddleware
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewRouteGroup crée une nouvelle instance de RouteGroup
|
||||||
|
func NewRouteGroup(handler *Handler, jwtSecret string, authMiddleware *middleware.AuthMiddleware) *RouteGroup { // Added authMiddleware parameter
|
||||||
|
return &RouteGroup{
|
||||||
|
handler: handler,
|
||||||
|
secret: jwtSecret,
|
||||||
|
authMiddleware: authMiddleware, // Assign authMiddleware
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Register enregistre toutes les routes du module utilisateur
|
||||||
|
func (rg *RouteGroup) Register(router *gin.RouterGroup) {
|
||||||
|
// Groupe principal des utilisateurs
|
||||||
|
users := router.Group("/users")
|
||||||
|
{
|
||||||
|
// Routes publiques
|
||||||
|
rg.registerPublicRoutes(users)
|
||||||
|
|
||||||
|
// Routes protégées
|
||||||
|
rg.registerProtectedRoutes(users)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// registerPublicRoutes enregistre les routes publiques
|
||||||
|
func (rg *RouteGroup) registerPublicRoutes(router *gin.RouterGroup) {
|
||||||
|
// GET /api/v1/users - Liste des utilisateurs
|
||||||
|
router.GET("", rg.handler.GetUsers)
|
||||||
|
|
||||||
|
// GET /api/v1/users/:id/avatar - Avatar d'un utilisateur
|
||||||
|
router.GET("/:id/avatar", rg.handler.GetUserAvatar)
|
||||||
|
|
||||||
|
// POST /api/v1/users/recover - Récupérer un compte supprimé
|
||||||
|
router.POST("/recover", rg.handler.RecoverAccount)
|
||||||
|
}
|
||||||
|
|
||||||
|
// registerProtectedRoutes enregistre les routes protégées
|
||||||
|
func (rg *RouteGroup) registerProtectedRoutes(router *gin.RouterGroup) {
|
||||||
|
protected := router.Group("")
|
||||||
|
protected.Use(rg.authMiddleware.RequireAuth()) // Changed to RequireAuth()
|
||||||
|
{
|
||||||
|
// GET /api/v1/users/me - Informations de l'utilisateur connecté
|
||||||
|
protected.GET("/me", rg.handler.GetMe)
|
||||||
|
|
||||||
|
// PUT /api/v1/users/me - Mise à jour des informations de l'utilisateur
|
||||||
|
protected.PUT("/me", rg.handler.UpdateMe)
|
||||||
|
|
||||||
|
// PUT /api/v1/users/me/password - Changement de mot de passe
|
||||||
|
protected.PUT("/me/password", rg.handler.ChangePassword)
|
||||||
|
|
||||||
|
// GET /api/v1/users/me/preferences - Récupérer les préférences
|
||||||
|
protected.GET("/me/preferences", rg.handler.GetPreferences)
|
||||||
|
|
||||||
|
// PUT /api/v1/users/me/preferences - Mettre à jour les préférences
|
||||||
|
protected.PUT("/me/preferences", rg.handler.UpdatePreferences)
|
||||||
|
|
||||||
|
// DELETE /api/v1/users/me - Supprimer le compte
|
||||||
|
protected.DELETE("/me", rg.handler.DeleteAccount)
|
||||||
|
|
||||||
|
// GET /api/v1/users/me/status - Statut du compte
|
||||||
|
protected.GET("/me/status", rg.handler.GetAccountStatus)
|
||||||
|
|
||||||
|
// GET /api/v1/users/me/export - Exporter les données (RGPD)
|
||||||
|
protected.GET("/me/export", rg.handler.ExportData)
|
||||||
|
|
||||||
|
// POST /api/v1/users/me/request-deletion - Demander suppression définitive
|
||||||
|
protected.POST("/me/request-deletion", rg.handler.RequestDataDeletion)
|
||||||
|
|
||||||
|
// GET /api/v1/users/except-me - Liste des utilisateurs sauf l'utilisateur connecté
|
||||||
|
protected.GET("/except-me", rg.handler.GetUsersExceptMe)
|
||||||
|
|
||||||
|
// GET /api/v1/users/search - Recherche d'utilisateurs
|
||||||
|
protected.GET("/search", rg.handler.SearchUsers)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetupRoutes configure les routes du module utilisateur (pour la compatibilité)
|
||||||
|
// func SetupRoutes(router *gin.RouterGroup, handler *Handler, jwtSecret string) {
|
||||||
|
// rg := NewRouteGroup(handler, jwtSecret)
|
||||||
|
// rg.Register(router)
|
||||||
|
// }
|
||||||
710
veza-backend-api/internal/api/user/service.go
Normal file
710
veza-backend-api/internal/api/user/service.go
Normal file
|
|
@ -0,0 +1,710 @@
|
||||||
|
// veza-backend-api/internal/api/user/service.go
|
||||||
|
package user
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"veza-backend-api/internal/database"
|
||||||
|
"veza-backend-api/internal/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Service handles user business logic
|
||||||
|
type Service struct {
|
||||||
|
db *database.DB
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewService creates a new user service
|
||||||
|
func NewService(db *database.DB) *Service {
|
||||||
|
return &Service{
|
||||||
|
db: db,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetUsers retrieves users with pagination and optional search
|
||||||
|
func (s *Service) GetUsers(page, limit int, search string) ([]UserResponse, int, error) {
|
||||||
|
offset := (page - 1) * limit
|
||||||
|
|
||||||
|
// Build the query with optional search
|
||||||
|
baseQuery := `
|
||||||
|
SELECT id, email, first_name, last_name, username, avatar, bio,
|
||||||
|
role, is_active, is_verified, last_login_at, created_at, updated_at
|
||||||
|
FROM users
|
||||||
|
`
|
||||||
|
countQuery := "SELECT COUNT(*) FROM users"
|
||||||
|
|
||||||
|
var whereClause string
|
||||||
|
var args []interface{}
|
||||||
|
argIndex := 1
|
||||||
|
|
||||||
|
if search != "" {
|
||||||
|
whereClause = ` WHERE (
|
||||||
|
email ILIKE $` + fmt.Sprintf("%d", argIndex) + ` OR
|
||||||
|
first_name ILIKE $` + fmt.Sprintf("%d", argIndex) + ` OR
|
||||||
|
last_name ILIKE $` + fmt.Sprintf("%d", argIndex) + ` OR
|
||||||
|
username ILIKE $` + fmt.Sprintf("%d", argIndex) + `
|
||||||
|
)`
|
||||||
|
args = append(args, "%"+search+"%")
|
||||||
|
argIndex++
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get total count
|
||||||
|
var total int
|
||||||
|
err := s.db.QueryRow(countQuery+whereClause, args...).Scan(&total)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, fmt.Errorf("failed to count users: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get users
|
||||||
|
orderClause := " ORDER BY created_at DESC"
|
||||||
|
limitClause := fmt.Sprintf(" LIMIT $%d OFFSET $%d", argIndex, argIndex+1)
|
||||||
|
args = append(args, limit, offset)
|
||||||
|
|
||||||
|
query := baseQuery + whereClause + orderClause + limitClause
|
||||||
|
rows, err := s.db.Query(query, args...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, fmt.Errorf("failed to query users: %w", err)
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var users []UserResponse
|
||||||
|
for rows.Next() {
|
||||||
|
var user UserResponse
|
||||||
|
err := rows.Scan(
|
||||||
|
&user.ID, &user.Email, &user.FirstName, &user.LastName,
|
||||||
|
&user.Username, &user.Avatar, &user.Bio, &user.Role,
|
||||||
|
&user.IsActive, &user.IsVerified, &user.LastLoginAt,
|
||||||
|
&user.CreatedAt, &user.UpdatedAt,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, fmt.Errorf("failed to scan user: %w", err)
|
||||||
|
}
|
||||||
|
users = append(users, user)
|
||||||
|
}
|
||||||
|
|
||||||
|
return users, total, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetUserByID retrieves a user by ID
|
||||||
|
func (s *Service) GetUserByID(userID uuid.UUID) (*UserResponse, error) {
|
||||||
|
query := `
|
||||||
|
SELECT id, email, first_name, last_name, username, avatar, bio,
|
||||||
|
role, is_active, is_verified, last_login_at, created_at, updated_at
|
||||||
|
FROM users
|
||||||
|
WHERE id = $1 AND is_active = true
|
||||||
|
`
|
||||||
|
|
||||||
|
var user UserResponse
|
||||||
|
err := s.db.QueryRow(query, userID).Scan(
|
||||||
|
&user.ID, &user.Email, &user.FirstName, &user.LastName,
|
||||||
|
&user.Username, &user.Avatar, &user.Bio, &user.Role,
|
||||||
|
&user.IsActive, &user.IsVerified, &user.LastLoginAt,
|
||||||
|
&user.CreatedAt, &user.UpdatedAt,
|
||||||
|
)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
if err == sql.ErrNoRows {
|
||||||
|
return nil, fmt.Errorf("user not found")
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("failed to get user: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &user, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetUserByEmail retrieves a user by email (includes password hash for auth)
|
||||||
|
func (s *Service) GetUserByEmail(email string) (*User, error) {
|
||||||
|
query := `
|
||||||
|
SELECT id, email, password_hash, first_name, last_name, username,
|
||||||
|
avatar, bio, role, is_active, is_verified, last_login_at,
|
||||||
|
created_at, updated_at
|
||||||
|
FROM users
|
||||||
|
WHERE email = $1
|
||||||
|
`
|
||||||
|
|
||||||
|
var user User
|
||||||
|
err := s.db.QueryRow(query, email).Scan(
|
||||||
|
&user.ID, &user.Email, &user.Password, &user.FirstName,
|
||||||
|
&user.LastName, &user.Username, &user.Avatar, &user.Bio,
|
||||||
|
&user.Role, &user.IsActive, &user.IsVerified, &user.LastLoginAt,
|
||||||
|
&user.CreatedAt, &user.UpdatedAt,
|
||||||
|
)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
if err == sql.ErrNoRows {
|
||||||
|
return nil, fmt.Errorf("user not found")
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("failed to get user: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &user, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateUser creates a new user
|
||||||
|
func (s *Service) CreateUser(req CreateUserRequest) (*UserResponse, error) {
|
||||||
|
// Hash the password
|
||||||
|
passwordHash, err := utils.HashPassword(req.Password)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to hash password: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set default role if not provided
|
||||||
|
role := req.Role
|
||||||
|
if role == "" {
|
||||||
|
role = "user"
|
||||||
|
}
|
||||||
|
|
||||||
|
query := `
|
||||||
|
INSERT INTO users (email, password_hash, first_name, last_name, username, role, is_active, is_verified, created_at, updated_at)
|
||||||
|
VALUES ($1, $2, $3, $4, $5, $6, true, false, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)
|
||||||
|
RETURNING id, email, first_name, last_name, username, role, is_active, is_verified, created_at, updated_at
|
||||||
|
`
|
||||||
|
|
||||||
|
var user UserResponse
|
||||||
|
err = s.db.QueryRow(
|
||||||
|
query, req.Email, passwordHash, req.FirstName, req.LastName,
|
||||||
|
req.Username, role,
|
||||||
|
).Scan(
|
||||||
|
&user.ID, &user.Email, &user.FirstName, &user.LastName,
|
||||||
|
&user.Username, &user.Role, &user.IsActive, &user.IsVerified,
|
||||||
|
&user.CreatedAt, &user.UpdatedAt,
|
||||||
|
)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
if strings.Contains(err.Error(), "unique") {
|
||||||
|
return nil, fmt.Errorf("email already exists")
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("failed to create user: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &user, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateUser updates an existing user
|
||||||
|
func (s *Service) UpdateUser(userID uuid.UUID, req UpdateUserRequest) (*UserResponse, error) {
|
||||||
|
// Build dynamic update query
|
||||||
|
setParts := []string{"updated_at = CURRENT_TIMESTAMP"}
|
||||||
|
args := []interface{}{}
|
||||||
|
argIndex := 1
|
||||||
|
|
||||||
|
if req.FirstName != nil {
|
||||||
|
setParts = append(setParts, fmt.Sprintf("first_name = $%d", argIndex))
|
||||||
|
args = append(args, req.FirstName)
|
||||||
|
argIndex++
|
||||||
|
}
|
||||||
|
|
||||||
|
if req.LastName != nil {
|
||||||
|
setParts = append(setParts, fmt.Sprintf("last_name = $%d", argIndex))
|
||||||
|
args = append(args, req.LastName)
|
||||||
|
argIndex++
|
||||||
|
}
|
||||||
|
|
||||||
|
if req.Username != nil {
|
||||||
|
setParts = append(setParts, fmt.Sprintf("username = $%d", argIndex))
|
||||||
|
args = append(args, req.Username)
|
||||||
|
argIndex++
|
||||||
|
}
|
||||||
|
|
||||||
|
if req.Avatar != nil {
|
||||||
|
setParts = append(setParts, fmt.Sprintf("avatar = $%d", argIndex))
|
||||||
|
args = append(args, req.Avatar)
|
||||||
|
argIndex++
|
||||||
|
}
|
||||||
|
|
||||||
|
if req.Bio != nil {
|
||||||
|
setParts = append(setParts, fmt.Sprintf("bio = $%d", argIndex))
|
||||||
|
args = append(args, req.Bio)
|
||||||
|
argIndex++
|
||||||
|
}
|
||||||
|
|
||||||
|
if req.IsActive != nil {
|
||||||
|
setParts = append(setParts, fmt.Sprintf("is_active = $%d", argIndex))
|
||||||
|
args = append(args, req.IsActive)
|
||||||
|
argIndex++
|
||||||
|
}
|
||||||
|
|
||||||
|
if req.IsVerified != nil {
|
||||||
|
setParts = append(setParts, fmt.Sprintf("is_verified = $%d", argIndex))
|
||||||
|
args = append(args, req.IsVerified)
|
||||||
|
argIndex++
|
||||||
|
}
|
||||||
|
|
||||||
|
if req.Role != nil {
|
||||||
|
setParts = append(setParts, fmt.Sprintf("role = $%d", argIndex))
|
||||||
|
args = append(args, req.Role)
|
||||||
|
argIndex++
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add user ID as the last argument
|
||||||
|
args = append(args, userID)
|
||||||
|
|
||||||
|
query := fmt.Sprintf(`
|
||||||
|
UPDATE users
|
||||||
|
SET %s
|
||||||
|
WHERE id = $%d
|
||||||
|
RETURNING id, email, first_name, last_name, username, avatar, bio,
|
||||||
|
role, is_active, is_verified, last_login_at, created_at, updated_at
|
||||||
|
`, strings.Join(setParts, ", "), argIndex)
|
||||||
|
|
||||||
|
var user UserResponse
|
||||||
|
err := s.db.QueryRow(query, args...).Scan(
|
||||||
|
&user.ID, &user.Email, &user.FirstName, &user.LastName,
|
||||||
|
&user.Username, &user.Avatar, &user.Bio, &user.Role,
|
||||||
|
&user.IsActive, &user.IsVerified, &user.LastLoginAt,
|
||||||
|
&user.CreatedAt, &user.UpdatedAt,
|
||||||
|
)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
if err == sql.ErrNoRows {
|
||||||
|
return nil, fmt.Errorf("user not found")
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("failed to update user: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &user, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteUser soft deletes a user (sets is_active to false)
|
||||||
|
func (s *Service) DeleteUser(userID uuid.UUID) error {
|
||||||
|
query := `
|
||||||
|
UPDATE users
|
||||||
|
SET is_active = false, updated_at = CURRENT_TIMESTAMP
|
||||||
|
WHERE id = $1 AND is_active = true
|
||||||
|
`
|
||||||
|
|
||||||
|
result, err := s.db.Exec(query, userID)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to delete user: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
rowsAffected, err := result.RowsAffected()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to get rows affected: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if rowsAffected == 0 {
|
||||||
|
return fmt.Errorf("user not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateLastLogin updates the user's last login timestamp
|
||||||
|
func (s *Service) UpdateLastLogin(userID uuid.UUID) error {
|
||||||
|
query := `
|
||||||
|
UPDATE users
|
||||||
|
SET last_login_at = CURRENT_TIMESTAMP, updated_at = CURRENT_TIMESTAMP
|
||||||
|
WHERE id = $1
|
||||||
|
`
|
||||||
|
|
||||||
|
_, err := s.db.Exec(query, userID)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to update last login: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ChangePassword updates a user's password
|
||||||
|
func (s *Service) ChangePassword(userID uuid.UUID, currentPassword, newPassword string) error {
|
||||||
|
// First, get the current password hash
|
||||||
|
var currentHash string
|
||||||
|
err := s.db.QueryRow("SELECT password_hash FROM users WHERE id = $1", userID).Scan(¤tHash)
|
||||||
|
if err != nil {
|
||||||
|
if err == sql.ErrNoRows {
|
||||||
|
return fmt.Errorf("user not found")
|
||||||
|
}
|
||||||
|
return fmt.Errorf("failed to get user password: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify current password
|
||||||
|
if err := utils.CheckPasswordHash(currentPassword, currentHash); err != nil {
|
||||||
|
return fmt.Errorf("current password is incorrect")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Hash new password
|
||||||
|
newHash, err := utils.HashPassword(newPassword)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to hash new password: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update password
|
||||||
|
query := `
|
||||||
|
UPDATE users
|
||||||
|
SET password_hash = $1, updated_at = CURRENT_TIMESTAMP
|
||||||
|
WHERE id = $2
|
||||||
|
`
|
||||||
|
|
||||||
|
_, err = s.db.Exec(query, newHash, userID)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to update password: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetUserStats returns basic user statistics
|
||||||
|
func (s *Service) GetUserStats() (map[string]interface{}, error) {
|
||||||
|
stats := make(map[string]interface{})
|
||||||
|
|
||||||
|
// Total users
|
||||||
|
var totalUsers int
|
||||||
|
err := s.db.QueryRow("SELECT COUNT(*) FROM users WHERE is_active = true").Scan(&totalUsers)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to get total users: %w", err)
|
||||||
|
}
|
||||||
|
stats["total_users"] = totalUsers
|
||||||
|
|
||||||
|
// Verified users
|
||||||
|
var verifiedUsers int
|
||||||
|
err = s.db.QueryRow("SELECT COUNT(*) FROM users WHERE is_active = true AND is_verified = true").Scan(&verifiedUsers)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to get verified users: %w", err)
|
||||||
|
}
|
||||||
|
stats["verified_users"] = verifiedUsers
|
||||||
|
|
||||||
|
// Active users (logged in within last 30 days)
|
||||||
|
var activeUsers int
|
||||||
|
err = s.db.QueryRow(`
|
||||||
|
SELECT COUNT(*) FROM users
|
||||||
|
WHERE is_active = true AND last_login_at > CURRENT_TIMESTAMP - INTERVAL '30 days'
|
||||||
|
`).Scan(&activeUsers)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to get active users: %w", err)
|
||||||
|
}
|
||||||
|
stats["active_users"] = activeUsers
|
||||||
|
|
||||||
|
// New users this month
|
||||||
|
var newUsersThisMonth int
|
||||||
|
err = s.db.QueryRow(`
|
||||||
|
SELECT COUNT(*) FROM users
|
||||||
|
WHERE is_active = true AND created_at >= date_trunc('month', CURRENT_TIMESTAMP)
|
||||||
|
`).Scan(&newUsersThisMonth)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to get new users this month: %w", err)
|
||||||
|
}
|
||||||
|
stats["new_users_this_month"] = newUsersThisMonth
|
||||||
|
|
||||||
|
return stats, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetUserPreferences récupère les préférences d'un utilisateur
|
||||||
|
func (s *Service) GetUserPreferences(userID uuid.UUID) (*UserPreferencesResponse, error) {
|
||||||
|
query := `
|
||||||
|
SELECT user_id, theme, language, timezone,
|
||||||
|
COALESCE(notifications, '{}') as notifications,
|
||||||
|
COALESCE(privacy, '{}') as privacy,
|
||||||
|
COALESCE(audio, '{}') as audio,
|
||||||
|
updated_at
|
||||||
|
FROM user_preferences
|
||||||
|
WHERE user_id = $1
|
||||||
|
`
|
||||||
|
|
||||||
|
var preferences UserPreferencesResponse
|
||||||
|
var notificationsJSON, privacyJSON, audioJSON string
|
||||||
|
|
||||||
|
err := s.db.QueryRow(query, userID).Scan(
|
||||||
|
&preferences.UserID, &preferences.Theme, &preferences.Language,
|
||||||
|
&preferences.Timezone, ¬ificationsJSON, &privacyJSON,
|
||||||
|
&audioJSON, &preferences.UpdatedAt,
|
||||||
|
)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
if err == sql.ErrNoRows {
|
||||||
|
// Retourner les préférences par défaut
|
||||||
|
return &UserPreferencesResponse{
|
||||||
|
UserID: userID,
|
||||||
|
Theme: "light",
|
||||||
|
Language: "en",
|
||||||
|
Timezone: "UTC",
|
||||||
|
Notifications: NotificationSettings{
|
||||||
|
Email: true, Push: true, Desktop: true,
|
||||||
|
NewFollowers: true, TrackComments: true,
|
||||||
|
DirectMessages: true, Mentions: true, Likes: false,
|
||||||
|
},
|
||||||
|
Privacy: PrivacySettings{
|
||||||
|
ShowEmail: false, ShowActivity: true, AllowDM: true,
|
||||||
|
TrackVisibility: "public", ProfileVisibility: "public",
|
||||||
|
},
|
||||||
|
Audio: AudioSettings{
|
||||||
|
AutoPlay: true, Quality: "high", Volume: 0.8, Crossfade: 5,
|
||||||
|
},
|
||||||
|
UpdatedAt: time.Now(),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("failed to get user preferences: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: Parse JSON strings to structs (simplified for now)
|
||||||
|
preferences.Notifications = NotificationSettings{
|
||||||
|
Email: true, Push: true, Desktop: true,
|
||||||
|
NewFollowers: true, TrackComments: true,
|
||||||
|
DirectMessages: true, Mentions: true, Likes: false,
|
||||||
|
}
|
||||||
|
preferences.Privacy = PrivacySettings{
|
||||||
|
ShowEmail: false, ShowActivity: true, AllowDM: true,
|
||||||
|
TrackVisibility: "public", ProfileVisibility: "public",
|
||||||
|
}
|
||||||
|
preferences.Audio = AudioSettings{
|
||||||
|
AutoPlay: true, Quality: "high", Volume: 0.8, Crossfade: 5,
|
||||||
|
}
|
||||||
|
|
||||||
|
return &preferences, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateUserPreferences met à jour les préférences d'un utilisateur
|
||||||
|
func (s *Service) UpdateUserPreferences(userID uuid.UUID, req UserPreferencesRequest) (*UserPreferencesResponse, error) {
|
||||||
|
// Récupérer les préférences actuelles
|
||||||
|
current, err := s.GetUserPreferences(userID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Appliquer les mises à jour
|
||||||
|
if req.Theme != nil {
|
||||||
|
current.Theme = *req.Theme
|
||||||
|
}
|
||||||
|
if req.Language != nil {
|
||||||
|
current.Language = *req.Language
|
||||||
|
}
|
||||||
|
if req.Timezone != nil {
|
||||||
|
current.Timezone = *req.Timezone
|
||||||
|
}
|
||||||
|
if req.Notifications != nil {
|
||||||
|
current.Notifications = *req.Notifications
|
||||||
|
}
|
||||||
|
if req.Privacy != nil {
|
||||||
|
current.Privacy = *req.Privacy
|
||||||
|
}
|
||||||
|
if req.Audio != nil {
|
||||||
|
current.Audio = *req.Audio
|
||||||
|
}
|
||||||
|
|
||||||
|
current.UpdatedAt = time.Now()
|
||||||
|
|
||||||
|
// Sauvegarder en base (upsert)
|
||||||
|
query := `
|
||||||
|
INSERT INTO user_preferences (user_id, theme, language, timezone, notifications, privacy, audio, updated_at)
|
||||||
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
|
||||||
|
ON CONFLICT (user_id) DO UPDATE SET
|
||||||
|
theme = EXCLUDED.theme,
|
||||||
|
language = EXCLUDED.language,
|
||||||
|
timezone = EXCLUDED.timezone,
|
||||||
|
notifications = EXCLUDED.notifications,
|
||||||
|
privacy = EXCLUDED.privacy,
|
||||||
|
audio = EXCLUDED.audio,
|
||||||
|
updated_at = EXCLUDED.updated_at
|
||||||
|
`
|
||||||
|
|
||||||
|
// TODO: Serialize structs to JSON (simplified for now)
|
||||||
|
notificationsJSON := "{}"
|
||||||
|
privacyJSON := "{}"
|
||||||
|
audioJSON := "{}"
|
||||||
|
|
||||||
|
_, err = s.db.Exec(query, userID, current.Theme, current.Language, current.Timezone,
|
||||||
|
notificationsJSON, privacyJSON, audioJSON, current.UpdatedAt)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to update user preferences: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return current, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteAccount supprime le compte d'un utilisateur (soft delete)
|
||||||
|
func (s *Service) DeleteAccount(userID uuid.UUID, password, reason string) error {
|
||||||
|
// Vérifier le mot de passe
|
||||||
|
var currentHash string
|
||||||
|
err := s.db.QueryRow("SELECT password_hash FROM users WHERE id = $1", userID).Scan(¤tHash)
|
||||||
|
if err != nil {
|
||||||
|
if err == sql.ErrNoRows {
|
||||||
|
return fmt.Errorf("user not found")
|
||||||
|
}
|
||||||
|
return fmt.Errorf("failed to get user password: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := utils.CheckPasswordHash(password, currentHash); err != nil {
|
||||||
|
return fmt.Errorf("invalid password")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Marquer le compte comme supprimé avec période de grâce de 30 jours
|
||||||
|
recoveryDeadline := time.Now().Add(30 * 24 * time.Hour)
|
||||||
|
query := `
|
||||||
|
UPDATE users
|
||||||
|
SET is_active = false, deleted_at = CURRENT_TIMESTAMP,
|
||||||
|
deletion_reason = $2, recovery_deadline = $3, updated_at = CURRENT_TIMESTAMP
|
||||||
|
WHERE id = $1
|
||||||
|
`
|
||||||
|
|
||||||
|
_, err = s.db.Exec(query, userID, reason, recoveryDeadline)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to delete account: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// RecoverAccount récupère un compte supprimé
|
||||||
|
func (s *Service) RecoverAccount(email, password string) error {
|
||||||
|
// Vérifier l'utilisateur et son statut
|
||||||
|
var userID uuid.UUID
|
||||||
|
var currentHash string
|
||||||
|
var deletedAt sql.NullTime
|
||||||
|
var recoveryDeadline sql.NullTime
|
||||||
|
|
||||||
|
query := `
|
||||||
|
SELECT id, password_hash, deleted_at, recovery_deadline
|
||||||
|
FROM users
|
||||||
|
WHERE email = $1 AND deleted_at IS NOT NULL
|
||||||
|
`
|
||||||
|
|
||||||
|
err := s.db.QueryRow(query, email).Scan(&userID, ¤tHash, &deletedAt, &recoveryDeadline)
|
||||||
|
if err != nil {
|
||||||
|
if err == sql.ErrNoRows {
|
||||||
|
return fmt.Errorf("no deleted account found for this email")
|
||||||
|
}
|
||||||
|
return fmt.Errorf("failed to find account: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Vérifier que la période de récupération n'est pas expirée
|
||||||
|
if recoveryDeadline.Valid && time.Now().After(recoveryDeadline.Time) {
|
||||||
|
return fmt.Errorf("recovery period has expired")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Vérifier le mot de passe
|
||||||
|
if err := utils.CheckPasswordHash(password, currentHash); err != nil {
|
||||||
|
return fmt.Errorf("invalid password")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Réactiver le compte
|
||||||
|
updateQuery := `
|
||||||
|
UPDATE users
|
||||||
|
SET is_active = true, deleted_at = NULL, deletion_reason = NULL,
|
||||||
|
recovery_deadline = NULL, updated_at = CURRENT_TIMESTAMP
|
||||||
|
WHERE id = $1
|
||||||
|
`
|
||||||
|
|
||||||
|
_, err = s.db.Exec(updateQuery, userID)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to recover account: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExportUserData exporte toutes les données d'un utilisateur (RGPD)
|
||||||
|
func (s *Service) ExportUserData(userID uuid.UUID) (*UserDataExport, error) {
|
||||||
|
// Récupérer le profil
|
||||||
|
profile, err := s.GetUserByID(userID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to get user profile: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Récupérer les préférences
|
||||||
|
preferences, err := s.GetUserPreferences(userID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to get user preferences: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Récupérer l'activité (simplifié)
|
||||||
|
activity := []UserActivity{
|
||||||
|
{ID: uuid.New(), Type: "login", Details: "User login", CreatedAt: time.Now()},
|
||||||
|
{ID: uuid.New(), Type: "profile_update", Details: "Profile updated", CreatedAt: time.Now()},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Récupérer le contenu (simplifié)
|
||||||
|
content := []UserContent{
|
||||||
|
{ID: uuid.New(), Type: "track", Title: "Sample Track", URL: "/tracks/1", CreatedAt: time.Now()},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Récupérer les interactions (simplifié)
|
||||||
|
interactions := []UserInteraction{
|
||||||
|
{ID: uuid.New(), Type: "like", TargetID: uuid.New(), CreatedAt: time.Now()},
|
||||||
|
}
|
||||||
|
|
||||||
|
export := &UserDataExport{
|
||||||
|
UserID: userID,
|
||||||
|
Profile: *profile,
|
||||||
|
Preferences: *preferences,
|
||||||
|
Activity: activity,
|
||||||
|
Content: content,
|
||||||
|
Interactions: interactions,
|
||||||
|
ExportedAt: time.Now(),
|
||||||
|
}
|
||||||
|
|
||||||
|
return export, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// RequestDataDeletion demande la suppression définitive des données
|
||||||
|
func (s *Service) RequestDataDeletion(userID uuid.UUID, password, reason string) error {
|
||||||
|
// Vérifier le mot de passe
|
||||||
|
var currentHash string
|
||||||
|
err := s.db.QueryRow("SELECT password_hash FROM users WHERE id = $1", userID).Scan(¤tHash)
|
||||||
|
if err != nil {
|
||||||
|
if err == sql.ErrNoRows {
|
||||||
|
return fmt.Errorf("user not found")
|
||||||
|
}
|
||||||
|
return fmt.Errorf("failed to get user password: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := utils.CheckPasswordHash(password, currentHash); err != nil {
|
||||||
|
return fmt.Errorf("invalid password")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Créer une demande de suppression définitive
|
||||||
|
query := `
|
||||||
|
INSERT INTO data_deletion_requests (user_id, reason, status, requested_at)
|
||||||
|
VALUES ($1, $2, 'pending', CURRENT_TIMESTAMP)
|
||||||
|
`
|
||||||
|
|
||||||
|
_, err = s.db.Exec(query, userID, reason)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to create deletion request: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetAccountStatus récupère le statut du compte
|
||||||
|
func (s *Service) GetAccountStatus(userID uuid.UUID) (*AccountStatus, error) {
|
||||||
|
query := `
|
||||||
|
SELECT id, is_active, is_verified, created_at, deleted_at,
|
||||||
|
COALESCE(deletion_reason, '') as deletion_reason,
|
||||||
|
recovery_deadline
|
||||||
|
FROM users
|
||||||
|
WHERE id = $1
|
||||||
|
`
|
||||||
|
|
||||||
|
var status AccountStatus
|
||||||
|
var deletedAt sql.NullTime
|
||||||
|
var recoveryDeadline sql.NullTime
|
||||||
|
|
||||||
|
err := s.db.QueryRow(query, userID).Scan(
|
||||||
|
&status.UserID, &status.IsActive, &status.IsVerified,
|
||||||
|
&status.CreatedAt, &deletedAt, &status.DeletionReason, &recoveryDeadline,
|
||||||
|
)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
if err == sql.ErrNoRows {
|
||||||
|
return nil, fmt.Errorf("user not found")
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("failed to get account status: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Déterminer le statut
|
||||||
|
if deletedAt.Valid {
|
||||||
|
status.Status = "deleted"
|
||||||
|
status.DeletedAt = &deletedAt.Time
|
||||||
|
if recoveryDeadline.Valid {
|
||||||
|
status.RecoveryDeadline = &recoveryDeadline.Time
|
||||||
|
}
|
||||||
|
} else if !status.IsActive {
|
||||||
|
status.Status = "suspended"
|
||||||
|
} else {
|
||||||
|
status.Status = "active"
|
||||||
|
}
|
||||||
|
|
||||||
|
return &status, nil
|
||||||
|
}
|
||||||
167
veza-backend-api/internal/api/user/types.go
Normal file
167
veza-backend-api/internal/api/user/types.go
Normal file
|
|
@ -0,0 +1,167 @@
|
||||||
|
package user
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
)
|
||||||
|
|
||||||
|
// User represents a user with password (for auth)
|
||||||
|
type User struct {
|
||||||
|
ID uuid.UUID `db:"id" json:"id"`
|
||||||
|
Username string `db:"username" json:"username"`
|
||||||
|
Email string `db:"email" json:"email"`
|
||||||
|
Password string `db:"password_hash" json:"-"` // Never serialize password
|
||||||
|
FirstName sql.NullString `db:"first_name" json:"first_name,omitempty"`
|
||||||
|
LastName sql.NullString `db:"last_name" json:"last_name,omitempty"`
|
||||||
|
Bio sql.NullString `db:"bio" json:"bio,omitempty"`
|
||||||
|
Avatar sql.NullString `db:"avatar" json:"avatar,omitempty"`
|
||||||
|
Role string `db:"role" json:"role"`
|
||||||
|
IsActive bool `db:"is_active" json:"is_active"`
|
||||||
|
IsVerified bool `db:"is_verified" json:"is_verified"`
|
||||||
|
LastLoginAt sql.NullTime `db:"last_login_at" json:"last_login_at,omitempty"`
|
||||||
|
CreatedAt time.Time `db:"created_at" json:"created_at"`
|
||||||
|
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// UserResponse represents user data without sensitive information
|
||||||
|
type UserResponse struct {
|
||||||
|
ID uuid.UUID `json:"id"`
|
||||||
|
Username string `json:"username"`
|
||||||
|
Email string `json:"email"`
|
||||||
|
FirstName sql.NullString `json:"first_name,omitempty"`
|
||||||
|
LastName sql.NullString `json:"last_name,omitempty"`
|
||||||
|
Bio sql.NullString `json:"bio,omitempty"`
|
||||||
|
Avatar sql.NullString `json:"avatar,omitempty"`
|
||||||
|
Role string `json:"role"`
|
||||||
|
IsActive bool `json:"is_active"`
|
||||||
|
IsVerified bool `json:"is_verified"`
|
||||||
|
LastLoginAt sql.NullTime `json:"last_login_at,omitempty"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateUserRequest represents a request to create a new user
|
||||||
|
type CreateUserRequest struct {
|
||||||
|
Username string `json:"username" binding:"required,min=3,max=50"`
|
||||||
|
Email string `json:"email" binding:"required,email"`
|
||||||
|
Password string `json:"password" binding:"required,min=8"`
|
||||||
|
FirstName string `json:"first_name,omitempty"`
|
||||||
|
LastName string `json:"last_name,omitempty"`
|
||||||
|
Role string `json:"role,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateUserRequest represents a request to update user data
|
||||||
|
type UpdateUserRequest struct {
|
||||||
|
Username *string `json:"username,omitempty"`
|
||||||
|
Email *string `json:"email,omitempty"`
|
||||||
|
FirstName *string `json:"first_name,omitempty"`
|
||||||
|
LastName *string `json:"last_name,omitempty"`
|
||||||
|
Bio *string `json:"bio,omitempty"`
|
||||||
|
Avatar *string `json:"avatar,omitempty"`
|
||||||
|
IsActive *bool `json:"is_active,omitempty"`
|
||||||
|
IsVerified *bool `json:"is_verified,omitempty"`
|
||||||
|
Role *string `json:"role,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// UserPreferencesRequest représente une requête de mise à jour des préférences
|
||||||
|
type UserPreferencesRequest struct {
|
||||||
|
Theme *string `json:"theme,omitempty"`
|
||||||
|
Language *string `json:"language,omitempty"`
|
||||||
|
Timezone *string `json:"timezone,omitempty"`
|
||||||
|
Notifications *NotificationSettings `json:"notifications,omitempty"`
|
||||||
|
Privacy *PrivacySettings `json:"privacy,omitempty"`
|
||||||
|
Audio *AudioSettings `json:"audio,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// UserPreferencesResponse représente les préférences utilisateur
|
||||||
|
type UserPreferencesResponse struct {
|
||||||
|
UserID uuid.UUID `json:"user_id"`
|
||||||
|
Theme string `json:"theme"`
|
||||||
|
Language string `json:"language"`
|
||||||
|
Timezone string `json:"timezone"`
|
||||||
|
Notifications NotificationSettings `json:"notifications"`
|
||||||
|
Privacy PrivacySettings `json:"privacy"`
|
||||||
|
Audio AudioSettings `json:"audio"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// NotificationSettings paramètres de notification
|
||||||
|
type NotificationSettings struct {
|
||||||
|
Email bool `json:"email"`
|
||||||
|
Push bool `json:"push"`
|
||||||
|
Desktop bool `json:"desktop"`
|
||||||
|
NewFollowers bool `json:"new_followers"`
|
||||||
|
TrackComments bool `json:"track_comments"`
|
||||||
|
DirectMessages bool `json:"direct_messages"`
|
||||||
|
Mentions bool `json:"mentions"`
|
||||||
|
Likes bool `json:"likes"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// PrivacySettings paramètres de confidentialité
|
||||||
|
type PrivacySettings struct {
|
||||||
|
ShowEmail bool `json:"show_email"`
|
||||||
|
ShowActivity bool `json:"show_activity"`
|
||||||
|
AllowDM bool `json:"allow_dm"`
|
||||||
|
TrackVisibility string `json:"track_visibility"` // public, followers, private
|
||||||
|
ProfileVisibility string `json:"profile_visibility"` // public, registered, private
|
||||||
|
}
|
||||||
|
|
||||||
|
// AudioSettings paramètres audio
|
||||||
|
type AudioSettings struct {
|
||||||
|
AutoPlay bool `json:"auto_play"`
|
||||||
|
Quality string `json:"quality"` // low, medium, high, lossless
|
||||||
|
Volume float64 `json:"volume"` // 0-1
|
||||||
|
Crossfade int `json:"crossfade"` // secondes
|
||||||
|
}
|
||||||
|
|
||||||
|
// AccountStatus statut du compte
|
||||||
|
type AccountStatus struct {
|
||||||
|
UserID uuid.UUID `json:"user_id"`
|
||||||
|
Status string `json:"status"` // active, suspended, deleted, pending_deletion
|
||||||
|
IsActive bool `json:"is_active"`
|
||||||
|
IsVerified bool `json:"is_verified"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
DeletedAt *time.Time `json:"deleted_at,omitempty"`
|
||||||
|
DeletionReason string `json:"deletion_reason,omitempty"`
|
||||||
|
RecoveryDeadline *time.Time `json:"recovery_deadline,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// UserDataExport export des données utilisateur (RGPD)
|
||||||
|
type UserDataExport struct {
|
||||||
|
UserID uuid.UUID `json:"user_id"`
|
||||||
|
Profile UserResponse `json:"profile"`
|
||||||
|
Preferences UserPreferencesResponse `json:"preferences"`
|
||||||
|
Activity []UserActivity `json:"activity"`
|
||||||
|
Content []UserContent `json:"content"`
|
||||||
|
Interactions []UserInteraction `json:"interactions"`
|
||||||
|
ExportedAt time.Time `json:"exported_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// UserActivity activité utilisateur
|
||||||
|
type UserActivity struct {
|
||||||
|
ID uuid.UUID `json:"id"`
|
||||||
|
Type string `json:"type"`
|
||||||
|
Details string `json:"details"`
|
||||||
|
IPAddress string `json:"ip_address"`
|
||||||
|
UserAgent string `json:"user_agent"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// UserContent contenu utilisateur
|
||||||
|
type UserContent struct {
|
||||||
|
ID uuid.UUID `json:"id"`
|
||||||
|
Type string `json:"type"`
|
||||||
|
Title string `json:"title"`
|
||||||
|
URL string `json:"url"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// UserInteraction interaction utilisateur
|
||||||
|
type UserInteraction struct {
|
||||||
|
ID uuid.UUID `json:"id"`
|
||||||
|
Type string `json:"type"`
|
||||||
|
TargetID uuid.UUID `json:"target_id"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
}
|
||||||
2
veza-backend-api/internal/api/voting_system/handler.go
Normal file
2
veza-backend-api/internal/api/voting_system/handler.go
Normal file
|
|
@ -0,0 +1,2 @@
|
||||||
|
// Package voting_system - TO BE IMPLEMENTED
|
||||||
|
package voting_system
|
||||||
2
veza-backend-api/internal/api/websocket/handler.go
Normal file
2
veza-backend-api/internal/api/websocket/handler.go
Normal file
|
|
@ -0,0 +1,2 @@
|
||||||
|
// Package websocket - TO BE IMPLEMENTED
|
||||||
|
package websocket
|
||||||
44
veza-backend-api/internal/benchmarks/example_test.go
Normal file
44
veza-backend-api/internal/benchmarks/example_test.go
Normal file
|
|
@ -0,0 +1,44 @@
|
||||||
|
package benchmarks
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"veza-backend-api/internal/testutils"
|
||||||
|
)
|
||||||
|
|
||||||
|
// BenchmarkDatabaseQuery benchmark pour une requête de base de données (T0044)
|
||||||
|
func BenchmarkDatabaseQuery(b *testing.B) {
|
||||||
|
db := testutils.SetupBenchmarkDB(b)
|
||||||
|
|
||||||
|
b.ResetTimer()
|
||||||
|
b.RunParallel(func(pb *testing.PB) {
|
||||||
|
for pb.Next() {
|
||||||
|
// Exemple de requête
|
||||||
|
var count int64
|
||||||
|
db.GormDB.Raw("SELECT COUNT(*) FROM users").Scan(&count)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// BenchmarkDatabaseQuerySequential benchmark séquentiel pour comparaison (T0044)
|
||||||
|
func BenchmarkDatabaseQuerySequential(b *testing.B) {
|
||||||
|
db := testutils.SetupBenchmarkDB(b)
|
||||||
|
|
||||||
|
b.ResetTimer()
|
||||||
|
for i := 0; i < b.N; i++ {
|
||||||
|
// Exemple de requête séquentielle
|
||||||
|
var count int64
|
||||||
|
db.GormDB.Raw("SELECT COUNT(*) FROM users").Scan(&count)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// BenchmarkSimpleQuery exemple de benchmark simple (T0044)
|
||||||
|
func BenchmarkSimpleQuery(b *testing.B) {
|
||||||
|
db := testutils.SetupBenchmarkDB(b)
|
||||||
|
|
||||||
|
b.ResetTimer()
|
||||||
|
for i := 0; i < b.N; i++ {
|
||||||
|
var count int64
|
||||||
|
db.GormDB.Raw("SELECT COUNT(*) FROM users").Scan(&count)
|
||||||
|
}
|
||||||
|
}
|
||||||
43
veza-backend-api/internal/common/context.go
Normal file
43
veza-backend-api/internal/common/context.go
Normal file
|
|
@ -0,0 +1,43 @@
|
||||||
|
package common
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
UserIDContextKey = "user_id"
|
||||||
|
UsernameContextKey = "username"
|
||||||
|
)
|
||||||
|
|
||||||
|
// GetUserIDFromContext retrieves user ID from gin context
|
||||||
|
func GetUserIDFromContext(c *gin.Context) (uuid.UUID, bool) {
|
||||||
|
userID, exists := c.Get(UserIDContextKey)
|
||||||
|
if !exists {
|
||||||
|
return uuid.Nil, false // Return uuid.Nil for non-existent UUID
|
||||||
|
}
|
||||||
|
|
||||||
|
id, ok := userID.(uuid.UUID)
|
||||||
|
return id, ok
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetUserIDInContext sets user ID in gin context
|
||||||
|
func SetUserIDInContext(c *gin.Context, userID uuid.UUID) {
|
||||||
|
c.Set(UserIDContextKey, userID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetUsernameFromContext retrieves username from gin context
|
||||||
|
func GetUsernameFromContext(c *gin.Context) (string, bool) {
|
||||||
|
username, exists := c.Get(UsernameContextKey)
|
||||||
|
if !exists {
|
||||||
|
return "", false
|
||||||
|
}
|
||||||
|
|
||||||
|
name, ok := username.(string)
|
||||||
|
return name, ok
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetUsernameInContext sets username in gin context
|
||||||
|
func SetUsernameInContext(c *gin.Context, username string) {
|
||||||
|
c.Set(UsernameContextKey, username)
|
||||||
|
}
|
||||||
31
veza-backend-api/internal/common/types.go
Normal file
31
veza-backend-api/internal/common/types.go
Normal file
|
|
@ -0,0 +1,31 @@
|
||||||
|
package common
|
||||||
|
|
||||||
|
// Common types and utilities used across the application
|
||||||
|
|
||||||
|
// Response represents a standard API response
|
||||||
|
type Response struct {
|
||||||
|
Success bool `json:"success"`
|
||||||
|
Data interface{} `json:"data,omitempty"`
|
||||||
|
Error string `json:"error,omitempty"`
|
||||||
|
Message string `json:"message,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// PaginationMeta contains pagination metadata
|
||||||
|
type PaginationMeta struct {
|
||||||
|
Page int `json:"page"`
|
||||||
|
PerPage int `json:"per_page"`
|
||||||
|
Total int `json:"total"`
|
||||||
|
TotalPages int `json:"total_pages"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrorCode represents application error codes
|
||||||
|
type ErrorCode string
|
||||||
|
|
||||||
|
const (
|
||||||
|
ErrorCodeBadRequest ErrorCode = "BAD_REQUEST"
|
||||||
|
ErrorCodeUnauthorized ErrorCode = "UNAUTHORIZED"
|
||||||
|
ErrorCodeForbidden ErrorCode = "FORBIDDEN"
|
||||||
|
ErrorCodeNotFound ErrorCode = "NOT_FOUND"
|
||||||
|
ErrorCodeConflict ErrorCode = "CONFLICT"
|
||||||
|
ErrorCodeInternalServerError ErrorCode = "INTERNAL_SERVER_ERROR"
|
||||||
|
)
|
||||||
593
veza-backend-api/internal/config/config.go
Normal file
593
veza-backend-api/internal/config/config.go
Normal file
|
|
@ -0,0 +1,593 @@
|
||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"veza-backend-api/internal/database"
|
||||||
|
"veza-backend-api/internal/eventbus" // Import the eventbus package
|
||||||
|
"veza-backend-api/internal/metrics"
|
||||||
|
"veza-backend-api/internal/middleware"
|
||||||
|
"veza-backend-api/internal/services"
|
||||||
|
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/redis/go-redis/v9"
|
||||||
|
"go.uber.org/zap"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Config contient toute la configuration de l'application
|
||||||
|
type Config struct {
|
||||||
|
// Base de données
|
||||||
|
Database *database.Database
|
||||||
|
|
||||||
|
// Redis
|
||||||
|
RedisClient *redis.Client
|
||||||
|
|
||||||
|
// Services
|
||||||
|
SessionService *services.SessionService
|
||||||
|
AuditService *services.AuditService
|
||||||
|
TOTPService *services.TOTPService
|
||||||
|
UploadValidator *services.UploadValidator
|
||||||
|
CacheService *services.CacheService
|
||||||
|
PlaylistService *services.PlaylistService
|
||||||
|
PermissionService *services.PermissionService
|
||||||
|
|
||||||
|
// Middlewares
|
||||||
|
RateLimiter *middleware.RateLimiter
|
||||||
|
SimpleRateLimiter *middleware.SimpleRateLimiter // Rate limiter simple (T0015)
|
||||||
|
EndpointLimiter *middleware.EndpointLimiter
|
||||||
|
AuthMiddleware *middleware.AuthMiddleware
|
||||||
|
|
||||||
|
// Logger
|
||||||
|
Logger *zap.Logger
|
||||||
|
|
||||||
|
// Metrics (T0020)
|
||||||
|
ErrorMetrics *metrics.ErrorMetrics
|
||||||
|
|
||||||
|
// Secrets Provider (T0037)
|
||||||
|
SecretsProvider SecretsProvider
|
||||||
|
|
||||||
|
// Config Watcher (T0040)
|
||||||
|
ConfigWatcher *ConfigWatcher
|
||||||
|
|
||||||
|
// Configuration
|
||||||
|
AppPort int // Port pour le serveur HTTP (T0031)
|
||||||
|
JWTSecret string
|
||||||
|
ChatJWTSecret string // Secret pour les tokens WebSocket Chat
|
||||||
|
RedisURL string
|
||||||
|
DatabaseURL string
|
||||||
|
UploadDir string // Répertoire d'upload
|
||||||
|
StreamServerURL string // URL du serveur de streaming
|
||||||
|
CORSOrigins []string // Liste des origines CORS autorisées
|
||||||
|
RateLimitLimit int // Limite de requêtes pour le rate limiter simple
|
||||||
|
RateLimitWindow int // Fenêtre de temps en secondes pour le rate limiter simple
|
||||||
|
LogLevel string // Niveau de log (T0027)
|
||||||
|
DBMaxRetries int
|
||||||
|
DBRetryInterval time.Duration
|
||||||
|
|
||||||
|
// RabbitMQ
|
||||||
|
RabbitMQEventBus *eventbus.RabbitMQEventBus // Ajout de l'instance de l'EventBus
|
||||||
|
RabbitMQURL string
|
||||||
|
RabbitMQMaxRetries int
|
||||||
|
RabbitMQRetryInterval time.Duration
|
||||||
|
RabbitMQEnable bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewConfig crée une nouvelle configuration
|
||||||
|
func NewConfig() (*Config, error) {
|
||||||
|
// Déterminer l'environnement avec détection automatique améliorée (T0032, T0039)
|
||||||
|
env := DetectEnvironment()
|
||||||
|
|
||||||
|
// Charger les fichiers .env selon l'environnement (T0032)
|
||||||
|
// Charge dans l'ordre: .env.{env}, .env
|
||||||
|
// Les variables d'environnement système ont priorité
|
||||||
|
if err := LoadEnvFiles(env); err != nil {
|
||||||
|
// En cas d'erreur, continuer quand même (peut-être que les fichiers .env n'existent pas)
|
||||||
|
// Les variables d'environnement système seront utilisées
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialiser le logger
|
||||||
|
logger, err := zap.NewProduction()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Charger les origines CORS depuis les variables d'environnement
|
||||||
|
corsOrigins := getEnvStringSlice("CORS_ALLOWED_ORIGINS", []string{"*"})
|
||||||
|
|
||||||
|
// Charger la configuration du rate limiter simple
|
||||||
|
rateLimitLimit := getEnvInt("RATE_LIMIT_LIMIT", 100) // 100 requêtes par défaut
|
||||||
|
rateLimitWindow := getEnvInt("RATE_LIMIT_WINDOW", 60) // 60 secondes (1 minute) par défaut
|
||||||
|
|
||||||
|
// Charger le niveau de log depuis les variables d'environnement (T0027)
|
||||||
|
// Valeurs possibles: DEBUG, INFO, WARN, ERROR
|
||||||
|
// Par défaut: INFO
|
||||||
|
logLevel := getEnv("LOG_LEVEL", "INFO")
|
||||||
|
|
||||||
|
// Charger le port depuis les variables d'environnement (T0031)
|
||||||
|
appPort := getEnvInt("APP_PORT", 8080)
|
||||||
|
|
||||||
|
// Configuration depuis les variables d'environnement
|
||||||
|
jwtSecret := getEnv("JWT_SECRET", "your-super-secret-jwt-key")
|
||||||
|
config := &Config{
|
||||||
|
AppPort: appPort,
|
||||||
|
JWTSecret: jwtSecret,
|
||||||
|
ChatJWTSecret: getEnv("CHAT_JWT_SECRET", jwtSecret), // Fallback to main JWT secret if not set
|
||||||
|
RedisURL: getEnv("REDIS_URL", "redis://localhost:6379"),
|
||||||
|
DatabaseURL: getEnv("DATABASE_URL", "postgresql://veza:password@localhost:5432/veza_db"),
|
||||||
|
UploadDir: getEnv("UPLOAD_DIR", "uploads"),
|
||||||
|
StreamServerURL: getEnv("STREAM_SERVER_URL", "http://localhost:8082"),
|
||||||
|
CORSOrigins: corsOrigins,
|
||||||
|
RateLimitLimit: rateLimitLimit,
|
||||||
|
RateLimitWindow: rateLimitWindow,
|
||||||
|
LogLevel: logLevel,
|
||||||
|
Logger: logger,
|
||||||
|
DBMaxRetries: getEnvInt("DB_MAX_RETRIES", 5), // 5 tentatives par défaut
|
||||||
|
DBRetryInterval: getEnvDuration("DB_RETRY_INTERVAL", 5*time.Second), // 5 secondes par défaut
|
||||||
|
|
||||||
|
// Configuration RabbitMQ
|
||||||
|
RabbitMQURL: getEnv("RABBITMQ_URL", "amqp://guest:guest@localhost:5672/"),
|
||||||
|
RabbitMQMaxRetries: getEnvInt("RABBITMQ_MAX_RETRIES", 3), // 3 tentatives par défaut
|
||||||
|
RabbitMQRetryInterval: getEnvDuration("RABBITMQ_RETRY_INTERVAL", 2*time.Second), // 2 secondes par défaut
|
||||||
|
RabbitMQEnable: getEnvBool("RABBITMQ_ENABLE", true), // Activé par défaut
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialiser le SecretsProvider (T0037)
|
||||||
|
secretKeys := DefaultSecretKeys()
|
||||||
|
config.SecretsProvider = NewEnvSecretsProvider(secretKeys)
|
||||||
|
|
||||||
|
// Valider la configuration (T0031)
|
||||||
|
if err := config.Validate(); err != nil {
|
||||||
|
logger.Error("Configuration validation failed", zap.Error(err))
|
||||||
|
return nil, fmt.Errorf("invalid configuration: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialiser Redis
|
||||||
|
config.RedisClient, err = initRedis(config.RedisURL)
|
||||||
|
if err != nil {
|
||||||
|
logger.Error("Failed to initialize Redis", zap.Error(err))
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialiser la base de données avec retry
|
||||||
|
config.Database, err = initDatabaseWithRetry(config.DatabaseURL, config.DBMaxRetries, config.DBRetryInterval, config.Logger)
|
||||||
|
if err != nil {
|
||||||
|
logger.Error("Failed to initialize database", zap.Error(err))
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialiser RabbitMQ avec retry
|
||||||
|
config.RabbitMQEventBus, err = eventbus.NewRabbitMQEventBusWithRetry(&eventbus.RabbitMQConfig{
|
||||||
|
URL: config.RabbitMQURL,
|
||||||
|
MaxRetries: config.RabbitMQMaxRetries,
|
||||||
|
RetryInterval: config.RabbitMQRetryInterval,
|
||||||
|
Enable: config.RabbitMQEnable,
|
||||||
|
}, config.Logger)
|
||||||
|
if err != nil {
|
||||||
|
// En mode dégradé, l'erreur n'est pas fatale au démarrage du service
|
||||||
|
if _, ok := err.(*eventbus.EventBusUnavailableError); ok && !config.RabbitMQEnable {
|
||||||
|
logger.Warn("RabbitMQ EventBus est indisponible mais le service démarre en mode dégradé.", zap.Error(err))
|
||||||
|
} else if _, ok := err.(*eventbus.EventBusUnavailableError); ok {
|
||||||
|
// Si le service est censé être enabled et qu'il est injoignable après retries
|
||||||
|
logger.Fatal("Impossible de se connecter à RabbitMQ après plusieurs tentatives. Le service ne peut pas démarrer.", zap.Error(err))
|
||||||
|
return nil, err // Retourner l'erreur fatale
|
||||||
|
} else {
|
||||||
|
logger.Error("Failed to initialize RabbitMQ EventBus", zap.Error(err))
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialiser les services
|
||||||
|
err = config.initServices()
|
||||||
|
if err != nil {
|
||||||
|
logger.Error("Failed to initialize services", zap.Error(err))
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialiser les middlewares
|
||||||
|
err = config.initMiddlewares()
|
||||||
|
if err != nil {
|
||||||
|
logger.Error("Failed to initialize middlewares", zap.Error(err))
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialiser les métriques d'erreurs (T0020)
|
||||||
|
config.ErrorMetrics = metrics.NewErrorMetrics()
|
||||||
|
|
||||||
|
// Logger la configuration avec masquage des secrets (T0037)
|
||||||
|
config.logConfigInitialized(logger)
|
||||||
|
|
||||||
|
// Initialiser le ConfigWatcher si activé (T0040)
|
||||||
|
// Le watcher peut être activé via une variable d'environnement CONFIG_WATCH=true
|
||||||
|
if getEnv("CONFIG_WATCH", "false") == "true" {
|
||||||
|
reloader := config.GetConfigReloader()
|
||||||
|
watcher, err := NewConfigWatcher(reloader, logger)
|
||||||
|
if err != nil {
|
||||||
|
logger.Warn("Failed to create config watcher", zap.Error(err))
|
||||||
|
} else {
|
||||||
|
config.ConfigWatcher = watcher
|
||||||
|
// Surveiller les fichiers .env
|
||||||
|
envFiles := []string{".env", ".env." + env}
|
||||||
|
if err := watcher.Watch(envFiles); err != nil {
|
||||||
|
logger.Warn("Failed to start watching config files", zap.Error(err))
|
||||||
|
} else {
|
||||||
|
logger.Info("Config watcher started", zap.Strings("files", watcher.GetWatchedFiles()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return config, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetConfigReloader retourne le ConfigReloader pour cette configuration (T0034)
|
||||||
|
func (c *Config) GetConfigReloader() *ConfigReloader {
|
||||||
|
return NewConfigReloader(c, c.Logger)
|
||||||
|
}
|
||||||
|
|
||||||
|
// initServices initialise tous les services
|
||||||
|
func (c *Config) initServices() error {
|
||||||
|
// Service de session
|
||||||
|
c.SessionService = services.NewSessionService(c.Database, c.Logger)
|
||||||
|
|
||||||
|
// Service d'audit
|
||||||
|
c.AuditService = services.NewAuditService(c.Database, c.Logger)
|
||||||
|
|
||||||
|
// Service TOTP
|
||||||
|
c.TOTPService = services.NewTOTPService(c.Database, c.Logger)
|
||||||
|
|
||||||
|
// Validateur d'upload
|
||||||
|
uploadConfig := services.DefaultUploadConfig()
|
||||||
|
var err error
|
||||||
|
c.UploadValidator, err = services.NewUploadValidator(uploadConfig, c.Logger)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Service de cache
|
||||||
|
c.CacheService = services.NewCacheService(c.RedisClient, c.Logger)
|
||||||
|
|
||||||
|
// Service de playlist
|
||||||
|
c.PlaylistService = services.NewPlaylistServiceWithDB(c.Database.GormDB, c.Logger)
|
||||||
|
|
||||||
|
// Service de permissions
|
||||||
|
c.PermissionService = services.NewPermissionService(c.Database.GormDB)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// initMiddlewares initialise tous les middlewares
|
||||||
|
func (c *Config) initMiddlewares() error {
|
||||||
|
// Rate limiter global (avec Redis)
|
||||||
|
rateLimiterConfig := &middleware.RateLimiterConfig{
|
||||||
|
IPRequestsPerMinute: 100,
|
||||||
|
IPBurst: 10,
|
||||||
|
UserRequestsPerMinute: 1000,
|
||||||
|
UserBurst: 100,
|
||||||
|
RedisClient: c.RedisClient,
|
||||||
|
KeyPrefix: "veza:rate_limit",
|
||||||
|
}
|
||||||
|
c.RateLimiter = middleware.NewRateLimiter(rateLimiterConfig)
|
||||||
|
|
||||||
|
// Simple rate limiter (T0015) - sans dépendance Redis
|
||||||
|
window := time.Duration(c.RateLimitWindow) * time.Second
|
||||||
|
c.SimpleRateLimiter = middleware.NewSimpleRateLimiter(c.RateLimitLimit, window)
|
||||||
|
|
||||||
|
// Rate limiter par endpoint
|
||||||
|
endpointLimiterConfig := &middleware.EndpointLimiterConfig{
|
||||||
|
RedisClient: c.RedisClient,
|
||||||
|
KeyPrefix: "veza:endpoint_limit",
|
||||||
|
}
|
||||||
|
endpointLimits := middleware.DefaultEndpointLimits()
|
||||||
|
c.EndpointLimiter = middleware.NewEndpointLimiter(endpointLimiterConfig, endpointLimits)
|
||||||
|
|
||||||
|
// Middleware d'authentification
|
||||||
|
c.AuthMiddleware = middleware.NewAuthMiddleware(
|
||||||
|
c.SessionService,
|
||||||
|
c.AuditService,
|
||||||
|
c.PermissionService,
|
||||||
|
c.Logger,
|
||||||
|
c.JWTSecret,
|
||||||
|
)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// NOTE: Les handlers ne sont plus initialisés dans Config pour éviter les imports cycliques.
|
||||||
|
// Les handlers doivent être créés dans main.go ou dans les routes selon les besoins.
|
||||||
|
//
|
||||||
|
// SetupRoutes a été supprimé pour casser le cycle d'import config <-> api.
|
||||||
|
// Utiliser directement api.SetupRoutes() dans cmd/modern-server/main.go
|
||||||
|
|
||||||
|
// SetupMiddleware configure les middlewares globaux
|
||||||
|
// DÉPRÉCIÉ : Cette méthode est conservée pour compatibilité mais ne fait plus rien
|
||||||
|
// Les middlewares globaux sont maintenant configurés dans internal/api/router.go via APIRouter.Setup()
|
||||||
|
// TODO: Améliorer la configuration CORS dans api/router.go pour utiliser c.CORSOrigins depuis la config
|
||||||
|
func (c *Config) SetupMiddleware(router *gin.Engine) {
|
||||||
|
// No-op : Les middlewares sont configurés dans api/router.go
|
||||||
|
// Cette méthode existe uniquement pour compatibilité avec cmd/main.go (legacy)
|
||||||
|
// qui sera désactivé dans le Chantier 1 - Étape 2
|
||||||
|
}
|
||||||
|
|
||||||
|
// initRedis initialise la connexion Redis
|
||||||
|
func initRedis(redisURL string) (*redis.Client, error) {
|
||||||
|
opts, err := redis.ParseURL(redisURL)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
client := redis.NewClient(opts)
|
||||||
|
|
||||||
|
// Test de connexion
|
||||||
|
ctx := context.Background()
|
||||||
|
_, err = client.Ping(ctx).Result()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return client, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// initDatabaseWithRetry initialise la connexion à la base de données avec des tentatives de retry
|
||||||
|
func initDatabaseWithRetry(databaseURL string, maxRetries int, retryInterval time.Duration, logger *zap.Logger) (*database.Database, error) {
|
||||||
|
dbConfig := &database.Config{
|
||||||
|
URL: databaseURL,
|
||||||
|
MaxOpenConns: 25,
|
||||||
|
MaxIdleConns: 10,
|
||||||
|
MaxLifetime: 5 * time.Minute,
|
||||||
|
MaxIdleTime: 1 * time.Minute,
|
||||||
|
MaxRetries: maxRetries,
|
||||||
|
RetryInterval: retryInterval,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Utiliser la fonction de connexion avec retry du package database
|
||||||
|
return database.NewDatabaseWithRetry(dbConfig, logger)
|
||||||
|
}
|
||||||
|
|
||||||
|
// initDatabase initialise la connexion à la base de données
|
||||||
|
// NOTE: Cette fonction est maintenant dépréciée et remplacée par initDatabaseWithRetry
|
||||||
|
func initDatabase(databaseURL string) (*database.Database, error) {
|
||||||
|
// Configuration de la base de données
|
||||||
|
dbConfig := &database.Config{
|
||||||
|
URL: databaseURL,
|
||||||
|
MaxOpenConns: 25,
|
||||||
|
MaxIdleConns: 10,
|
||||||
|
MaxLifetime: 5 * time.Minute,
|
||||||
|
MaxIdleTime: 1 * time.Minute,
|
||||||
|
}
|
||||||
|
|
||||||
|
return database.NewDatabase(dbConfig)
|
||||||
|
}
|
||||||
|
|
||||||
|
// EnvConfig représente la configuration de base chargée depuis les variables d'environnement
|
||||||
|
// Cette struct est utilisée par la fonction Load() pour charger la configuration de base
|
||||||
|
type EnvConfig struct {
|
||||||
|
AppEnv string
|
||||||
|
AppPort int
|
||||||
|
DBHost string
|
||||||
|
DBPort int
|
||||||
|
DBUser string
|
||||||
|
DBPassword string
|
||||||
|
DBName string
|
||||||
|
JWTSecret string
|
||||||
|
RedisURL string
|
||||||
|
CORSOrigins []string // Liste des origines CORS autorisées
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load charge et valide les variables d'environnement avec valeurs par défaut
|
||||||
|
func Load() (*EnvConfig, error) {
|
||||||
|
// Déterminer l'environnement (T0032)
|
||||||
|
env := getEnv("APP_ENV", "development")
|
||||||
|
|
||||||
|
// Charger les fichiers .env selon l'environnement (T0032)
|
||||||
|
// Charge dans l'ordre: .env.{env}, .env
|
||||||
|
// Les variables d'environnement système ont priorité
|
||||||
|
if err := LoadEnvFiles(env); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to load environment files: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Charger les origines CORS depuis les variables d'environnement
|
||||||
|
corsOrigins := getEnvStringSlice("CORS_ALLOWED_ORIGINS", []string{"*"})
|
||||||
|
|
||||||
|
config := &EnvConfig{
|
||||||
|
AppEnv: getEnv("APP_ENV", "development"),
|
||||||
|
AppPort: getEnvInt("APP_PORT", 8080),
|
||||||
|
DBHost: getEnv("DB_HOST", "localhost"),
|
||||||
|
DBPort: getEnvInt("DB_PORT", 5432),
|
||||||
|
DBUser: getEnv("DB_USER", "veza"),
|
||||||
|
DBPassword: getEnvRequired("DB_PASSWORD"),
|
||||||
|
DBName: getEnv("DB_NAME", "veza_db"),
|
||||||
|
JWTSecret: getEnvRequired("JWT_SECRET"),
|
||||||
|
RedisURL: getEnv("REDIS_URL", "redis://localhost:6379"),
|
||||||
|
CORSOrigins: corsOrigins,
|
||||||
|
}
|
||||||
|
|
||||||
|
return config, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// getEnv récupère une variable d'environnement avec une valeur par défaut
|
||||||
|
func getEnv(key, defaultValue string) string {
|
||||||
|
if value := os.Getenv(key); value != "" {
|
||||||
|
fmt.Printf("getEnv (config.go) for key %s: raw='%s', trimmed='%s'\n", key, value, strings.TrimSpace(value))
|
||||||
|
return strings.TrimSpace(value)
|
||||||
|
}
|
||||||
|
fmt.Printf("getEnv (config.go) for key %s: using default='%s'\n", key, defaultValue)
|
||||||
|
return defaultValue
|
||||||
|
}
|
||||||
|
|
||||||
|
// getEnvRequired récupère une variable d'environnement requise (panique si absente)
|
||||||
|
func getEnvRequired(key string) string {
|
||||||
|
value := os.Getenv(key)
|
||||||
|
if value == "" {
|
||||||
|
panic(fmt.Sprintf("Required environment variable %s is not set", key))
|
||||||
|
}
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
|
||||||
|
// getEnvInt récupère une variable d'environnement entière avec une valeur par défaut
|
||||||
|
func getEnvInt(key string, defaultValue int) int {
|
||||||
|
if value := os.Getenv(key); value != "" {
|
||||||
|
if intValue, err := strconv.Atoi(value); err == nil {
|
||||||
|
return intValue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return defaultValue
|
||||||
|
}
|
||||||
|
|
||||||
|
// getEnvBool récupère une variable d'environnement booléenne avec une valeur par défaut
|
||||||
|
func getEnvBool(key string, defaultValue bool) bool {
|
||||||
|
if value := os.Getenv(key); value != "" {
|
||||||
|
if boolValue, err := strconv.ParseBool(value); err == nil {
|
||||||
|
return boolValue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return defaultValue
|
||||||
|
}
|
||||||
|
|
||||||
|
// getEnvDuration récupère une variable d'environnement durée avec une valeur par défaut
|
||||||
|
func getEnvDuration(key string, defaultValue time.Duration) time.Duration {
|
||||||
|
if value := os.Getenv(key); value != "" {
|
||||||
|
if duration, err := time.ParseDuration(value); err == nil {
|
||||||
|
return duration
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return defaultValue
|
||||||
|
}
|
||||||
|
|
||||||
|
// getEnvStringSlice récupère une variable d'environnement comme une slice de strings
|
||||||
|
// Format attendu: "value1,value2,value3" (séparées par des virgules)
|
||||||
|
func getEnvStringSlice(key string, defaultValue []string) []string {
|
||||||
|
if value := os.Getenv(key); value != "" {
|
||||||
|
// Séparer par virgule et nettoyer les espaces
|
||||||
|
parts := strings.Split(value, ",")
|
||||||
|
result := make([]string, 0, len(parts))
|
||||||
|
for _, part := range parts {
|
||||||
|
trimmed := strings.TrimSpace(part)
|
||||||
|
if trimmed != "" {
|
||||||
|
result = append(result, trimmed)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(result) > 0 {
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return defaultValue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate valide la configuration (T0031, T0036)
|
||||||
|
// Vérifie que toutes les valeurs de configuration sont valides avant le démarrage de l'application
|
||||||
|
// Utilise ConfigValidator pour une validation stricte selon les règles de schéma (T0036)
|
||||||
|
func (c *Config) Validate() error {
|
||||||
|
validator := NewConfigValidator()
|
||||||
|
|
||||||
|
// Valider le port (1-65535) avec ConfigValidator (T0036)
|
||||||
|
if err := validator.ValidatePort(c.AppPort); err != nil {
|
||||||
|
return fmt.Errorf("APP_PORT validation failed: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Valider JWT secret (minimum 32 caractères pour sécurité) avec ConfigValidator (T0036)
|
||||||
|
if err := validator.ValidateSecretLength(c.JWTSecret, 32); err != nil {
|
||||||
|
return fmt.Errorf("JWT_SECRET validation failed: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Valider DatabaseURL (requis) avec ConfigValidator (T0036)
|
||||||
|
if c.DatabaseURL == "" {
|
||||||
|
return errors.New("DATABASE_URL is required")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Valider le format de DatabaseURL avec ConfigValidator (T0036)
|
||||||
|
// Support postgres, postgresql, et sqlite
|
||||||
|
if err := validator.ValidateURL(c.DatabaseURL, "postgres"); err != nil {
|
||||||
|
if err2 := validator.ValidateURL(c.DatabaseURL, "postgresql"); err2 != nil {
|
||||||
|
if err3 := validator.ValidateURL(c.DatabaseURL, "sqlite"); err3 != nil {
|
||||||
|
return fmt.Errorf("DATABASE_URL validation failed: must start with postgres://, postgresql://, or sqlite://")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Valider RedisURL (requis) avec ConfigValidator (T0036)
|
||||||
|
if c.RedisURL == "" {
|
||||||
|
return errors.New("REDIS_URL is required")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Valider le format de RedisURL avec ConfigValidator (T0036)
|
||||||
|
// Support redis et rediss (Redis avec SSL)
|
||||||
|
if err := validator.ValidateURL(c.RedisURL, "redis"); err != nil {
|
||||||
|
if err2 := validator.ValidateURL(c.RedisURL, "rediss"); err2 != nil {
|
||||||
|
return fmt.Errorf("REDIS_URL validation failed: must start with redis:// or rediss://")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Valider LogLevel avec ValidateEnum (T0036)
|
||||||
|
if c.LogLevel != "" {
|
||||||
|
allowedLevels := []string{"DEBUG", "INFO", "WARN", "ERROR"}
|
||||||
|
if err := validator.ValidateEnum(c.LogLevel, allowedLevels); err != nil {
|
||||||
|
return fmt.Errorf("LOG_LEVEL validation failed: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Valider RateLimitLimit et RateLimitWindow avec ValidatePositiveInt (T0036)
|
||||||
|
if err := validator.ValidatePositiveInt(c.RateLimitLimit, "RATE_LIMIT_LIMIT"); err != nil {
|
||||||
|
return fmt.Errorf("RATE_LIMIT_LIMIT validation failed: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := validator.ValidatePositiveInt(c.RateLimitWindow, "RATE_LIMIT_WINDOW"); err != nil {
|
||||||
|
return fmt.Errorf("RATE_LIMIT_WINDOW validation failed: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// logConfigInitialized log la configuration initialisée avec masquage des secrets (T0037)
|
||||||
|
func (c *Config) logConfigInitialized(logger *zap.Logger) {
|
||||||
|
logger.Info("Configuration initialized successfully",
|
||||||
|
zap.Int("app_port", c.AppPort),
|
||||||
|
zap.String("jwt_secret", MaskConfigValue("JWT_SECRET", c.JWTSecret, c.SecretsProvider)),
|
||||||
|
zap.String("database_url", MaskConfigValue("DATABASE_URL", c.DatabaseURL, c.SecretsProvider)),
|
||||||
|
zap.String("redis_url", MaskConfigValue("REDIS_URL", c.RedisURL, c.SecretsProvider)),
|
||||||
|
zap.Strings("cors_origins", c.CORSOrigins),
|
||||||
|
zap.Int("rate_limit_limit", c.RateLimitLimit),
|
||||||
|
zap.Int("rate_limit_window", c.RateLimitWindow),
|
||||||
|
zap.String("log_level", c.LogLevel),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Close ferme toutes les connexions (T0040)
|
||||||
|
func (c *Config) Close() error {
|
||||||
|
var err error
|
||||||
|
|
||||||
|
// Arrêter le ConfigWatcher si actif (T0040)
|
||||||
|
if c.ConfigWatcher != nil {
|
||||||
|
if closeErr := c.ConfigWatcher.Stop(); closeErr != nil {
|
||||||
|
err = closeErr
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if c.RedisClient != nil {
|
||||||
|
if closeErr := c.RedisClient.Close(); closeErr != nil {
|
||||||
|
err = closeErr
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if c.Database != nil {
|
||||||
|
if closeErr := c.Database.Close(); closeErr != nil {
|
||||||
|
err = closeErr
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if c.RabbitMQEventBus != nil {
|
||||||
|
if closeErr := c.RabbitMQEventBus.Close(); closeErr != nil {
|
||||||
|
err = closeErr
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if c.Logger != nil {
|
||||||
|
c.Logger.Sync()
|
||||||
|
}
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
284
veza-backend-api/internal/config/config_test.go
Normal file
284
veza-backend-api/internal/config/config_test.go
Normal file
|
|
@ -0,0 +1,284 @@
|
||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestLoad(t *testing.T) {
|
||||||
|
// Sauvegarder les valeurs originales
|
||||||
|
originalDBPassword := os.Getenv("DB_PASSWORD")
|
||||||
|
originalJWTSecret := os.Getenv("JWT_SECRET")
|
||||||
|
originalAppPort := os.Getenv("APP_PORT")
|
||||||
|
|
||||||
|
// Nettoyer après le test
|
||||||
|
defer func() {
|
||||||
|
if originalDBPassword != "" {
|
||||||
|
os.Setenv("DB_PASSWORD", originalDBPassword)
|
||||||
|
} else {
|
||||||
|
os.Unsetenv("DB_PASSWORD")
|
||||||
|
}
|
||||||
|
if originalJWTSecret != "" {
|
||||||
|
os.Setenv("JWT_SECRET", originalJWTSecret)
|
||||||
|
} else {
|
||||||
|
os.Unsetenv("JWT_SECRET")
|
||||||
|
}
|
||||||
|
if originalAppPort != "" {
|
||||||
|
os.Setenv("APP_PORT", originalAppPort)
|
||||||
|
} else {
|
||||||
|
os.Unsetenv("APP_PORT")
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Définir les variables requises
|
||||||
|
os.Setenv("DB_PASSWORD", "test_password")
|
||||||
|
os.Setenv("JWT_SECRET", "test_secret")
|
||||||
|
|
||||||
|
config, err := Load()
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.NotNil(t, config)
|
||||||
|
|
||||||
|
// Vérifier les valeurs par défaut
|
||||||
|
assert.Equal(t, 8080, config.AppPort)
|
||||||
|
assert.Equal(t, "development", config.AppEnv)
|
||||||
|
assert.Equal(t, "localhost", config.DBHost)
|
||||||
|
assert.Equal(t, 5432, config.DBPort)
|
||||||
|
assert.Equal(t, "veza", config.DBUser)
|
||||||
|
assert.Equal(t, "veza_db", config.DBName)
|
||||||
|
assert.Equal(t, "redis://localhost:6379", config.RedisURL)
|
||||||
|
|
||||||
|
// Vérifier les valeurs requises
|
||||||
|
assert.Equal(t, "test_password", config.DBPassword)
|
||||||
|
assert.Equal(t, "test_secret", config.JWTSecret)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLoad_WithCustomValues(t *testing.T) {
|
||||||
|
// Sauvegarder les valeurs originales
|
||||||
|
originalDBPassword := os.Getenv("DB_PASSWORD")
|
||||||
|
originalJWTSecret := os.Getenv("JWT_SECRET")
|
||||||
|
originalAppPort := os.Getenv("APP_PORT")
|
||||||
|
originalDBHost := os.Getenv("DB_HOST")
|
||||||
|
originalDBPort := os.Getenv("DB_PORT")
|
||||||
|
|
||||||
|
// Nettoyer après le test
|
||||||
|
defer func() {
|
||||||
|
if originalDBPassword != "" {
|
||||||
|
os.Setenv("DB_PASSWORD", originalDBPassword)
|
||||||
|
} else {
|
||||||
|
os.Unsetenv("DB_PASSWORD")
|
||||||
|
}
|
||||||
|
if originalJWTSecret != "" {
|
||||||
|
os.Setenv("JWT_SECRET", originalJWTSecret)
|
||||||
|
} else {
|
||||||
|
os.Unsetenv("JWT_SECRET")
|
||||||
|
}
|
||||||
|
if originalAppPort != "" {
|
||||||
|
os.Setenv("APP_PORT", originalAppPort)
|
||||||
|
} else {
|
||||||
|
os.Unsetenv("APP_PORT")
|
||||||
|
}
|
||||||
|
if originalDBHost != "" {
|
||||||
|
os.Setenv("DB_HOST", originalDBHost)
|
||||||
|
} else {
|
||||||
|
os.Unsetenv("DB_HOST")
|
||||||
|
}
|
||||||
|
if originalDBPort != "" {
|
||||||
|
os.Setenv("DB_PORT", originalDBPort)
|
||||||
|
} else {
|
||||||
|
os.Unsetenv("DB_PORT")
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Définir des valeurs personnalisées
|
||||||
|
os.Setenv("DB_PASSWORD", "custom_password")
|
||||||
|
os.Setenv("JWT_SECRET", "custom_secret")
|
||||||
|
os.Setenv("APP_PORT", "9090")
|
||||||
|
os.Setenv("DB_HOST", "custom_host")
|
||||||
|
os.Setenv("DB_PORT", "3306")
|
||||||
|
|
||||||
|
config, err := Load()
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
assert.Equal(t, 9090, config.AppPort)
|
||||||
|
assert.Equal(t, "custom_host", config.DBHost)
|
||||||
|
assert.Equal(t, 3306, config.DBPort)
|
||||||
|
assert.Equal(t, "custom_password", config.DBPassword)
|
||||||
|
assert.Equal(t, "custom_secret", config.JWTSecret)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLoad_MissingRequiredVariable_DBPassword(t *testing.T) {
|
||||||
|
// Sauvegarder les valeurs originales
|
||||||
|
originalDBPassword := os.Getenv("DB_PASSWORD")
|
||||||
|
originalJWTSecret := os.Getenv("JWT_SECRET")
|
||||||
|
|
||||||
|
// Nettoyer après le test
|
||||||
|
defer func() {
|
||||||
|
if originalDBPassword != "" {
|
||||||
|
os.Setenv("DB_PASSWORD", originalDBPassword)
|
||||||
|
} else {
|
||||||
|
os.Unsetenv("DB_PASSWORD")
|
||||||
|
}
|
||||||
|
if originalJWTSecret != "" {
|
||||||
|
os.Setenv("JWT_SECRET", originalJWTSecret)
|
||||||
|
} else {
|
||||||
|
os.Unsetenv("JWT_SECRET")
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Supprimer les variables requises
|
||||||
|
os.Unsetenv("DB_PASSWORD")
|
||||||
|
os.Setenv("JWT_SECRET", "test_secret")
|
||||||
|
|
||||||
|
// Devrait paniquer
|
||||||
|
assert.Panics(t, func() {
|
||||||
|
_, _ = Load()
|
||||||
|
}, "Should panic when DB_PASSWORD is missing")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLoad_MissingRequiredVariable_JWTSecret(t *testing.T) {
|
||||||
|
// Sauvegarder les valeurs originales
|
||||||
|
originalDBPassword := os.Getenv("DB_PASSWORD")
|
||||||
|
originalJWTSecret := os.Getenv("JWT_SECRET")
|
||||||
|
|
||||||
|
// Nettoyer après le test
|
||||||
|
defer func() {
|
||||||
|
if originalDBPassword != "" {
|
||||||
|
os.Setenv("DB_PASSWORD", originalDBPassword)
|
||||||
|
} else {
|
||||||
|
os.Unsetenv("DB_PASSWORD")
|
||||||
|
}
|
||||||
|
if originalJWTSecret != "" {
|
||||||
|
os.Setenv("JWT_SECRET", originalJWTSecret)
|
||||||
|
} else {
|
||||||
|
os.Unsetenv("JWT_SECRET")
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Supprimer les variables requises
|
||||||
|
os.Setenv("DB_PASSWORD", "test_password")
|
||||||
|
os.Unsetenv("JWT_SECRET")
|
||||||
|
|
||||||
|
// Devrait paniquer
|
||||||
|
assert.Panics(t, func() {
|
||||||
|
_, _ = Load()
|
||||||
|
}, "Should panic when JWT_SECRET is missing")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetEnv(t *testing.T) {
|
||||||
|
// Sauvegarder la valeur originale
|
||||||
|
originalValue := os.Getenv("TEST_VAR")
|
||||||
|
|
||||||
|
defer func() {
|
||||||
|
if originalValue != "" {
|
||||||
|
os.Setenv("TEST_VAR", originalValue)
|
||||||
|
} else {
|
||||||
|
os.Unsetenv("TEST_VAR")
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Test avec valeur définie
|
||||||
|
os.Setenv("TEST_VAR", "test_value")
|
||||||
|
assert.Equal(t, "test_value", getEnv("TEST_VAR", "default"))
|
||||||
|
|
||||||
|
// Test sans valeur (devrait retourner défaut)
|
||||||
|
os.Unsetenv("TEST_VAR")
|
||||||
|
assert.Equal(t, "default", getEnv("TEST_VAR", "default"))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetEnvInt(t *testing.T) {
|
||||||
|
// Sauvegarder la valeur originale
|
||||||
|
originalValue := os.Getenv("TEST_INT")
|
||||||
|
|
||||||
|
defer func() {
|
||||||
|
if originalValue != "" {
|
||||||
|
os.Setenv("TEST_INT", originalValue)
|
||||||
|
} else {
|
||||||
|
os.Unsetenv("TEST_INT")
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Test avec valeur entière valide
|
||||||
|
os.Setenv("TEST_INT", "42")
|
||||||
|
assert.Equal(t, 42, getEnvInt("TEST_INT", 10))
|
||||||
|
|
||||||
|
// Test sans valeur (devrait retourner défaut)
|
||||||
|
os.Unsetenv("TEST_INT")
|
||||||
|
assert.Equal(t, 10, getEnvInt("TEST_INT", 10))
|
||||||
|
|
||||||
|
// Test avec valeur invalide (devrait retourner défaut)
|
||||||
|
os.Setenv("TEST_INT", "not_a_number")
|
||||||
|
assert.Equal(t, 10, getEnvInt("TEST_INT", 10))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetEnvRequired(t *testing.T) {
|
||||||
|
// Sauvegarder la valeur originale
|
||||||
|
originalValue := os.Getenv("TEST_REQUIRED")
|
||||||
|
|
||||||
|
defer func() {
|
||||||
|
if originalValue != "" {
|
||||||
|
os.Setenv("TEST_REQUIRED", originalValue)
|
||||||
|
} else {
|
||||||
|
os.Unsetenv("TEST_REQUIRED")
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Test avec valeur définie
|
||||||
|
os.Setenv("TEST_REQUIRED", "required_value")
|
||||||
|
assert.Equal(t, "required_value", getEnvRequired("TEST_REQUIRED"))
|
||||||
|
|
||||||
|
// Test sans valeur (devrait paniquer)
|
||||||
|
os.Unsetenv("TEST_REQUIRED")
|
||||||
|
assert.Panics(t, func() {
|
||||||
|
_ = getEnvRequired("TEST_REQUIRED")
|
||||||
|
}, "Should panic when required variable is missing")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLoad_DefaultValues(t *testing.T) {
|
||||||
|
// Sauvegarder les valeurs originales
|
||||||
|
originalDBPassword := os.Getenv("DB_PASSWORD")
|
||||||
|
originalJWTSecret := os.Getenv("JWT_SECRET")
|
||||||
|
originalAppEnv := os.Getenv("APP_ENV")
|
||||||
|
originalRedisURL := os.Getenv("REDIS_URL")
|
||||||
|
|
||||||
|
// Nettoyer après le test
|
||||||
|
defer func() {
|
||||||
|
if originalDBPassword != "" {
|
||||||
|
os.Setenv("DB_PASSWORD", originalDBPassword)
|
||||||
|
} else {
|
||||||
|
os.Unsetenv("DB_PASSWORD")
|
||||||
|
}
|
||||||
|
if originalJWTSecret != "" {
|
||||||
|
os.Setenv("JWT_SECRET", originalJWTSecret)
|
||||||
|
} else {
|
||||||
|
os.Unsetenv("JWT_SECRET")
|
||||||
|
}
|
||||||
|
if originalAppEnv != "" {
|
||||||
|
os.Setenv("APP_ENV", originalAppEnv)
|
||||||
|
} else {
|
||||||
|
os.Unsetenv("APP_ENV")
|
||||||
|
}
|
||||||
|
if originalRedisURL != "" {
|
||||||
|
os.Setenv("REDIS_URL", originalRedisURL)
|
||||||
|
} else {
|
||||||
|
os.Unsetenv("REDIS_URL")
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Définir seulement les variables requises
|
||||||
|
os.Setenv("DB_PASSWORD", "test")
|
||||||
|
os.Setenv("JWT_SECRET", "secret")
|
||||||
|
|
||||||
|
// Supprimer les variables optionnelles pour tester les valeurs par défaut
|
||||||
|
os.Unsetenv("APP_ENV")
|
||||||
|
os.Unsetenv("REDIS_URL")
|
||||||
|
|
||||||
|
config, err := Load()
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Vérifier que les valeurs par défaut sont utilisées
|
||||||
|
assert.Equal(t, "development", config.AppEnv)
|
||||||
|
assert.Equal(t, "redis://localhost:6379", config.RedisURL)
|
||||||
|
}
|
||||||
148
veza-backend-api/internal/config/defaults.go
Normal file
148
veza-backend-api/internal/config/defaults.go
Normal file
|
|
@ -0,0 +1,148 @@
|
||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"go.uber.org/zap"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ConfigDefaults permet de construire une config avec des valeurs par défaut (T0038)
|
||||||
|
type ConfigDefaults struct {
|
||||||
|
appPort *int
|
||||||
|
appEnv *string
|
||||||
|
jwtSecret *string
|
||||||
|
databaseURL *string
|
||||||
|
redisURL *string
|
||||||
|
corsOrigins []string
|
||||||
|
rateLimitLimit *int
|
||||||
|
rateLimitWindow *int
|
||||||
|
logLevel *string
|
||||||
|
logger *zap.Logger
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewConfigDefaults crée un nouveau builder de defaults (T0038)
|
||||||
|
func NewConfigDefaults() *ConfigDefaults {
|
||||||
|
return &ConfigDefaults{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithPort définit le port par défaut (T0038)
|
||||||
|
func (b *ConfigDefaults) WithPort(port int) *ConfigDefaults {
|
||||||
|
b.appPort = &port
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithEnv définit l'environnement par défaut (T0038)
|
||||||
|
func (b *ConfigDefaults) WithEnv(env string) *ConfigDefaults {
|
||||||
|
b.appEnv = &env
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithJWTSecret définit le secret JWT par défaut (T0038)
|
||||||
|
func (b *ConfigDefaults) WithJWTSecret(secret string) *ConfigDefaults {
|
||||||
|
b.jwtSecret = &secret
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithDatabaseURL définit l'URL de la base de données par défaut (T0038)
|
||||||
|
func (b *ConfigDefaults) WithDatabaseURL(url string) *ConfigDefaults {
|
||||||
|
b.databaseURL = &url
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithRedisURL définit l'URL Redis par défaut (T0038)
|
||||||
|
func (b *ConfigDefaults) WithRedisURL(url string) *ConfigDefaults {
|
||||||
|
b.redisURL = &url
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithCORSOrigins définit les origines CORS par défaut (T0038)
|
||||||
|
func (b *ConfigDefaults) WithCORSOrigins(origins []string) *ConfigDefaults {
|
||||||
|
b.corsOrigins = origins
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithRateLimit définit les limites de rate limiting par défaut (T0038)
|
||||||
|
func (b *ConfigDefaults) WithRateLimit(limit int, windowSeconds int) *ConfigDefaults {
|
||||||
|
b.rateLimitLimit = &limit
|
||||||
|
b.rateLimitWindow = &windowSeconds
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithLogLevel définit le niveau de log par défaut (T0038)
|
||||||
|
func (b *ConfigDefaults) WithLogLevel(level string) *ConfigDefaults {
|
||||||
|
b.logLevel = &level
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithLogger définit le logger par défaut (T0038)
|
||||||
|
func (b *ConfigDefaults) WithLogger(logger *zap.Logger) *ConfigDefaults {
|
||||||
|
b.logger = logger
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build construit une Config avec les valeurs par défaut (T0038)
|
||||||
|
func (b *ConfigDefaults) Build() *Config {
|
||||||
|
config := &Config{}
|
||||||
|
|
||||||
|
if b.appPort != nil {
|
||||||
|
config.AppPort = *b.appPort
|
||||||
|
}
|
||||||
|
// Note: appEnv n'est pas dans Config, mais peut être utilisé ailleurs
|
||||||
|
if b.jwtSecret != nil {
|
||||||
|
config.JWTSecret = *b.jwtSecret
|
||||||
|
}
|
||||||
|
if b.databaseURL != nil {
|
||||||
|
config.DatabaseURL = *b.databaseURL
|
||||||
|
}
|
||||||
|
if b.redisURL != nil {
|
||||||
|
config.RedisURL = *b.redisURL
|
||||||
|
}
|
||||||
|
if len(b.corsOrigins) > 0 {
|
||||||
|
config.CORSOrigins = b.corsOrigins
|
||||||
|
}
|
||||||
|
if b.rateLimitLimit != nil {
|
||||||
|
config.RateLimitLimit = *b.rateLimitLimit
|
||||||
|
}
|
||||||
|
if b.rateLimitWindow != nil {
|
||||||
|
config.RateLimitWindow = *b.rateLimitWindow
|
||||||
|
}
|
||||||
|
if b.logLevel != nil {
|
||||||
|
config.LogLevel = *b.logLevel
|
||||||
|
}
|
||||||
|
if b.logger != nil {
|
||||||
|
config.Logger = b.logger
|
||||||
|
}
|
||||||
|
|
||||||
|
return config
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge fusionne les valeurs par défaut avec une config existante (override) (T0038)
|
||||||
|
func (b *ConfigDefaults) Merge(config *Config) *Config {
|
||||||
|
if b.appPort != nil {
|
||||||
|
config.AppPort = *b.appPort
|
||||||
|
}
|
||||||
|
if b.jwtSecret != nil {
|
||||||
|
config.JWTSecret = *b.jwtSecret
|
||||||
|
}
|
||||||
|
if b.databaseURL != nil {
|
||||||
|
config.DatabaseURL = *b.databaseURL
|
||||||
|
}
|
||||||
|
if b.redisURL != nil {
|
||||||
|
config.RedisURL = *b.redisURL
|
||||||
|
}
|
||||||
|
if len(b.corsOrigins) > 0 {
|
||||||
|
config.CORSOrigins = b.corsOrigins
|
||||||
|
}
|
||||||
|
if b.rateLimitLimit != nil {
|
||||||
|
config.RateLimitLimit = *b.rateLimitLimit
|
||||||
|
}
|
||||||
|
if b.rateLimitWindow != nil {
|
||||||
|
config.RateLimitWindow = *b.rateLimitWindow
|
||||||
|
}
|
||||||
|
if b.logLevel != nil {
|
||||||
|
config.LogLevel = *b.logLevel
|
||||||
|
}
|
||||||
|
if b.logger != nil {
|
||||||
|
config.Logger = b.logger
|
||||||
|
}
|
||||||
|
|
||||||
|
return config
|
||||||
|
}
|
||||||
214
veza-backend-api/internal/config/defaults_test.go
Normal file
214
veza-backend-api/internal/config/defaults_test.go
Normal file
|
|
@ -0,0 +1,214 @@
|
||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"go.uber.org/zap"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestConfigDefaults_Build(t *testing.T) {
|
||||||
|
defaults := NewConfigDefaults().
|
||||||
|
WithPort(9000).
|
||||||
|
WithEnv("test").
|
||||||
|
WithJWTSecret("test-secret").
|
||||||
|
WithDatabaseURL("postgres://test").
|
||||||
|
WithLogLevel("DEBUG")
|
||||||
|
|
||||||
|
config := defaults.Build()
|
||||||
|
|
||||||
|
assert.Equal(t, 9000, config.AppPort)
|
||||||
|
assert.Equal(t, "test-secret", config.JWTSecret)
|
||||||
|
assert.Equal(t, "postgres://test", config.DatabaseURL)
|
||||||
|
assert.Equal(t, "DEBUG", config.LogLevel)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigDefaults_Merge(t *testing.T) {
|
||||||
|
existingConfig := &Config{
|
||||||
|
AppPort: 8080,
|
||||||
|
LogLevel: "INFO",
|
||||||
|
}
|
||||||
|
|
||||||
|
defaults := NewConfigDefaults().
|
||||||
|
WithPort(9000).
|
||||||
|
WithLogLevel("DEBUG")
|
||||||
|
|
||||||
|
merged := defaults.Merge(existingConfig)
|
||||||
|
|
||||||
|
assert.Equal(t, 9000, merged.AppPort) // Override
|
||||||
|
assert.Equal(t, "DEBUG", merged.LogLevel) // Override
|
||||||
|
assert.Same(t, existingConfig, merged) // Même instance
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigDefaults_WithPort(t *testing.T) {
|
||||||
|
defaults := NewConfigDefaults().WithPort(3000)
|
||||||
|
config := defaults.Build()
|
||||||
|
assert.Equal(t, 3000, config.AppPort)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigDefaults_WithJWTSecret(t *testing.T) {
|
||||||
|
defaults := NewConfigDefaults().WithJWTSecret("my-secret-key")
|
||||||
|
config := defaults.Build()
|
||||||
|
assert.Equal(t, "my-secret-key", config.JWTSecret)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigDefaults_WithDatabaseURL(t *testing.T) {
|
||||||
|
defaults := NewConfigDefaults().WithDatabaseURL("postgresql://localhost/db")
|
||||||
|
config := defaults.Build()
|
||||||
|
assert.Equal(t, "postgresql://localhost/db", config.DatabaseURL)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigDefaults_WithRedisURL(t *testing.T) {
|
||||||
|
defaults := NewConfigDefaults().WithRedisURL("redis://localhost:6379")
|
||||||
|
config := defaults.Build()
|
||||||
|
assert.Equal(t, "redis://localhost:6379", config.RedisURL)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigDefaults_WithCORSOrigins(t *testing.T) {
|
||||||
|
origins := []string{"http://localhost:3000", "https://example.com"}
|
||||||
|
defaults := NewConfigDefaults().WithCORSOrigins(origins)
|
||||||
|
config := defaults.Build()
|
||||||
|
assert.Equal(t, origins, config.CORSOrigins)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigDefaults_WithRateLimit(t *testing.T) {
|
||||||
|
defaults := NewConfigDefaults().WithRateLimit(200, 120)
|
||||||
|
config := defaults.Build()
|
||||||
|
assert.Equal(t, 200, config.RateLimitLimit)
|
||||||
|
assert.Equal(t, 120, config.RateLimitWindow)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigDefaults_WithLogLevel(t *testing.T) {
|
||||||
|
defaults := NewConfigDefaults().WithLogLevel("ERROR")
|
||||||
|
config := defaults.Build()
|
||||||
|
assert.Equal(t, "ERROR", config.LogLevel)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigDefaults_WithLogger(t *testing.T) {
|
||||||
|
logger, _ := zap.NewDevelopment()
|
||||||
|
defaults := NewConfigDefaults().WithLogger(logger)
|
||||||
|
config := defaults.Build()
|
||||||
|
assert.Same(t, logger, config.Logger)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigDefaults_Build_Empty(t *testing.T) {
|
||||||
|
defaults := NewConfigDefaults()
|
||||||
|
config := defaults.Build()
|
||||||
|
|
||||||
|
assert.NotNil(t, config)
|
||||||
|
assert.Equal(t, 0, config.AppPort)
|
||||||
|
assert.Empty(t, config.JWTSecret)
|
||||||
|
assert.Empty(t, config.DatabaseURL)
|
||||||
|
assert.Empty(t, config.RedisURL)
|
||||||
|
assert.Nil(t, config.CORSOrigins)
|
||||||
|
assert.Equal(t, 0, config.RateLimitLimit)
|
||||||
|
assert.Equal(t, 0, config.RateLimitWindow)
|
||||||
|
assert.Empty(t, config.LogLevel)
|
||||||
|
assert.Nil(t, config.Logger)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigDefaults_FluentChaining(t *testing.T) {
|
||||||
|
config := NewConfigDefaults().
|
||||||
|
WithPort(8080).
|
||||||
|
WithJWTSecret("secret").
|
||||||
|
WithDatabaseURL("postgres://db").
|
||||||
|
WithRedisURL("redis://redis").
|
||||||
|
WithCORSOrigins([]string{"*"}).
|
||||||
|
WithRateLimit(100, 60).
|
||||||
|
WithLogLevel("INFO").
|
||||||
|
Build()
|
||||||
|
|
||||||
|
assert.Equal(t, 8080, config.AppPort)
|
||||||
|
assert.Equal(t, "secret", config.JWTSecret)
|
||||||
|
assert.Equal(t, "postgres://db", config.DatabaseURL)
|
||||||
|
assert.Equal(t, "redis://redis", config.RedisURL)
|
||||||
|
assert.Equal(t, []string{"*"}, config.CORSOrigins)
|
||||||
|
assert.Equal(t, 100, config.RateLimitLimit)
|
||||||
|
assert.Equal(t, 60, config.RateLimitWindow)
|
||||||
|
assert.Equal(t, "INFO", config.LogLevel)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigDefaults_Merge_Partial(t *testing.T) {
|
||||||
|
existingConfig := &Config{
|
||||||
|
AppPort: 8080,
|
||||||
|
JWTSecret: "original-secret",
|
||||||
|
DatabaseURL: "postgres://original",
|
||||||
|
LogLevel: "INFO",
|
||||||
|
}
|
||||||
|
|
||||||
|
defaults := NewConfigDefaults().
|
||||||
|
WithPort(9000).
|
||||||
|
WithDatabaseURL("postgres://new")
|
||||||
|
|
||||||
|
merged := defaults.Merge(existingConfig)
|
||||||
|
|
||||||
|
assert.Equal(t, 9000, merged.AppPort) // Override
|
||||||
|
assert.Equal(t, "original-secret", merged.JWTSecret) // Pas override
|
||||||
|
assert.Equal(t, "postgres://new", merged.DatabaseURL) // Override
|
||||||
|
assert.Equal(t, "INFO", merged.LogLevel) // Pas override
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigDefaults_Merge_AllFields(t *testing.T) {
|
||||||
|
existingConfig := &Config{
|
||||||
|
AppPort: 8080,
|
||||||
|
JWTSecret: "old-secret",
|
||||||
|
DatabaseURL: "postgres://old",
|
||||||
|
RedisURL: "redis://old",
|
||||||
|
CORSOrigins: []string{"old-origin"},
|
||||||
|
RateLimitLimit: 50,
|
||||||
|
RateLimitWindow: 30,
|
||||||
|
LogLevel: "WARN",
|
||||||
|
}
|
||||||
|
|
||||||
|
logger, _ := zap.NewDevelopment()
|
||||||
|
newOrigins := []string{"new-origin1", "new-origin2"}
|
||||||
|
|
||||||
|
defaults := NewConfigDefaults().
|
||||||
|
WithPort(9000).
|
||||||
|
WithJWTSecret("new-secret").
|
||||||
|
WithDatabaseURL("postgres://new").
|
||||||
|
WithRedisURL("redis://new").
|
||||||
|
WithCORSOrigins(newOrigins).
|
||||||
|
WithRateLimit(200, 120).
|
||||||
|
WithLogLevel("DEBUG").
|
||||||
|
WithLogger(logger)
|
||||||
|
|
||||||
|
merged := defaults.Merge(existingConfig)
|
||||||
|
|
||||||
|
assert.Equal(t, 9000, merged.AppPort)
|
||||||
|
assert.Equal(t, "new-secret", merged.JWTSecret)
|
||||||
|
assert.Equal(t, "postgres://new", merged.DatabaseURL)
|
||||||
|
assert.Equal(t, "redis://new", merged.RedisURL)
|
||||||
|
assert.Equal(t, newOrigins, merged.CORSOrigins)
|
||||||
|
assert.Equal(t, 200, merged.RateLimitLimit)
|
||||||
|
assert.Equal(t, 120, merged.RateLimitWindow)
|
||||||
|
assert.Equal(t, "DEBUG", merged.LogLevel)
|
||||||
|
assert.Same(t, logger, merged.Logger)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigDefaults_WithEnv(t *testing.T) {
|
||||||
|
defaults := NewConfigDefaults().WithEnv("production")
|
||||||
|
// Env n'est pas stocké dans Config, mais le builder l'accepte
|
||||||
|
// Ceci permet d'utiliser l'env pour d'autres choses si nécessaire
|
||||||
|
config := defaults.Build()
|
||||||
|
assert.NotNil(t, config)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigDefaults_MultipleCalls(t *testing.T) {
|
||||||
|
defaults := NewConfigDefaults().
|
||||||
|
WithPort(1000).
|
||||||
|
WithPort(2000). // Override
|
||||||
|
WithLogLevel("INFO").
|
||||||
|
WithLogLevel("DEBUG") // Override
|
||||||
|
|
||||||
|
config := defaults.Build()
|
||||||
|
assert.Equal(t, 2000, config.AppPort) // Dernière valeur
|
||||||
|
assert.Equal(t, "DEBUG", config.LogLevel) // Dernière valeur
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNewConfigDefaults(t *testing.T) {
|
||||||
|
defaults := NewConfigDefaults()
|
||||||
|
assert.NotNil(t, defaults)
|
||||||
|
config := defaults.Build()
|
||||||
|
assert.NotNil(t, config)
|
||||||
|
}
|
||||||
187
veza-backend-api/internal/config/docs.go
Normal file
187
veza-backend-api/internal/config/docs.go
Normal file
|
|
@ -0,0 +1,187 @@
|
||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"sort"
|
||||||
|
)
|
||||||
|
|
||||||
|
// EnvVarDoc représente la documentation d'une variable d'environnement (T0033)
|
||||||
|
type EnvVarDoc struct {
|
||||||
|
Name string
|
||||||
|
Type string
|
||||||
|
Required bool
|
||||||
|
Default string
|
||||||
|
Description string
|
||||||
|
Example string
|
||||||
|
}
|
||||||
|
|
||||||
|
// envVarsDocs contient la documentation de toutes les variables d'environnement (T0033)
|
||||||
|
var envVarsDocs = map[string]EnvVarDoc{
|
||||||
|
"APP_ENV": {
|
||||||
|
Name: "APP_ENV",
|
||||||
|
Type: "string",
|
||||||
|
Required: false,
|
||||||
|
Default: "development",
|
||||||
|
Description: "Environment mode (development, production, test)",
|
||||||
|
Example: "production",
|
||||||
|
},
|
||||||
|
"APP_PORT": {
|
||||||
|
Name: "APP_PORT",
|
||||||
|
Type: "int",
|
||||||
|
Required: false,
|
||||||
|
Default: "8080",
|
||||||
|
Description: "Port for HTTP server (1-65535)",
|
||||||
|
Example: "8080",
|
||||||
|
},
|
||||||
|
"JWT_SECRET": {
|
||||||
|
Name: "JWT_SECRET",
|
||||||
|
Type: "string",
|
||||||
|
Required: true,
|
||||||
|
Default: "",
|
||||||
|
Description: "Secret key for JWT token signing and validation (minimum 32 characters)",
|
||||||
|
Example: "your-super-secret-jwt-key-minimum-32-characters-long",
|
||||||
|
},
|
||||||
|
"DATABASE_URL": {
|
||||||
|
Name: "DATABASE_URL",
|
||||||
|
Type: "string",
|
||||||
|
Required: false,
|
||||||
|
Default: "postgresql://veza:password@localhost:5432/veza_db",
|
||||||
|
Description: "PostgreSQL database connection URL (must start with postgres://, postgresql://, or sqlite://)",
|
||||||
|
Example: "postgresql://user:password@localhost:5432/veza_db",
|
||||||
|
},
|
||||||
|
"DB_HOST": {
|
||||||
|
Name: "DB_HOST",
|
||||||
|
Type: "string",
|
||||||
|
Required: false,
|
||||||
|
Default: "localhost",
|
||||||
|
Description: "Database host address",
|
||||||
|
Example: "localhost",
|
||||||
|
},
|
||||||
|
"DB_PORT": {
|
||||||
|
Name: "DB_PORT",
|
||||||
|
Type: "int",
|
||||||
|
Required: false,
|
||||||
|
Default: "5432",
|
||||||
|
Description: "Database port number",
|
||||||
|
Example: "5432",
|
||||||
|
},
|
||||||
|
"DB_USER": {
|
||||||
|
Name: "DB_USER",
|
||||||
|
Type: "string",
|
||||||
|
Required: false,
|
||||||
|
Default: "veza",
|
||||||
|
Description: "Database username",
|
||||||
|
Example: "veza",
|
||||||
|
},
|
||||||
|
"DB_PASSWORD": {
|
||||||
|
Name: "DB_PASSWORD",
|
||||||
|
Type: "string",
|
||||||
|
Required: true,
|
||||||
|
Default: "",
|
||||||
|
Description: "Database password (required)",
|
||||||
|
Example: "your-secure-database-password",
|
||||||
|
},
|
||||||
|
"DB_NAME": {
|
||||||
|
Name: "DB_NAME",
|
||||||
|
Type: "string",
|
||||||
|
Required: false,
|
||||||
|
Default: "veza_db",
|
||||||
|
Description: "Database name",
|
||||||
|
Example: "veza_db",
|
||||||
|
},
|
||||||
|
"REDIS_URL": {
|
||||||
|
Name: "REDIS_URL",
|
||||||
|
Type: "string",
|
||||||
|
Required: false,
|
||||||
|
Default: "redis://localhost:6379",
|
||||||
|
Description: "Redis connection URL (must start with redis:// or rediss://)",
|
||||||
|
Example: "redis://localhost:6379",
|
||||||
|
},
|
||||||
|
"CORS_ALLOWED_ORIGINS": {
|
||||||
|
Name: "CORS_ALLOWED_ORIGINS",
|
||||||
|
Type: "string",
|
||||||
|
Required: false,
|
||||||
|
Default: "*",
|
||||||
|
Description: "Comma-separated list of allowed CORS origins (use * for all origins)",
|
||||||
|
Example: "http://localhost:3000,https://app.veza.com",
|
||||||
|
},
|
||||||
|
"RATE_LIMIT_LIMIT": {
|
||||||
|
Name: "RATE_LIMIT_LIMIT",
|
||||||
|
Type: "int",
|
||||||
|
Required: false,
|
||||||
|
Default: "100",
|
||||||
|
Description: "Maximum number of requests allowed per time window for rate limiting",
|
||||||
|
Example: "100",
|
||||||
|
},
|
||||||
|
"RATE_LIMIT_WINDOW": {
|
||||||
|
Name: "RATE_LIMIT_WINDOW",
|
||||||
|
Type: "int",
|
||||||
|
Required: false,
|
||||||
|
Default: "60",
|
||||||
|
Description: "Time window in seconds for rate limiting",
|
||||||
|
Example: "60",
|
||||||
|
},
|
||||||
|
"LOG_LEVEL": {
|
||||||
|
Name: "LOG_LEVEL",
|
||||||
|
Type: "string",
|
||||||
|
Required: false,
|
||||||
|
Default: "INFO",
|
||||||
|
Description: "Logging level (DEBUG, INFO, WARN, ERROR)",
|
||||||
|
Example: "INFO",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
// GenerateConfigDocs génère la documentation markdown pour toutes les variables d'environnement (T0033)
|
||||||
|
func GenerateConfigDocs() string {
|
||||||
|
var keys []string
|
||||||
|
for k := range envVarsDocs {
|
||||||
|
keys = append(keys, k)
|
||||||
|
}
|
||||||
|
sort.Strings(keys)
|
||||||
|
|
||||||
|
md := "# Configuration Variables\n\n"
|
||||||
|
md += "This document lists all environment variables used by the Veza backend API.\n\n"
|
||||||
|
md += "## Overview\n\n"
|
||||||
|
md += "Variables can be set in:\n"
|
||||||
|
md += "- System environment variables (highest priority)\n"
|
||||||
|
md += "- `.env.{APP_ENV}` file (e.g., `.env.development`, `.env.production`)\n"
|
||||||
|
md += "- `.env` file (fallback)\n\n"
|
||||||
|
md += "---\n\n"
|
||||||
|
|
||||||
|
for _, key := range keys {
|
||||||
|
doc := envVarsDocs[key]
|
||||||
|
md += fmt.Sprintf("## %s\n\n", doc.Name)
|
||||||
|
|
||||||
|
md += fmt.Sprintf("**Type**: `%s`\n\n", doc.Type)
|
||||||
|
|
||||||
|
if doc.Required {
|
||||||
|
md += "**Required**: ✅ Yes\n\n"
|
||||||
|
} else {
|
||||||
|
md += "**Required**: ❌ No\n\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
if doc.Default != "" {
|
||||||
|
md += fmt.Sprintf("**Default**: `%s`\n\n", doc.Default)
|
||||||
|
}
|
||||||
|
|
||||||
|
md += fmt.Sprintf("**Description**: %s\n\n", doc.Description)
|
||||||
|
|
||||||
|
if doc.Example != "" {
|
||||||
|
md += fmt.Sprintf("**Example**:\n```bash\nexport %s=%s\n```\n\n", doc.Name, doc.Example)
|
||||||
|
}
|
||||||
|
|
||||||
|
md += "---\n\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
return md
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetAllEnvVarDocs retourne la map complète de documentation (utile pour les tests et l'introspection)
|
||||||
|
func GetAllEnvVarDocs() map[string]EnvVarDoc {
|
||||||
|
// Retourner une copie pour éviter les modifications externes
|
||||||
|
result := make(map[string]EnvVarDoc)
|
||||||
|
for k, v := range envVarsDocs {
|
||||||
|
result[k] = v
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
128
veza-backend-api/internal/config/docs_test.go
Normal file
128
veza-backend-api/internal/config/docs_test.go
Normal file
|
|
@ -0,0 +1,128 @@
|
||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestGenerateConfigDocs(t *testing.T) {
|
||||||
|
docs := GenerateConfigDocs()
|
||||||
|
|
||||||
|
// Vérifier le header
|
||||||
|
assert.Contains(t, docs, "# Configuration Variables")
|
||||||
|
assert.Contains(t, docs, "Veza backend API")
|
||||||
|
|
||||||
|
// Vérifier que les variables documentées sont présentes
|
||||||
|
assert.Contains(t, docs, "APP_ENV")
|
||||||
|
assert.Contains(t, docs, "APP_PORT")
|
||||||
|
assert.Contains(t, docs, "JWT_SECRET")
|
||||||
|
assert.Contains(t, docs, "DATABASE_URL")
|
||||||
|
assert.Contains(t, docs, "REDIS_URL")
|
||||||
|
assert.Contains(t, docs, "LOG_LEVEL")
|
||||||
|
|
||||||
|
// Vérifier la structure de base
|
||||||
|
assert.Contains(t, docs, "**Type**:")
|
||||||
|
assert.Contains(t, docs, "**Required**:")
|
||||||
|
assert.Contains(t, docs, "**Description**:")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGenerateConfigDocs_Structure(t *testing.T) {
|
||||||
|
docs := GenerateConfigDocs()
|
||||||
|
|
||||||
|
// Vérifier qu'il y a des sections pour chaque variable
|
||||||
|
lines := strings.Split(docs, "\n")
|
||||||
|
|
||||||
|
// Devrait contenir des sections ## pour chaque variable
|
||||||
|
sectionCount := 0
|
||||||
|
for _, line := range lines {
|
||||||
|
if strings.HasPrefix(line, "## ") && line != "## Overview" {
|
||||||
|
sectionCount++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Au moins quelques variables devraient être documentées
|
||||||
|
assert.Greater(t, sectionCount, 5, "Should have multiple variable sections")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGenerateConfigDocs_ContainsRequiredFields(t *testing.T) {
|
||||||
|
docs := GenerateConfigDocs()
|
||||||
|
|
||||||
|
// Vérifier qu'une variable requise est documentée comme telle
|
||||||
|
assert.Contains(t, docs, "JWT_SECRET")
|
||||||
|
jwtSection := strings.Split(docs, "## JWT_SECRET")[1]
|
||||||
|
jwtSection = strings.Split(jwtSection, "---")[0]
|
||||||
|
|
||||||
|
assert.Contains(t, jwtSection, "✅ Yes", "JWT_SECRET should be marked as required")
|
||||||
|
|
||||||
|
// Vérifier qu'une variable optionnelle est documentée
|
||||||
|
assert.Contains(t, docs, "APP_ENV")
|
||||||
|
appEnvSection := strings.Split(docs, "## APP_ENV")[1]
|
||||||
|
appEnvSection = strings.Split(appEnvSection, "---")[0]
|
||||||
|
|
||||||
|
assert.Contains(t, appEnvSection, "❌ No", "APP_ENV should be marked as not required")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGenerateConfigDocs_ContainsExamples(t *testing.T) {
|
||||||
|
docs := GenerateConfigDocs()
|
||||||
|
|
||||||
|
// Vérifier qu'il y a des exemples
|
||||||
|
assert.Contains(t, docs, "**Example**:")
|
||||||
|
assert.Contains(t, docs, "```bash")
|
||||||
|
assert.Contains(t, docs, "export")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGenerateConfigDocs_ContainsDefaults(t *testing.T) {
|
||||||
|
docs := GenerateConfigDocs()
|
||||||
|
|
||||||
|
// Vérifier qu'il y a des valeurs par défaut
|
||||||
|
assert.Contains(t, docs, "**Default**:")
|
||||||
|
assert.Contains(t, docs, "development") // Default pour APP_ENV
|
||||||
|
assert.Contains(t, docs, "8080") // Default pour APP_PORT
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetAllEnvVarDocs(t *testing.T) {
|
||||||
|
docs := GetAllEnvVarDocs()
|
||||||
|
|
||||||
|
// Vérifier que la map contient des entrées
|
||||||
|
assert.Greater(t, len(docs), 0, "Should have environment variables documented")
|
||||||
|
|
||||||
|
// Vérifier quelques variables clés
|
||||||
|
assert.Contains(t, docs, "APP_ENV")
|
||||||
|
assert.Contains(t, docs, "JWT_SECRET")
|
||||||
|
assert.Contains(t, docs, "DATABASE_URL")
|
||||||
|
|
||||||
|
// Vérifier la structure d'une variable
|
||||||
|
appEnvDoc := docs["APP_ENV"]
|
||||||
|
assert.Equal(t, "APP_ENV", appEnvDoc.Name)
|
||||||
|
assert.Equal(t, "string", appEnvDoc.Type)
|
||||||
|
assert.False(t, appEnvDoc.Required)
|
||||||
|
assert.Equal(t, "development", appEnvDoc.Default)
|
||||||
|
assert.NotEmpty(t, appEnvDoc.Description)
|
||||||
|
|
||||||
|
// Vérifier une variable requise
|
||||||
|
jwtSecretDoc := docs["JWT_SECRET"]
|
||||||
|
assert.True(t, jwtSecretDoc.Required, "JWT_SECRET should be required")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestEnvVarDoc_Complete(t *testing.T) {
|
||||||
|
// Vérifier que toutes les entrées de envVarsDocs sont complètes
|
||||||
|
allDocs := GetAllEnvVarDocs()
|
||||||
|
|
||||||
|
for key, doc := range allDocs {
|
||||||
|
assert.NotEmpty(t, doc.Name, "Name should not be empty for %s", key)
|
||||||
|
assert.NotEmpty(t, doc.Type, "Type should not be empty for %s", key)
|
||||||
|
assert.NotEmpty(t, doc.Description, "Description should not be empty for %s", key)
|
||||||
|
|
||||||
|
// Si ce n'est pas requis, devrait avoir une valeur par défaut
|
||||||
|
if !doc.Required {
|
||||||
|
// Note: certaines variables peuvent avoir une valeur par défaut vide (c'est OK)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Si c'est requis, ne devrait pas avoir de valeur par défaut (ou valeur vide)
|
||||||
|
if doc.Required {
|
||||||
|
assert.Empty(t, doc.Default, "Required variable %s should not have a default value", key)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
108
veza-backend-api/internal/config/env_detection.go
Normal file
108
veza-backend-api/internal/config/env_detection.go
Normal file
|
|
@ -0,0 +1,108 @@
|
||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
// EnvDevelopment représente l'environnement de développement (T0039)
|
||||||
|
EnvDevelopment = "development"
|
||||||
|
// EnvStaging représente l'environnement de staging (T0039)
|
||||||
|
EnvStaging = "staging"
|
||||||
|
// EnvProduction représente l'environnement de production (T0039)
|
||||||
|
EnvProduction = "production"
|
||||||
|
// EnvTest représente l'environnement de test (T0039)
|
||||||
|
EnvTest = "test"
|
||||||
|
)
|
||||||
|
|
||||||
|
var validEnvironments = []string{
|
||||||
|
EnvDevelopment,
|
||||||
|
EnvStaging,
|
||||||
|
EnvProduction,
|
||||||
|
EnvTest,
|
||||||
|
}
|
||||||
|
|
||||||
|
// DetectEnvironment détecte l'environnement actuel avec fallback intelligent (T0039)
|
||||||
|
// Priorité: APP_ENV > NODE_ENV > GO_ENV > hostname > development
|
||||||
|
func DetectEnvironment() string {
|
||||||
|
// 1. APP_ENV (priorité)
|
||||||
|
if env := os.Getenv("APP_ENV"); env != "" {
|
||||||
|
env = strings.TrimSpace(env)
|
||||||
|
if isValidEnvironment(env) {
|
||||||
|
return strings.ToLower(env)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. NODE_ENV (compatibilité)
|
||||||
|
if env := os.Getenv("NODE_ENV"); env != "" {
|
||||||
|
env = strings.TrimSpace(env)
|
||||||
|
if isValidEnvironment(env) {
|
||||||
|
return strings.ToLower(env)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. GO_ENV (compatibilité Go)
|
||||||
|
if env := os.Getenv("GO_ENV"); env != "" {
|
||||||
|
env = strings.TrimSpace(env)
|
||||||
|
if isValidEnvironment(env) {
|
||||||
|
return strings.ToLower(env)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4. Fallback: détection par hostname (production si contient "prod")
|
||||||
|
if hostname, err := os.Hostname(); err == nil {
|
||||||
|
hostnameLower := strings.ToLower(hostname)
|
||||||
|
if strings.Contains(hostnameLower, "prod") || strings.Contains(hostnameLower, "production") {
|
||||||
|
return EnvProduction
|
||||||
|
}
|
||||||
|
if strings.Contains(hostnameLower, "staging") || strings.Contains(hostnameLower, "stage") {
|
||||||
|
return EnvStaging
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 5. Fallback par défaut: development
|
||||||
|
return EnvDevelopment
|
||||||
|
}
|
||||||
|
|
||||||
|
// isValidEnvironment vérifie qu'un environnement est valide (T0039)
|
||||||
|
func isValidEnvironment(env string) bool {
|
||||||
|
envLower := strings.ToLower(strings.TrimSpace(env))
|
||||||
|
for _, validEnv := range validEnvironments {
|
||||||
|
if envLower == validEnv {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// NormalizeEnvironment normalise le nom d'environnement (T0039)
|
||||||
|
// Convertit les alias courants (dev, prod, stage, etc.) en noms canoniques
|
||||||
|
func NormalizeEnvironment(env string) string {
|
||||||
|
envLower := strings.ToLower(strings.TrimSpace(env))
|
||||||
|
|
||||||
|
// Mappings courants
|
||||||
|
mappings := map[string]string{
|
||||||
|
"dev": EnvDevelopment,
|
||||||
|
"prod": EnvProduction,
|
||||||
|
"stage": EnvStaging,
|
||||||
|
"stg": EnvStaging,
|
||||||
|
"test": EnvTest,
|
||||||
|
"local": EnvDevelopment,
|
||||||
|
"staging": EnvStaging,
|
||||||
|
"production": EnvProduction,
|
||||||
|
"development": EnvDevelopment,
|
||||||
|
}
|
||||||
|
|
||||||
|
if normalized, ok := mappings[envLower]; ok {
|
||||||
|
return normalized
|
||||||
|
}
|
||||||
|
|
||||||
|
// Si déjà valide, retourner tel quel
|
||||||
|
if isValidEnvironment(envLower) {
|
||||||
|
return envLower
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback
|
||||||
|
return EnvDevelopment
|
||||||
|
}
|
||||||
242
veza-backend-api/internal/config/env_detection_test.go
Normal file
242
veza-backend-api/internal/config/env_detection_test.go
Normal file
|
|
@ -0,0 +1,242 @@
|
||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestDetectEnvironment(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
setupFunc func()
|
||||||
|
expected string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "APP_ENV takes priority",
|
||||||
|
setupFunc: func() {
|
||||||
|
os.Setenv("APP_ENV", "production")
|
||||||
|
os.Setenv("NODE_ENV", "development")
|
||||||
|
os.Setenv("GO_ENV", "staging")
|
||||||
|
},
|
||||||
|
expected: EnvProduction,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "NODE_ENV fallback",
|
||||||
|
setupFunc: func() {
|
||||||
|
os.Unsetenv("APP_ENV")
|
||||||
|
os.Setenv("NODE_ENV", "staging")
|
||||||
|
os.Unsetenv("GO_ENV")
|
||||||
|
},
|
||||||
|
expected: EnvStaging,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "GO_ENV fallback",
|
||||||
|
setupFunc: func() {
|
||||||
|
os.Unsetenv("APP_ENV")
|
||||||
|
os.Unsetenv("NODE_ENV")
|
||||||
|
os.Setenv("GO_ENV", "test")
|
||||||
|
},
|
||||||
|
expected: EnvTest,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "default to development",
|
||||||
|
setupFunc: func() {
|
||||||
|
os.Unsetenv("APP_ENV")
|
||||||
|
os.Unsetenv("NODE_ENV")
|
||||||
|
os.Unsetenv("GO_ENV")
|
||||||
|
},
|
||||||
|
expected: EnvDevelopment,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "invalid APP_ENV falls back to NODE_ENV",
|
||||||
|
setupFunc: func() {
|
||||||
|
os.Setenv("APP_ENV", "invalid")
|
||||||
|
os.Setenv("NODE_ENV", "production")
|
||||||
|
os.Unsetenv("GO_ENV")
|
||||||
|
},
|
||||||
|
expected: EnvProduction,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "case insensitive",
|
||||||
|
setupFunc: func() {
|
||||||
|
os.Setenv("APP_ENV", "PRODUCTION")
|
||||||
|
os.Unsetenv("NODE_ENV")
|
||||||
|
os.Unsetenv("GO_ENV")
|
||||||
|
},
|
||||||
|
expected: EnvProduction,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "whitespace trimmed",
|
||||||
|
setupFunc: func() {
|
||||||
|
os.Setenv("APP_ENV", " production ")
|
||||||
|
os.Unsetenv("NODE_ENV")
|
||||||
|
os.Unsetenv("GO_ENV")
|
||||||
|
},
|
||||||
|
expected: EnvProduction,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
tt.setupFunc()
|
||||||
|
defer func() {
|
||||||
|
os.Unsetenv("APP_ENV")
|
||||||
|
os.Unsetenv("NODE_ENV")
|
||||||
|
os.Unsetenv("GO_ENV")
|
||||||
|
}()
|
||||||
|
|
||||||
|
result := DetectEnvironment()
|
||||||
|
fmt.Println("TestDetectEnvironment/whitespace_trimmed - Detected Environment:", result)
|
||||||
|
assert.Equal(t, tt.expected, result)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNormalizeEnvironment(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
input string
|
||||||
|
expected string
|
||||||
|
}{
|
||||||
|
{"dev", EnvDevelopment},
|
||||||
|
{"prod", EnvProduction},
|
||||||
|
{"stage", EnvStaging},
|
||||||
|
{"stg", EnvStaging},
|
||||||
|
{"test", EnvTest},
|
||||||
|
{"local", EnvDevelopment},
|
||||||
|
{"development", EnvDevelopment},
|
||||||
|
{"production", EnvProduction},
|
||||||
|
{"staging", EnvStaging},
|
||||||
|
{"invalid", EnvDevelopment},
|
||||||
|
{"", EnvDevelopment},
|
||||||
|
{" dev ", EnvDevelopment},
|
||||||
|
{"PROD", EnvProduction},
|
||||||
|
{"STAGE", EnvStaging},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.input, func(t *testing.T) {
|
||||||
|
result := NormalizeEnvironment(tt.input)
|
||||||
|
assert.Equal(t, tt.expected, result)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIsValidEnvironment(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
env string
|
||||||
|
expected bool
|
||||||
|
}{
|
||||||
|
{"valid development", EnvDevelopment, true},
|
||||||
|
{"valid staging", EnvStaging, true},
|
||||||
|
{"valid production", EnvProduction, true},
|
||||||
|
{"valid test", EnvTest, true},
|
||||||
|
{"invalid", "invalid", false},
|
||||||
|
{"empty", "", false},
|
||||||
|
{"case insensitive", "PRODUCTION", true},
|
||||||
|
{"with whitespace", " production ", true},
|
||||||
|
{"dev alias", "dev", false}, // Dev n'est pas valide directement, doit être normalisé
|
||||||
|
{"prod alias", "prod", false}, // Prod n'est pas valide directement, doit être normalisé
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
result := isValidEnvironment(tt.env)
|
||||||
|
assert.Equal(t, tt.expected, result, "Environment %s should be valid: %v", tt.env, tt.expected)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDetectEnvironment_Priority(t *testing.T) {
|
||||||
|
// Test que APP_ENV a la plus haute priorité
|
||||||
|
os.Setenv("APP_ENV", "production")
|
||||||
|
os.Setenv("NODE_ENV", "staging")
|
||||||
|
os.Setenv("GO_ENV", "development")
|
||||||
|
defer func() {
|
||||||
|
os.Unsetenv("APP_ENV")
|
||||||
|
os.Unsetenv("NODE_ENV")
|
||||||
|
os.Unsetenv("GO_ENV")
|
||||||
|
}()
|
||||||
|
|
||||||
|
result := DetectEnvironment()
|
||||||
|
assert.Equal(t, EnvProduction, result, "APP_ENV should have highest priority")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDetectEnvironment_AllEnvironments(t *testing.T) {
|
||||||
|
environments := []string{EnvDevelopment, EnvStaging, EnvProduction, EnvTest}
|
||||||
|
|
||||||
|
for _, env := range environments {
|
||||||
|
t.Run(env, func(t *testing.T) {
|
||||||
|
os.Setenv("APP_ENV", env)
|
||||||
|
defer os.Unsetenv("APP_ENV")
|
||||||
|
|
||||||
|
result := DetectEnvironment()
|
||||||
|
assert.Equal(t, env, result)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNormalizeEnvironment_CanonicalNames(t *testing.T) {
|
||||||
|
// Les noms canoniques doivent rester inchangés
|
||||||
|
canonicalNames := []string{
|
||||||
|
EnvDevelopment,
|
||||||
|
EnvStaging,
|
||||||
|
EnvProduction,
|
||||||
|
EnvTest,
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, name := range canonicalNames {
|
||||||
|
t.Run(name, func(t *testing.T) {
|
||||||
|
result := NormalizeEnvironment(name)
|
||||||
|
assert.Equal(t, name, result, "Canonical name should remain unchanged")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNormalizeEnvironment_Aliases(t *testing.T) {
|
||||||
|
aliasTests := []struct {
|
||||||
|
alias string
|
||||||
|
expected string
|
||||||
|
}{
|
||||||
|
{"dev", EnvDevelopment},
|
||||||
|
{"local", EnvDevelopment},
|
||||||
|
{"prod", EnvProduction},
|
||||||
|
{"stage", EnvStaging},
|
||||||
|
{"stg", EnvStaging},
|
||||||
|
{"test", EnvTest},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range aliasTests {
|
||||||
|
t.Run(tt.alias, func(t *testing.T) {
|
||||||
|
result := NormalizeEnvironment(tt.alias)
|
||||||
|
assert.Equal(t, tt.expected, result, "Alias %s should normalize to %s", tt.alias, tt.expected)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConstants(t *testing.T) {
|
||||||
|
// Vérifier que les constantes sont définies correctement
|
||||||
|
assert.Equal(t, "development", EnvDevelopment)
|
||||||
|
assert.Equal(t, "staging", EnvStaging)
|
||||||
|
assert.Equal(t, "production", EnvProduction)
|
||||||
|
assert.Equal(t, "test", EnvTest)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDetectEnvironment_InvalidEnvFallback(t *testing.T) {
|
||||||
|
// Test que les environnements invalides ne sont pas utilisés
|
||||||
|
os.Setenv("APP_ENV", "invalid_env")
|
||||||
|
os.Setenv("NODE_ENV", "also_invalid")
|
||||||
|
os.Setenv("GO_ENV", "still_invalid")
|
||||||
|
defer func() {
|
||||||
|
os.Unsetenv("APP_ENV")
|
||||||
|
os.Unsetenv("NODE_ENV")
|
||||||
|
os.Unsetenv("GO_ENV")
|
||||||
|
}()
|
||||||
|
|
||||||
|
result := DetectEnvironment()
|
||||||
|
// Devrait fallback sur hostname ou development
|
||||||
|
assert.Contains(t, []string{EnvDevelopment, EnvStaging, EnvProduction}, result)
|
||||||
|
}
|
||||||
27
veza-backend-api/internal/config/env_loader.go
Normal file
27
veza-backend-api/internal/config/env_loader.go
Normal file
|
|
@ -0,0 +1,27 @@
|
||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"github.com/joho/godotenv"
|
||||||
|
)
|
||||||
|
|
||||||
|
// LoadEnvFiles charge les fichiers .env selon l'environnement (T0032)
|
||||||
|
// Charge dans l'ordre: .env.{env}, .env
|
||||||
|
// Les variables d'environnement système ont toujours priorité (godotenv ne surcharge pas les variables existantes)
|
||||||
|
func LoadEnvFiles(env string) error {
|
||||||
|
// Charger .env.{env} si existe (ex: .env.development, .env.production, .env.test)
|
||||||
|
envFile := ".env." + env
|
||||||
|
if _, err := os.Stat(envFile); err == nil {
|
||||||
|
if err := godotenv.Load(envFile); err != nil {
|
||||||
|
return fmt.Errorf("failed to load %s: %w", envFile, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Charger .env en fallback (ignore si n'existe pas)
|
||||||
|
// Note: godotenv.Load() ne retourne pas d'erreur si le fichier n'existe pas
|
||||||
|
_ = godotenv.Load()
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
107
veza-backend-api/internal/config/env_loader_test.go
Normal file
107
veza-backend-api/internal/config/env_loader_test.go
Normal file
|
|
@ -0,0 +1,107 @@
|
||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestLoadEnvFiles(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
env string
|
||||||
|
wantErr bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "development environment",
|
||||||
|
env: "development",
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "production environment",
|
||||||
|
env: "production",
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "test environment",
|
||||||
|
env: "test",
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "environment without file (should not error)",
|
||||||
|
env: "staging",
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "custom environment",
|
||||||
|
env: "custom",
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
// Sauvegarder la valeur originale de TEST_VAR si elle existe
|
||||||
|
originalValue := os.Getenv("TEST_VAR")
|
||||||
|
defer func() {
|
||||||
|
if originalValue != "" {
|
||||||
|
os.Setenv("TEST_VAR", originalValue)
|
||||||
|
} else {
|
||||||
|
os.Unsetenv("TEST_VAR")
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Unset TEST_VAR avant le test pour éviter les valeurs système
|
||||||
|
os.Unsetenv("TEST_VAR")
|
||||||
|
|
||||||
|
// Test: LoadEnvFiles ne devrait pas causer d'erreur même si les fichiers n'existent pas
|
||||||
|
err := LoadEnvFiles(tt.env)
|
||||||
|
if tt.wantErr {
|
||||||
|
require.Error(t, err)
|
||||||
|
} else {
|
||||||
|
// Si le fichier n'existe pas, ce n'est pas une erreur
|
||||||
|
require.NoError(t, err)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLoadEnvFiles_Priority(t *testing.T) {
|
||||||
|
// Tester que les variables d'environnement système ont priorité sur les fichiers .env
|
||||||
|
// Sauvegarder la valeur originale
|
||||||
|
originalValue := os.Getenv("TEST_PRIORITY")
|
||||||
|
defer func() {
|
||||||
|
if originalValue != "" {
|
||||||
|
os.Setenv("TEST_PRIORITY", originalValue)
|
||||||
|
} else {
|
||||||
|
os.Unsetenv("TEST_PRIORITY")
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Définir variable système avant de charger les fichiers
|
||||||
|
os.Setenv("TEST_PRIORITY", "system_value")
|
||||||
|
|
||||||
|
// Charger les fichiers (même si .env.development n'existe pas, ça ne devrait pas causer d'erreur)
|
||||||
|
err := LoadEnvFiles("development")
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// La variable système doit toujours être présente (godotenv ne surcharge pas les variables existantes)
|
||||||
|
value := os.Getenv("TEST_PRIORITY")
|
||||||
|
assert.Equal(t, "system_value", value, "System environment variable should have priority")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLoadEnvFiles_NoErrorOnMissingFile(t *testing.T) {
|
||||||
|
// Tester que LoadEnvFiles ne cause pas d'erreur si les fichiers n'existent pas
|
||||||
|
err := LoadEnvFiles("nonexistent_env_12345")
|
||||||
|
// Ne devrait pas causer d'erreur si les fichiers n'existent pas
|
||||||
|
assert.NoError(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLoadEnvFiles_EmptyEnvironment(t *testing.T) {
|
||||||
|
// Tester avec un environnement vide
|
||||||
|
err := LoadEnvFiles("")
|
||||||
|
// Ne devrait pas causer d'erreur
|
||||||
|
assert.NoError(t, err)
|
||||||
|
}
|
||||||
149
veza-backend-api/internal/config/reloader.go
Normal file
149
veza-backend-api/internal/config/reloader.go
Normal file
|
|
@ -0,0 +1,149 @@
|
||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"veza-backend-api/internal/logging"
|
||||||
|
"veza-backend-api/internal/middleware"
|
||||||
|
|
||||||
|
"go.uber.org/zap"
|
||||||
|
"go.uber.org/zap/zapcore"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Reloadable représente une configuration qui peut être rechargée (T0034)
|
||||||
|
type Reloadable interface {
|
||||||
|
Reload() error
|
||||||
|
}
|
||||||
|
|
||||||
|
// ConfigReloader gère le rechargement de configurations à chaud (T0034)
|
||||||
|
type ConfigReloader struct {
|
||||||
|
mu sync.RWMutex
|
||||||
|
config *Config
|
||||||
|
logger *zap.Logger
|
||||||
|
loggingService *logging.Logger // Service de logging pour changement de niveau dynamique
|
||||||
|
simpleRateLimiter *middleware.SimpleRateLimiter
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewConfigReloader crée un nouveau ConfigReloader (T0034)
|
||||||
|
func NewConfigReloader(config *Config, logger *zap.Logger) *ConfigReloader {
|
||||||
|
return &ConfigReloader{
|
||||||
|
config: config,
|
||||||
|
logger: logger,
|
||||||
|
loggingService: nil, // Sera initialisé lors du premier reload si nécessaire
|
||||||
|
simpleRateLimiter: config.SimpleRateLimiter,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetLoggingService définit le service de logging pour permettre le changement dynamique de niveau
|
||||||
|
func (r *ConfigReloader) SetLoggingService(loggingService *logging.Logger) {
|
||||||
|
r.mu.Lock()
|
||||||
|
defer r.mu.Unlock()
|
||||||
|
r.loggingService = loggingService
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReloadLogLevel recharge le niveau de log depuis les variables d'environnement (T0034)
|
||||||
|
func (r *ConfigReloader) ReloadLogLevel() error {
|
||||||
|
r.mu.Lock()
|
||||||
|
defer r.mu.Unlock()
|
||||||
|
|
||||||
|
// Récupérer le nouveau niveau depuis les variables d'environnement
|
||||||
|
newLevelStr := getEnv("LOG_LEVEL", "INFO")
|
||||||
|
if newLevelStr == "" {
|
||||||
|
newLevelStr = "INFO"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parser le niveau
|
||||||
|
level, err := zapcore.ParseLevel(newLevelStr)
|
||||||
|
if err != nil {
|
||||||
|
level = zapcore.InfoLevel
|
||||||
|
}
|
||||||
|
|
||||||
|
// Si le logger zap est accessible directement et utilise AtomicLevel
|
||||||
|
// On peut changer le niveau dynamiquement
|
||||||
|
if r.config.Logger != nil {
|
||||||
|
// Essayer de changer le niveau via l'AtomicLevel si disponible
|
||||||
|
// Note: Le logger zap doit être créé avec AtomicLevel pour permettre le changement dynamique
|
||||||
|
// Pour l'instant, on log juste le changement et on met à jour la config
|
||||||
|
r.config.LogLevel = newLevelStr
|
||||||
|
r.logger.Info("Log level reloaded from environment",
|
||||||
|
zap.String("old_level", r.config.LogLevel),
|
||||||
|
zap.String("new_level", newLevelStr),
|
||||||
|
zap.String("parsed_level", level.String()),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReloadRateLimits recharge les limites de rate limiting depuis les variables d'environnement (T0034)
|
||||||
|
func (r *ConfigReloader) ReloadRateLimits() error {
|
||||||
|
r.mu.Lock()
|
||||||
|
defer r.mu.Unlock()
|
||||||
|
|
||||||
|
// Récupérer les nouvelles limites depuis les variables d'environnement
|
||||||
|
newLimit := getEnvInt("RATE_LIMIT_LIMIT", 100)
|
||||||
|
newWindowSeconds := getEnvInt("RATE_LIMIT_WINDOW", 60)
|
||||||
|
newWindow := time.Duration(newWindowSeconds) * time.Second
|
||||||
|
|
||||||
|
// Si le simple rate limiter existe, mettre à jour ses limites
|
||||||
|
if r.simpleRateLimiter != nil {
|
||||||
|
// Mettre à jour les limites directement dans le rate limiter
|
||||||
|
r.simpleRateLimiter.UpdateLimits(newLimit, newWindow)
|
||||||
|
|
||||||
|
// Mettre à jour la config
|
||||||
|
r.config.RateLimitLimit = newLimit
|
||||||
|
r.config.RateLimitWindow = newWindowSeconds
|
||||||
|
|
||||||
|
r.logger.Info("Rate limits reloaded from environment",
|
||||||
|
zap.Int("new_limit", newLimit),
|
||||||
|
zap.Int("new_window_seconds", newWindowSeconds),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReloadAll recharge toutes les configurations reloadable (T0034)
|
||||||
|
func (r *ConfigReloader) ReloadAll() error {
|
||||||
|
var errors []error
|
||||||
|
|
||||||
|
// Recharger le niveau de log
|
||||||
|
if err := r.ReloadLogLevel(); err != nil {
|
||||||
|
errors = append(errors, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Recharger les limites de rate limiting
|
||||||
|
if err := r.ReloadRateLimits(); err != nil {
|
||||||
|
errors = append(errors, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(errors) > 0 {
|
||||||
|
r.logger.Error("Some configurations failed to reload", zap.Int("error_count", len(errors)))
|
||||||
|
return errors[0] // Retourner la première erreur
|
||||||
|
}
|
||||||
|
|
||||||
|
r.logger.Info("All configurations reloaded successfully")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetCurrentConfig retourne la configuration actuelle (en lecture seule)
|
||||||
|
func (r *ConfigReloader) GetCurrentConfig() *ReloadableConfig {
|
||||||
|
r.mu.RLock()
|
||||||
|
defer r.mu.RUnlock()
|
||||||
|
|
||||||
|
return &ReloadableConfig{
|
||||||
|
LogLevel: r.config.LogLevel,
|
||||||
|
RateLimitLimit: r.config.RateLimitLimit,
|
||||||
|
RateLimitWindow: r.config.RateLimitWindow,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReloadableConfig représente la partie de la configuration qui peut être rechargée
|
||||||
|
type ReloadableConfig struct {
|
||||||
|
LogLevel string `json:"log_level"`
|
||||||
|
RateLimitLimit int `json:"rate_limit_limit"`
|
||||||
|
RateLimitWindow int `json:"rate_limit_window"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Note: getEnv et getEnvInt sont définis dans config.go
|
||||||
137
veza-backend-api/internal/config/reloader_test.go
Normal file
137
veza-backend-api/internal/config/reloader_test.go
Normal file
|
|
@ -0,0 +1,137 @@
|
||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
"go.uber.org/zap"
|
||||||
|
"veza-backend-api/internal/middleware"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestConfigReloader_ReloadLogLevel(t *testing.T) {
|
||||||
|
// Créer un logger de test
|
||||||
|
logger := zap.NewNop()
|
||||||
|
|
||||||
|
// Créer une config minimale
|
||||||
|
config := &Config{
|
||||||
|
LogLevel: "INFO",
|
||||||
|
Logger: logger,
|
||||||
|
}
|
||||||
|
|
||||||
|
reloader := NewConfigReloader(config, logger)
|
||||||
|
|
||||||
|
// Test avec niveau DEBUG
|
||||||
|
os.Setenv("LOG_LEVEL", "DEBUG")
|
||||||
|
defer os.Unsetenv("LOG_LEVEL")
|
||||||
|
|
||||||
|
err := reloader.ReloadLogLevel()
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "DEBUG", config.LogLevel)
|
||||||
|
|
||||||
|
// Test avec niveau ERROR
|
||||||
|
os.Setenv("LOG_LEVEL", "ERROR")
|
||||||
|
err = reloader.ReloadLogLevel()
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "ERROR", config.LogLevel)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigReloader_ReloadRateLimits(t *testing.T) {
|
||||||
|
// Créer un logger de test
|
||||||
|
logger := zap.NewNop()
|
||||||
|
|
||||||
|
// Créer un simple rate limiter de test
|
||||||
|
rateLimiter := middleware.NewSimpleRateLimiter(100, 60*time.Second)
|
||||||
|
defer rateLimiter.Stop() // Stop the rate limiter's cleanup goroutine
|
||||||
|
|
||||||
|
// Créer une config minimale
|
||||||
|
config := &Config{
|
||||||
|
RateLimitLimit: 100,
|
||||||
|
RateLimitWindow: 60,
|
||||||
|
Logger: logger,
|
||||||
|
SimpleRateLimiter: rateLimiter,
|
||||||
|
}
|
||||||
|
|
||||||
|
reloader := NewConfigReloader(config, logger)
|
||||||
|
|
||||||
|
// Test avec nouvelles limites
|
||||||
|
os.Setenv("RATE_LIMIT_LIMIT", "200")
|
||||||
|
os.Setenv("RATE_LIMIT_WINDOW", "120")
|
||||||
|
defer func() {
|
||||||
|
os.Unsetenv("RATE_LIMIT_LIMIT")
|
||||||
|
os.Unsetenv("RATE_LIMIT_WINDOW")
|
||||||
|
}()
|
||||||
|
|
||||||
|
err := reloader.ReloadRateLimits()
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, 200, config.RateLimitLimit)
|
||||||
|
assert.Equal(t, 120, config.RateLimitWindow)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigReloader_ReloadAll(t *testing.T) {
|
||||||
|
logger := zap.NewNop()
|
||||||
|
|
||||||
|
// Create a simple rate limiter for test
|
||||||
|
rateLimiter := middleware.NewSimpleRateLimiter(100, 60*time.Second)
|
||||||
|
defer rateLimiter.Stop() // Stop the rate limiter's cleanup goroutine
|
||||||
|
|
||||||
|
config := &Config{
|
||||||
|
LogLevel: "INFO",
|
||||||
|
RateLimitLimit: 100,
|
||||||
|
RateLimitWindow: 60,
|
||||||
|
Logger: logger,
|
||||||
|
SimpleRateLimiter: rateLimiter,
|
||||||
|
}
|
||||||
|
|
||||||
|
reloader := NewConfigReloader(config, logger)
|
||||||
|
|
||||||
|
// Définir de nouvelles valeurs
|
||||||
|
os.Setenv("LOG_LEVEL", "WARN")
|
||||||
|
os.Setenv("RATE_LIMIT_LIMIT", "150")
|
||||||
|
os.Setenv("RATE_LIMIT_WINDOW", "90")
|
||||||
|
defer func() {
|
||||||
|
os.Unsetenv("LOG_LEVEL")
|
||||||
|
os.Unsetenv("RATE_LIMIT_LIMIT")
|
||||||
|
os.Unsetenv("RATE_LIMIT_WINDOW")
|
||||||
|
}()
|
||||||
|
|
||||||
|
err := reloader.ReloadAll()
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "WARN", config.LogLevel)
|
||||||
|
assert.Equal(t, 150, config.RateLimitLimit)
|
||||||
|
assert.Equal(t, 90, config.RateLimitWindow)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigReloader_GetCurrentConfig(t *testing.T) {
|
||||||
|
logger := zap.NewNop()
|
||||||
|
|
||||||
|
config := &Config{
|
||||||
|
LogLevel: "INFO",
|
||||||
|
RateLimitLimit: 100,
|
||||||
|
RateLimitWindow: 60,
|
||||||
|
Logger: logger,
|
||||||
|
}
|
||||||
|
|
||||||
|
reloader := NewConfigReloader(config, logger)
|
||||||
|
|
||||||
|
currentConfig := reloader.GetCurrentConfig()
|
||||||
|
require.NotNil(t, currentConfig)
|
||||||
|
assert.Equal(t, "INFO", currentConfig.LogLevel)
|
||||||
|
assert.Equal(t, 100, currentConfig.RateLimitLimit)
|
||||||
|
assert.Equal(t, 60, currentConfig.RateLimitWindow)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNewConfigReloader(t *testing.T) {
|
||||||
|
logger := zap.NewNop()
|
||||||
|
|
||||||
|
config := &Config{
|
||||||
|
Logger: logger,
|
||||||
|
}
|
||||||
|
|
||||||
|
reloader := NewConfigReloader(config, logger)
|
||||||
|
require.NotNil(t, reloader)
|
||||||
|
assert.Equal(t, config, reloader.config)
|
||||||
|
assert.Equal(t, logger, reloader.logger)
|
||||||
|
}
|
||||||
76
veza-backend-api/internal/config/secrets.go
Normal file
76
veza-backend-api/internal/config/secrets.go
Normal file
|
|
@ -0,0 +1,76 @@
|
||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SecretsProvider définit l'interface pour les fournisseurs de secrets (T0037)
|
||||||
|
type SecretsProvider interface {
|
||||||
|
GetSecret(name string) (string, error)
|
||||||
|
IsSecret(name string) bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// EnvSecretsProvider récupère les secrets depuis les variables d'environnement (T0037)
|
||||||
|
type EnvSecretsProvider struct {
|
||||||
|
secretKeys map[string]bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewEnvSecretsProvider crée un nouveau fournisseur de secrets depuis l'environnement
|
||||||
|
func NewEnvSecretsProvider(secretKeys []string) *EnvSecretsProvider {
|
||||||
|
keysMap := make(map[string]bool)
|
||||||
|
for _, key := range secretKeys {
|
||||||
|
keysMap[key] = true
|
||||||
|
}
|
||||||
|
return &EnvSecretsProvider{secretKeys: keysMap}
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetSecret récupère un secret depuis les variables d'environnement (T0037)
|
||||||
|
func (p *EnvSecretsProvider) GetSecret(name string) (string, error) {
|
||||||
|
value := os.Getenv(name)
|
||||||
|
if value == "" {
|
||||||
|
return "", fmt.Errorf("secret %s not found", name)
|
||||||
|
}
|
||||||
|
return value, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsSecret vérifie si une clé est un secret (T0037)
|
||||||
|
func (p *EnvSecretsProvider) IsSecret(name string) bool {
|
||||||
|
return p.secretKeys[name]
|
||||||
|
}
|
||||||
|
|
||||||
|
// MaskSecret masque un secret pour l'affichage dans les logs (T0037)
|
||||||
|
// Masque les 4 premiers et 4 derniers caractères, remplace le reste par "****"
|
||||||
|
func MaskSecret(secret string) string {
|
||||||
|
if secret == "" {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
if len(secret) <= 8 {
|
||||||
|
return "****"
|
||||||
|
}
|
||||||
|
return secret[:4] + "****" + secret[len(secret)-4:]
|
||||||
|
}
|
||||||
|
|
||||||
|
// MaskConfigValue masque une valeur si c'est un secret (T0037)
|
||||||
|
func MaskConfigValue(key, value string, provider SecretsProvider) string {
|
||||||
|
if provider != nil && provider.IsSecret(key) {
|
||||||
|
return MaskSecret(value)
|
||||||
|
}
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
|
||||||
|
// DefaultSecretKeys retourne la liste des clés considérées comme secrets (T0037)
|
||||||
|
func DefaultSecretKeys() []string {
|
||||||
|
return []string{
|
||||||
|
"JWT_SECRET",
|
||||||
|
"DB_PASSWORD",
|
||||||
|
"DATABASE_PASSWORD",
|
||||||
|
"REDIS_PASSWORD",
|
||||||
|
"AWS_SECRET_ACCESS_KEY",
|
||||||
|
"AWS_ACCESS_KEY_ID",
|
||||||
|
"STRIPE_SECRET_KEY",
|
||||||
|
"STRIPE_WEBHOOK_SECRET",
|
||||||
|
"SMTP_PASSWORD",
|
||||||
|
"OAUTH_CLIENT_SECRET",
|
||||||
|
}
|
||||||
|
}
|
||||||
242
veza-backend-api/internal/config/secrets_test.go
Normal file
242
veza-backend-api/internal/config/secrets_test.go
Normal file
|
|
@ -0,0 +1,242 @@
|
||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestEnvSecretsProvider_GetSecret(t *testing.T) {
|
||||||
|
os.Setenv("TEST_SECRET", "my-secret-value")
|
||||||
|
defer os.Unsetenv("TEST_SECRET")
|
||||||
|
|
||||||
|
provider := NewEnvSecretsProvider([]string{"TEST_SECRET"})
|
||||||
|
|
||||||
|
secret, err := provider.GetSecret("TEST_SECRET")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "my-secret-value", secret)
|
||||||
|
|
||||||
|
_, err = provider.GetSecret("NONEXISTENT")
|
||||||
|
assert.Error(t, err)
|
||||||
|
assert.Contains(t, err.Error(), "not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestEnvSecretsProvider_IsSecret(t *testing.T) {
|
||||||
|
provider := NewEnvSecretsProvider([]string{"SECRET_KEY", "ANOTHER_SECRET"})
|
||||||
|
|
||||||
|
assert.True(t, provider.IsSecret("SECRET_KEY"))
|
||||||
|
assert.True(t, provider.IsSecret("ANOTHER_SECRET"))
|
||||||
|
assert.False(t, provider.IsSecret("NOT_A_SECRET"))
|
||||||
|
assert.False(t, provider.IsSecret(""))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestEnvSecretsProvider_GetSecret_Empty(t *testing.T) {
|
||||||
|
// S'assurer que la variable n'existe pas
|
||||||
|
os.Unsetenv("MISSING_SECRET")
|
||||||
|
defer os.Unsetenv("MISSING_SECRET")
|
||||||
|
|
||||||
|
provider := NewEnvSecretsProvider([]string{"MISSING_SECRET"})
|
||||||
|
|
||||||
|
_, err := provider.GetSecret("MISSING_SECRET")
|
||||||
|
assert.Error(t, err)
|
||||||
|
assert.Contains(t, err.Error(), "not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMaskSecret(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
secret string
|
||||||
|
expected string
|
||||||
|
}{
|
||||||
|
{"long secret", "my-super-secret-key-12345", "my-s****2345"}, // length 23, 4 prefix, 4 suffix
|
||||||
|
{"short secret", "short", "****"}, // length 5, <= 8
|
||||||
|
{"empty secret", "", ""}, // length 0, empty
|
||||||
|
{"very short", "ab", "****"}, // length 2, <= 8
|
||||||
|
{"exactly 8 chars", "12345678", "****"}, // length 8, <= 8
|
||||||
|
{"9 chars", "123456789", "1234****6789"}, // length 9, 4 prefix, 4 suffix
|
||||||
|
{"exactly 10 chars", "1234567890", "1234****7890"}, // length 10, 4 prefix, 4 suffix
|
||||||
|
{"very long secret", "this-is-a-very-long-secret-key-that-needs-masking", "this****king"}, // length 45, 4 prefix, 4 suffix
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
result := MaskSecret(tt.secret)
|
||||||
|
assert.Equal(t, tt.expected, result)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMaskConfigValue(t *testing.T) {
|
||||||
|
provider := NewEnvSecretsProvider([]string{"JWT_SECRET", "DB_PASSWORD"})
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
key string
|
||||||
|
value string
|
||||||
|
provider SecretsProvider
|
||||||
|
expectedMasked bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "secret key should be masked",
|
||||||
|
key: "JWT_SECRET",
|
||||||
|
value: "my-secret-key-12345",
|
||||||
|
provider: provider,
|
||||||
|
expectedMasked: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "non-secret key should not be masked",
|
||||||
|
key: "APP_PORT",
|
||||||
|
value: "8080",
|
||||||
|
provider: provider,
|
||||||
|
expectedMasked: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "nil provider should not mask",
|
||||||
|
key: "JWT_SECRET",
|
||||||
|
value: "my-secret-key-12345",
|
||||||
|
provider: nil,
|
||||||
|
expectedMasked: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "empty value should remain empty",
|
||||||
|
key: "JWT_SECRET",
|
||||||
|
value: "",
|
||||||
|
provider: provider,
|
||||||
|
expectedMasked: false, // MaskSecret retourne "" pour empty, donc pas de changement visible
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
result := MaskConfigValue(tt.key, tt.value, tt.provider)
|
||||||
|
if tt.expectedMasked {
|
||||||
|
assert.NotEqual(t, tt.value, result, "Value should be masked")
|
||||||
|
if tt.value != "" {
|
||||||
|
assert.Contains(t, result, "****")
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
assert.Equal(t, tt.value, result, "Value should not be masked")
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDefaultSecretKeys(t *testing.T) {
|
||||||
|
keys := DefaultSecretKeys()
|
||||||
|
assert.NotEmpty(t, keys)
|
||||||
|
|
||||||
|
// Vérifier que les clés communes sont présentes
|
||||||
|
expectedKeys := []string{
|
||||||
|
"JWT_SECRET",
|
||||||
|
"DB_PASSWORD",
|
||||||
|
"REDIS_PASSWORD",
|
||||||
|
"AWS_SECRET_ACCESS_KEY",
|
||||||
|
"STRIPE_SECRET_KEY",
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, expectedKey := range expectedKeys {
|
||||||
|
assert.Contains(t, keys, expectedKey, "DefaultSecretKeys should contain %s", expectedKey)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Vérifier qu'il n'y a pas de doublons
|
||||||
|
seen := make(map[string]bool)
|
||||||
|
for _, key := range keys {
|
||||||
|
assert.False(t, seen[key], "Duplicate key found: %s", key)
|
||||||
|
seen[key] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNewEnvSecretsProvider(t *testing.T) {
|
||||||
|
keys := []string{"KEY1", "KEY2", "KEY3"}
|
||||||
|
provider := NewEnvSecretsProvider(keys)
|
||||||
|
|
||||||
|
assert.NotNil(t, provider)
|
||||||
|
assert.True(t, provider.IsSecret("KEY1"))
|
||||||
|
assert.True(t, provider.IsSecret("KEY2"))
|
||||||
|
assert.True(t, provider.IsSecret("KEY3"))
|
||||||
|
assert.False(t, provider.IsSecret("KEY4"))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestEnvSecretsProvider_EmptyKeys(t *testing.T) {
|
||||||
|
provider := NewEnvSecretsProvider([]string{})
|
||||||
|
|
||||||
|
assert.NotNil(t, provider)
|
||||||
|
assert.False(t, provider.IsSecret("ANY_KEY"))
|
||||||
|
|
||||||
|
_, err := provider.GetSecret("ANY_KEY")
|
||||||
|
assert.Error(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMaskSecret_BoundaryCases(t *testing.T) {
|
||||||
|
// Test cas limites
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
secret string
|
||||||
|
expected string
|
||||||
|
}{
|
||||||
|
{"nil equivalent (empty)", "", ""},
|
||||||
|
{"1 char", "a", "****"},
|
||||||
|
{"4 chars", "abcd", "****"},
|
||||||
|
{"5 chars", "abcde", "****"},
|
||||||
|
{"8 chars", "12345678", "****"},
|
||||||
|
{"9 chars (threshold)", "123456789", "1234****6789"}, // Adjusted expected
|
||||||
|
{"exactly 10 chars", "1234567890", "1234****7890"}, // Adjusted expected
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
result := MaskSecret(tt.secret)
|
||||||
|
assert.Equal(t, tt.expected, result)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestEnvSecretsProvider_MultipleSecrets(t *testing.T) {
|
||||||
|
os.Setenv("SECRET1", "value1")
|
||||||
|
os.Setenv("SECRET2", "value2")
|
||||||
|
os.Setenv("SECRET3", "value3")
|
||||||
|
defer func() {
|
||||||
|
os.Unsetenv("SECRET1")
|
||||||
|
os.Unsetenv("SECRET2")
|
||||||
|
os.Unsetenv("SECRET3")
|
||||||
|
}()
|
||||||
|
|
||||||
|
provider := NewEnvSecretsProvider([]string{"SECRET1", "SECRET2", "SECRET3"})
|
||||||
|
|
||||||
|
secret1, err := provider.GetSecret("SECRET1")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "value1", secret1)
|
||||||
|
|
||||||
|
secret2, err := provider.GetSecret("SECRET2")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "value2", secret2)
|
||||||
|
|
||||||
|
secret3, err := provider.GetSecret("SECRET3")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "value3", secret3)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMaskConfigValue_AllCases(t *testing.T) {
|
||||||
|
provider := NewEnvSecretsProvider([]string{"SECRET_KEY"})
|
||||||
|
|
||||||
|
// Test avec différents types de valeurs
|
||||||
|
testCases := []struct {
|
||||||
|
key string
|
||||||
|
value string
|
||||||
|
expected string
|
||||||
|
}{
|
||||||
|
{"SECRET_KEY", "long-secret-value-12345", "long****2345"}, // Adjusted expected
|
||||||
|
{"SECRET_KEY", "short", "****"},
|
||||||
|
{"SECRET_KEY", "", ""},
|
||||||
|
{"PUBLIC_KEY", "public-value", "public-value"}, // Ne devrait pas être masqué
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tc := range testCases {
|
||||||
|
t.Run(tc.key+"_"+tc.value, func(t *testing.T) {
|
||||||
|
result := MaskConfigValue(tc.key, tc.value, provider)
|
||||||
|
assert.Equal(t, tc.expected, result)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
100
veza-backend-api/internal/config/testutils.go
Normal file
100
veza-backend-api/internal/config/testutils.go
Normal file
|
|
@ -0,0 +1,100 @@
|
||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"go.uber.org/zap/zaptest"
|
||||||
|
)
|
||||||
|
|
||||||
|
// NewTestConfig crée une configuration de test avec valeurs par défaut (T0035)
|
||||||
|
// Cette fonction facilite la création de configurations de test sans nécessiter
|
||||||
|
// une base de données ou Redis réels, parfait pour les tests unitaires
|
||||||
|
func NewTestConfig(t *testing.T) *Config {
|
||||||
|
// Créer un logger de test
|
||||||
|
logger := zaptest.NewLogger(t)
|
||||||
|
|
||||||
|
return &Config{
|
||||||
|
AppPort: 8080,
|
||||||
|
JWTSecret: "test-jwt-secret-key-minimum-32-characters-long",
|
||||||
|
DatabaseURL: "postgres://test:test@localhost:5432/test_db",
|
||||||
|
RedisURL: "redis://localhost:6379/0",
|
||||||
|
CORSOrigins: []string{"*"},
|
||||||
|
RateLimitLimit: 100,
|
||||||
|
RateLimitWindow: 60,
|
||||||
|
LogLevel: "DEBUG",
|
||||||
|
Logger: logger,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithEnv définit temporairement une variable d'environnement pour les tests (T0035)
|
||||||
|
// Retourne une fonction de cleanup qui restaure la valeur originale (ou unset si elle n'existait pas)
|
||||||
|
// Usage:
|
||||||
|
//
|
||||||
|
// reset := WithEnv("TEST_VAR", "test_value")
|
||||||
|
// defer reset()
|
||||||
|
// // ... test code ...
|
||||||
|
func WithEnv(key, value string) func() {
|
||||||
|
oldValue := os.Getenv(key)
|
||||||
|
os.Setenv(key, value)
|
||||||
|
return func() {
|
||||||
|
if oldValue == "" {
|
||||||
|
os.Unsetenv(key)
|
||||||
|
} else {
|
||||||
|
os.Setenv(key, oldValue)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResetEnv réinitialise toutes les variables d'environnement de test couramment utilisées (T0035)
|
||||||
|
// Cette fonction nettoie les variables d'environnement après les tests pour éviter
|
||||||
|
// les interférences entre tests
|
||||||
|
func ResetEnv() {
|
||||||
|
testVars := []string{
|
||||||
|
"APP_ENV",
|
||||||
|
"APP_PORT",
|
||||||
|
"JWT_SECRET",
|
||||||
|
"DATABASE_URL",
|
||||||
|
"REDIS_URL",
|
||||||
|
"CORS_ALLOWED_ORIGINS",
|
||||||
|
"RATE_LIMIT_LIMIT",
|
||||||
|
"RATE_LIMIT_WINDOW",
|
||||||
|
"LOG_LEVEL",
|
||||||
|
}
|
||||||
|
for _, v := range testVars {
|
||||||
|
os.Unsetenv(v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithMultipleEnv définit temporairement plusieurs variables d'environnement pour les tests (T0035)
|
||||||
|
// Retourne une fonction de cleanup qui restaure toutes les valeurs originales
|
||||||
|
// Usage:
|
||||||
|
//
|
||||||
|
// reset := WithMultipleEnv(map[string]string{
|
||||||
|
// "APP_ENV": "test",
|
||||||
|
// "LOG_LEVEL": "DEBUG",
|
||||||
|
// })
|
||||||
|
// defer reset()
|
||||||
|
func WithMultipleEnv(envVars map[string]string) func() {
|
||||||
|
// Sauvegarder les valeurs actuelles
|
||||||
|
oldValues := make(map[string]string)
|
||||||
|
for key := range envVars {
|
||||||
|
oldValues[key] = os.Getenv(key)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Définir les nouvelles valeurs
|
||||||
|
for key, value := range envVars {
|
||||||
|
os.Setenv(key, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Retourner la fonction de cleanup
|
||||||
|
return func() {
|
||||||
|
for key, oldValue := range oldValues {
|
||||||
|
if oldValue == "" {
|
||||||
|
os.Unsetenv(key)
|
||||||
|
} else {
|
||||||
|
os.Setenv(key, oldValue)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
206
veza-backend-api/internal/config/testutils_test.go
Normal file
206
veza-backend-api/internal/config/testutils_test.go
Normal file
|
|
@ -0,0 +1,206 @@
|
||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestNewTestConfig(t *testing.T) {
|
||||||
|
config := NewTestConfig(t)
|
||||||
|
|
||||||
|
// Vérifier les valeurs par défaut
|
||||||
|
assert.Equal(t, 8080, config.AppPort)
|
||||||
|
assert.Equal(t, "test-jwt-secret-key-minimum-32-characters-long", config.JWTSecret)
|
||||||
|
assert.Equal(t, "postgres://test:test@localhost:5432/test_db", config.DatabaseURL)
|
||||||
|
assert.Equal(t, "redis://localhost:6379/0", config.RedisURL)
|
||||||
|
assert.Equal(t, []string{"*"}, config.CORSOrigins)
|
||||||
|
assert.Equal(t, 100, config.RateLimitLimit)
|
||||||
|
assert.Equal(t, 60, config.RateLimitWindow)
|
||||||
|
assert.Equal(t, "DEBUG", config.LogLevel)
|
||||||
|
assert.NotNil(t, config.Logger)
|
||||||
|
|
||||||
|
// Vérifier que la config est valide (selon les règles de validation)
|
||||||
|
// Note: Pour un test complet, on devrait tester que Validate() passe
|
||||||
|
// mais NewTestConfig ne crée pas une config complète avec DB/Redis
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWithEnv(t *testing.T) {
|
||||||
|
// Sauvegarder la valeur originale si elle existe
|
||||||
|
originalValue := os.Getenv("TEST_VAR")
|
||||||
|
defer func() {
|
||||||
|
if originalValue != "" {
|
||||||
|
os.Setenv("TEST_VAR", originalValue)
|
||||||
|
} else {
|
||||||
|
os.Unsetenv("TEST_VAR")
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Tester avec une variable qui n'existe pas
|
||||||
|
os.Unsetenv("TEST_VAR")
|
||||||
|
reset := WithEnv("TEST_VAR", "test_value")
|
||||||
|
|
||||||
|
// Vérifier que la valeur est définie
|
||||||
|
assert.Equal(t, "test_value", os.Getenv("TEST_VAR"))
|
||||||
|
|
||||||
|
// Nettoyer
|
||||||
|
reset()
|
||||||
|
assert.Empty(t, os.Getenv("TEST_VAR"))
|
||||||
|
|
||||||
|
// Tester avec une variable qui existe déjà
|
||||||
|
os.Setenv("TEST_VAR", "original_value")
|
||||||
|
reset2 := WithEnv("TEST_VAR", "new_value")
|
||||||
|
defer reset2()
|
||||||
|
|
||||||
|
// Vérifier que la nouvelle valeur est définie
|
||||||
|
assert.Equal(t, "new_value", os.Getenv("TEST_VAR"))
|
||||||
|
|
||||||
|
// Nettoyer et vérifier que l'ancienne valeur est restaurée
|
||||||
|
reset2()
|
||||||
|
assert.Equal(t, "original_value", os.Getenv("TEST_VAR"))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWithEnv_MultipleCalls(t *testing.T) {
|
||||||
|
// Tester plusieurs appels consécutifs
|
||||||
|
os.Unsetenv("TEST_VAR")
|
||||||
|
defer os.Unsetenv("TEST_VAR")
|
||||||
|
|
||||||
|
reset1 := WithEnv("TEST_VAR", "value1")
|
||||||
|
assert.Equal(t, "value1", os.Getenv("TEST_VAR"))
|
||||||
|
|
||||||
|
reset2 := WithEnv("TEST_VAR", "value2")
|
||||||
|
assert.Equal(t, "value2", os.Getenv("TEST_VAR"))
|
||||||
|
|
||||||
|
reset2()
|
||||||
|
assert.Equal(t, "value1", os.Getenv("TEST_VAR"))
|
||||||
|
|
||||||
|
reset1()
|
||||||
|
assert.Empty(t, os.Getenv("TEST_VAR"))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestResetEnv(t *testing.T) {
|
||||||
|
// Définir quelques variables de test
|
||||||
|
testVars := map[string]string{
|
||||||
|
"APP_ENV": "test",
|
||||||
|
"APP_PORT": "9000",
|
||||||
|
"JWT_SECRET": "test-secret",
|
||||||
|
"DATABASE_URL": "postgres://test",
|
||||||
|
"REDIS_URL": "redis://test",
|
||||||
|
"CORS_ALLOWED_ORIGINS": "http://test",
|
||||||
|
"RATE_LIMIT_LIMIT": "200",
|
||||||
|
"RATE_LIMIT_WINDOW": "120",
|
||||||
|
"LOG_LEVEL": "ERROR",
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sauvegarder les valeurs originales
|
||||||
|
originalValues := make(map[string]string)
|
||||||
|
for key := range testVars {
|
||||||
|
originalValues[key] = os.Getenv(key)
|
||||||
|
}
|
||||||
|
defer func() {
|
||||||
|
// Restaurer les valeurs originales
|
||||||
|
for key, value := range originalValues {
|
||||||
|
if value != "" {
|
||||||
|
os.Setenv(key, value)
|
||||||
|
} else {
|
||||||
|
os.Unsetenv(key)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Définir les variables de test
|
||||||
|
for key, value := range testVars {
|
||||||
|
os.Setenv(key, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Vérifier qu'elles sont définies
|
||||||
|
for key, expectedValue := range testVars {
|
||||||
|
assert.Equal(t, expectedValue, os.Getenv(key), "Variable %s should be set", key)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Réinitialiser
|
||||||
|
ResetEnv()
|
||||||
|
|
||||||
|
// Vérifier qu'elles sont toutes unset
|
||||||
|
for key := range testVars {
|
||||||
|
assert.Empty(t, os.Getenv(key), "Variable %s should be unset", key)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWithMultipleEnv(t *testing.T) {
|
||||||
|
// Sauvegarder les valeurs originales
|
||||||
|
originalValues := make(map[string]string)
|
||||||
|
testKeys := []string{"TEST_VAR1", "TEST_VAR2", "TEST_VAR3"}
|
||||||
|
for _, key := range testKeys {
|
||||||
|
originalValues[key] = os.Getenv(key)
|
||||||
|
}
|
||||||
|
defer func() {
|
||||||
|
// Restaurer les valeurs originales
|
||||||
|
for key, value := range originalValues {
|
||||||
|
if value != "" {
|
||||||
|
os.Setenv(key, value)
|
||||||
|
} else {
|
||||||
|
os.Unsetenv(key)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Définir quelques variables avec des valeurs existantes
|
||||||
|
os.Setenv("TEST_VAR1", "original1")
|
||||||
|
os.Unsetenv("TEST_VAR2")
|
||||||
|
os.Unsetenv("TEST_VAR3")
|
||||||
|
|
||||||
|
// Utiliser WithMultipleEnv
|
||||||
|
reset := WithMultipleEnv(map[string]string{
|
||||||
|
"TEST_VAR1": "new1",
|
||||||
|
"TEST_VAR2": "new2",
|
||||||
|
"TEST_VAR3": "new3",
|
||||||
|
})
|
||||||
|
defer reset()
|
||||||
|
|
||||||
|
// Vérifier que les nouvelles valeurs sont définies
|
||||||
|
assert.Equal(t, "new1", os.Getenv("TEST_VAR1"))
|
||||||
|
assert.Equal(t, "new2", os.Getenv("TEST_VAR2"))
|
||||||
|
assert.Equal(t, "new3", os.Getenv("TEST_VAR3"))
|
||||||
|
|
||||||
|
// Nettoyer
|
||||||
|
reset()
|
||||||
|
|
||||||
|
// Vérifier que les valeurs originales sont restaurées
|
||||||
|
assert.Equal(t, "original1", os.Getenv("TEST_VAR1"))
|
||||||
|
assert.Empty(t, os.Getenv("TEST_VAR2"))
|
||||||
|
assert.Empty(t, os.Getenv("TEST_VAR3"))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWithMultipleEnv_Empty(t *testing.T) {
|
||||||
|
// Tester avec une map vide
|
||||||
|
reset := WithMultipleEnv(map[string]string{})
|
||||||
|
require.NotNil(t, reset)
|
||||||
|
|
||||||
|
// La fonction de cleanup devrait fonctionner sans erreur
|
||||||
|
reset()
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNewTestConfig_Logger(t *testing.T) {
|
||||||
|
config := NewTestConfig(t)
|
||||||
|
require.NotNil(t, config.Logger)
|
||||||
|
|
||||||
|
// Vérifier que le logger fonctionne
|
||||||
|
config.Logger.Info("test log message")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNewTestConfig_Isolation(t *testing.T) {
|
||||||
|
// Tester que chaque appel crée une nouvelle instance
|
||||||
|
config1 := NewTestConfig(t)
|
||||||
|
config2 := NewTestConfig(t)
|
||||||
|
|
||||||
|
// Modifier config1
|
||||||
|
config1.AppPort = 9000
|
||||||
|
config1.LogLevel = "ERROR"
|
||||||
|
|
||||||
|
// Vérifier que config2 n'est pas affecté
|
||||||
|
assert.Equal(t, 8080, config2.AppPort)
|
||||||
|
assert.Equal(t, "DEBUG", config2.LogLevel)
|
||||||
|
}
|
||||||
293
veza-backend-api/internal/config/validation_test.go
Normal file
293
veza-backend-api/internal/config/validation_test.go
Normal file
|
|
@ -0,0 +1,293 @@
|
||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
"go.uber.org/zap"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestConfig_Validate(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
config *Config
|
||||||
|
wantErr bool
|
||||||
|
errMsg string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "valid config",
|
||||||
|
config: &Config{
|
||||||
|
AppPort: 8080,
|
||||||
|
JWTSecret: strings.Repeat("a", 32),
|
||||||
|
DatabaseURL: "postgresql://user:pass@localhost:5432/db",
|
||||||
|
RedisURL: "redis://localhost:6379",
|
||||||
|
RateLimitLimit: 100, // Added
|
||||||
|
RateLimitWindow: 60, // Added
|
||||||
|
},
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "invalid port too low",
|
||||||
|
config: &Config{
|
||||||
|
AppPort: 0,
|
||||||
|
JWTSecret: strings.Repeat("a", 32),
|
||||||
|
DatabaseURL: "postgresql://user:pass@localhost:5432/db",
|
||||||
|
RedisURL: "redis://localhost:6379",
|
||||||
|
RateLimitLimit: 100, // Added
|
||||||
|
RateLimitWindow: 60, // Added
|
||||||
|
},
|
||||||
|
wantErr: true,
|
||||||
|
errMsg: "APP_PORT validation failed",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "invalid port too high",
|
||||||
|
config: &Config{
|
||||||
|
AppPort: 99999,
|
||||||
|
JWTSecret: strings.Repeat("a", 32),
|
||||||
|
DatabaseURL: "postgresql://user:pass@localhost:5432/db",
|
||||||
|
RedisURL: "redis://localhost:6379",
|
||||||
|
RateLimitLimit: 100, // Added
|
||||||
|
RateLimitWindow: 60, // Added
|
||||||
|
},
|
||||||
|
wantErr: true,
|
||||||
|
errMsg: "APP_PORT validation failed",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "JWT secret too short",
|
||||||
|
config: &Config{
|
||||||
|
AppPort: 8080,
|
||||||
|
JWTSecret: "short",
|
||||||
|
DatabaseURL: "postgresql://user:pass@localhost:5432/db",
|
||||||
|
RedisURL: "redis://localhost:6379",
|
||||||
|
RateLimitLimit: 100, // Added
|
||||||
|
RateLimitWindow: 60, // Added
|
||||||
|
},
|
||||||
|
wantErr: true,
|
||||||
|
errMsg: "JWT_SECRET validation failed",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "JWT secret empty",
|
||||||
|
config: &Config{
|
||||||
|
AppPort: 8080,
|
||||||
|
JWTSecret: "",
|
||||||
|
DatabaseURL: "postgresql://user:pass@localhost:5432/db",
|
||||||
|
RedisURL: "redis://localhost:6379",
|
||||||
|
RateLimitLimit: 100, // Added
|
||||||
|
RateLimitWindow: 60, // Added
|
||||||
|
},
|
||||||
|
wantErr: true,
|
||||||
|
errMsg: "JWT_SECRET validation failed",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "JWT secret exactly 32 characters",
|
||||||
|
config: &Config{
|
||||||
|
AppPort: 8080,
|
||||||
|
JWTSecret: strings.Repeat("a", 32),
|
||||||
|
DatabaseURL: "postgresql://user:pass@localhost:5432/db",
|
||||||
|
RedisURL: "redis://localhost:6379",
|
||||||
|
RateLimitLimit: 100, // Added
|
||||||
|
RateLimitWindow: 60, // Added
|
||||||
|
},
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "DatabaseURL empty",
|
||||||
|
config: &Config{
|
||||||
|
AppPort: 8080,
|
||||||
|
JWTSecret: strings.Repeat("a", 32),
|
||||||
|
DatabaseURL: "",
|
||||||
|
RedisURL: "redis://localhost:6379",
|
||||||
|
RateLimitLimit: 100, // Added
|
||||||
|
RateLimitWindow: 60, // Added
|
||||||
|
},
|
||||||
|
wantErr: true,
|
||||||
|
errMsg: "DATABASE_URL is required",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "RedisURL empty",
|
||||||
|
config: &Config{
|
||||||
|
AppPort: 8080,
|
||||||
|
JWTSecret: strings.Repeat("a", 32),
|
||||||
|
DatabaseURL: "postgresql://user:pass@localhost:5432/db",
|
||||||
|
RedisURL: "",
|
||||||
|
RateLimitLimit: 100, // Added
|
||||||
|
RateLimitWindow: 60, // Added
|
||||||
|
},
|
||||||
|
wantErr: true,
|
||||||
|
errMsg: "REDIS_URL is required",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "DatabaseURL invalid format",
|
||||||
|
config: &Config{
|
||||||
|
AppPort: 8080,
|
||||||
|
JWTSecret: strings.Repeat("a", 32),
|
||||||
|
DatabaseURL: "invalid://database",
|
||||||
|
RedisURL: "redis://localhost:6379",
|
||||||
|
RateLimitLimit: 100, // Added
|
||||||
|
RateLimitWindow: 60, // Added
|
||||||
|
},
|
||||||
|
wantErr: true,
|
||||||
|
errMsg: "DATABASE_URL validation failed",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "RedisURL invalid format",
|
||||||
|
config: &Config{
|
||||||
|
AppPort: 8080,
|
||||||
|
JWTSecret: strings.Repeat("a", 32),
|
||||||
|
DatabaseURL: "postgresql://user:pass@localhost:5432/db",
|
||||||
|
RedisURL: "invalid://redis",
|
||||||
|
RateLimitLimit: 100, // Added
|
||||||
|
RateLimitWindow: 60, // Added
|
||||||
|
},
|
||||||
|
wantErr: true,
|
||||||
|
errMsg: "REDIS_URL validation failed",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "DatabaseURL postgres format",
|
||||||
|
config: &Config{
|
||||||
|
AppPort: 8080,
|
||||||
|
JWTSecret: strings.Repeat("a", 32),
|
||||||
|
DatabaseURL: "postgres://user:pass@localhost:5432/db",
|
||||||
|
RedisURL: "redis://localhost:6379",
|
||||||
|
RateLimitLimit: 100, // Added
|
||||||
|
RateLimitWindow: 60, // Added
|
||||||
|
},
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "DatabaseURL sqlite format",
|
||||||
|
config: &Config{
|
||||||
|
AppPort: 8080,
|
||||||
|
JWTSecret: strings.Repeat("a", 32),
|
||||||
|
DatabaseURL: "sqlite:///path/to/db",
|
||||||
|
RedisURL: "redis://localhost:6379",
|
||||||
|
RateLimitLimit: 100, // Added
|
||||||
|
RateLimitWindow: 60, // Added
|
||||||
|
},
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "RedisURL rediss format (TLS)",
|
||||||
|
config: &Config{
|
||||||
|
AppPort: 8080,
|
||||||
|
JWTSecret: strings.Repeat("a", 32),
|
||||||
|
DatabaseURL: "postgresql://user:pass@localhost:5432/db",
|
||||||
|
RedisURL: "rediss://localhost:6379",
|
||||||
|
RateLimitLimit: 100, // Added
|
||||||
|
RateLimitWindow: 60, // Added
|
||||||
|
},
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "valid port boundaries",
|
||||||
|
config: &Config{
|
||||||
|
AppPort: 1,
|
||||||
|
JWTSecret: strings.Repeat("a", 32),
|
||||||
|
DatabaseURL: "postgresql://user:pass@localhost:5432/db",
|
||||||
|
RedisURL: "redis://localhost:6379",
|
||||||
|
RateLimitLimit: 100, // Added
|
||||||
|
RateLimitWindow: 60, // Added
|
||||||
|
},
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "valid port upper boundary",
|
||||||
|
config: &Config{
|
||||||
|
AppPort: 65535,
|
||||||
|
JWTSecret: strings.Repeat("a", 32),
|
||||||
|
DatabaseURL: "postgresql://user:pass@localhost:5432/db",
|
||||||
|
RedisURL: "redis://localhost:6379",
|
||||||
|
RateLimitLimit: 100, // Added
|
||||||
|
RateLimitWindow: 60, // Added
|
||||||
|
},
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "invalid LogLevel",
|
||||||
|
config: &Config{
|
||||||
|
AppPort: 8080,
|
||||||
|
JWTSecret: strings.Repeat("a", 32),
|
||||||
|
DatabaseURL: "postgresql://user:pass@localhost:5432/db",
|
||||||
|
RedisURL: "redis://localhost:6379",
|
||||||
|
LogLevel: "INVALID",
|
||||||
|
RateLimitLimit: 100, // Added
|
||||||
|
RateLimitWindow: 60, // Added
|
||||||
|
},
|
||||||
|
wantErr: true,
|
||||||
|
errMsg: "LOG_LEVEL validation failed",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "valid LogLevel",
|
||||||
|
config: &Config{
|
||||||
|
AppPort: 8080,
|
||||||
|
JWTSecret: strings.Repeat("a", 32),
|
||||||
|
DatabaseURL: "postgresql://user:pass@localhost:5432/db",
|
||||||
|
RedisURL: "redis://localhost:6379",
|
||||||
|
LogLevel: "DEBUG",
|
||||||
|
RateLimitLimit: 100, // Added
|
||||||
|
RateLimitWindow: 60, // Added
|
||||||
|
},
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "invalid RateLimitLimit zero",
|
||||||
|
config: &Config{
|
||||||
|
AppPort: 8080,
|
||||||
|
JWTSecret: strings.Repeat("a", 32),
|
||||||
|
DatabaseURL: "postgresql://user:pass@localhost:5432/db",
|
||||||
|
RedisURL: "redis://localhost:6379",
|
||||||
|
RateLimitLimit: 0,
|
||||||
|
RateLimitWindow: 60, // Added
|
||||||
|
},
|
||||||
|
wantErr: true,
|
||||||
|
errMsg: "RATE_LIMIT_LIMIT validation failed",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "invalid RateLimitWindow negative",
|
||||||
|
config: &Config{
|
||||||
|
AppPort: 8080,
|
||||||
|
JWTSecret: strings.Repeat("a", 32),
|
||||||
|
DatabaseURL: "postgresql://user:pass@localhost:5432/db",
|
||||||
|
RedisURL: "redis://localhost:6379",
|
||||||
|
RateLimitLimit: 100, // Added
|
||||||
|
RateLimitWindow: -1,
|
||||||
|
},
|
||||||
|
wantErr: true,
|
||||||
|
errMsg: "RATE_LIMIT_WINDOW validation failed",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "valid RateLimit values",
|
||||||
|
config: &Config{
|
||||||
|
AppPort: 8080,
|
||||||
|
JWTSecret: strings.Repeat("a", 32),
|
||||||
|
DatabaseURL: "postgresql://user:pass@localhost:5432/db",
|
||||||
|
RedisURL: "redis://localhost:6379",
|
||||||
|
RateLimitLimit: 100,
|
||||||
|
RateLimitWindow: 60,
|
||||||
|
},
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
// Ajouter un logger minimal si nécessaire pour éviter nil pointer
|
||||||
|
if tt.config.Logger == nil {
|
||||||
|
logger, _ := zap.NewDevelopment()
|
||||||
|
tt.config.Logger = logger
|
||||||
|
}
|
||||||
|
|
||||||
|
err := tt.config.Validate()
|
||||||
|
if tt.wantErr {
|
||||||
|
require.Error(t, err)
|
||||||
|
if tt.errMsg != "" {
|
||||||
|
assert.Contains(t, err.Error(), tt.errMsg)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
require.NoError(t, err)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
67
veza-backend-api/internal/config/validator.go
Normal file
67
veza-backend-api/internal/config/validator.go
Normal file
|
|
@ -0,0 +1,67 @@
|
||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/url"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ConfigValidator valide la configuration selon des règles strictes (T0036)
|
||||||
|
type ConfigValidator struct{}
|
||||||
|
|
||||||
|
// NewConfigValidator crée un nouveau validateur
|
||||||
|
func NewConfigValidator() *ConfigValidator {
|
||||||
|
return &ConfigValidator{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidatePort valide qu'un port est dans la plage valide (1-65535)
|
||||||
|
func (v *ConfigValidator) ValidatePort(port int) error {
|
||||||
|
if port < 1 || port > 65535 {
|
||||||
|
return fmt.Errorf("port must be between 1 and 65535, got %d", port)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidateURL valide qu'une URL a le schéma attendu
|
||||||
|
func (v *ConfigValidator) ValidateURL(urlStr, expectedScheme string) error {
|
||||||
|
if urlStr == "" {
|
||||||
|
return fmt.Errorf("URL cannot be empty")
|
||||||
|
}
|
||||||
|
|
||||||
|
parsedURL, err := url.Parse(urlStr)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("invalid URL format: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if parsedURL.Scheme != expectedScheme {
|
||||||
|
return fmt.Errorf("URL must have scheme %s, got %s", expectedScheme, parsedURL.Scheme)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidateEnum valide qu'une valeur fait partie des valeurs autorisées
|
||||||
|
func (v *ConfigValidator) ValidateEnum(value string, allowed []string) error {
|
||||||
|
for _, allowedValue := range allowed {
|
||||||
|
if value == allowedValue {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return fmt.Errorf("value '%s' is not allowed. Allowed values: %s", value, strings.Join(allowed, ", "))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidateSecretLength valide qu'un secret a une longueur minimale
|
||||||
|
func (v *ConfigValidator) ValidateSecretLength(secret string, minLength int) error {
|
||||||
|
if len(secret) < minLength {
|
||||||
|
return fmt.Errorf("secret must be at least %d characters, got %d", minLength, len(secret))
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidatePositiveInt valide qu'un entier est positif
|
||||||
|
func (v *ConfigValidator) ValidatePositiveInt(value int, fieldName string) error {
|
||||||
|
if value <= 0 {
|
||||||
|
return fmt.Errorf("%s must be positive, got %d", fieldName, value)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
232
veza-backend-api/internal/config/validator_test.go
Normal file
232
veza-backend-api/internal/config/validator_test.go
Normal file
|
|
@ -0,0 +1,232 @@
|
||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestConfigValidator_ValidatePort(t *testing.T) {
|
||||||
|
validator := NewConfigValidator()
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
port int
|
||||||
|
wantErr bool
|
||||||
|
}{
|
||||||
|
{"valid port", 8080, false},
|
||||||
|
{"min port", 1, false},
|
||||||
|
{"max port", 65535, false},
|
||||||
|
{"invalid negative", -1, true},
|
||||||
|
{"invalid too high", 65536, true},
|
||||||
|
{"invalid zero", 0, true},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
err := validator.ValidatePort(tt.port)
|
||||||
|
if tt.wantErr {
|
||||||
|
assert.Error(t, err)
|
||||||
|
} else {
|
||||||
|
assert.NoError(t, err)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigValidator_ValidateURL(t *testing.T) {
|
||||||
|
validator := NewConfigValidator()
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
url string
|
||||||
|
expectedScheme string
|
||||||
|
wantErr bool
|
||||||
|
}{
|
||||||
|
{"valid postgres URL", "postgres://user:pass@localhost:5432/db", "postgres", false},
|
||||||
|
{"valid postgresql URL", "postgresql://user:pass@localhost:5432/db", "postgresql", false},
|
||||||
|
{"valid redis URL", "redis://localhost:6379", "redis", false},
|
||||||
|
{"valid rediss URL", "rediss://localhost:6380", "rediss", false},
|
||||||
|
{"invalid scheme", "http://localhost", "postgres", true},
|
||||||
|
{"empty URL", "", "postgres", true},
|
||||||
|
{"malformed URL", "://invalid", "postgres", true},
|
||||||
|
{"missing scheme", "localhost:5432/db", "postgres", true},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
err := validator.ValidateURL(tt.url, tt.expectedScheme)
|
||||||
|
if tt.wantErr {
|
||||||
|
assert.Error(t, err)
|
||||||
|
} else {
|
||||||
|
assert.NoError(t, err)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigValidator_ValidateEnum(t *testing.T) {
|
||||||
|
validator := NewConfigValidator()
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
value string
|
||||||
|
allowed []string
|
||||||
|
wantErr bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "valid value in enum",
|
||||||
|
value: "INFO",
|
||||||
|
allowed: []string{"DEBUG", "INFO", "WARN", "ERROR"},
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "case sensitive match",
|
||||||
|
value: "info",
|
||||||
|
allowed: []string{"INFO", "WARN"},
|
||||||
|
wantErr: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "value not in enum",
|
||||||
|
value: "TRACE",
|
||||||
|
allowed: []string{"DEBUG", "INFO", "WARN", "ERROR"},
|
||||||
|
wantErr: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "empty value with empty allowed",
|
||||||
|
value: "",
|
||||||
|
allowed: []string{},
|
||||||
|
wantErr: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "empty value in allowed",
|
||||||
|
value: "",
|
||||||
|
allowed: []string{"", "value1"},
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
err := validator.ValidateEnum(tt.value, tt.allowed)
|
||||||
|
if tt.wantErr {
|
||||||
|
assert.Error(t, err)
|
||||||
|
assert.Contains(t, err.Error(), "not allowed")
|
||||||
|
} else {
|
||||||
|
assert.NoError(t, err)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigValidator_ValidateSecretLength(t *testing.T) {
|
||||||
|
validator := NewConfigValidator()
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
secret string
|
||||||
|
minLength int
|
||||||
|
wantErr bool
|
||||||
|
}{
|
||||||
|
{"valid secret", "my-super-secret-key-that-is-long-enough", 32, false},
|
||||||
|
{"exact length", strings.Repeat("a", 32), 32, false},
|
||||||
|
{"too short", "short", 32, true},
|
||||||
|
{"empty secret", "", 1, true},
|
||||||
|
{"empty secret with min 0", "", 0, false},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
err := validator.ValidateSecretLength(tt.secret, tt.minLength)
|
||||||
|
if tt.wantErr {
|
||||||
|
assert.Error(t, err)
|
||||||
|
assert.Contains(t, err.Error(), "at least")
|
||||||
|
} else {
|
||||||
|
assert.NoError(t, err)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigValidator_ValidatePositiveInt(t *testing.T) {
|
||||||
|
validator := NewConfigValidator()
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
value int
|
||||||
|
fieldName string
|
||||||
|
wantErr bool
|
||||||
|
}{
|
||||||
|
{"valid positive", 42, "test_field", false},
|
||||||
|
{"valid one", 1, "test_field", false},
|
||||||
|
{"invalid zero", 0, "test_field", true},
|
||||||
|
{"invalid negative", -1, "test_field", true},
|
||||||
|
{"invalid large negative", -1000, "test_field", true},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
err := validator.ValidatePositiveInt(tt.value, tt.fieldName)
|
||||||
|
if tt.wantErr {
|
||||||
|
assert.Error(t, err)
|
||||||
|
assert.Contains(t, err.Error(), "must be positive")
|
||||||
|
assert.Contains(t, err.Error(), tt.fieldName)
|
||||||
|
} else {
|
||||||
|
assert.NoError(t, err)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNewConfigValidator(t *testing.T) {
|
||||||
|
validator := NewConfigValidator()
|
||||||
|
assert.NotNil(t, validator)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigValidator_ValidateURL_MultipleSchemes(t *testing.T) {
|
||||||
|
validator := NewConfigValidator()
|
||||||
|
|
||||||
|
// Test avec différents schémas PostgreSQL
|
||||||
|
err1 := validator.ValidateURL("postgres://localhost/db", "postgres")
|
||||||
|
assert.NoError(t, err1)
|
||||||
|
|
||||||
|
err2 := validator.ValidateURL("postgresql://localhost/db", "postgresql")
|
||||||
|
assert.NoError(t, err2)
|
||||||
|
|
||||||
|
// Test avec schéma Redis
|
||||||
|
err3 := validator.ValidateURL("redis://localhost:6379", "redis")
|
||||||
|
assert.NoError(t, err3)
|
||||||
|
|
||||||
|
err4 := validator.ValidateURL("rediss://localhost:6380", "rediss")
|
||||||
|
assert.NoError(t, err4)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigValidator_ValidateEnum_ErrorMessages(t *testing.T) {
|
||||||
|
validator := NewConfigValidator()
|
||||||
|
|
||||||
|
err := validator.ValidateEnum("invalid", []string{"valid1", "valid2", "valid3"})
|
||||||
|
require.Error(t, err)
|
||||||
|
assert.Contains(t, err.Error(), "not allowed")
|
||||||
|
assert.Contains(t, err.Error(), "valid1, valid2, valid3")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigValidator_ValidateSecretLength_ErrorMessages(t *testing.T) {
|
||||||
|
validator := NewConfigValidator()
|
||||||
|
|
||||||
|
err := validator.ValidateSecretLength("short", 32)
|
||||||
|
require.Error(t, err)
|
||||||
|
assert.Contains(t, err.Error(), "at least 32")
|
||||||
|
assert.Contains(t, err.Error(), "got 5")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigValidator_ValidatePositiveInt_ErrorMessages(t *testing.T) {
|
||||||
|
validator := NewConfigValidator()
|
||||||
|
|
||||||
|
err := validator.ValidatePositiveInt(-5, "rate_limit")
|
||||||
|
require.Error(t, err)
|
||||||
|
assert.Contains(t, err.Error(), "rate_limit")
|
||||||
|
assert.Contains(t, err.Error(), "must be positive")
|
||||||
|
assert.Contains(t, err.Error(), "got -5")
|
||||||
|
}
|
||||||
136
veza-backend-api/internal/config/watcher.go
Normal file
136
veza-backend-api/internal/config/watcher.go
Normal file
|
|
@ -0,0 +1,136 @@
|
||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"path/filepath"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/fsnotify/fsnotify"
|
||||||
|
"go.uber.org/zap"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ConfigWatcher surveille les fichiers de configuration pour changements (T0040)
|
||||||
|
type ConfigWatcher struct {
|
||||||
|
watcher *fsnotify.Watcher
|
||||||
|
reloader *ConfigReloader
|
||||||
|
logger *zap.Logger
|
||||||
|
stopChan chan struct{}
|
||||||
|
stopOnce sync.Once // Ensures stopChan is closed only once
|
||||||
|
wg sync.WaitGroup
|
||||||
|
debounce time.Duration
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewConfigWatcher crée un nouveau watcher de configuration (T0040)
|
||||||
|
func NewConfigWatcher(reloader *ConfigReloader, logger *zap.Logger) (*ConfigWatcher, error) {
|
||||||
|
watcher, err := fsnotify.NewWatcher()
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to create watcher: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &ConfigWatcher{
|
||||||
|
watcher: watcher,
|
||||||
|
reloader: reloader,
|
||||||
|
logger: logger,
|
||||||
|
stopChan: make(chan struct{}),
|
||||||
|
stopOnce: sync.Once{}, // Initialize sync.Once
|
||||||
|
debounce: 500 * time.Millisecond,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Watch surveille les fichiers .env pour changements (T0040)
|
||||||
|
func (w *ConfigWatcher) Watch(envFiles []string) error {
|
||||||
|
// Ajouter les fichiers à surveiller
|
||||||
|
for _, file := range envFiles {
|
||||||
|
// Résoudre le chemin absolu pour éviter les problèmes de chemins relatifs
|
||||||
|
absPath, err := filepath.Abs(file)
|
||||||
|
if err != nil {
|
||||||
|
w.logger.Warn("Failed to resolve absolute path", zap.String("file", file), zap.Error(err))
|
||||||
|
absPath = file
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := w.watcher.Add(absPath); err != nil {
|
||||||
|
w.logger.Warn("Failed to watch file", zap.String("file", absPath), zap.Error(err))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
w.logger.Info("Watching config file", zap.String("file", absPath))
|
||||||
|
}
|
||||||
|
|
||||||
|
w.wg.Add(1)
|
||||||
|
go w.watchLoop()
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// watchLoop boucle principale de surveillance avec debouncing (T0040)
|
||||||
|
func (w *ConfigWatcher) watchLoop() {
|
||||||
|
defer w.wg.Done()
|
||||||
|
|
||||||
|
var debounceTimer *time.Timer
|
||||||
|
|
||||||
|
for {
|
||||||
|
select {
|
||||||
|
case event, ok := <-w.watcher.Events:
|
||||||
|
if !ok {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ignorer les opérations autres que Write et Create
|
||||||
|
if event.Op&fsnotify.Write == 0 && event.Op&fsnotify.Create == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
w.logger.Debug("Config file changed", zap.String("file", event.Name), zap.String("op", event.Op.String()))
|
||||||
|
|
||||||
|
// Arrêter le timer précédent si existant
|
||||||
|
if debounceTimer != nil {
|
||||||
|
debounceTimer.Stop()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Démarrer un nouveau timer de debounce
|
||||||
|
debounceTimer = time.NewTimer(w.debounce)
|
||||||
|
|
||||||
|
// Goroutine pour attendre le debounce et relancer
|
||||||
|
go func(fileName string) {
|
||||||
|
<-debounceTimer.C
|
||||||
|
w.logger.Info("Config file changed, reloading", zap.String("file", fileName))
|
||||||
|
if err := w.reloader.ReloadAll(); err != nil {
|
||||||
|
w.logger.Error("Failed to reload config", zap.Error(err))
|
||||||
|
} else {
|
||||||
|
w.logger.Info("Config reloaded successfully")
|
||||||
|
}
|
||||||
|
}(event.Name)
|
||||||
|
|
||||||
|
case err, ok := <-w.watcher.Errors:
|
||||||
|
if !ok {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
w.logger.Error("Watcher error", zap.Error(err))
|
||||||
|
|
||||||
|
case <-w.stopChan:
|
||||||
|
// Arrêter le timer si actif
|
||||||
|
if debounceTimer != nil {
|
||||||
|
debounceTimer.Stop()
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Stop arrête la surveillance proprement (T0040)
|
||||||
|
func (w *ConfigWatcher) Stop() error {
|
||||||
|
w.stopOnce.Do(func() {
|
||||||
|
close(w.stopChan)
|
||||||
|
})
|
||||||
|
err := w.watcher.Close()
|
||||||
|
w.wg.Wait()
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetWatchedFiles retourne la liste des fichiers surveillés (T0040)
|
||||||
|
func (w *ConfigWatcher) GetWatchedFiles() []string {
|
||||||
|
if w.watcher == nil {
|
||||||
|
return []string{}
|
||||||
|
}
|
||||||
|
return w.watcher.WatchList()
|
||||||
|
}
|
||||||
266
veza-backend-api/internal/config/watcher_test.go
Normal file
266
veza-backend-api/internal/config/watcher_test.go
Normal file
|
|
@ -0,0 +1,266 @@
|
||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
"go.uber.org/zap"
|
||||||
|
"go.uber.org/zap/zaptest"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestNewConfigWatcher(t *testing.T) {
|
||||||
|
logger := zaptest.NewLogger(t)
|
||||||
|
config := &Config{LogLevel: "INFO"}
|
||||||
|
reloader := NewConfigReloader(config, logger)
|
||||||
|
|
||||||
|
watcher, err := NewConfigWatcher(reloader, logger)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.NotNil(t, watcher)
|
||||||
|
defer watcher.Stop()
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigWatcher_Watch(t *testing.T) {
|
||||||
|
logger := zaptest.NewLogger(t)
|
||||||
|
config := &Config{LogLevel: "INFO"}
|
||||||
|
reloader := NewConfigReloader(config, logger)
|
||||||
|
|
||||||
|
watcher, err := NewConfigWatcher(reloader, logger)
|
||||||
|
require.NoError(t, err)
|
||||||
|
defer watcher.Stop()
|
||||||
|
|
||||||
|
// Créer un fichier temporaire
|
||||||
|
tmpDir := t.TempDir()
|
||||||
|
tmpFile := filepath.Join(tmpDir, ".env.test")
|
||||||
|
err = os.WriteFile(tmpFile, []byte("LOG_LEVEL=DEBUG\n"), 0644)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
err = watcher.Watch([]string{tmpFile})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Vérifier que le fichier est surveillé
|
||||||
|
watchedFiles := watcher.GetWatchedFiles()
|
||||||
|
assert.Contains(t, watchedFiles, tmpFile)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigWatcher_Stop(t *testing.T) {
|
||||||
|
logger := zaptest.NewLogger(t)
|
||||||
|
config := &Config{LogLevel: "INFO"}
|
||||||
|
reloader := NewConfigReloader(config, logger)
|
||||||
|
|
||||||
|
watcher, err := NewConfigWatcher(reloader, logger)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
err = watcher.Stop()
|
||||||
|
assert.NoError(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigWatcher_GetWatchedFiles(t *testing.T) {
|
||||||
|
logger := zaptest.NewLogger(t)
|
||||||
|
config := &Config{LogLevel: "INFO"}
|
||||||
|
reloader := NewConfigReloader(config, logger)
|
||||||
|
|
||||||
|
watcher, err := NewConfigWatcher(reloader, logger)
|
||||||
|
require.NoError(t, err)
|
||||||
|
defer watcher.Stop()
|
||||||
|
|
||||||
|
// Aucun fichier surveillé initialement
|
||||||
|
files := watcher.GetWatchedFiles()
|
||||||
|
assert.Empty(t, files)
|
||||||
|
|
||||||
|
// Ajouter un fichier
|
||||||
|
tmpDir := t.TempDir()
|
||||||
|
tmpFile := filepath.Join(tmpDir, ".env.test")
|
||||||
|
err = os.WriteFile(tmpFile, []byte("LOG_LEVEL=DEBUG\n"), 0644)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
err = watcher.Watch([]string{tmpFile})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
files = watcher.GetWatchedFiles()
|
||||||
|
assert.Contains(t, files, tmpFile)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigWatcher_MultipleFiles(t *testing.T) {
|
||||||
|
logger := zaptest.NewLogger(t)
|
||||||
|
config := &Config{LogLevel: "INFO"}
|
||||||
|
reloader := NewConfigReloader(config, logger)
|
||||||
|
|
||||||
|
watcher, err := NewConfigWatcher(reloader, logger)
|
||||||
|
require.NoError(t, err)
|
||||||
|
defer watcher.Stop()
|
||||||
|
|
||||||
|
tmpDir := t.TempDir()
|
||||||
|
file1 := filepath.Join(tmpDir, ".env")
|
||||||
|
file2 := filepath.Join(tmpDir, ".env.production")
|
||||||
|
|
||||||
|
err = os.WriteFile(file1, []byte("LOG_LEVEL=DEBUG\n"), 0644)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
err = os.WriteFile(file2, []byte("LOG_LEVEL=ERROR\n"), 0644)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
err = watcher.Watch([]string{file1, file2})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
watchedFiles := watcher.GetWatchedFiles()
|
||||||
|
assert.Contains(t, watchedFiles, file1)
|
||||||
|
assert.Contains(t, watchedFiles, file2)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigWatcher_InvalidFile(t *testing.T) {
|
||||||
|
logger := zaptest.NewLogger(t)
|
||||||
|
config := &Config{LogLevel: "INFO"}
|
||||||
|
reloader := NewConfigReloader(config, logger)
|
||||||
|
|
||||||
|
watcher, err := NewConfigWatcher(reloader, logger)
|
||||||
|
require.NoError(t, err)
|
||||||
|
defer watcher.Stop()
|
||||||
|
|
||||||
|
// Essayer de surveiller un fichier inexistant
|
||||||
|
// Ne devrait pas planter, juste logger un avertissement
|
||||||
|
err = watcher.Watch([]string{"/nonexistent/file.env"})
|
||||||
|
// Le watch peut échouer mais ne doit pas planter
|
||||||
|
if err != nil {
|
||||||
|
t.Logf("Expected error for nonexistent file: %v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigWatcher_FileChangeDetection(t *testing.T) {
|
||||||
|
if testing.Short() {
|
||||||
|
t.Skip("Skipping test that requires file watching in short mode")
|
||||||
|
}
|
||||||
|
|
||||||
|
logger := zaptest.NewLogger(t)
|
||||||
|
config := &Config{LogLevel: "INFO"}
|
||||||
|
reloader := NewConfigReloader(config, logger)
|
||||||
|
|
||||||
|
watcher, err := NewConfigWatcher(reloader, logger)
|
||||||
|
require.NoError(t, err)
|
||||||
|
defer watcher.Stop()
|
||||||
|
|
||||||
|
// Créer un fichier temporaire
|
||||||
|
tmpDir := t.TempDir()
|
||||||
|
tmpFile := filepath.Join(tmpDir, ".env.test")
|
||||||
|
err = os.WriteFile(tmpFile, []byte("LOG_LEVEL=DEBUG\n"), 0644)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
err = watcher.Watch([]string{tmpFile})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Attendre que le watcher soit prêt
|
||||||
|
time.Sleep(100 * time.Millisecond)
|
||||||
|
|
||||||
|
// Modifier le fichier
|
||||||
|
err = os.WriteFile(tmpFile, []byte("LOG_LEVEL=ERROR\n"), 0644)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Attendre le debounce + reload (500ms debounce + marge)
|
||||||
|
time.Sleep(700 * time.Millisecond)
|
||||||
|
|
||||||
|
// Le reload devrait avoir été déclenché
|
||||||
|
// Note: Le reload peut ne pas avoir modifié config.LogLevel si le fichier .env
|
||||||
|
// n'est pas chargé par LoadEnvFiles, mais on vérifie au moins que le watcher
|
||||||
|
// a détecté le changement
|
||||||
|
watchedFiles := watcher.GetWatchedFiles()
|
||||||
|
assert.Contains(t, watchedFiles, tmpFile)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNewConfigWatcher_Error(t *testing.T) {
|
||||||
|
// Test avec un logger invalide ne devrait pas causer d'erreur
|
||||||
|
// mais si fsnotify.NewWatcher() échoue, on devrait avoir une erreur
|
||||||
|
// Dans la pratique, cette fonction ne devrait pas échouer sur la plupart des systèmes
|
||||||
|
logger := zap.NewNop()
|
||||||
|
config := &Config{LogLevel: "INFO"}
|
||||||
|
reloader := NewConfigReloader(config, logger)
|
||||||
|
|
||||||
|
watcher, err := NewConfigWatcher(reloader, logger)
|
||||||
|
// Sur la plupart des systèmes, cela ne devrait pas échouer
|
||||||
|
if err != nil {
|
||||||
|
t.Logf("NewConfigWatcher failed (may be expected on some systems): %v", err)
|
||||||
|
} else {
|
||||||
|
require.NotNil(t, watcher)
|
||||||
|
watcher.Stop()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigWatcher_StopMultipleTimes(t *testing.T) {
|
||||||
|
logger := zaptest.NewLogger(t)
|
||||||
|
config := &Config{LogLevel: "INFO"}
|
||||||
|
reloader := NewConfigReloader(config, logger)
|
||||||
|
|
||||||
|
watcher, err := NewConfigWatcher(reloader, logger)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Arrêter plusieurs fois ne devrait pas planter
|
||||||
|
err = watcher.Stop()
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
// Essayer d'arrêter à nouveau
|
||||||
|
err = watcher.Stop()
|
||||||
|
// Peut retourner une erreur mais ne doit pas planter
|
||||||
|
if err != nil {
|
||||||
|
t.Logf("Second Stop() returned error (may be expected): %v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigWatcher_EmptyFileList(t *testing.T) {
|
||||||
|
logger := zaptest.NewLogger(t)
|
||||||
|
config := &Config{LogLevel: "INFO"}
|
||||||
|
reloader := NewConfigReloader(config, logger)
|
||||||
|
|
||||||
|
watcher, err := NewConfigWatcher(reloader, logger)
|
||||||
|
require.NoError(t, err)
|
||||||
|
defer watcher.Stop()
|
||||||
|
|
||||||
|
// Surveiller une liste vide
|
||||||
|
err = watcher.Watch([]string{})
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
files := watcher.GetWatchedFiles()
|
||||||
|
assert.Empty(t, files)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigWatcher_RelativePath(t *testing.T) {
|
||||||
|
logger := zaptest.NewLogger(t)
|
||||||
|
config := &Config{LogLevel: "INFO"}
|
||||||
|
reloader := NewConfigReloader(config, logger)
|
||||||
|
|
||||||
|
watcher, err := NewConfigWatcher(reloader, logger)
|
||||||
|
require.NoError(t, err)
|
||||||
|
defer watcher.Stop()
|
||||||
|
|
||||||
|
tmpDir := t.TempDir()
|
||||||
|
// Créer le fichier
|
||||||
|
absFile := filepath.Join(tmpDir, ".env.test")
|
||||||
|
err = os.WriteFile(absFile, []byte("LOG_LEVEL=DEBUG\n"), 0644)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Changer vers le répertoire temporaire
|
||||||
|
oldDir, err := os.Getwd()
|
||||||
|
require.NoError(t, err)
|
||||||
|
defer os.Chdir(oldDir)
|
||||||
|
|
||||||
|
err = os.Chdir(tmpDir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Essayer de surveiller avec un chemin relatif
|
||||||
|
err = watcher.Watch([]string{".env.test"})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Vérifier que le chemin absolu est surveillé
|
||||||
|
watchedFiles := watcher.GetWatchedFiles()
|
||||||
|
assert.NotEmpty(t, watchedFiles)
|
||||||
|
// Le chemin absolu devrait être dans la liste
|
||||||
|
found := false
|
||||||
|
for _, file := range watchedFiles {
|
||||||
|
if filepath.Base(file) == ".env.test" {
|
||||||
|
found = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
assert.True(t, found, "Relative path should be converted to absolute path")
|
||||||
|
}
|
||||||
301
veza-backend-api/internal/core/auth/handler.go
Normal file
301
veza-backend-api/internal/core/auth/handler.go
Normal file
|
|
@ -0,0 +1,301 @@
|
||||||
|
package auth
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"veza-backend-api/internal/dto"
|
||||||
|
"veza-backend-api/internal/response"
|
||||||
|
"veza-backend-api/internal/services"
|
||||||
|
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"go.uber.org/zap"
|
||||||
|
)
|
||||||
|
|
||||||
|
// AuthHandler gère les requêtes d'authentification pour T0151
|
||||||
|
type AuthHandler struct {
|
||||||
|
authService *AuthService // Changed to *AuthService (from the current package)
|
||||||
|
sessionService *services.SessionService
|
||||||
|
logger *zap.Logger
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewAuthHandler crée une nouvelle instance d'AuthHandler
|
||||||
|
func NewAuthHandler(authService *AuthService, sessionService *services.SessionService, logger *zap.Logger) *AuthHandler { // Changed to *AuthService
|
||||||
|
return &AuthHandler{
|
||||||
|
authService: authService,
|
||||||
|
sessionService: sessionService,
|
||||||
|
logger: logger,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Register gère l'inscription d'un nouvel utilisateur
|
||||||
|
func (h *AuthHandler) Register(c *gin.Context) {
|
||||||
|
var req dto.RegisterRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
errorMsg := err.Error()
|
||||||
|
if strings.Contains(errorMsg, "Password") && strings.Contains(errorMsg, "min") {
|
||||||
|
errorMsg = "Le mot de passe doit contenir au moins 12 caractères"
|
||||||
|
} else if strings.Contains(errorMsg, "PasswordConfirm") && strings.Contains(errorMsg, "eqfield") {
|
||||||
|
errorMsg = "Les mots de passe ne correspondent pas"
|
||||||
|
} else if strings.Contains(errorMsg, "Email") && strings.Contains(errorMsg, "email") {
|
||||||
|
errorMsg = "Format d'email invalide"
|
||||||
|
} else if strings.Contains(errorMsg, "required") {
|
||||||
|
if strings.Contains(errorMsg, "Password") {
|
||||||
|
errorMsg = "Le mot de passe est requis"
|
||||||
|
} else if strings.Contains(errorMsg, "Email") {
|
||||||
|
errorMsg = "L'email est requis"
|
||||||
|
} else if strings.Contains(errorMsg, "PasswordConfirm") {
|
||||||
|
errorMsg = "La confirmation du mot de passe est requise"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
h.logger.Warn("Invalid registration request", zap.Error(err), zap.String("error_message", errorMsg))
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": errorMsg})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
user, err := h.authService.Register(c.Request.Context(), req.Email, req.Password)
|
||||||
|
if err != nil {
|
||||||
|
if strings.Contains(err.Error(), "already exists") {
|
||||||
|
c.JSON(http.StatusConflict, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if strings.Contains(err.Error(), "validation") || strings.Contains(err.Error(), "invalid") {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to create user"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
response := dto.RegisterResponse{
|
||||||
|
User: dto.UserResponse{
|
||||||
|
ID: user.ID,
|
||||||
|
Email: user.Email,
|
||||||
|
Username: user.Username,
|
||||||
|
},
|
||||||
|
Token: dto.TokenResponse{},
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusCreated, response)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Login gère la connexion d'un utilisateur
|
||||||
|
func (h *AuthHandler) Login(c *gin.Context) {
|
||||||
|
var req dto.LoginRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
user, tokens, err := h.authService.Login(c.Request.Context(), req.Email, req.Password, req.RememberMe)
|
||||||
|
if err != nil {
|
||||||
|
if strings.Contains(err.Error(), "email not verified") {
|
||||||
|
c.JSON(http.StatusForbidden, gin.H{
|
||||||
|
"error": err.Error(),
|
||||||
|
"code": "EMAIL_NOT_VERIFIED",
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if strings.Contains(err.Error(), "invalid credentials") {
|
||||||
|
c.JSON(http.StatusUnauthorized, gin.H{"error": "Invalid credentials"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to authenticate"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if h.sessionService != nil {
|
||||||
|
ipAddress := c.ClientIP()
|
||||||
|
userAgent := c.GetHeader("User-Agent")
|
||||||
|
if userAgent == "" {
|
||||||
|
userAgent = "Unknown"
|
||||||
|
}
|
||||||
|
|
||||||
|
expiresIn := 30 * 24 * time.Hour
|
||||||
|
if req.RememberMe {
|
||||||
|
expiresIn = 90 * 24 * time.Hour
|
||||||
|
}
|
||||||
|
|
||||||
|
sessionReq := &services.SessionCreateRequest{
|
||||||
|
UserID: user.ID,
|
||||||
|
Token: tokens.AccessToken,
|
||||||
|
IPAddress: ipAddress,
|
||||||
|
UserAgent: userAgent,
|
||||||
|
ExpiresIn: expiresIn,
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, err := h.sessionService.CreateSession(c.Request.Context(), sessionReq); err != nil {
|
||||||
|
h.logger.Warn("Failed to create session after login",
|
||||||
|
zap.String("user_id", user.ID.String()),
|
||||||
|
zap.String("ip_address", ipAddress),
|
||||||
|
zap.Error(err),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
response := dto.LoginResponse{
|
||||||
|
User: dto.UserResponse{
|
||||||
|
ID: user.ID,
|
||||||
|
Email: user.Email,
|
||||||
|
},
|
||||||
|
Token: dto.TokenResponse{
|
||||||
|
AccessToken: tokens.AccessToken,
|
||||||
|
RefreshToken: tokens.RefreshToken,
|
||||||
|
ExpiresIn: int(h.authService.JWTService.Config.AccessTokenTTL.Seconds()),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, response)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Refresh gère le rafraîchissement d'un access token
|
||||||
|
func (h *AuthHandler) Refresh(c *gin.Context) {
|
||||||
|
var req dto.RefreshRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
tokens, err := h.authService.Refresh(c.Request.Context(), req.RefreshToken)
|
||||||
|
if err != nil {
|
||||||
|
if strings.Contains(err.Error(), "invalid refresh token") ||
|
||||||
|
strings.Contains(err.Error(), "not found") ||
|
||||||
|
strings.Contains(err.Error(), "expired") ||
|
||||||
|
strings.Contains(err.Error(), "token version mismatch") {
|
||||||
|
c.JSON(http.StatusUnauthorized, gin.H{"error": "Invalid refresh token"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to refresh token"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
response := dto.TokenResponse{
|
||||||
|
AccessToken: tokens.AccessToken,
|
||||||
|
RefreshToken: tokens.RefreshToken,
|
||||||
|
ExpiresIn: 900,
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, response)
|
||||||
|
}
|
||||||
|
|
||||||
|
// CheckUsername vérifie la disponibilité d'un nom d'utilisateur
|
||||||
|
func (h *AuthHandler) CheckUsername(c *gin.Context) {
|
||||||
|
username := c.Query("username")
|
||||||
|
if username == "" {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "Username is required"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err := h.authService.GetUserByUsername(c.Request.Context(), username)
|
||||||
|
available := err != nil
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"available": available,
|
||||||
|
"username": username,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetMe retourne les informations de l'utilisateur connecté
|
||||||
|
func (h *AuthHandler) GetMe(c *gin.Context) {
|
||||||
|
userID, exists := c.Get("user_id")
|
||||||
|
if !exists {
|
||||||
|
c.JSON(http.StatusUnauthorized, gin.H{"error": "Unauthorized"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"id": userID,
|
||||||
|
"email": c.GetString("email"),
|
||||||
|
"role": c.GetString("role"),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Logout déconnecte l'utilisateur
|
||||||
|
func (h *AuthHandler) Logout(c *gin.Context) {
|
||||||
|
userIDInterface, exists := c.Get("user_id")
|
||||||
|
if !exists {
|
||||||
|
c.JSON(http.StatusUnauthorized, gin.H{"error": "Unauthorized"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
userID, ok := userIDInterface.(uuid.UUID)
|
||||||
|
if !ok {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "Invalid user ID type in context"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req struct {
|
||||||
|
RefreshToken string `json:"refresh_token" binding:"required"`
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "Refresh token is required"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.authService.Logout(c.Request.Context(), userID, req.RefreshToken); err != nil {
|
||||||
|
h.logger.Error("Failed to logout (revoke token)", zap.Error(err))
|
||||||
|
}
|
||||||
|
|
||||||
|
if h.sessionService != nil {
|
||||||
|
authHeader := c.GetHeader("Authorization")
|
||||||
|
if authHeader != "" && strings.HasPrefix(authHeader, "Bearer ") {
|
||||||
|
token := strings.TrimPrefix(authHeader, "Bearer ")
|
||||||
|
if err := h.sessionService.RevokeSession(c.Request.Context(), token); err != nil {
|
||||||
|
h.logger.Warn("Failed to revoke session on logout", zap.Error(err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{"message": "Logged out successfully"})
|
||||||
|
}
|
||||||
|
|
||||||
|
// VerifyEmail gère la vérification de l'email
|
||||||
|
func (h *AuthHandler) VerifyEmail(c *gin.Context) {
|
||||||
|
token := c.Query("token")
|
||||||
|
if token == "" {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "Token required"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.authService.VerifyEmail(c.Request.Context(), token); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{"message": "Email verified successfully"})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResendVerification gère la demande de renvoi d'email de vérification
|
||||||
|
func (h *AuthHandler) ResendVerification(c *gin.Context) {
|
||||||
|
var req struct {
|
||||||
|
Email string `json:"email" binding:"required,email"`
|
||||||
|
}
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.authService.ResendVerificationEmail(c.Request.Context(), req.Email); err != nil {
|
||||||
|
if err.Error() == "email already verified" {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{"message": "Verification email sent if account exists"})
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetUserByUsername gets a user by username
|
||||||
|
func (h *AuthHandler) GetUserByUsername(c *gin.Context) {
|
||||||
|
username := c.Param("username")
|
||||||
|
user, err := h.authService.GetUserByUsername(c.Request.Context(), username)
|
||||||
|
if err != nil {
|
||||||
|
response.NotFound(c, "User not found")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
response.Success(c, user)
|
||||||
|
}
|
||||||
437
veza-backend-api/internal/core/auth/service.go
Normal file
437
veza-backend-api/internal/core/auth/service.go
Normal file
|
|
@ -0,0 +1,437 @@
|
||||||
|
package auth
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
"fmt" // Ajoutez cette ligne
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"veza-backend-api/internal/models"
|
||||||
|
"veza-backend-api/internal/services" // Added import for services
|
||||||
|
|
||||||
|
"go.uber.org/zap"
|
||||||
|
"golang.org/x/crypto/bcrypt"
|
||||||
|
"gorm.io/gorm"
|
||||||
|
|
||||||
|
"veza-backend-api/internal/validators" // Import the validators package
|
||||||
|
)
|
||||||
|
|
||||||
|
type AuthService struct {
|
||||||
|
db *gorm.DB
|
||||||
|
logger *zap.Logger
|
||||||
|
JWTService *services.JWTService // Changed to pointer
|
||||||
|
emailVerificationService *services.EmailVerificationService // Changed to pointer
|
||||||
|
refreshTokenService *services.RefreshTokenService // Changed to pointer
|
||||||
|
emailValidator *validators.EmailValidator
|
||||||
|
passwordValidator *validators.PasswordValidator
|
||||||
|
passwordService *services.PasswordService // Changed to pointer
|
||||||
|
emailService *services.EmailService // Changed to pointer
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewAuthService(
|
||||||
|
db *gorm.DB,
|
||||||
|
emailValidator *validators.EmailValidator,
|
||||||
|
passwordValidator *validators.PasswordValidator,
|
||||||
|
passwordService *services.PasswordService, // Changed to pointer
|
||||||
|
jwtService *services.JWTService, // Changed to pointer
|
||||||
|
refreshTokenService *services.RefreshTokenService, // Changed to pointer
|
||||||
|
emailVerificationService *services.EmailVerificationService, // Changed to pointer
|
||||||
|
emailService *services.EmailService, // Changed to pointer
|
||||||
|
logger *zap.Logger,
|
||||||
|
) *AuthService {
|
||||||
|
return &AuthService{
|
||||||
|
db: db,
|
||||||
|
logger: logger,
|
||||||
|
JWTService: jwtService,
|
||||||
|
emailVerificationService: emailVerificationService,
|
||||||
|
refreshTokenService: refreshTokenService,
|
||||||
|
emailValidator: emailValidator,
|
||||||
|
passwordValidator: passwordValidator,
|
||||||
|
passwordService: passwordService,
|
||||||
|
emailService: emailService,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetUserByUsername récupère un utilisateur par son nom d'utilisateur
|
||||||
|
func (s *AuthService) GetUserByUsername(ctx context.Context, username string) (*models.User, error) {
|
||||||
|
var user models.User
|
||||||
|
if err := s.db.WithContext(ctx).Where("username = ?", username).First(&user).Error; err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &user, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Refresh est un alias pour RefreshToken
|
||||||
|
func (s *AuthService) Refresh(ctx context.Context, refreshToken string) (*models.TokenPair, error) {
|
||||||
|
return s.RefreshToken(ctx, refreshToken)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *AuthService) Register(ctx context.Context, email, password string) (*models.User, error) {
|
||||||
|
s.logger.Info("Attempting to register new user", zap.String("email", email))
|
||||||
|
|
||||||
|
// Valider l'email
|
||||||
|
if err := s.emailValidator.Validate(email); err != nil {
|
||||||
|
s.logger.Warn("Registration failed: invalid email", zap.String("email", email), zap.Error(err))
|
||||||
|
return nil, errors.New("invalid email: " + err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Valider le mot de passe
|
||||||
|
passwordStrength, err := s.passwordValidator.Validate(password)
|
||||||
|
if err != nil || !passwordStrength.Valid { // Vérifiez également si la force n'est pas suffisante
|
||||||
|
s.logger.Warn("Registration failed: weak password", zap.String("email", email), zap.Error(err))
|
||||||
|
// Si l'erreur est nil mais pas valide, utilisez les détails de la force
|
||||||
|
if err == nil {
|
||||||
|
err = errors.New("weak password: " + strings.Join(passwordStrength.Details, ", "))
|
||||||
|
}
|
||||||
|
return nil, errors.New("weak password: " + err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Hacher le mot de passe
|
||||||
|
hashedPassword, err := bcrypt.GenerateFromPassword([]byte(password), bcrypt.DefaultCost)
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Error("Failed to hash password", zap.Error(err))
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Créer l'utilisateur dans la base de données
|
||||||
|
user := &models.User{
|
||||||
|
ID: uuid.New(), // Générer un nouvel UUID
|
||||||
|
Email: email,
|
||||||
|
PasswordHash: string(hashedPassword),
|
||||||
|
// Le nom d'utilisateur sera généré par défaut ou défini plus tard
|
||||||
|
// IsVerified: false par défaut
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := s.db.WithContext(ctx).Create(user).Error; err != nil {
|
||||||
|
if strings.Contains(err.Error(), "unique constraint") || strings.Contains(err.Error(), "duplicate key") {
|
||||||
|
s.logger.Warn("Registration failed: email already exists", zap.String("email", email))
|
||||||
|
return nil, errors.New("email already exists")
|
||||||
|
}
|
||||||
|
s.logger.Error("Failed to create user in database", zap.Error(err))
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Générer le token de vérification d'email
|
||||||
|
token, err := s.emailVerificationService.GenerateToken()
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Error("Failed to generate email verification token", zap.Error(err))
|
||||||
|
return user, fmt.Errorf("failed to generate verification token: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Stocker le token
|
||||||
|
if err := s.emailVerificationService.StoreToken(user.ID, token); err != nil {
|
||||||
|
s.logger.Error("Failed to store email verification token", zap.Error(err))
|
||||||
|
return user, fmt.Errorf("failed to store verification token: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Envoyer l'email de vérification (simulation pour l'instant)
|
||||||
|
s.logger.Info("Sending verification email",
|
||||||
|
zap.String("email", user.Email),
|
||||||
|
zap.String("token", token),
|
||||||
|
zap.String("user_id", user.ID.String()))
|
||||||
|
|
||||||
|
s.logger.Info("User registered successfully", zap.String("user_id", user.ID.String()))
|
||||||
|
return user, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *AuthService) Login(ctx context.Context, email, password string, rememberMe bool) (*models.User, *models.TokenPair, error) {
|
||||||
|
s.logger.Info("Attempting login", zap.String("email", email))
|
||||||
|
|
||||||
|
var user models.User
|
||||||
|
if err := s.db.WithContext(ctx).Where("email = ?", email).First(&user).Error; err != nil {
|
||||||
|
if err == gorm.ErrRecordNotFound {
|
||||||
|
s.logger.Warn("Login failed: user not found", zap.String("email", email))
|
||||||
|
return nil, nil, errors.New("invalid credentials")
|
||||||
|
}
|
||||||
|
s.logger.Error("Database error during login", zap.Error(err))
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := bcrypt.CompareHashAndPassword([]byte(user.PasswordHash), []byte(password)); err != nil {
|
||||||
|
s.logger.Warn("Login failed: invalid password", zap.String("email", email))
|
||||||
|
return nil, nil, errors.New("invalid credentials")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !user.IsVerified {
|
||||||
|
s.logger.Warn("Login failed: email not verified", zap.String("email", email))
|
||||||
|
return nil, nil, errors.New("email not verified")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Générer les tokens JWT
|
||||||
|
accessToken, err := s.JWTService.GenerateAccessToken(&user)
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Error("Failed to generate access token", zap.Error(err))
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
refreshTokenTTL := s.JWTService.Config.RefreshTokenTTL
|
||||||
|
if rememberMe {
|
||||||
|
refreshTokenTTL = s.JWTService.Config.RememberMeRefreshTokenTTL // Assurez-vous que ce champ existe dans models.JWTConfig
|
||||||
|
}
|
||||||
|
refreshToken, err := s.JWTService.GenerateRefreshToken(&user)
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Error("Failed to generate refresh token", zap.Error(err))
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Stocker le refresh token en base
|
||||||
|
if err := s.refreshTokenService.Store(user.ID, refreshToken, refreshTokenTTL); err != nil {
|
||||||
|
s.logger.Error("Failed to store refresh token", zap.Error(err))
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
s.logger.Info("User logged in successfully", zap.String("user_id", user.ID.String()))
|
||||||
|
|
||||||
|
return &user, &models.TokenPair{
|
||||||
|
AccessToken: accessToken,
|
||||||
|
RefreshToken: refreshToken,
|
||||||
|
ExpiresIn: int(s.JWTService.Config.AccessTokenTTL.Seconds()),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *AuthService) RefreshToken(ctx context.Context, refreshToken string) (*models.TokenPair, error) {
|
||||||
|
claims, err := s.JWTService.ValidateToken(refreshToken)
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Warn("Invalid refresh token format", zap.Error(err))
|
||||||
|
return nil, errors.New("invalid refresh token")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !claims.IsRefresh {
|
||||||
|
s.logger.Warn("Token is not a refresh token")
|
||||||
|
return nil, errors.New("invalid token type")
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := s.refreshTokenService.Validate(claims.UserID, refreshToken); err != nil {
|
||||||
|
s.logger.Warn("Refresh token invalid or revoked", zap.Error(err))
|
||||||
|
return nil, errors.New("invalid or revoked refresh token")
|
||||||
|
}
|
||||||
|
|
||||||
|
var user models.User
|
||||||
|
if err := s.db.WithContext(ctx).First(&user, claims.UserID).Error; err != nil {
|
||||||
|
s.logger.Error("User not found for refresh token", zap.Error(err))
|
||||||
|
return nil, errors.New("user not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
newAccessToken, err := s.JWTService.GenerateAccessToken(&user)
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Error("Failed to generate new access token", zap.Error(err))
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
newRefreshToken, err := s.JWTService.GenerateRefreshToken(&user)
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Error("Failed to generate new refresh token", zap.Error(err))
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := s.refreshTokenService.Rotate(user.ID, refreshToken, newRefreshToken, s.JWTService.Config.RefreshTokenTTL); err != nil {
|
||||||
|
s.logger.Error("Failed to rotate refresh token", zap.Error(err))
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &models.TokenPair{
|
||||||
|
AccessToken: newAccessToken,
|
||||||
|
RefreshToken: newRefreshToken,
|
||||||
|
ExpiresIn: int(s.JWTService.Config.AccessTokenTTL.Seconds()),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *AuthService) VerifyEmail(ctx context.Context, token string) error {
|
||||||
|
userID, err := s.emailVerificationService.VerifyToken(token)
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Warn("Email verification failed", zap.Error(err))
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := s.db.WithContext(ctx).Model(&models.User{}).Where("id = ?", userID).Update("is_verified", true).Error; err != nil {
|
||||||
|
s.logger.Error("Failed to update user verification status", zap.Error(err))
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := s.emailVerificationService.InvalidateOldTokens(userID); err != nil {
|
||||||
|
s.logger.Warn("Failed to invalidate old verification tokens", zap.Error(err))
|
||||||
|
}
|
||||||
|
|
||||||
|
s.logger.Info("Email verified successfully", zap.String("user_id", userID.String()))
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *AuthService) ResendVerificationEmail(ctx context.Context, email string) error {
|
||||||
|
var user models.User
|
||||||
|
if err := s.db.WithContext(ctx).Where("email = ?", email).First(&user).Error; err != nil {
|
||||||
|
if err == gorm.ErrRecordNotFound {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if user.IsVerified {
|
||||||
|
return errors.New("email already verified")
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := s.emailVerificationService.InvalidateOldTokens(user.ID); err != nil {
|
||||||
|
s.logger.Error("Failed to invalidate old tokens", zap.Error(err))
|
||||||
|
}
|
||||||
|
|
||||||
|
token, err := s.emailVerificationService.GenerateToken()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := s.emailVerificationService.StoreToken(user.ID, token); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
s.logger.Info("Resending verification email",
|
||||||
|
zap.String("email", user.Email),
|
||||||
|
zap.String("token", token),
|
||||||
|
zap.String("user_id", user.ID.String()))
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *AuthService) Logout(ctx context.Context, userID uuid.UUID, refreshToken string) error {
|
||||||
|
// Valider le refresh token
|
||||||
|
claims, err := s.JWTService.ValidateToken(refreshToken)
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Warn("Invalid refresh token during logout", zap.Error(err), zap.String("user_id", userID.String()))
|
||||||
|
return nil // Ne pas retourner d'erreur pour ne pas bloquer le logout côté UI
|
||||||
|
}
|
||||||
|
|
||||||
|
if claims.UserID != userID {
|
||||||
|
s.logger.Warn("User ID mismatch for logout request", zap.String("requested_user_id", userID.String()), zap.String("token_user_id", claims.UserID.String()))
|
||||||
|
return errors.New("user ID mismatch")
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := s.refreshTokenService.Revoke(claims.UserID, refreshToken); err != nil {
|
||||||
|
s.logger.Error("Failed to revoke refresh token during logout", zap.Error(err), zap.String("user_id", userID.String()))
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
s.logger.Info("User logged out successfully", zap.String("user_id", userID.String()))
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *AuthService) InvalidateAllUserSessions(ctx context.Context, userID uuid.UUID, sessionService interface {
|
||||||
|
RevokeAllUserSessions(ctx context.Context, userID uuid.UUID) (int64, error)
|
||||||
|
}) error {
|
||||||
|
if err := s.refreshTokenService.RevokeAll(userID); err != nil {
|
||||||
|
s.logger.Error("Failed to revoke all refresh tokens", zap.Error(err))
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if sessionService != nil {
|
||||||
|
count, err := sessionService.RevokeAllUserSessions(ctx, userID)
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Error("Failed to revoke user sessions", zap.Error(err))
|
||||||
|
} else {
|
||||||
|
s.logger.Info("Revoked user sessions", zap.Int64("count", count), zap.String("user_id", userID.String()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
s.logger.Info("All user sessions invalidated", zap.String("user_id", userID.String()))
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// MIGRATION UUID: userID migré vers uuid.UUID
|
||||||
|
func (s *AuthService) AdminVerifyUser(ctx context.Context, userID uuid.UUID) error {
|
||||||
|
result := s.db.WithContext(ctx).Model(&models.User{}).Where("id = ?", userID).Update("is_verified", true)
|
||||||
|
if result.Error != nil {
|
||||||
|
return result.Error
|
||||||
|
}
|
||||||
|
if result.RowsAffected == 0 {
|
||||||
|
return errors.New("user not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
_ = s.emailVerificationService.InvalidateOldTokens(userID)
|
||||||
|
|
||||||
|
s.logger.Info("User verified by admin", zap.String("user_id", userID.String()))
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// MIGRATION UUID: userID migré vers uuid.UUID
|
||||||
|
func (s *AuthService) AdminBlockUser(ctx context.Context, userID uuid.UUID) error {
|
||||||
|
if err := s.refreshTokenService.RevokeAll(userID); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
s.logger.Info("User blocked by admin", zap.String("user_id", userID.String()))
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *AuthService) RequestPasswordReset(ctx context.Context, email string) error {
|
||||||
|
var user models.User
|
||||||
|
if err := s.db.WithContext(ctx).Where("email = ?", email).First(&user).Error; err != nil {
|
||||||
|
if err == gorm.ErrRecordNotFound {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
token, err := s.emailVerificationService.GenerateToken()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO(P2-GO-010): Store reset token - Implémenter table password_reset_tokens selon ORIGIN_DATABASE_SCHEMA
|
||||||
|
s.logger.Info("Password reset requested", zap.String("email", email), zap.String("token_preview", token[:5]+"..."))
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *AuthService) ResetPassword(ctx context.Context, token, newPassword string) error {
|
||||||
|
// TODO(P2-GO-010): Verify reset token - Implémenter vérification token selon ORIGIN_SECURITY_FRAMEWORK
|
||||||
|
// userID := ...
|
||||||
|
// For now, assume verification is done or stubbed
|
||||||
|
|
||||||
|
hashedPassword, err := bcrypt.GenerateFromPassword([]byte(newPassword), bcrypt.DefaultCost)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update password in DB (example with stubbed userID)
|
||||||
|
// if err := s.db.Model(&models.User{}).Where("id = ?", userID).Update("password_hash", string(hashedPassword)).Error; err != nil { return err }
|
||||||
|
|
||||||
|
s.logger.Warn("ResetPassword not fully implemented yet - password hash generated but not saved", zap.String("hash_preview", string(hashedPassword)[:10]))
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// MIGRATION UUID: userID migré vers uuid.UUID
|
||||||
|
func (s *AuthService) ChangePassword(ctx context.Context, userID uuid.UUID, currentPassword, newPassword string) error {
|
||||||
|
var user models.User
|
||||||
|
if err := s.db.WithContext(ctx).First(&user, userID).Error; err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := bcrypt.CompareHashAndPassword([]byte(user.PasswordHash), []byte(currentPassword)); err != nil {
|
||||||
|
return errors.New("invalid current password")
|
||||||
|
}
|
||||||
|
|
||||||
|
hashedPassword, err := bcrypt.GenerateFromPassword([]byte(newPassword), bcrypt.DefaultCost)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := s.db.WithContext(ctx).Model(&user).Update("password_hash", string(hashedPassword)).Error; err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := s.refreshTokenService.RevokeAll(userID); err != nil {
|
||||||
|
s.logger.Warn("Failed to revoke refresh tokens after password change", zap.Error(err))
|
||||||
|
}
|
||||||
|
|
||||||
|
s.logger.Info("Password changed successfully", zap.String("user_id", userID.String()))
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *AuthService) ValidateAccessToken(tokenString string) (*models.CustomClaims, error) {
|
||||||
|
return s.JWTService.ValidateToken(tokenString)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *AuthService) UpdateLastLogin(ctx context.Context, userID uuid.UUID) error {
|
||||||
|
return s.db.WithContext(ctx).Model(&models.User{}).
|
||||||
|
Where("id = ?", userID).
|
||||||
|
Update("last_login_at", time.Now()).Error
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,4 @@
|
||||||
|
package collaboration
|
||||||
|
|
||||||
|
// Package collaboration - TO BE IMPLEMENTED
|
||||||
|
// Core collaboration functionality for the application
|
||||||
452
veza-backend-api/internal/core/education/course.go
Normal file
452
veza-backend-api/internal/core/education/course.go
Normal file
|
|
@ -0,0 +1,452 @@
|
||||||
|
package education
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"go.uber.org/zap"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Course représente un cours de formation
|
||||||
|
type Course struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
Title string `json:"title"`
|
||||||
|
Description string `json:"description"`
|
||||||
|
Instructor string `json:"instructor"`
|
||||||
|
Category string `json:"category"`
|
||||||
|
Level CourseLevel `json:"level"`
|
||||||
|
Duration time.Duration `json:"duration"`
|
||||||
|
Price float64 `json:"price"`
|
||||||
|
Currency string `json:"currency"`
|
||||||
|
Language string `json:"language"`
|
||||||
|
Thumbnail string `json:"thumbnail"`
|
||||||
|
VideoURL string `json:"video_url"`
|
||||||
|
Lessons []*Lesson `json:"lessons"`
|
||||||
|
Exercises []*Exercise `json:"exercises"`
|
||||||
|
Certificates []*Certificate `json:"certificates"`
|
||||||
|
Tags []string `json:"tags"`
|
||||||
|
IsPublished bool `json:"is_published"`
|
||||||
|
IsFree bool `json:"is_free"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
|
mu sync.RWMutex
|
||||||
|
}
|
||||||
|
|
||||||
|
// CourseLevel définit le niveau de difficulté d'un cours
|
||||||
|
type CourseLevel string
|
||||||
|
|
||||||
|
const (
|
||||||
|
CourseLevelBeginner CourseLevel = "beginner"
|
||||||
|
CourseLevelIntermediate CourseLevel = "intermediate"
|
||||||
|
CourseLevelAdvanced CourseLevel = "advanced"
|
||||||
|
CourseLevelExpert CourseLevel = "expert"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Lesson représente une leçon dans un cours
|
||||||
|
type Lesson struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
CourseID string `json:"course_id"`
|
||||||
|
Title string `json:"title"`
|
||||||
|
Description string `json:"description"`
|
||||||
|
Content string `json:"content"`
|
||||||
|
VideoURL string `json:"video_url"`
|
||||||
|
Duration time.Duration `json:"duration"`
|
||||||
|
Order int `json:"order"`
|
||||||
|
IsFree bool `json:"is_free"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exercise représente un exercice pratique
|
||||||
|
type Exercise struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
CourseID string `json:"course_id"`
|
||||||
|
LessonID string `json:"lesson_id"`
|
||||||
|
Title string `json:"title"`
|
||||||
|
Description string `json:"description"`
|
||||||
|
Type ExerciseType `json:"type"`
|
||||||
|
Content string `json:"content"`
|
||||||
|
Solution string `json:"solution"`
|
||||||
|
Points int `json:"points"`
|
||||||
|
TimeLimit time.Duration `json:"time_limit"`
|
||||||
|
IsRequired bool `json:"is_required"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExerciseType définit le type d'exercice
|
||||||
|
type ExerciseType string
|
||||||
|
|
||||||
|
const (
|
||||||
|
ExerciseTypeQuiz ExerciseType = "quiz"
|
||||||
|
ExerciseTypeProject ExerciseType = "project"
|
||||||
|
ExerciseTypeAudio ExerciseType = "audio"
|
||||||
|
ExerciseTypeCode ExerciseType = "code"
|
||||||
|
ExerciseTypeEssay ExerciseType = "essay"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Certificate représente un certificat de formation
|
||||||
|
type Certificate struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
CourseID string `json:"course_id"`
|
||||||
|
UserID uuid.UUID `json:"user_id"`
|
||||||
|
Title string `json:"title"`
|
||||||
|
Description string `json:"description"`
|
||||||
|
Score float64 `json:"score"`
|
||||||
|
MaxScore float64 `json:"max_score"`
|
||||||
|
IsPassed bool `json:"is_passed"`
|
||||||
|
IssuedAt time.Time `json:"issued_at"`
|
||||||
|
ExpiresAt time.Time `json:"expires_at"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// CourseProgress représente la progression d'un utilisateur dans un cours
|
||||||
|
type CourseProgress struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
UserID uuid.UUID `json:"user_id"`
|
||||||
|
CourseID string `json:"course_id"`
|
||||||
|
Progress float64 `json:"progress"` // 0.0 à 1.0
|
||||||
|
CompletedLessons []string `json:"completed_lessons"`
|
||||||
|
CurrentLesson string `json:"current_lesson"`
|
||||||
|
Score float64 `json:"score"`
|
||||||
|
TimeSpent time.Duration `json:"time_spent"`
|
||||||
|
LastAccessed time.Time `json:"last_accessed"`
|
||||||
|
IsCompleted bool `json:"is_completed"`
|
||||||
|
CompletedAt time.Time `json:"completed_at"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// CourseManager gère les cours et formations
|
||||||
|
type CourseManager struct {
|
||||||
|
courses map[string]*Course
|
||||||
|
progress map[string]*CourseProgress
|
||||||
|
logger *zap.Logger
|
||||||
|
mu sync.RWMutex
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewCourseManager crée un nouveau gestionnaire de cours
|
||||||
|
func NewCourseManager(logger *zap.Logger) *CourseManager {
|
||||||
|
return &CourseManager{
|
||||||
|
courses: make(map[string]*Course),
|
||||||
|
progress: make(map[string]*CourseProgress),
|
||||||
|
logger: logger,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateCourse crée un nouveau cours
|
||||||
|
func (cm *CourseManager) CreateCourse(ctx context.Context, title, description, instructor, category string, level CourseLevel, duration time.Duration, price float64, language string) (*Course, error) {
|
||||||
|
cm.mu.Lock()
|
||||||
|
defer cm.mu.Unlock()
|
||||||
|
|
||||||
|
courseID := uuid.New().String()
|
||||||
|
|
||||||
|
course := &Course{
|
||||||
|
ID: courseID,
|
||||||
|
Title: title,
|
||||||
|
Description: description,
|
||||||
|
Instructor: instructor,
|
||||||
|
Category: category,
|
||||||
|
Level: level,
|
||||||
|
Duration: duration,
|
||||||
|
Price: price,
|
||||||
|
Currency: "EUR",
|
||||||
|
Language: language,
|
||||||
|
Lessons: []*Lesson{},
|
||||||
|
Exercises: []*Exercise{},
|
||||||
|
Certificates: []*Certificate{},
|
||||||
|
Tags: []string{},
|
||||||
|
IsPublished: false,
|
||||||
|
IsFree: price == 0,
|
||||||
|
CreatedAt: time.Now(),
|
||||||
|
UpdatedAt: time.Now(),
|
||||||
|
}
|
||||||
|
|
||||||
|
cm.courses[courseID] = course
|
||||||
|
|
||||||
|
cm.logger.Info("Cours créé",
|
||||||
|
zap.String("course_id", courseID),
|
||||||
|
zap.String("title", title),
|
||||||
|
zap.String("instructor", instructor))
|
||||||
|
|
||||||
|
return course, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetCourse récupère un cours par son ID
|
||||||
|
func (cm *CourseManager) GetCourse(ctx context.Context, courseID string) (*Course, error) {
|
||||||
|
cm.mu.RLock()
|
||||||
|
defer cm.mu.RUnlock()
|
||||||
|
|
||||||
|
course, exists := cm.courses[courseID]
|
||||||
|
if !exists {
|
||||||
|
return nil, fmt.Errorf("cours non trouvé: %s", courseID)
|
||||||
|
}
|
||||||
|
|
||||||
|
return course, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListCourses liste tous les cours disponibles
|
||||||
|
func (cm *CourseManager) ListCourses(ctx context.Context, filters map[string]interface{}) ([]*Course, error) {
|
||||||
|
cm.mu.RLock()
|
||||||
|
defer cm.mu.RUnlock()
|
||||||
|
|
||||||
|
var courses []*Course
|
||||||
|
for _, course := range cm.courses {
|
||||||
|
// Appliquer les filtres si fournis
|
||||||
|
if filters != nil {
|
||||||
|
if category, ok := filters["category"].(string); ok && course.Category != category {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if level, ok := filters["level"].(CourseLevel); ok && course.Level != level {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if isPublished, ok := filters["is_published"].(bool); ok && course.IsPublished != isPublished {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if isFree, ok := filters["is_free"].(bool); ok && course.IsFree != isFree {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
courses = append(courses, course)
|
||||||
|
}
|
||||||
|
|
||||||
|
return courses, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateCourse met à jour un cours
|
||||||
|
func (cm *CourseManager) UpdateCourse(ctx context.Context, courseID string, updates map[string]interface{}) (*Course, error) {
|
||||||
|
cm.mu.Lock()
|
||||||
|
defer cm.mu.Unlock()
|
||||||
|
|
||||||
|
course, exists := cm.courses[courseID]
|
||||||
|
if !exists {
|
||||||
|
return nil, fmt.Errorf("cours non trouvé: %s", courseID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Appliquer les mises à jour
|
||||||
|
if title, ok := updates["title"].(string); ok {
|
||||||
|
course.Title = title
|
||||||
|
}
|
||||||
|
if description, ok := updates["description"].(string); ok {
|
||||||
|
course.Description = description
|
||||||
|
}
|
||||||
|
if instructor, ok := updates["instructor"].(string); ok {
|
||||||
|
course.Instructor = instructor
|
||||||
|
}
|
||||||
|
if category, ok := updates["category"].(string); ok {
|
||||||
|
course.Category = category
|
||||||
|
}
|
||||||
|
if level, ok := updates["level"].(CourseLevel); ok {
|
||||||
|
course.Level = level
|
||||||
|
}
|
||||||
|
if duration, ok := updates["duration"].(time.Duration); ok {
|
||||||
|
course.Duration = duration
|
||||||
|
}
|
||||||
|
if price, ok := updates["price"].(float64); ok {
|
||||||
|
course.Price = price
|
||||||
|
course.IsFree = price == 0
|
||||||
|
}
|
||||||
|
if isPublished, ok := updates["is_published"].(bool); ok {
|
||||||
|
course.IsPublished = isPublished
|
||||||
|
}
|
||||||
|
|
||||||
|
course.UpdatedAt = time.Now()
|
||||||
|
|
||||||
|
cm.logger.Info("Cours mis à jour",
|
||||||
|
zap.String("course_id", courseID),
|
||||||
|
zap.String("title", course.Title))
|
||||||
|
|
||||||
|
return course, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteCourse supprime un cours
|
||||||
|
func (cm *CourseManager) DeleteCourse(ctx context.Context, courseID string) error {
|
||||||
|
cm.mu.Lock()
|
||||||
|
defer cm.mu.Unlock()
|
||||||
|
|
||||||
|
if _, exists := cm.courses[courseID]; !exists {
|
||||||
|
return fmt.Errorf("cours non trouvé: %s", courseID)
|
||||||
|
}
|
||||||
|
|
||||||
|
delete(cm.courses, courseID)
|
||||||
|
|
||||||
|
cm.logger.Info("Cours supprimé",
|
||||||
|
zap.String("course_id", courseID))
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddLesson ajoute une leçon à un cours
|
||||||
|
func (cm *CourseManager) AddLesson(ctx context.Context, courseID, title, description, content, videoURL string, duration time.Duration, order int, isFree bool) (*Lesson, error) {
|
||||||
|
cm.mu.Lock()
|
||||||
|
defer cm.mu.Unlock()
|
||||||
|
|
||||||
|
course, exists := cm.courses[courseID]
|
||||||
|
if !exists {
|
||||||
|
return nil, fmt.Errorf("cours non trouvé: %s", courseID)
|
||||||
|
}
|
||||||
|
|
||||||
|
lessonID := uuid.New().String()
|
||||||
|
lesson := &Lesson{
|
||||||
|
ID: lessonID,
|
||||||
|
CourseID: courseID,
|
||||||
|
Title: title,
|
||||||
|
Description: description,
|
||||||
|
Content: content,
|
||||||
|
VideoURL: videoURL,
|
||||||
|
Duration: duration,
|
||||||
|
Order: order,
|
||||||
|
IsFree: isFree,
|
||||||
|
CreatedAt: time.Now(),
|
||||||
|
UpdatedAt: time.Now(),
|
||||||
|
}
|
||||||
|
|
||||||
|
course.Lessons = append(course.Lessons, lesson)
|
||||||
|
course.UpdatedAt = time.Now()
|
||||||
|
|
||||||
|
cm.logger.Info("Leçon ajoutée",
|
||||||
|
zap.String("course_id", courseID),
|
||||||
|
zap.String("lesson_id", lessonID),
|
||||||
|
zap.String("title", title))
|
||||||
|
|
||||||
|
return lesson, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddExercise ajoute un exercice à un cours
|
||||||
|
func (cm *CourseManager) AddExercise(ctx context.Context, courseID, lessonID, title, description, content, solution string, exerciseType ExerciseType, points int, timeLimit time.Duration, isRequired bool) (*Exercise, error) {
|
||||||
|
cm.mu.Lock()
|
||||||
|
defer cm.mu.Unlock()
|
||||||
|
|
||||||
|
course, exists := cm.courses[courseID]
|
||||||
|
if !exists {
|
||||||
|
return nil, fmt.Errorf("cours non trouvé: %s", courseID)
|
||||||
|
}
|
||||||
|
|
||||||
|
exerciseID := uuid.New().String()
|
||||||
|
exercise := &Exercise{
|
||||||
|
ID: exerciseID,
|
||||||
|
CourseID: courseID,
|
||||||
|
LessonID: lessonID,
|
||||||
|
Title: title,
|
||||||
|
Description: description,
|
||||||
|
Type: exerciseType,
|
||||||
|
Content: content,
|
||||||
|
Solution: solution,
|
||||||
|
Points: points,
|
||||||
|
TimeLimit: timeLimit,
|
||||||
|
IsRequired: isRequired,
|
||||||
|
CreatedAt: time.Now(),
|
||||||
|
UpdatedAt: time.Now(),
|
||||||
|
}
|
||||||
|
|
||||||
|
course.Exercises = append(course.Exercises, exercise)
|
||||||
|
course.UpdatedAt = time.Now()
|
||||||
|
|
||||||
|
cm.logger.Info("Exercice ajouté",
|
||||||
|
zap.String("course_id", courseID),
|
||||||
|
zap.String("exercise_id", exerciseID),
|
||||||
|
zap.String("title", title))
|
||||||
|
|
||||||
|
return exercise, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetUserProgress récupère la progression d'un utilisateur dans un cours
|
||||||
|
func (cm *CourseManager) GetUserProgress(ctx context.Context, userID uuid.UUID, courseID string) (*CourseProgress, error) {
|
||||||
|
cm.mu.RLock()
|
||||||
|
defer cm.mu.RUnlock()
|
||||||
|
|
||||||
|
progressKey := fmt.Sprintf("%s_%s", userID.String(), courseID)
|
||||||
|
progress, exists := cm.progress[progressKey]
|
||||||
|
if !exists {
|
||||||
|
return nil, fmt.Errorf("progression non trouvée pour l'utilisateur %s dans le cours %s", userID, courseID)
|
||||||
|
}
|
||||||
|
|
||||||
|
return progress, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateUserProgress met à jour la progression d'un utilisateur
|
||||||
|
func (cm *CourseManager) UpdateUserProgress(ctx context.Context, userID uuid.UUID, courseID string, progress float64, completedLessons []string, currentLesson string, score float64, timeSpent time.Duration) (*CourseProgress, error) {
|
||||||
|
cm.mu.Lock()
|
||||||
|
defer cm.mu.Unlock()
|
||||||
|
|
||||||
|
progressKey := fmt.Sprintf("%s_%s", userID.String(), courseID)
|
||||||
|
|
||||||
|
userProgress, exists := cm.progress[progressKey]
|
||||||
|
if !exists {
|
||||||
|
userProgress = &CourseProgress{
|
||||||
|
ID: uuid.New().String(),
|
||||||
|
UserID: userID,
|
||||||
|
CourseID: courseID,
|
||||||
|
Progress: progress,
|
||||||
|
CompletedLessons: completedLessons,
|
||||||
|
CurrentLesson: currentLesson,
|
||||||
|
Score: score,
|
||||||
|
TimeSpent: timeSpent,
|
||||||
|
LastAccessed: time.Now(),
|
||||||
|
IsCompleted: progress >= 1.0,
|
||||||
|
CreatedAt: time.Now(),
|
||||||
|
UpdatedAt: time.Now(),
|
||||||
|
}
|
||||||
|
cm.progress[progressKey] = userProgress
|
||||||
|
} else {
|
||||||
|
userProgress.Progress = progress
|
||||||
|
userProgress.CompletedLessons = completedLessons
|
||||||
|
userProgress.CurrentLesson = currentLesson
|
||||||
|
userProgress.Score = score
|
||||||
|
userProgress.TimeSpent = timeSpent
|
||||||
|
userProgress.LastAccessed = time.Now()
|
||||||
|
userProgress.IsCompleted = progress >= 1.0
|
||||||
|
userProgress.UpdatedAt = time.Now()
|
||||||
|
|
||||||
|
if userProgress.IsCompleted && userProgress.CompletedAt.IsZero() {
|
||||||
|
userProgress.CompletedAt = time.Now()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
cm.logger.Info("Progression utilisateur mise à jour",
|
||||||
|
zap.String("user_id", userID.String()),
|
||||||
|
zap.String("course_id", courseID),
|
||||||
|
zap.Float64("progress", progress))
|
||||||
|
|
||||||
|
return userProgress, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// IssueCertificate émet un certificat pour un utilisateur
|
||||||
|
func (cm *CourseManager) IssueCertificate(ctx context.Context, courseID string, userID uuid.UUID, title, description string, score, maxScore float64) (*Certificate, error) {
|
||||||
|
cm.mu.Lock()
|
||||||
|
defer cm.mu.Unlock()
|
||||||
|
|
||||||
|
certificateID := uuid.New().String()
|
||||||
|
isPassed := score >= maxScore*0.7 // 70% pour réussir
|
||||||
|
|
||||||
|
certificate := &Certificate{
|
||||||
|
ID: certificateID,
|
||||||
|
CourseID: courseID,
|
||||||
|
UserID: userID,
|
||||||
|
Title: title,
|
||||||
|
Description: description,
|
||||||
|
Score: score,
|
||||||
|
MaxScore: maxScore,
|
||||||
|
IsPassed: isPassed,
|
||||||
|
IssuedAt: time.Now(),
|
||||||
|
ExpiresAt: time.Now().AddDate(2, 0, 0), // Valide 2 ans
|
||||||
|
CreatedAt: time.Now(),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ajouter le certificat au cours
|
||||||
|
if course, exists := cm.courses[courseID]; exists {
|
||||||
|
course.Certificates = append(course.Certificates, certificate)
|
||||||
|
course.UpdatedAt = time.Now()
|
||||||
|
}
|
||||||
|
|
||||||
|
cm.logger.Info("Certificat émis",
|
||||||
|
zap.String("certificate_id", certificateID),
|
||||||
|
zap.String("course_id", courseID),
|
||||||
|
zap.String("user_id", userID.String()),
|
||||||
|
zap.Bool("is_passed", isPassed))
|
||||||
|
|
||||||
|
return certificate, nil
|
||||||
|
}
|
||||||
479
veza-backend-api/internal/core/education/tutorial.go
Normal file
479
veza-backend-api/internal/core/education/tutorial.go
Normal file
|
|
@ -0,0 +1,479 @@
|
||||||
|
package education
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"go.uber.org/zap"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Tutorial représente un tutoriel vidéo
|
||||||
|
type Tutorial struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
Title string `json:"title"`
|
||||||
|
Description string `json:"description"`
|
||||||
|
Author string `json:"author"`
|
||||||
|
Category string `json:"category"`
|
||||||
|
Tags []string `json:"tags"`
|
||||||
|
VideoURL string `json:"video_url"`
|
||||||
|
Thumbnail string `json:"thumbnail"`
|
||||||
|
Duration time.Duration `json:"duration"`
|
||||||
|
Quality VideoQuality `json:"quality"`
|
||||||
|
Language string `json:"language"`
|
||||||
|
IsFree bool `json:"is_free"`
|
||||||
|
IsPublished bool `json:"is_published"`
|
||||||
|
Views int64 `json:"views"`
|
||||||
|
Likes int64 `json:"likes"`
|
||||||
|
Dislikes int64 `json:"dislikes"`
|
||||||
|
Rating float64 `json:"rating"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
|
mu sync.RWMutex
|
||||||
|
}
|
||||||
|
|
||||||
|
// VideoQuality définit la qualité de la vidéo
|
||||||
|
type VideoQuality string
|
||||||
|
|
||||||
|
const (
|
||||||
|
VideoQualityHD VideoQuality = "hd"
|
||||||
|
VideoQuality4K VideoQuality = "4k"
|
||||||
|
VideoQuality8K VideoQuality = "8k"
|
||||||
|
)
|
||||||
|
|
||||||
|
// TutorialStep représente une étape dans un tutoriel
|
||||||
|
type TutorialStep struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
TutorialID string `json:"tutorial_id"`
|
||||||
|
Title string `json:"title"`
|
||||||
|
Description string `json:"description"`
|
||||||
|
Content string `json:"content"`
|
||||||
|
Order int `json:"order"`
|
||||||
|
Timestamp time.Duration `json:"timestamp"`
|
||||||
|
IsFree bool `json:"is_free"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// TutorialComment représente un commentaire sur un tutoriel
|
||||||
|
type TutorialComment struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
TutorialID string `json:"tutorial_id"`
|
||||||
|
UserID string `json:"user_id"`
|
||||||
|
Username string `json:"username"`
|
||||||
|
Content string `json:"content"`
|
||||||
|
Rating int `json:"rating"` // 1-5 étoiles
|
||||||
|
IsHelpful bool `json:"is_helpful"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// TutorialManager gère les tutoriels vidéo
|
||||||
|
type TutorialManager struct {
|
||||||
|
tutorials map[string]*Tutorial
|
||||||
|
steps map[string][]*TutorialStep
|
||||||
|
comments map[string][]*TutorialComment
|
||||||
|
logger *zap.Logger
|
||||||
|
mu sync.RWMutex
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewTutorialManager crée un nouveau gestionnaire de tutoriels
|
||||||
|
func NewTutorialManager(logger *zap.Logger) *TutorialManager {
|
||||||
|
return &TutorialManager{
|
||||||
|
tutorials: make(map[string]*Tutorial),
|
||||||
|
steps: make(map[string][]*TutorialStep),
|
||||||
|
comments: make(map[string][]*TutorialComment),
|
||||||
|
logger: logger,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateTutorial crée un nouveau tutoriel
|
||||||
|
func (tm *TutorialManager) CreateTutorial(ctx context.Context, title, description, author, category, videoURL, thumbnail, language string, duration time.Duration, quality VideoQuality, isFree bool, tags []string) (*Tutorial, error) {
|
||||||
|
tm.mu.Lock()
|
||||||
|
defer tm.mu.Unlock()
|
||||||
|
|
||||||
|
tutorialID := uuid.New().String()
|
||||||
|
|
||||||
|
tutorial := &Tutorial{
|
||||||
|
ID: tutorialID,
|
||||||
|
Title: title,
|
||||||
|
Description: description,
|
||||||
|
Author: author,
|
||||||
|
Category: category,
|
||||||
|
Tags: tags,
|
||||||
|
VideoURL: videoURL,
|
||||||
|
Thumbnail: thumbnail,
|
||||||
|
Duration: duration,
|
||||||
|
Quality: quality,
|
||||||
|
Language: language,
|
||||||
|
IsFree: isFree,
|
||||||
|
IsPublished: false,
|
||||||
|
Views: 0,
|
||||||
|
Likes: 0,
|
||||||
|
Dislikes: 0,
|
||||||
|
Rating: 0.0,
|
||||||
|
CreatedAt: time.Now(),
|
||||||
|
UpdatedAt: time.Now(),
|
||||||
|
}
|
||||||
|
|
||||||
|
tm.tutorials[tutorialID] = tutorial
|
||||||
|
|
||||||
|
tm.logger.Info("Tutoriel créé",
|
||||||
|
zap.String("tutorial_id", tutorialID),
|
||||||
|
zap.String("title", title),
|
||||||
|
zap.String("author", author))
|
||||||
|
|
||||||
|
return tutorial, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetTutorial récupère un tutoriel par son ID
|
||||||
|
func (tm *TutorialManager) GetTutorial(ctx context.Context, tutorialID string) (*Tutorial, error) {
|
||||||
|
tm.mu.RLock()
|
||||||
|
defer tm.mu.RUnlock()
|
||||||
|
|
||||||
|
tutorial, exists := tm.tutorials[tutorialID]
|
||||||
|
if !exists {
|
||||||
|
return nil, fmt.Errorf("tutoriel non trouvé: %s", tutorialID)
|
||||||
|
}
|
||||||
|
|
||||||
|
return tutorial, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListTutorials liste tous les tutoriels disponibles
|
||||||
|
func (tm *TutorialManager) ListTutorials(ctx context.Context, filters map[string]interface{}) ([]*Tutorial, error) {
|
||||||
|
tm.mu.RLock()
|
||||||
|
defer tm.mu.RUnlock()
|
||||||
|
|
||||||
|
var tutorials []*Tutorial
|
||||||
|
for _, tutorial := range tm.tutorials {
|
||||||
|
// Appliquer les filtres si fournis
|
||||||
|
if filters != nil {
|
||||||
|
if category, ok := filters["category"].(string); ok && tutorial.Category != category {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if isPublished, ok := filters["is_published"].(bool); ok && tutorial.IsPublished != isPublished {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if isFree, ok := filters["is_free"].(bool); ok && tutorial.IsFree != isFree {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if language, ok := filters["language"].(string); ok && tutorial.Language != language {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if author, ok := filters["author"].(string); ok && tutorial.Author != author {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tutorials = append(tutorials, tutorial)
|
||||||
|
}
|
||||||
|
|
||||||
|
return tutorials, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateTutorial met à jour un tutoriel
|
||||||
|
func (tm *TutorialManager) UpdateTutorial(ctx context.Context, tutorialID string, updates map[string]interface{}) (*Tutorial, error) {
|
||||||
|
tm.mu.Lock()
|
||||||
|
defer tm.mu.Unlock()
|
||||||
|
|
||||||
|
tutorial, exists := tm.tutorials[tutorialID]
|
||||||
|
if !exists {
|
||||||
|
return nil, fmt.Errorf("tutoriel non trouvé: %s", tutorialID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Appliquer les mises à jour
|
||||||
|
if title, ok := updates["title"].(string); ok {
|
||||||
|
tutorial.Title = title
|
||||||
|
}
|
||||||
|
if description, ok := updates["description"].(string); ok {
|
||||||
|
tutorial.Description = description
|
||||||
|
}
|
||||||
|
if author, ok := updates["author"].(string); ok {
|
||||||
|
tutorial.Author = author
|
||||||
|
}
|
||||||
|
if category, ok := updates["category"].(string); ok {
|
||||||
|
tutorial.Category = category
|
||||||
|
}
|
||||||
|
if videoURL, ok := updates["video_url"].(string); ok {
|
||||||
|
tutorial.VideoURL = videoURL
|
||||||
|
}
|
||||||
|
if thumbnail, ok := updates["thumbnail"].(string); ok {
|
||||||
|
tutorial.Thumbnail = thumbnail
|
||||||
|
}
|
||||||
|
if duration, ok := updates["duration"].(time.Duration); ok {
|
||||||
|
tutorial.Duration = duration
|
||||||
|
}
|
||||||
|
if quality, ok := updates["quality"].(VideoQuality); ok {
|
||||||
|
tutorial.Quality = quality
|
||||||
|
}
|
||||||
|
if isPublished, ok := updates["is_published"].(bool); ok {
|
||||||
|
tutorial.IsPublished = isPublished
|
||||||
|
}
|
||||||
|
if tags, ok := updates["tags"].([]string); ok {
|
||||||
|
tutorial.Tags = tags
|
||||||
|
}
|
||||||
|
|
||||||
|
tutorial.UpdatedAt = time.Now()
|
||||||
|
|
||||||
|
tm.logger.Info("Tutoriel mis à jour",
|
||||||
|
zap.String("tutorial_id", tutorialID),
|
||||||
|
zap.String("title", tutorial.Title))
|
||||||
|
|
||||||
|
return tutorial, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteTutorial supprime un tutoriel
|
||||||
|
func (tm *TutorialManager) DeleteTutorial(ctx context.Context, tutorialID string) error {
|
||||||
|
tm.mu.Lock()
|
||||||
|
defer tm.mu.Unlock()
|
||||||
|
|
||||||
|
if _, exists := tm.tutorials[tutorialID]; !exists {
|
||||||
|
return fmt.Errorf("tutoriel non trouvé: %s", tutorialID)
|
||||||
|
}
|
||||||
|
|
||||||
|
delete(tm.tutorials, tutorialID)
|
||||||
|
delete(tm.steps, tutorialID)
|
||||||
|
delete(tm.comments, tutorialID)
|
||||||
|
|
||||||
|
tm.logger.Info("Tutoriel supprimé",
|
||||||
|
zap.String("tutorial_id", tutorialID))
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddTutorialStep ajoute une étape à un tutoriel
|
||||||
|
func (tm *TutorialManager) AddTutorialStep(ctx context.Context, tutorialID, title, description, content string, order int, timestamp time.Duration, isFree bool) (*TutorialStep, error) {
|
||||||
|
tm.mu.Lock()
|
||||||
|
defer tm.mu.Unlock()
|
||||||
|
|
||||||
|
stepID := uuid.New().String()
|
||||||
|
step := &TutorialStep{
|
||||||
|
ID: stepID,
|
||||||
|
TutorialID: tutorialID,
|
||||||
|
Title: title,
|
||||||
|
Description: description,
|
||||||
|
Content: content,
|
||||||
|
Order: order,
|
||||||
|
Timestamp: timestamp,
|
||||||
|
IsFree: isFree,
|
||||||
|
CreatedAt: time.Now(),
|
||||||
|
UpdatedAt: time.Now(),
|
||||||
|
}
|
||||||
|
|
||||||
|
tm.steps[tutorialID] = append(tm.steps[tutorialID], step)
|
||||||
|
|
||||||
|
tm.logger.Info("Étape de tutoriel ajoutée",
|
||||||
|
zap.String("tutorial_id", tutorialID),
|
||||||
|
zap.String("step_id", stepID),
|
||||||
|
zap.String("title", title))
|
||||||
|
|
||||||
|
return step, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetTutorialSteps récupère toutes les étapes d'un tutoriel
|
||||||
|
func (tm *TutorialManager) GetTutorialSteps(ctx context.Context, tutorialID string) ([]*TutorialStep, error) {
|
||||||
|
tm.mu.RLock()
|
||||||
|
defer tm.mu.RUnlock()
|
||||||
|
|
||||||
|
steps, exists := tm.steps[tutorialID]
|
||||||
|
if !exists {
|
||||||
|
return []*TutorialStep{}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return steps, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddTutorialComment ajoute un commentaire à un tutoriel
|
||||||
|
func (tm *TutorialManager) AddTutorialComment(ctx context.Context, tutorialID, userID, username, content string, rating int) (*TutorialComment, error) {
|
||||||
|
tm.mu.Lock()
|
||||||
|
defer tm.mu.Unlock()
|
||||||
|
|
||||||
|
commentID := uuid.New().String()
|
||||||
|
comment := &TutorialComment{
|
||||||
|
ID: commentID,
|
||||||
|
TutorialID: tutorialID,
|
||||||
|
UserID: userID,
|
||||||
|
Username: username,
|
||||||
|
Content: content,
|
||||||
|
Rating: rating,
|
||||||
|
IsHelpful: false,
|
||||||
|
CreatedAt: time.Now(),
|
||||||
|
UpdatedAt: time.Now(),
|
||||||
|
}
|
||||||
|
|
||||||
|
tm.comments[tutorialID] = append(tm.comments[tutorialID], comment)
|
||||||
|
|
||||||
|
// Mettre à jour la note moyenne du tutoriel
|
||||||
|
tm.updateTutorialRating(tutorialID)
|
||||||
|
|
||||||
|
tm.logger.Info("Commentaire ajouté",
|
||||||
|
zap.String("tutorial_id", tutorialID),
|
||||||
|
zap.String("comment_id", commentID),
|
||||||
|
zap.String("username", username))
|
||||||
|
|
||||||
|
return comment, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetTutorialComments récupère tous les commentaires d'un tutoriel
|
||||||
|
func (tm *TutorialManager) GetTutorialComments(ctx context.Context, tutorialID string) ([]*TutorialComment, error) {
|
||||||
|
tm.mu.RLock()
|
||||||
|
defer tm.mu.RUnlock()
|
||||||
|
|
||||||
|
comments, exists := tm.comments[tutorialID]
|
||||||
|
if !exists {
|
||||||
|
return []*TutorialComment{}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return comments, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// IncrementViews incrémente le nombre de vues d'un tutoriel
|
||||||
|
func (tm *TutorialManager) IncrementViews(ctx context.Context, tutorialID string) error {
|
||||||
|
tm.mu.Lock()
|
||||||
|
defer tm.mu.Unlock()
|
||||||
|
|
||||||
|
tutorial, exists := tm.tutorials[tutorialID]
|
||||||
|
if !exists {
|
||||||
|
return fmt.Errorf("tutoriel non trouvé: %s", tutorialID)
|
||||||
|
}
|
||||||
|
|
||||||
|
tutorial.Views++
|
||||||
|
tutorial.UpdatedAt = time.Now()
|
||||||
|
|
||||||
|
tm.logger.Debug("Vues incrémentées",
|
||||||
|
zap.String("tutorial_id", tutorialID),
|
||||||
|
zap.Int64("views", tutorial.Views))
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// LikeTutorial ajoute un like à un tutoriel
|
||||||
|
func (tm *TutorialManager) LikeTutorial(ctx context.Context, tutorialID string) error {
|
||||||
|
tm.mu.Lock()
|
||||||
|
defer tm.mu.Unlock()
|
||||||
|
|
||||||
|
tutorial, exists := tm.tutorials[tutorialID]
|
||||||
|
if !exists {
|
||||||
|
return fmt.Errorf("tutoriel non trouvé: %s", tutorialID)
|
||||||
|
}
|
||||||
|
|
||||||
|
tutorial.Likes++
|
||||||
|
tutorial.UpdatedAt = time.Now()
|
||||||
|
|
||||||
|
tm.logger.Debug("Like ajouté",
|
||||||
|
zap.String("tutorial_id", tutorialID),
|
||||||
|
zap.Int64("likes", tutorial.Likes))
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// DislikeTutorial ajoute un dislike à un tutoriel
|
||||||
|
func (tm *TutorialManager) DislikeTutorial(ctx context.Context, tutorialID string) error {
|
||||||
|
tm.mu.Lock()
|
||||||
|
defer tm.mu.Unlock()
|
||||||
|
|
||||||
|
tutorial, exists := tm.tutorials[tutorialID]
|
||||||
|
if !exists {
|
||||||
|
return fmt.Errorf("tutoriel non trouvé: %s", tutorialID)
|
||||||
|
}
|
||||||
|
|
||||||
|
tutorial.Dislikes++
|
||||||
|
tutorial.UpdatedAt = time.Now()
|
||||||
|
|
||||||
|
tm.logger.Debug("Dislike ajouté",
|
||||||
|
zap.String("tutorial_id", tutorialID),
|
||||||
|
zap.Int64("dislikes", tutorial.Dislikes))
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// updateTutorialRating met à jour la note moyenne d'un tutoriel
|
||||||
|
func (tm *TutorialManager) updateTutorialRating(tutorialID string) {
|
||||||
|
comments, exists := tm.comments[tutorialID]
|
||||||
|
if !exists || len(comments) == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var totalRating int
|
||||||
|
var ratedComments int
|
||||||
|
|
||||||
|
for _, comment := range comments {
|
||||||
|
if comment.Rating > 0 {
|
||||||
|
totalRating += comment.Rating
|
||||||
|
ratedComments++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if ratedComments > 0 {
|
||||||
|
tutorial, exists := tm.tutorials[tutorialID]
|
||||||
|
if exists {
|
||||||
|
tutorial.Rating = float64(totalRating) / float64(ratedComments)
|
||||||
|
tutorial.UpdatedAt = time.Now()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// SearchTutorials recherche des tutoriels par mots-clés
|
||||||
|
func (tm *TutorialManager) SearchTutorials(ctx context.Context, query string, filters map[string]interface{}) ([]*Tutorial, error) {
|
||||||
|
tm.mu.RLock()
|
||||||
|
defer tm.mu.RUnlock()
|
||||||
|
|
||||||
|
var results []*Tutorial
|
||||||
|
query = fmt.Sprintf("%%%s%%", query) // Recherche LIKE
|
||||||
|
|
||||||
|
for _, tutorial := range tm.tutorials {
|
||||||
|
// Vérifier si le tutoriel correspond à la recherche
|
||||||
|
matches := false
|
||||||
|
if contains(tutorial.Title, query) || contains(tutorial.Description, query) || contains(tutorial.Author, query) {
|
||||||
|
matches = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Vérifier les tags
|
||||||
|
for _, tag := range tutorial.Tags {
|
||||||
|
if contains(tag, query) {
|
||||||
|
matches = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !matches {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Appliquer les filtres si fournis
|
||||||
|
if filters != nil {
|
||||||
|
if category, ok := filters["category"].(string); ok && tutorial.Category != category {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if isPublished, ok := filters["is_published"].(bool); ok && tutorial.IsPublished != isPublished {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if isFree, ok := filters["is_free"].(bool); ok && tutorial.IsFree != isFree {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
results = append(results, tutorial)
|
||||||
|
}
|
||||||
|
|
||||||
|
return results, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// contains vérifie si une chaîne contient une sous-chaîne (insensible à la casse)
|
||||||
|
func contains(s, substr string) bool {
|
||||||
|
return len(s) >= len(substr) && (s == substr ||
|
||||||
|
(len(s) > len(substr) && (s[:len(substr)] == substr ||
|
||||||
|
s[len(s)-len(substr):] == substr ||
|
||||||
|
containsSubstring(s, substr))))
|
||||||
|
}
|
||||||
|
|
||||||
|
// containsSubstring vérifie si une chaîne contient une sous-chaîne
|
||||||
|
func containsSubstring(s, substr string) bool {
|
||||||
|
for i := 0; i <= len(s)-len(substr); i++ {
|
||||||
|
if s[i:i+len(substr)] == substr {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
85
veza-backend-api/internal/core/marketplace/models.go
Normal file
85
veza-backend-api/internal/core/marketplace/models.go
Normal file
|
|
@ -0,0 +1,85 @@
|
||||||
|
package marketplace
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"gorm.io/gorm"
|
||||||
|
)
|
||||||
|
|
||||||
|
// LicenseType définit le type de licence (Basic, Premium, Exclusive)
|
||||||
|
type LicenseType string
|
||||||
|
|
||||||
|
const (
|
||||||
|
LicenseBasic LicenseType = "basic"
|
||||||
|
LicensePremium LicenseType = "premium"
|
||||||
|
LicenseExclusive LicenseType = "exclusive"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ProductStatus définit le statut d'un produit (Draft, Active, Archived)
|
||||||
|
type ProductStatus string
|
||||||
|
|
||||||
|
const (
|
||||||
|
ProductStatusDraft ProductStatus = "draft"
|
||||||
|
ProductStatusActive ProductStatus = "active"
|
||||||
|
ProductStatusArchived ProductStatus = "archived"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Product représente un produit vendable sur la marketplace (Track, Sample Pack, Service)
|
||||||
|
type Product struct {
|
||||||
|
ID uuid.UUID `gorm:"type:uuid;primaryKey;default:gen_random_uuid()" json:"id"`
|
||||||
|
SellerID uuid.UUID `gorm:"type:uuid;not null" json:"seller_id"`
|
||||||
|
Title string `gorm:"not null;size:255" json:"title"`
|
||||||
|
Description string `gorm:"type:text" json:"description"`
|
||||||
|
Price float64 `gorm:"not null;type:decimal(10,2)" json:"price"`
|
||||||
|
Currency string `gorm:"default:'EUR';size:3" json:"currency"`
|
||||||
|
Status ProductStatus `gorm:"default:'draft'" json:"status"`
|
||||||
|
ProductType string `gorm:"not null" json:"product_type"` // "track", "pack", "service"
|
||||||
|
|
||||||
|
// Liaison optionnelle avec un Track (si ProductType == "track")
|
||||||
|
TrackID *uuid.UUID `gorm:"type:uuid" json:"track_id,omitempty"`
|
||||||
|
LicenseType LicenseType `gorm:"size:50" json:"license_type,omitempty"`
|
||||||
|
|
||||||
|
CreatedAt time.Time `gorm:"autoCreateTime" json:"created_at"`
|
||||||
|
UpdatedAt time.Time `gorm:"autoUpdateTime" json:"updated_at"`
|
||||||
|
DeletedAt gorm.DeletedAt `gorm:"index" json:"-"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// License représente une licence achetée par un utilisateur pour un Track
|
||||||
|
type License struct {
|
||||||
|
ID uuid.UUID `gorm:"type:uuid;primaryKey;default:gen_random_uuid()" json:"id"`
|
||||||
|
BuyerID uuid.UUID `gorm:"type:uuid;not null" json:"buyer_id"`
|
||||||
|
TrackID uuid.UUID `gorm:"type:uuid;not null" json:"track_id"`
|
||||||
|
ProductID uuid.UUID `gorm:"type:uuid;not null" json:"product_id"`
|
||||||
|
OrderID uuid.UUID `gorm:"type:uuid;not null" json:"order_id"`
|
||||||
|
|
||||||
|
Type LicenseType `gorm:"not null" json:"type"`
|
||||||
|
Rights string `gorm:"type:jsonb" json:"rights"` // Détails des droits (JSON)
|
||||||
|
DownloadsLeft int `gorm:"default:3" json:"downloads_left"`
|
||||||
|
|
||||||
|
CreatedAt time.Time `gorm:"autoCreateTime" json:"created_at"`
|
||||||
|
ExpiresAt *time.Time `json:"expires_at,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Order représente une commande/transaction
|
||||||
|
type Order struct {
|
||||||
|
ID uuid.UUID `gorm:"type:uuid;primaryKey;default:gen_random_uuid()" json:"id"`
|
||||||
|
BuyerID uuid.UUID `gorm:"type:uuid;not null" json:"buyer_id"`
|
||||||
|
TotalAmount float64 `gorm:"not null;type:decimal(10,2)" json:"total_amount"`
|
||||||
|
Currency string `gorm:"default:'EUR'" json:"currency"`
|
||||||
|
Status string `gorm:"default:'pending'" json:"status"` // pending, paid, failed, refunded
|
||||||
|
PaymentIntent string `json:"payment_intent,omitempty"` // Stripe PaymentIntent ID
|
||||||
|
|
||||||
|
Items []OrderItem `gorm:"foreignKey:OrderID" json:"items"`
|
||||||
|
|
||||||
|
CreatedAt time.Time `gorm:"autoCreateTime" json:"created_at"`
|
||||||
|
UpdatedAt time.Time `gorm:"autoUpdateTime" json:"updated_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// OrderItem représente une ligne dans une commande
|
||||||
|
type OrderItem struct {
|
||||||
|
ID uuid.UUID `gorm:"type:uuid;primaryKey;default:gen_random_uuid()" json:"id"`
|
||||||
|
OrderID uuid.UUID `gorm:"type:uuid;not null" json:"order_id"`
|
||||||
|
ProductID uuid.UUID `gorm:"type:uuid;not null" json:"product_id"`
|
||||||
|
Price float64 `gorm:"not null;type:decimal(10,2)" json:"price"`
|
||||||
|
}
|
||||||
263
veza-backend-api/internal/core/marketplace/service.go
Normal file
263
veza-backend-api/internal/core/marketplace/service.go
Normal file
|
|
@ -0,0 +1,263 @@
|
||||||
|
package marketplace
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"go.uber.org/zap"
|
||||||
|
"gorm.io/gorm"
|
||||||
|
|
||||||
|
"veza-backend-api/internal/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
ErrProductNotFound = errors.New("product not found")
|
||||||
|
ErrInsufficientFunds = errors.New("insufficient funds")
|
||||||
|
ErrOrderFailed = errors.New("order failed processing")
|
||||||
|
ErrInvalidSeller = errors.New("seller does not own the track")
|
||||||
|
ErrTrackNotFound = errors.New("track not found")
|
||||||
|
ErrNoLicense = errors.New("no valid license found")
|
||||||
|
)
|
||||||
|
|
||||||
|
// NewOrderItem represents an item to be ordered
|
||||||
|
type NewOrderItem struct {
|
||||||
|
ProductID uuid.UUID
|
||||||
|
}
|
||||||
|
|
||||||
|
// StorageService defines the interface for file retrieval
|
||||||
|
type StorageService interface {
|
||||||
|
// GetDownloadURL returns a signed URL or relative path for the file
|
||||||
|
GetDownloadURL(ctx context.Context, filePath string) (string, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// MarketplaceService définit l'interface pour les opérations de la marketplace
|
||||||
|
type MarketplaceService interface {
|
||||||
|
// Product Management
|
||||||
|
CreateProduct(ctx context.Context, product *Product) error
|
||||||
|
GetProduct(ctx context.Context, id uuid.UUID) (*Product, error)
|
||||||
|
ListProducts(ctx context.Context, filters map[string]interface{}) ([]Product, error)
|
||||||
|
|
||||||
|
// Purchasing
|
||||||
|
CreateOrder(ctx context.Context, buyerID uuid.UUID, items []NewOrderItem) (*Order, error)
|
||||||
|
ProcessPaymentWebhook(ctx context.Context, payload []byte) error
|
||||||
|
|
||||||
|
// Fulfillment
|
||||||
|
GetDownloadURL(ctx context.Context, buyerID uuid.UUID, productID uuid.UUID) (string, error)
|
||||||
|
GetUserLicenses(ctx context.Context, userID uuid.UUID) ([]License, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Service implémente MarketplaceService
|
||||||
|
type Service struct {
|
||||||
|
db *gorm.DB
|
||||||
|
logger *zap.Logger
|
||||||
|
storage StorageService
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewService creates a new Marketplace service instance
|
||||||
|
func NewService(db *gorm.DB, logger *zap.Logger, storage StorageService) *Service {
|
||||||
|
return &Service{
|
||||||
|
db: db,
|
||||||
|
logger: logger,
|
||||||
|
storage: storage,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateProduct creates a new product listing
|
||||||
|
// Validates that the seller owns the track
|
||||||
|
func (s *Service) CreateProduct(ctx context.Context, product *Product) error {
|
||||||
|
return s.db.Transaction(func(tx *gorm.DB) error {
|
||||||
|
// 1. Validate Track existence and ownership if linked
|
||||||
|
if product.ProductType == "track" && product.TrackID != nil {
|
||||||
|
var track models.Track
|
||||||
|
if err := tx.First(&track, "id = ?", product.TrackID).Error; err != nil {
|
||||||
|
if errors.Is(err, gorm.ErrRecordNotFound) {
|
||||||
|
return ErrTrackNotFound
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify ownership
|
||||||
|
if track.UserID != product.SellerID {
|
||||||
|
return ErrInvalidSeller
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Create Product
|
||||||
|
if err := tx.Create(product).Error; err != nil {
|
||||||
|
s.logger.Error("Failed to create product", zap.Error(err))
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
s.logger.Info("Product created successfully",
|
||||||
|
zap.String("product_id", product.ID.String()),
|
||||||
|
zap.String("seller_id", product.SellerID.String()))
|
||||||
|
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetProduct retrieves a product by ID
|
||||||
|
func (s *Service) GetProduct(ctx context.Context, id uuid.UUID) (*Product, error) {
|
||||||
|
var product Product
|
||||||
|
if err := s.db.First(&product, "id = ?", id).Error; err != nil {
|
||||||
|
if errors.Is(err, gorm.ErrRecordNotFound) {
|
||||||
|
return nil, ErrProductNotFound
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &product, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListProducts retrieves products based on filters
|
||||||
|
func (s *Service) ListProducts(ctx context.Context, filters map[string]interface{}) ([]Product, error) {
|
||||||
|
var products []Product
|
||||||
|
query := s.db.Model(&Product{})
|
||||||
|
|
||||||
|
if status, ok := filters["status"]; ok {
|
||||||
|
query = query.Where("status = ?", status)
|
||||||
|
} else {
|
||||||
|
query = query.Where("status = ?", ProductStatusActive)
|
||||||
|
}
|
||||||
|
|
||||||
|
if sellerID, ok := filters["seller_id"]; ok {
|
||||||
|
query = query.Where("seller_id = ?", sellerID)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := query.Find(&products).Error; err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return products, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateOrder initiates a purchase transaction
|
||||||
|
// Transactional: Order -> Items -> Payment(Simulated) -> Licenses
|
||||||
|
func (s *Service) CreateOrder(ctx context.Context, buyerID uuid.UUID, items []NewOrderItem) (*Order, error) {
|
||||||
|
var order *Order
|
||||||
|
|
||||||
|
err := s.db.Transaction(func(tx *gorm.DB) error {
|
||||||
|
totalAmount := 0.0
|
||||||
|
var orderItems []OrderItem
|
||||||
|
var productsToLicense []*Product
|
||||||
|
|
||||||
|
// 1. Validate products and calculate total
|
||||||
|
for _, item := range items {
|
||||||
|
var product Product
|
||||||
|
if err := tx.First(&product, "id = ?", item.ProductID).Error; err != nil {
|
||||||
|
if errors.Is(err, gorm.ErrRecordNotFound) {
|
||||||
|
return fmt.Errorf("product %s not found", item.ProductID)
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if product.Status != ProductStatusActive {
|
||||||
|
return fmt.Errorf("product %s is not active", item.ProductID)
|
||||||
|
}
|
||||||
|
|
||||||
|
totalAmount += product.Price
|
||||||
|
orderItems = append(orderItems, OrderItem{
|
||||||
|
ProductID: product.ID,
|
||||||
|
Price: product.Price,
|
||||||
|
})
|
||||||
|
productsToLicense = append(productsToLicense, &product)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Create Order (PENDING)
|
||||||
|
order = &Order{
|
||||||
|
BuyerID: buyerID,
|
||||||
|
TotalAmount: totalAmount,
|
||||||
|
Currency: "EUR", // Default for MVP
|
||||||
|
Status: "pending",
|
||||||
|
Items: orderItems,
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := tx.Create(order).Error; err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. Simulate Payment (Immediate Success for MVP)
|
||||||
|
// In real scenario, we would pause here or interact with Stripe
|
||||||
|
order.Status = "completed"
|
||||||
|
order.PaymentIntent = "simulated_payment_" + uuid.New().String()
|
||||||
|
if err := tx.Save(order).Error; err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4. Generate Licenses
|
||||||
|
for _, prod := range productsToLicense {
|
||||||
|
if prod.ProductType == "track" && prod.TrackID != nil {
|
||||||
|
license := License{
|
||||||
|
BuyerID: buyerID,
|
||||||
|
TrackID: *prod.TrackID,
|
||||||
|
ProductID: prod.ID,
|
||||||
|
OrderID: order.ID,
|
||||||
|
Type: prod.LicenseType,
|
||||||
|
Rights: `{"streaming": true, "download": true}`, // Default rights
|
||||||
|
DownloadsLeft: 3, // Default limit
|
||||||
|
}
|
||||||
|
if err := tx.Create(&license).Error; err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Error("Failed to create order", zap.Error(err), zap.String("buyer_id", buyerID.String()))
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
s.logger.Info("Order created and processed successfully", zap.String("order_id", order.ID.String()))
|
||||||
|
return order, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ProcessPaymentWebhook handles payment confirmation
|
||||||
|
func (s *Service) ProcessPaymentWebhook(ctx context.Context, payload []byte) error {
|
||||||
|
// MVP: Not implemented yet
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetDownloadURL checks license and returns signed URL for the asset
|
||||||
|
func (s *Service) GetDownloadURL(ctx context.Context, buyerID uuid.UUID, productID uuid.UUID) (string, error) {
|
||||||
|
// 1. Check for valid license
|
||||||
|
var license License
|
||||||
|
err := s.db.Where("buyer_id = ? AND product_id = ? AND downloads_left > 0", buyerID, productID).
|
||||||
|
First(&license).Error
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
if errors.Is(err, gorm.ErrRecordNotFound) {
|
||||||
|
return "", ErrNoLicense
|
||||||
|
}
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Get Track info
|
||||||
|
var track models.Track
|
||||||
|
if err := s.db.First(&track, "id = ?", license.TrackID).Error; err != nil {
|
||||||
|
return "", ErrTrackNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. Generate URL
|
||||||
|
url, err := s.storage.GetDownloadURL(ctx, track.FilePath)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4. Decrement downloads left (Optional based on business rules)
|
||||||
|
// In strict mode we might want to decrement here
|
||||||
|
// s.db.Model(&license).Update("downloads_left", gorm.Expr("downloads_left - 1"))
|
||||||
|
|
||||||
|
return url, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetUserLicenses returns all licenses owned by a user
|
||||||
|
func (s *Service) GetUserLicenses(ctx context.Context, userID uuid.UUID) ([]License, error) {
|
||||||
|
var licenses []License
|
||||||
|
if err := s.db.Where("buyer_id = ?", userID).Find(&licenses).Error; err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return licenses, nil
|
||||||
|
}
|
||||||
86
veza-backend-api/internal/core/social/models.go
Normal file
86
veza-backend-api/internal/core/social/models.go
Normal file
|
|
@ -0,0 +1,86 @@
|
||||||
|
package social
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"gorm.io/gorm"
|
||||||
|
)
|
||||||
|
|
||||||
|
// PostType définit le type de post
|
||||||
|
type PostType string
|
||||||
|
|
||||||
|
const (
|
||||||
|
PostTypeStatus PostType = "status"
|
||||||
|
PostTypeShare PostType = "share"
|
||||||
|
PostTypeRelease PostType = "release"
|
||||||
|
PostTypeActivity PostType = "activity" // Pour les activités automatiques (ex: achat)
|
||||||
|
)
|
||||||
|
|
||||||
|
// Post représente une publication sociale d'un utilisateur
|
||||||
|
type Post struct {
|
||||||
|
ID uuid.UUID `gorm:"type:uuid;primaryKey;default:gen_random_uuid()" json:"id"`
|
||||||
|
UserID uuid.UUID `gorm:"type:uuid;not null;index" json:"user_id"`
|
||||||
|
Content string `gorm:"type:text" json:"content"`
|
||||||
|
Type PostType `gorm:"default:'status'" json:"type"`
|
||||||
|
|
||||||
|
// Attachments (Optionnel)
|
||||||
|
TrackID *uuid.UUID `gorm:"type:uuid" json:"track_id,omitempty"`
|
||||||
|
PlaylistID *uuid.UUID `gorm:"type:uuid" json:"playlist_id,omitempty"`
|
||||||
|
|
||||||
|
// Metrics (Cached)
|
||||||
|
LikeCount int `gorm:"default:0" json:"like_count"`
|
||||||
|
CommentCount int `gorm:"default:0" json:"comment_count"`
|
||||||
|
|
||||||
|
CreatedAt time.Time `gorm:"autoCreateTime;index" json:"created_at"`
|
||||||
|
UpdatedAt time.Time `gorm:"autoUpdateTime" json:"updated_at"`
|
||||||
|
DeletedAt gorm.DeletedAt `gorm:"index" json:"-"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Like représente une interaction "J'aime"
|
||||||
|
// Polymorphisme via TargetType + TargetID
|
||||||
|
type Like struct {
|
||||||
|
ID uuid.UUID `gorm:"type:uuid;primaryKey;default:gen_random_uuid()" json:"id"`
|
||||||
|
UserID uuid.UUID `gorm:"type:uuid;not null;index" json:"user_id"`
|
||||||
|
TargetID uuid.UUID `gorm:"type:uuid;not null;index" json:"target_id"`
|
||||||
|
TargetType string `gorm:"not null" json:"target_type"` // "post", "track", "playlist"
|
||||||
|
CreatedAt time.Time `gorm:"autoCreateTime" json:"created_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Comment représente un commentaire
|
||||||
|
type Comment struct {
|
||||||
|
ID uuid.UUID `gorm:"type:uuid;primaryKey;default:gen_random_uuid()" json:"id"`
|
||||||
|
UserID uuid.UUID `gorm:"type:uuid;not null;index" json:"user_id"`
|
||||||
|
TargetID uuid.UUID `gorm:"type:uuid;not null;index" json:"target_id"`
|
||||||
|
TargetType string `gorm:"not null" json:"target_type"` // "post", "track", "playlist"
|
||||||
|
Content string `gorm:"type:text;not null" json:"content"`
|
||||||
|
CreatedAt time.Time `gorm:"autoCreateTime" json:"created_at"`
|
||||||
|
UpdatedAt time.Time `gorm:"autoUpdateTime" json:"updated_at"`
|
||||||
|
DeletedAt gorm.DeletedAt `gorm:"index" json:"-"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ActivityType définit le type d'activité
|
||||||
|
type ActivityType string
|
||||||
|
|
||||||
|
const (
|
||||||
|
ActivityPost ActivityType = "post"
|
||||||
|
ActivityLike ActivityType = "like"
|
||||||
|
ActivityComment ActivityType = "comment"
|
||||||
|
ActivityFollow ActivityType = "follow"
|
||||||
|
ActivityPurchase ActivityType = "purchase" // Nouveau
|
||||||
|
)
|
||||||
|
|
||||||
|
// FeedItem représente un élément agrégé pour le flux d'actualité
|
||||||
|
type FeedItem struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
Type ActivityType `json:"type"`
|
||||||
|
ActorID uuid.UUID `json:"actor_id"`
|
||||||
|
TargetID uuid.UUID `json:"target_id"`
|
||||||
|
TargetType string `json:"target_type"`
|
||||||
|
Content string `json:"content,omitempty"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
|
||||||
|
// Embedded objects
|
||||||
|
ActorName string `json:"actor_name,omitempty"`
|
||||||
|
ActorAvatar string `json:"actor_avatar,omitempty"`
|
||||||
|
}
|
||||||
205
veza-backend-api/internal/core/social/service.go
Normal file
205
veza-backend-api/internal/core/social/service.go
Normal file
|
|
@ -0,0 +1,205 @@
|
||||||
|
package social
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"go.uber.org/zap"
|
||||||
|
"gorm.io/gorm"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SocialService gère les interactions sociales
|
||||||
|
type SocialService interface {
|
||||||
|
CreatePost(ctx context.Context, userID uuid.UUID, content string, attachments map[string]uuid.UUID) (*Post, error)
|
||||||
|
GetGlobalFeed(ctx context.Context, limit, offset int) ([]FeedItem, error)
|
||||||
|
GetUserFeed(ctx context.Context, userID uuid.UUID, limit, offset int) ([]FeedItem, error)
|
||||||
|
|
||||||
|
// Interactions
|
||||||
|
ToggleLike(ctx context.Context, userID uuid.UUID, targetID uuid.UUID, targetType string) (bool, error)
|
||||||
|
AddComment(ctx context.Context, userID uuid.UUID, targetID uuid.UUID, targetType string, content string) (*Comment, error)
|
||||||
|
|
||||||
|
// Internal
|
||||||
|
CreateActivityPost(ctx context.Context, userID uuid.UUID, content string, meta map[string]interface{}) error
|
||||||
|
}
|
||||||
|
|
||||||
|
// Service implémente SocialService
|
||||||
|
type Service struct {
|
||||||
|
db *gorm.DB
|
||||||
|
logger *zap.Logger
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewService crée une nouvelle instance du service social
|
||||||
|
func NewService(db *gorm.DB, logger *zap.Logger) *Service {
|
||||||
|
return &Service{
|
||||||
|
db: db,
|
||||||
|
logger: logger,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatePost crée une nouvelle publication
|
||||||
|
func (s *Service) CreatePost(ctx context.Context, userID uuid.UUID, content string, attachments map[string]uuid.UUID) (*Post, error) {
|
||||||
|
post := &Post{
|
||||||
|
UserID: userID,
|
||||||
|
Content: content,
|
||||||
|
Type: PostTypeStatus,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle attachments
|
||||||
|
if trackID, ok := attachments["track_id"]; ok {
|
||||||
|
post.TrackID = &trackID
|
||||||
|
post.Type = PostTypeShare
|
||||||
|
}
|
||||||
|
if playlistID, ok := attachments["playlist_id"]; ok {
|
||||||
|
post.PlaylistID = &playlistID
|
||||||
|
post.Type = PostTypeShare
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := s.db.Create(post).Error; err != nil {
|
||||||
|
s.logger.Error("Failed to create post", zap.Error(err), zap.String("user_id", userID.String()))
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return post, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetGlobalFeed récupère un flux d'activité global
|
||||||
|
func (s *Service) GetGlobalFeed(ctx context.Context, limit, offset int) ([]FeedItem, error) {
|
||||||
|
var posts []Post
|
||||||
|
if err := s.db.Order("created_at desc").Limit(limit).Offset(offset).Find(&posts).Error; err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var feed []FeedItem
|
||||||
|
for _, p := range posts {
|
||||||
|
targetType := "none"
|
||||||
|
targetID := uuid.Nil
|
||||||
|
|
||||||
|
if p.TrackID != nil {
|
||||||
|
targetType = "track"
|
||||||
|
targetID = *p.TrackID
|
||||||
|
} else if p.PlaylistID != nil {
|
||||||
|
targetType = "playlist"
|
||||||
|
targetID = *p.PlaylistID
|
||||||
|
}
|
||||||
|
|
||||||
|
item := FeedItem{
|
||||||
|
ID: fmt.Sprintf("post:%s", p.ID.String()),
|
||||||
|
Type: ActivityPost,
|
||||||
|
ActorID: p.UserID,
|
||||||
|
TargetID: targetID,
|
||||||
|
TargetType: targetType,
|
||||||
|
Content: p.Content,
|
||||||
|
CreatedAt: p.CreatedAt,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Spécial pour les activités automatiques
|
||||||
|
if p.Type == PostTypeActivity {
|
||||||
|
item.Type = ActivityPurchase // Ou autre logique plus fine
|
||||||
|
}
|
||||||
|
|
||||||
|
feed = append(feed, item)
|
||||||
|
}
|
||||||
|
|
||||||
|
return feed, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetUserFeed récupère le flux d'un utilisateur
|
||||||
|
func (s *Service) GetUserFeed(ctx context.Context, userID uuid.UUID, limit, offset int) ([]FeedItem, error) {
|
||||||
|
var posts []Post
|
||||||
|
if err := s.db.Where("user_id = ?", userID).Order("created_at desc").Limit(limit).Offset(offset).Find(&posts).Error; err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var feed []FeedItem
|
||||||
|
for _, p := range posts {
|
||||||
|
item := FeedItem{
|
||||||
|
ID: fmt.Sprintf("post:%s", p.ID.String()),
|
||||||
|
Type: ActivityPost,
|
||||||
|
ActorID: p.UserID,
|
||||||
|
Content: p.Content,
|
||||||
|
CreatedAt: p.CreatedAt,
|
||||||
|
TargetType: "user_wall",
|
||||||
|
}
|
||||||
|
feed = append(feed, item)
|
||||||
|
}
|
||||||
|
|
||||||
|
return feed, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ToggleLike ajoute ou supprime un like
|
||||||
|
func (s *Service) ToggleLike(ctx context.Context, userID uuid.UUID, targetID uuid.UUID, targetType string) (bool, error) {
|
||||||
|
var like Like
|
||||||
|
err := s.db.Where("user_id = ? AND target_id = ? AND target_type = ?", userID, targetID, targetType).First(&like).Error
|
||||||
|
|
||||||
|
if err == nil {
|
||||||
|
// Like existe, on le supprime (Unlike)
|
||||||
|
if err := s.db.Delete(&like).Error; err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Décrémenter le compteur si c'est un post
|
||||||
|
if targetType == "post" {
|
||||||
|
s.db.Model(&Post{}).Where("id = ?", targetID).Update("like_count", gorm.Expr("like_count - 1"))
|
||||||
|
}
|
||||||
|
|
||||||
|
return false, nil // Liked = false
|
||||||
|
} else if err == gorm.ErrRecordNotFound {
|
||||||
|
// Like n'existe pas, on le crée
|
||||||
|
like = Like{
|
||||||
|
UserID: userID,
|
||||||
|
TargetID: targetID,
|
||||||
|
TargetType: targetType,
|
||||||
|
}
|
||||||
|
if err := s.db.Create(&like).Error; err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Incrémenter le compteur si c'est un post
|
||||||
|
if targetType == "post" {
|
||||||
|
s.db.Model(&Post{}).Where("id = ?", targetID).Update("like_count", gorm.Expr("like_count + 1"))
|
||||||
|
}
|
||||||
|
|
||||||
|
return true, nil // Liked = true
|
||||||
|
} else {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddComment ajoute un commentaire
|
||||||
|
func (s *Service) AddComment(ctx context.Context, userID uuid.UUID, targetID uuid.UUID, targetType string, content string) (*Comment, error) {
|
||||||
|
comment := &Comment{
|
||||||
|
UserID: userID,
|
||||||
|
TargetID: targetID,
|
||||||
|
TargetType: targetType,
|
||||||
|
Content: content,
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := s.db.Create(comment).Error; err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Incrémenter le compteur si c'est un post
|
||||||
|
if targetType == "post" {
|
||||||
|
s.db.Model(&Post{}).Where("id = ?", targetID).Update("comment_count", gorm.Expr("comment_count + 1"))
|
||||||
|
}
|
||||||
|
|
||||||
|
return comment, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateActivityPost crée un post automatique pour une activité (ex: Achat)
|
||||||
|
func (s *Service) CreateActivityPost(ctx context.Context, userID uuid.UUID, content string, meta map[string]interface{}) error {
|
||||||
|
post := &Post{
|
||||||
|
UserID: userID,
|
||||||
|
Content: content,
|
||||||
|
Type: PostTypeActivity,
|
||||||
|
}
|
||||||
|
|
||||||
|
if trackIDStr, ok := meta["track_id"].(string); ok {
|
||||||
|
if trackID, err := uuid.Parse(trackIDStr); err == nil {
|
||||||
|
post.TrackID = &trackID
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return s.db.Create(post).Error
|
||||||
|
}
|
||||||
1403
veza-backend-api/internal/core/track/handler.go
Normal file
1403
veza-backend-api/internal/core/track/handler.go
Normal file
File diff suppressed because it is too large
Load diff
933
veza-backend-api/internal/core/track/service.go
Normal file
933
veza-backend-api/internal/core/track/service.go
Normal file
|
|
@ -0,0 +1,933 @@
|
||||||
|
package track
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"mime/multipart"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings" // Removed strconv
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"go.uber.org/zap"
|
||||||
|
"gorm.io/gorm"
|
||||||
|
"veza-backend-api/internal/models"
|
||||||
|
"veza-backend-api/internal/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Constantes pour les quotas utilisateur
|
||||||
|
const (
|
||||||
|
MaxTracksPerUser = 1000 // Nombre maximum de tracks par utilisateur
|
||||||
|
MaxStoragePerUser = 100 * 1024 * 1024 * 1024 // 100GB par utilisateur
|
||||||
|
)
|
||||||
|
|
||||||
|
// Types d'erreurs spécifiques pour les tracks
|
||||||
|
var (
|
||||||
|
// ErrInvalidTrackFormat est retourné quand le format du fichier est invalide
|
||||||
|
ErrInvalidTrackFormat = errors.New("invalid track format")
|
||||||
|
// ErrTrackTooLarge est retourné quand le fichier dépasse la taille maximale
|
||||||
|
ErrTrackTooLarge = errors.New("track file too large")
|
||||||
|
// ErrTrackQuotaExceeded est retourné quand l'utilisateur a atteint son quota de tracks
|
||||||
|
ErrTrackQuotaExceeded = errors.New("track quota exceeded")
|
||||||
|
// ErrStorageQuotaExceeded est retourné quand l'utilisateur a atteint son quota de stockage
|
||||||
|
ErrStorageQuotaExceeded = errors.New("storage quota exceeded")
|
||||||
|
// ErrTrackNotFound est retourné quand un track n'est pas trouvé
|
||||||
|
ErrTrackNotFound = errors.New("track not found")
|
||||||
|
// ErrNetworkError est retourné en cas d'erreur réseau (timeout, connexion)
|
||||||
|
ErrNetworkError = errors.New("network error")
|
||||||
|
// ErrStorageError est retourné en cas d'erreur de stockage
|
||||||
|
ErrStorageError = errors.New("storage error")
|
||||||
|
// ErrForbidden est retourné quand l'utilisateur n'a pas la permission d'effectuer l'action
|
||||||
|
ErrForbidden = errors.New("forbidden")
|
||||||
|
)
|
||||||
|
|
||||||
|
// TrackService gère les opérations sur les tracks
|
||||||
|
type TrackService struct {
|
||||||
|
db *gorm.DB
|
||||||
|
logger *zap.Logger
|
||||||
|
uploadDir string
|
||||||
|
maxFileSize int64
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewTrackService crée un nouveau service de tracks
|
||||||
|
func NewTrackService(db *gorm.DB, logger *zap.Logger, uploadDir string) *TrackService {
|
||||||
|
if uploadDir == "" {
|
||||||
|
uploadDir = "uploads/tracks"
|
||||||
|
}
|
||||||
|
return &TrackService{
|
||||||
|
db: db,
|
||||||
|
logger: logger,
|
||||||
|
uploadDir: uploadDir,
|
||||||
|
maxFileSize: 100 * 1024 * 1024, // 100MB
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidateTrackFile valide le format et la taille d'un fichier audio
|
||||||
|
func (s *TrackService) ValidateTrackFile(fileHeader *multipart.FileHeader) error {
|
||||||
|
// Valider la taille
|
||||||
|
if fileHeader.Size > s.maxFileSize {
|
||||||
|
return fmt.Errorf("%w: file size exceeds maximum allowed size of 100MB", ErrTrackTooLarge)
|
||||||
|
}
|
||||||
|
|
||||||
|
if fileHeader.Size == 0 {
|
||||||
|
return fmt.Errorf("%w: file is empty", ErrInvalidTrackFormat)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Valider l'extension
|
||||||
|
ext := strings.ToLower(filepath.Ext(fileHeader.Filename))
|
||||||
|
allowedExtensions := []string{".mp3", ".flac", ".wav", ".ogg", ".m4a", ".aac"}
|
||||||
|
isValidExt := false
|
||||||
|
for _, allowedExt := range allowedExtensions {
|
||||||
|
if ext == allowedExt {
|
||||||
|
isValidExt = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !isValidExt {
|
||||||
|
return fmt.Errorf("%w: invalid file format. Allowed formats: MP3, FLAC, WAV, OGG", ErrInvalidTrackFormat)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Valider le type MIME en ouvrant le fichier
|
||||||
|
file, err := fileHeader.Open()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to open file: %w", err)
|
||||||
|
}
|
||||||
|
defer file.Close()
|
||||||
|
|
||||||
|
// Lire les premiers bytes pour vérifier le magic number
|
||||||
|
header := make([]byte, 12)
|
||||||
|
n, err := file.Read(header)
|
||||||
|
if err != nil && err != io.EOF {
|
||||||
|
return fmt.Errorf("failed to read file header: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if n < 4 {
|
||||||
|
return fmt.Errorf("file too small to validate")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Vérifier les magic numbers pour les formats audio
|
||||||
|
isValidFormat := false
|
||||||
|
headerStr := string(header[:n])
|
||||||
|
|
||||||
|
// MP3: ID3v2 (starts with "ID3") or MPEG frame sync (0xFF 0xFB/E/F)
|
||||||
|
if strings.HasPrefix(headerStr, "ID3") || (header[0] == 0xFF && (header[1]&0xE0) == 0xE0) {
|
||||||
|
isValidFormat = true
|
||||||
|
}
|
||||||
|
// FLAC: "fLaC"
|
||||||
|
if strings.HasPrefix(headerStr, "fLaC") {
|
||||||
|
isValidFormat = true
|
||||||
|
}
|
||||||
|
// WAV: "RIFF" followed by "WAVE"
|
||||||
|
if strings.HasPrefix(headerStr, "RIFF") && len(headerStr) >= 12 && string(header[8:12]) == "WAVE" {
|
||||||
|
isValidFormat = true
|
||||||
|
}
|
||||||
|
// OGG: "OggS"
|
||||||
|
if strings.HasPrefix(headerStr, "OggS") {
|
||||||
|
isValidFormat = true
|
||||||
|
}
|
||||||
|
// M4A/AAC: "ftyp" avec "M4A" ou "mp4"
|
||||||
|
if strings.Contains(headerStr, "ftyp") && (strings.Contains(headerStr, "M4A") || strings.Contains(headerStr, "mp4")) {
|
||||||
|
isValidFormat = true
|
||||||
|
}
|
||||||
|
|
||||||
|
if !isValidFormat {
|
||||||
|
return fmt.Errorf("%w: invalid audio file format", ErrInvalidTrackFormat)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// UploadTrack upload un fichier audio et crée un enregistrement Track en base
|
||||||
|
func (s *TrackService) UploadTrack(ctx context.Context, userID uuid.UUID, fileHeader *multipart.FileHeader) (*models.Track, error) {
|
||||||
|
// Vérifier le quota utilisateur
|
||||||
|
if err := s.CheckUserQuota(ctx, userID, fileHeader.Size); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Valider le fichier
|
||||||
|
if err := s.ValidateTrackFile(fileHeader); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Créer le répertoire d'upload s'il n'existe pas
|
||||||
|
if err := os.MkdirAll(s.uploadDir, 0755); err != nil {
|
||||||
|
return nil, fmt.Errorf("%w: failed to create upload directory: %w", ErrStorageError, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Générer un nom de fichier unique
|
||||||
|
timestamp := uuid.New()
|
||||||
|
ext := filepath.Ext(fileHeader.Filename)
|
||||||
|
filename := fmt.Sprintf("%d_%d%s", userID, timestamp, ext)
|
||||||
|
filePath := filepath.Join(s.uploadDir, filename)
|
||||||
|
|
||||||
|
// Ouvrir le fichier source
|
||||||
|
src, err := fileHeader.Open()
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("%w: failed to open uploaded file: %w", ErrNetworkError, err)
|
||||||
|
}
|
||||||
|
defer src.Close()
|
||||||
|
|
||||||
|
// Créer le fichier de destination
|
||||||
|
dst, err := os.Create(filePath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to create destination file: %w", err)
|
||||||
|
}
|
||||||
|
defer dst.Close()
|
||||||
|
|
||||||
|
// Copier le fichier avec gestion d'erreur réseau
|
||||||
|
if _, err := io.Copy(dst, src); err != nil {
|
||||||
|
os.Remove(filePath) // Nettoyer en cas d'erreur
|
||||||
|
// Vérifier si c'est une erreur réseau (timeout, connexion fermée, etc.)
|
||||||
|
if strings.Contains(err.Error(), "timeout") || strings.Contains(err.Error(), "connection") {
|
||||||
|
return nil, fmt.Errorf("%w: failed to save file: %w", ErrNetworkError, err)
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("%w: failed to save file: %w", ErrStorageError, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Déterminer le format depuis l'extension
|
||||||
|
format := strings.TrimPrefix(strings.ToUpper(ext), ".")
|
||||||
|
if format == "M4A" {
|
||||||
|
format = "AAC"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extraire le titre depuis le nom de fichier (sans extension)
|
||||||
|
title := strings.TrimSuffix(fileHeader.Filename, ext)
|
||||||
|
|
||||||
|
// Créer l'enregistrement Track en base
|
||||||
|
track := &models.Track{
|
||||||
|
UserID: userID,
|
||||||
|
Title: title,
|
||||||
|
FilePath: filePath,
|
||||||
|
FileSize: fileHeader.Size,
|
||||||
|
Format: format,
|
||||||
|
Duration: 0, // Sera mis à jour lors du traitement asynchrone
|
||||||
|
IsPublic: true,
|
||||||
|
Status: models.TrackStatusUploading,
|
||||||
|
StatusMessage: "Upload started",
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := s.db.WithContext(ctx).Create(track).Error; err != nil {
|
||||||
|
os.Remove(filePath) // Nettoyer en cas d'erreur
|
||||||
|
return nil, fmt.Errorf("failed to create track record: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
s.logger.Info("Track uploaded successfully",
|
||||||
|
zap.String("track_id", track.ID.String()),
|
||||||
|
zap.String("user_id", userID.String()),
|
||||||
|
zap.String("filename", filename),
|
||||||
|
zap.Int64("file_size", fileHeader.Size),
|
||||||
|
)
|
||||||
|
|
||||||
|
// TODO(P2-GO-018): Enqueue job pour traitement asynchrone (metadata, waveform, etc.) selon ORIGIN_ASYNC_PROCESSING
|
||||||
|
// jobService.EnqueueTrackProcessing(ctx, track.ID, filePath)
|
||||||
|
|
||||||
|
return track, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateTrackFromPath crée un track à partir d'un fichier déjà sauvegardé
|
||||||
|
func (s *TrackService) CreateTrackFromPath(ctx context.Context, userID uuid.UUID, filePath, filename string, fileSize int64, format string) (*models.Track, error) {
|
||||||
|
ext := filepath.Ext(filename)
|
||||||
|
title := strings.TrimSuffix(filename, ext)
|
||||||
|
|
||||||
|
track := &models.Track{
|
||||||
|
UserID: userID,
|
||||||
|
Title: title,
|
||||||
|
FilePath: filePath,
|
||||||
|
FileSize: fileSize,
|
||||||
|
Format: format,
|
||||||
|
Duration: 0, // Sera mis à jour lors du traitement asynchrone
|
||||||
|
IsPublic: true,
|
||||||
|
Status: models.TrackStatusUploading,
|
||||||
|
StatusMessage: "Upload completed",
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := s.db.WithContext(ctx).Create(track).Error; err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to create track record: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
s.logger.Info("Track created from path",
|
||||||
|
zap.String("track_id", track.ID.String()),
|
||||||
|
zap.String("user_id", userID.String()),
|
||||||
|
zap.String("file_path", filePath),
|
||||||
|
zap.Int64("file_size", fileSize),
|
||||||
|
)
|
||||||
|
|
||||||
|
return track, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// UserQuota représente les informations de quota d'un utilisateur
|
||||||
|
type UserQuota struct {
|
||||||
|
TracksCount int64 `json:"tracks_count"`
|
||||||
|
TracksLimit int64 `json:"tracks_limit"`
|
||||||
|
StorageUsed int64 `json:"storage_used"` // bytes
|
||||||
|
StorageLimit int64 `json:"storage_limit"` // bytes
|
||||||
|
}
|
||||||
|
|
||||||
|
// CheckUserQuota vérifie si l'utilisateur peut uploader un fichier selon son quota
|
||||||
|
func (s *TrackService) CheckUserQuota(ctx context.Context, userID uuid.UUID, fileSize int64) error {
|
||||||
|
var trackCount int64
|
||||||
|
if err := s.db.WithContext(ctx).Model(&models.Track{}).Where("user_id = ?", userID).Count(&trackCount).Error; err != nil {
|
||||||
|
return fmt.Errorf("failed to check track count: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if trackCount >= MaxTracksPerUser {
|
||||||
|
return ErrTrackQuotaExceeded
|
||||||
|
}
|
||||||
|
|
||||||
|
var totalSize int64
|
||||||
|
if err := s.db.WithContext(ctx).Model(&models.Track{}).
|
||||||
|
Where("user_id = ?", userID).
|
||||||
|
Select("COALESCE(SUM(file_size), 0)").
|
||||||
|
Scan(&totalSize).Error; err != nil {
|
||||||
|
return fmt.Errorf("failed to check storage usage: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if totalSize+fileSize > MaxStoragePerUser {
|
||||||
|
return ErrStorageQuotaExceeded
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetUserQuota récupère les informations de quota d'un utilisateur
|
||||||
|
func (s *TrackService) GetUserQuota(ctx context.Context, userID uuid.UUID) (*UserQuota, error) {
|
||||||
|
var trackCount int64
|
||||||
|
if err := s.db.WithContext(ctx).Model(&models.Track{}).Where("user_id = ?", userID).Count(&trackCount).Error; err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to get track count: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var totalSize int64
|
||||||
|
if err := s.db.WithContext(ctx).Model(&models.Track{}).
|
||||||
|
Where("user_id = ?", userID).
|
||||||
|
Select("COALESCE(SUM(file_size), 0)").
|
||||||
|
Scan(&totalSize).Error; err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to get storage usage: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &UserQuota{
|
||||||
|
TracksCount: trackCount,
|
||||||
|
TracksLimit: MaxTracksPerUser,
|
||||||
|
StorageUsed: totalSize,
|
||||||
|
StorageLimit: MaxStoragePerUser,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// TrackListParams représente les paramètres de filtrage et pagination pour la liste des tracks
|
||||||
|
type TrackListParams struct {
|
||||||
|
Page int
|
||||||
|
Limit int
|
||||||
|
UserID *uuid.UUID
|
||||||
|
Genre *string
|
||||||
|
Format *string
|
||||||
|
SortBy string // "created_at", "title", "popularity"
|
||||||
|
SortOrder string // "asc", "desc"
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListTracks récupère une liste de tracks avec pagination, filtres et tri
|
||||||
|
func (s *TrackService) ListTracks(ctx context.Context, params TrackListParams) ([]*models.Track, int64, error) {
|
||||||
|
// Créer la requête de base avec filtre sur le statut
|
||||||
|
query := s.db.WithContext(ctx).Model(&models.Track{}).Where("status = ?", models.TrackStatusCompleted)
|
||||||
|
|
||||||
|
// Appliquer les filtres
|
||||||
|
if params.UserID != nil {
|
||||||
|
query = query.Where("user_id = ?", *params.UserID)
|
||||||
|
}
|
||||||
|
if params.Genre != nil && *params.Genre != "" {
|
||||||
|
query = query.Where("genre = ?", *params.Genre)
|
||||||
|
}
|
||||||
|
if params.Format != nil && *params.Format != "" {
|
||||||
|
query = query.Where("format = ?", *params.Format)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compter le total avant pagination
|
||||||
|
var total int64
|
||||||
|
if err := query.Count(&total).Error; err != nil {
|
||||||
|
return nil, 0, fmt.Errorf("failed to count tracks: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Appliquer le tri
|
||||||
|
sortOrder := "DESC"
|
||||||
|
if params.SortOrder == "asc" {
|
||||||
|
sortOrder = "ASC"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Valider et appliquer SortBy
|
||||||
|
sortBy := params.SortBy
|
||||||
|
if sortBy == "" {
|
||||||
|
sortBy = "created_at"
|
||||||
|
}
|
||||||
|
// Sécurité: valider que sortBy est un champ valide
|
||||||
|
validSortFields := map[string]bool{
|
||||||
|
"created_at": true,
|
||||||
|
"title": true,
|
||||||
|
"popularity": true,
|
||||||
|
}
|
||||||
|
if !validSortFields[sortBy] {
|
||||||
|
sortBy = "created_at"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Pour "popularity", on utilise play_count + like_count
|
||||||
|
if sortBy == "popularity" {
|
||||||
|
query = query.Order(fmt.Sprintf("(play_count + like_count) %s", sortOrder))
|
||||||
|
} else {
|
||||||
|
query = query.Order(fmt.Sprintf("%s %s", sortBy, sortOrder))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Appliquer la pagination
|
||||||
|
if params.Limit <= 0 {
|
||||||
|
params.Limit = 20 // Par défaut
|
||||||
|
}
|
||||||
|
if params.Limit > 100 {
|
||||||
|
params.Limit = 100 // Maximum
|
||||||
|
}
|
||||||
|
if params.Page <= 0 {
|
||||||
|
params.Page = 1
|
||||||
|
}
|
||||||
|
offset := (params.Page - 1) * params.Limit
|
||||||
|
query = query.Offset(offset).Limit(params.Limit)
|
||||||
|
|
||||||
|
// Exécuter la requête
|
||||||
|
var tracks []*models.Track
|
||||||
|
if err := query.Find(&tracks).Error; err != nil {
|
||||||
|
return nil, 0, fmt.Errorf("failed to list tracks: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return tracks, total, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetTrackByID récupère un track par son ID
|
||||||
|
func (s *TrackService) GetTrackByID(ctx context.Context, trackID uuid.UUID) (*models.Track, error) { // Changed trackID to uuid.UUID
|
||||||
|
var track models.Track
|
||||||
|
if err := s.db.WithContext(ctx).First(&track, "id = ?", trackID).Error; err != nil { // Updated query
|
||||||
|
if err == gorm.ErrRecordNotFound {
|
||||||
|
return nil, ErrTrackNotFound
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("failed to get track: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &track, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateTrackParams représente les paramètres de mise à jour d'un track
|
||||||
|
type UpdateTrackParams struct {
|
||||||
|
Title *string `json:"title"`
|
||||||
|
Artist *string `json:"artist"`
|
||||||
|
Album *string `json:"album"`
|
||||||
|
Genre *string `json:"genre"`
|
||||||
|
Year *int `json:"year"`
|
||||||
|
IsPublic *bool `json:"is_public"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateTrack met à jour les métadonnées d'un track
|
||||||
|
func (s *TrackService) UpdateTrack(ctx context.Context, trackID uuid.UUID, userID uuid.UUID, params UpdateTrackParams) (*models.Track, error) { // Changed trackID to uuid.UUID
|
||||||
|
// Récupérer le track existant
|
||||||
|
track, err := s.GetTrackByID(ctx, trackID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Vérifier que l'utilisateur est propriétaire du track
|
||||||
|
if track.UserID != userID {
|
||||||
|
return nil, ErrForbidden
|
||||||
|
}
|
||||||
|
|
||||||
|
// Construire les mises à jour
|
||||||
|
updates := make(map[string]interface{})
|
||||||
|
if params.Title != nil {
|
||||||
|
if *params.Title == "" {
|
||||||
|
return nil, fmt.Errorf("title cannot be empty")
|
||||||
|
}
|
||||||
|
updates["title"] = *params.Title
|
||||||
|
}
|
||||||
|
if params.Artist != nil {
|
||||||
|
updates["artist"] = *params.Artist
|
||||||
|
}
|
||||||
|
if params.Album != nil {
|
||||||
|
updates["album"] = *params.Album
|
||||||
|
}
|
||||||
|
if params.Genre != nil {
|
||||||
|
updates["genre"] = *params.Genre
|
||||||
|
}
|
||||||
|
if params.Year != nil {
|
||||||
|
if *params.Year < 0 {
|
||||||
|
return nil, fmt.Errorf("year cannot be negative")
|
||||||
|
}
|
||||||
|
updates["year"] = *params.Year
|
||||||
|
}
|
||||||
|
if params.IsPublic != nil {
|
||||||
|
updates["is_public"] = *params.IsPublic
|
||||||
|
}
|
||||||
|
|
||||||
|
// Si aucune mise à jour n'est demandée
|
||||||
|
if len(updates) == 0 {
|
||||||
|
return track, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Appliquer les mises à jour
|
||||||
|
if err := s.db.WithContext(ctx).Model(track).Updates(updates).Error; err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to update track: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Recharger le track pour obtenir les valeurs mises à jour
|
||||||
|
updatedTrack, err := s.GetTrackByID(ctx, trackID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
s.logger.Info("Track updated",
|
||||||
|
zap.Any("track_id", trackID), // Changed to zap.Any for uuid.UUID
|
||||||
|
zap.String("user_id", userID.String()),
|
||||||
|
zap.Any("updates", updates),
|
||||||
|
)
|
||||||
|
|
||||||
|
return updatedTrack, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteTrack supprime un track et son fichier physique
|
||||||
|
func (s *TrackService) DeleteTrack(ctx context.Context, trackID uuid.UUID, userID uuid.UUID) error { // Changed trackID to uuid.UUID
|
||||||
|
// Récupérer le track existant
|
||||||
|
track, err := s.GetTrackByID(ctx, trackID)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Vérifier que l'utilisateur est propriétaire du track
|
||||||
|
if track.UserID != userID {
|
||||||
|
return ErrForbidden
|
||||||
|
}
|
||||||
|
|
||||||
|
// Supprimer le fichier physique
|
||||||
|
if track.FilePath != "" {
|
||||||
|
if err := os.Remove(track.FilePath); err != nil && !os.IsNotExist(err) {
|
||||||
|
s.logger.Warn("Failed to delete track file",
|
||||||
|
zap.Any("track_id", trackID), // Changed to zap.Any for uuid.UUID
|
||||||
|
zap.String("file_path", track.FilePath),
|
||||||
|
zap.Error(err),
|
||||||
|
)
|
||||||
|
// On continue même si la suppression du fichier échoue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Supprimer les fichiers associés (waveform, cover art)
|
||||||
|
if track.WaveformPath != "" {
|
||||||
|
if err := os.Remove(track.WaveformPath); err != nil && !os.IsNotExist(err) {
|
||||||
|
s.logger.Warn("Failed to delete waveform file",
|
||||||
|
zap.Any("track_id", trackID), // Changed to zap.Any for uuid.UUID
|
||||||
|
zap.String("waveform_path", track.WaveformPath),
|
||||||
|
zap.Error(err),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if track.CoverArtPath != "" {
|
||||||
|
if err := os.Remove(track.CoverArtPath); err != nil && !os.IsNotExist(err) {
|
||||||
|
s.logger.Warn("Failed to delete cover art file",
|
||||||
|
zap.Any("track_id", trackID), // Changed to zap.Any for uuid.UUID
|
||||||
|
zap.String("cover_art_path", track.CoverArtPath),
|
||||||
|
zap.Error(err),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Supprimer de la base de données
|
||||||
|
// GORM gérera automatiquement les relations en cascade grâce aux contraintes OnDelete:CASCADE
|
||||||
|
if err := s.db.WithContext(ctx).Delete(track).Error; err != nil {
|
||||||
|
return fmt.Errorf("failed to delete track: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
s.logger.Info("Track deleted",
|
||||||
|
zap.Any("track_id", trackID), // Changed to zap.Any for uuid.UUID
|
||||||
|
zap.String("user_id", userID.String()),
|
||||||
|
zap.String("file_path", track.FilePath),
|
||||||
|
)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateStreamStatus updates the stream status and manifest URL of a track
|
||||||
|
func (s *TrackService) UpdateStreamStatus(ctx context.Context, trackID uuid.UUID, status string, manifestURL string) error { // Changed trackID to uuid.UUID
|
||||||
|
updates := map[string]interface{}{
|
||||||
|
"stream_status": status,
|
||||||
|
}
|
||||||
|
if manifestURL != "" {
|
||||||
|
updates["stream_manifest_url"] = manifestURL
|
||||||
|
}
|
||||||
|
|
||||||
|
if status == "ready" {
|
||||||
|
updates["status"] = models.TrackStatusCompleted
|
||||||
|
updates["status_message"] = "Ready for streaming"
|
||||||
|
} else if status == "error" {
|
||||||
|
updates["status"] = models.TrackStatusFailed
|
||||||
|
updates["status_message"] = "Transcoding failed"
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := s.db.WithContext(ctx).Model(&models.Track{}).Where("id = ?", trackID).Updates(updates).Error; err != nil {
|
||||||
|
return fmt.Errorf("failed to update stream status: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
s.logger.Info("Track stream status updated",
|
||||||
|
zap.Any("track_id", trackID), // Changed to zap.Any for uuid.UUID
|
||||||
|
zap.String("status", status),
|
||||||
|
zap.String("manifest_url", manifestURL),
|
||||||
|
)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
// TrackStats représente les statistiques d'un track
|
||||||
|
type TrackStats struct {
|
||||||
|
Views int64 `json:"views"`
|
||||||
|
Likes int64 `json:"likes"`
|
||||||
|
Comments int64 `json:"comments"`
|
||||||
|
TotalPlayTime int64 `json:"total_play_time"` // seconds
|
||||||
|
Downloads int64 `json:"downloads"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetTrackStats récupère les statistiques d'un track
|
||||||
|
func (s *TrackService) GetTrackStats(ctx context.Context, trackID uuid.UUID) (*types.TrackStats, error) { // Changed trackID to uuid.UUID
|
||||||
|
// Vérifier que le track existe
|
||||||
|
var track models.Track
|
||||||
|
if err := s.db.WithContext(ctx).First(&track, "id = ?", trackID).Error; err != nil { // Updated query
|
||||||
|
if errors.Is(err, gorm.ErrRecordNotFound) {
|
||||||
|
return nil, ErrTrackNotFound
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("failed to get track: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var stats types.TrackStats
|
||||||
|
|
||||||
|
// Count likes
|
||||||
|
if err := s.db.WithContext(ctx).Model(&models.TrackLike{}).
|
||||||
|
Where("track_id = ?", trackID).
|
||||||
|
Count(&stats.Likes).Error; err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to count likes: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Count comments (excluding soft-deleted)
|
||||||
|
if err := s.db.WithContext(ctx).Model(&models.TrackComment{}).
|
||||||
|
Where("track_id = ?", trackID).
|
||||||
|
Count(&stats.Comments).Error; err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to count comments: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Count views (total plays) and sum total play time
|
||||||
|
type PlayStats struct {
|
||||||
|
Views int64
|
||||||
|
TotalPlayTime int64
|
||||||
|
}
|
||||||
|
var playStats PlayStats
|
||||||
|
if err := s.db.WithContext(ctx).Model(&models.TrackPlay{}).
|
||||||
|
Where("track_id = ?", trackID).
|
||||||
|
Select("COUNT(*) as views, COALESCE(SUM(duration), 0) as total_play_time").
|
||||||
|
Scan(&playStats).Error; err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to get play statistics: %w", err)
|
||||||
|
}
|
||||||
|
stats.Views = playStats.Views
|
||||||
|
stats.TotalPlayTime = playStats.TotalPlayTime
|
||||||
|
|
||||||
|
// Count downloads (sum of access_count from track_shares where permissions include 'download')
|
||||||
|
// Note: access_count is incremented when a share link with download permission is accessed
|
||||||
|
if err := s.db.WithContext(ctx).Model(&models.TrackShare{}).
|
||||||
|
Where("track_id = ? AND permissions LIKE ?", trackID, "%download%").
|
||||||
|
Select("COALESCE(SUM(access_count), 0)").
|
||||||
|
Scan(&stats.Downloads).Error; err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to count downloads: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
s.logger.Info("Track stats retrieved",
|
||||||
|
zap.Any("track_id", trackID), // Changed to zap.Any for uuid.UUID
|
||||||
|
zap.Int64("views", stats.Views),
|
||||||
|
zap.Int64("likes", stats.Likes),
|
||||||
|
zap.Int64("comments", stats.Comments),
|
||||||
|
zap.Int64("total_play_time", stats.TotalPlayTime),
|
||||||
|
zap.Int64("downloads", stats.Downloads),
|
||||||
|
)
|
||||||
|
|
||||||
|
return &stats, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// BatchDeleteResult représente le résultat d'une suppression en lot
|
||||||
|
type BatchDeleteResult struct {
|
||||||
|
Deleted []uuid.UUID `json:"deleted"` // Changed to uuid.UUID
|
||||||
|
Failed []BatchDeleteError `json:"failed"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// BatchDeleteError représente une erreur lors de la suppression d'un track
|
||||||
|
type BatchDeleteError struct {
|
||||||
|
TrackID uuid.UUID `json:"track_id"` // Changed to uuid.UUID
|
||||||
|
Error string `json:"error"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// BatchDeleteTracks supprime plusieurs tracks en une seule requête
|
||||||
|
func (s *TrackService) BatchDeleteTracks(ctx context.Context, trackIDs []uuid.UUID, userID uuid.UUID) (*BatchDeleteResult, error) { // Changed trackIDs to []uuid.UUID
|
||||||
|
if len(trackIDs) == 0 {
|
||||||
|
return &BatchDeleteResult{
|
||||||
|
Deleted: []uuid.UUID{},
|
||||||
|
Failed: []BatchDeleteError{},
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Limiter le nombre de tracks à supprimer en une seule fois pour éviter les surcharges
|
||||||
|
const maxBatchSize = 100
|
||||||
|
if len(trackIDs) > maxBatchSize {
|
||||||
|
return nil, fmt.Errorf("batch size exceeds maximum of %d tracks", maxBatchSize)
|
||||||
|
}
|
||||||
|
|
||||||
|
result := &BatchDeleteResult{
|
||||||
|
Deleted: []uuid.UUID{},
|
||||||
|
Failed: []BatchDeleteError{},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Récupérer tous les tracks en une seule requête
|
||||||
|
var tracks []models.Track
|
||||||
|
if err := s.db.WithContext(ctx).Where("id IN ?", trackIDs).Find(&tracks).Error; err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to fetch tracks: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Créer un map pour un accès rapide
|
||||||
|
trackMap := make(map[uuid.UUID]*models.Track) // Changed to uuid.UUID
|
||||||
|
for i := range tracks {
|
||||||
|
trackMap[tracks[i].ID] = &tracks[i]
|
||||||
|
}
|
||||||
|
|
||||||
|
// Traiter chaque track
|
||||||
|
for _, trackID := range trackIDs {
|
||||||
|
track, exists := trackMap[trackID]
|
||||||
|
if !exists {
|
||||||
|
result.Failed = append(result.Failed, BatchDeleteError{
|
||||||
|
TrackID: trackID,
|
||||||
|
Error: "track not found",
|
||||||
|
})
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Vérifier l'ownership
|
||||||
|
if track.UserID != userID {
|
||||||
|
result.Failed = append(result.Failed, BatchDeleteError{
|
||||||
|
TrackID: trackID,
|
||||||
|
Error: "forbidden: track does not belong to user",
|
||||||
|
})
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Supprimer le track (réutiliser la logique de DeleteTrack)
|
||||||
|
if err := s.deleteTrackFiles(ctx, track); err != nil {
|
||||||
|
s.logger.Warn("Failed to delete track files",
|
||||||
|
zap.Any("track_id", trackID), // Changed to zap.Any for uuid.UUID
|
||||||
|
zap.Error(err),
|
||||||
|
)
|
||||||
|
// On continue même si la suppression des fichiers échoue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Supprimer de la base de données
|
||||||
|
if err := s.db.WithContext(ctx).Delete(track).Error; err != nil {
|
||||||
|
result.Failed = append(result.Failed, BatchDeleteError{
|
||||||
|
TrackID: trackID,
|
||||||
|
Error: fmt.Sprintf("failed to delete from database: %v", err),
|
||||||
|
})
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
result.Deleted = append(result.Deleted, trackID)
|
||||||
|
|
||||||
|
s.logger.Info("Track deleted in batch",
|
||||||
|
zap.Any("track_id", trackID), // Changed to zap.Any for uuid.UUID
|
||||||
|
zap.String("user_id", userID.String()),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// deleteTrackFiles supprime les fichiers physiques d'un track (logique extraite de DeleteTrack)
|
||||||
|
func (s *TrackService) deleteTrackFiles(ctx context.Context, track *models.Track) error {
|
||||||
|
var errors []error
|
||||||
|
|
||||||
|
// Supprimer le fichier principal
|
||||||
|
if track.FilePath != "" {
|
||||||
|
if err := os.Remove(track.FilePath); err != nil && !os.IsNotExist(err) {
|
||||||
|
errors = append(errors, fmt.Errorf("failed to delete track file %s: %w", track.FilePath, err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Supprimer le fichier waveform
|
||||||
|
if track.WaveformPath != "" {
|
||||||
|
if err := os.Remove(track.WaveformPath); err != nil && !os.IsNotExist(err) {
|
||||||
|
errors = append(errors, fmt.Errorf("failed to delete waveform file %s: %w", track.WaveformPath, err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Supprimer le fichier cover art
|
||||||
|
if track.CoverArtPath != "" {
|
||||||
|
if err := os.Remove(track.CoverArtPath); err != nil && !os.IsNotExist(err) {
|
||||||
|
errors = append(errors, fmt.Errorf("failed to delete cover art file %s: %w", track.CoverArtPath, err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Retourner la première erreur si il y en a, sinon nil
|
||||||
|
if len(errors) > 0 {
|
||||||
|
return errors[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// BatchUpdateResult représente le résultat d'une mise à jour en lot
|
||||||
|
type BatchUpdateResult struct {
|
||||||
|
Updated []uuid.UUID `json:"updated"` // Changed to uuid.UUID
|
||||||
|
Failed []BatchUpdateError `json:"failed"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// BatchUpdateError représente une erreur lors de la mise à jour d'un track
|
||||||
|
type BatchUpdateError struct {
|
||||||
|
TrackID uuid.UUID `json:"track_id"` // Changed to uuid.UUID
|
||||||
|
Error string `json:"error"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// BatchUpdateTracks met à jour plusieurs tracks en une seule requête
|
||||||
|
func (s *TrackService) BatchUpdateTracks(ctx context.Context, trackIDs []uuid.UUID, userID uuid.UUID, updates map[string]interface{}) (*BatchUpdateResult, error) { // Changed trackIDs to []uuid.UUID
|
||||||
|
if len(trackIDs) == 0 {
|
||||||
|
return &BatchUpdateResult{
|
||||||
|
Updated: []uuid.UUID{},
|
||||||
|
Failed: []BatchUpdateError{},
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Limiter le nombre de tracks à mettre à jour en une seule fois
|
||||||
|
const maxBatchSize = 100
|
||||||
|
if len(trackIDs) > maxBatchSize {
|
||||||
|
return nil, fmt.Errorf("batch size exceeds maximum of %d tracks", maxBatchSize)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Valider que les updates ne sont pas vides
|
||||||
|
if len(updates) == 0 {
|
||||||
|
return nil, fmt.Errorf("no valid fields to update")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Liste des champs autorisés pour la mise à jour en lot
|
||||||
|
allowedFields := map[string]bool{
|
||||||
|
"is_public": true,
|
||||||
|
"title": true,
|
||||||
|
"artist": true,
|
||||||
|
"album": true,
|
||||||
|
"genre": true,
|
||||||
|
"year": true,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Filtrer les champs autorisés et valider les valeurs
|
||||||
|
filteredUpdates := make(map[string]interface{})
|
||||||
|
for key, value := range updates {
|
||||||
|
if !allowedFields[key] {
|
||||||
|
continue // Ignorer les champs non autorisés
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validation spécifique selon le champ
|
||||||
|
switch key {
|
||||||
|
case "is_public":
|
||||||
|
if _, ok := value.(bool); !ok {
|
||||||
|
return nil, fmt.Errorf("invalid value for is_public: must be boolean")
|
||||||
|
}
|
||||||
|
case "title":
|
||||||
|
if str, ok := value.(string); ok {
|
||||||
|
if len(str) == 0 {
|
||||||
|
return nil, fmt.Errorf("title cannot be empty")
|
||||||
|
}
|
||||||
|
if len(str) > 255 {
|
||||||
|
return nil, fmt.Errorf("title exceeds maximum length of 255 characters")
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return nil, fmt.Errorf("invalid value for title: must be string")
|
||||||
|
}
|
||||||
|
case "artist", "album", "genre":
|
||||||
|
if str, ok := value.(string); ok {
|
||||||
|
if key == "genre" && len(str) > 100 {
|
||||||
|
return nil, fmt.Errorf("genre exceeds maximum length of 100 characters")
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return nil, fmt.Errorf("invalid value for %s: must be string", key)
|
||||||
|
}
|
||||||
|
case "year":
|
||||||
|
if num, ok := value.(float64); ok {
|
||||||
|
year := int(num)
|
||||||
|
if year < 1900 || year > 2100 {
|
||||||
|
return nil, fmt.Errorf("year must be between 1900 and 2100")
|
||||||
|
}
|
||||||
|
filteredUpdates[key] = year
|
||||||
|
continue
|
||||||
|
} else if num, ok := value.(int); ok {
|
||||||
|
if num < 1900 || num > 2100 {
|
||||||
|
return nil, fmt.Errorf("year must be between 1900 and 2100")
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return nil, fmt.Errorf("invalid value for year: must be integer")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
filteredUpdates[key] = value
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(filteredUpdates) == 0 {
|
||||||
|
return nil, fmt.Errorf("no valid fields to update")
|
||||||
|
}
|
||||||
|
|
||||||
|
result := &BatchUpdateResult{
|
||||||
|
Updated: []uuid.UUID{},
|
||||||
|
Failed: []BatchUpdateError{},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Récupérer tous les tracks en une seule requête
|
||||||
|
var tracks []models.Track
|
||||||
|
if err := s.db.WithContext(ctx).Where("id IN ?", trackIDs).Find(&tracks).Error; err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to fetch tracks: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Créer un map pour un accès rapide
|
||||||
|
trackMap := make(map[uuid.UUID]*models.Track) // Changed to uuid.UUID
|
||||||
|
for i := range tracks {
|
||||||
|
trackMap[tracks[i].ID] = &tracks[i]
|
||||||
|
}
|
||||||
|
|
||||||
|
// Traiter chaque track
|
||||||
|
for _, trackID := range trackIDs {
|
||||||
|
track, exists := trackMap[trackID]
|
||||||
|
if !exists {
|
||||||
|
result.Failed = append(result.Failed, BatchUpdateError{
|
||||||
|
TrackID: trackID,
|
||||||
|
Error: "track not found",
|
||||||
|
})
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Vérifier l'ownership
|
||||||
|
if track.UserID != userID {
|
||||||
|
result.Failed = append(result.Failed, BatchUpdateError{
|
||||||
|
TrackID: trackID,
|
||||||
|
Error: "forbidden: track does not belong to user",
|
||||||
|
})
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Appliquer les mises à jour
|
||||||
|
if err := s.db.WithContext(ctx).Model(track).Updates(filteredUpdates).Error; err != nil {
|
||||||
|
result.Failed = append(result.Failed, BatchUpdateError{
|
||||||
|
TrackID: trackID,
|
||||||
|
Error: fmt.Sprintf("failed to update: %v", err),
|
||||||
|
})
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
result.Updated = append(result.Updated, trackID)
|
||||||
|
|
||||||
|
s.logger.Info("Track updated in batch",
|
||||||
|
zap.Any("track_id", trackID), // Changed to zap.Any for uuid.UUID
|
||||||
|
zap.String("user_id", userID.String()),
|
||||||
|
zap.Any("updates", filteredUpdates),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateStreamStatus updates the stream status and manifest URL of a track
|
||||||
342
veza-backend-api/internal/database/chat_repository.go
Normal file
342
veza-backend-api/internal/database/chat_repository.go
Normal file
|
|
@ -0,0 +1,342 @@
|
||||||
|
package database
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"database/sql"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ChatRepository provides access to chat data
|
||||||
|
type ChatRepository struct {
|
||||||
|
db *DB
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewChatRepository creates a new chat repository
|
||||||
|
func NewChatRepository(db *DB) *ChatRepository {
|
||||||
|
return &ChatRepository{db: db}
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateMessage creates a new message
|
||||||
|
func (r *ChatRepository) CreateMessage(ctx context.Context, message *Message) error {
|
||||||
|
query := `
|
||||||
|
INSERT INTO messages (room_id, user_id, content, type, parent_id, is_edited, is_deleted, created_at, updated_at)
|
||||||
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
|
||||||
|
RETURNING id
|
||||||
|
`
|
||||||
|
|
||||||
|
err := r.db.QueryRowContext(ctx, query,
|
||||||
|
message.RoomID,
|
||||||
|
message.UserID,
|
||||||
|
message.Content,
|
||||||
|
message.Type,
|
||||||
|
message.ParentID,
|
||||||
|
message.IsEdited,
|
||||||
|
message.IsDeleted,
|
||||||
|
message.CreatedAt,
|
||||||
|
message.UpdatedAt,
|
||||||
|
).Scan(&message.ID)
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetMessages retrieves messages for a room with pagination
|
||||||
|
func (r *ChatRepository) GetMessages(ctx context.Context, roomID uuid.UUID, page, limit int, beforeID *uuid.UUID) ([]*Message, error) {
|
||||||
|
var query string
|
||||||
|
var args []interface{}
|
||||||
|
|
||||||
|
if beforeID != nil {
|
||||||
|
query = `
|
||||||
|
SELECT id, room_id, user_id, content, type, parent_id, is_edited, is_deleted, created_at, updated_at
|
||||||
|
FROM messages
|
||||||
|
WHERE room_id = $1 AND id < $2 AND is_deleted = false
|
||||||
|
ORDER BY created_at DESC
|
||||||
|
LIMIT $3 OFFSET $4
|
||||||
|
`
|
||||||
|
args = []interface{}{roomID, *beforeID, limit, (page - 1) * limit}
|
||||||
|
} else {
|
||||||
|
query = `
|
||||||
|
SELECT id, room_id, user_id, content, type, parent_id, is_edited, is_deleted, created_at, updated_at
|
||||||
|
FROM messages
|
||||||
|
WHERE room_id = $1 AND is_deleted = false
|
||||||
|
ORDER BY created_at DESC
|
||||||
|
LIMIT $2 OFFSET $3
|
||||||
|
`
|
||||||
|
args = []interface{}{roomID, limit, (page - 1) * limit}
|
||||||
|
}
|
||||||
|
|
||||||
|
rows, err := r.db.QueryContext(ctx, query, args...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var messages []*Message
|
||||||
|
for rows.Next() {
|
||||||
|
msg := &Message{}
|
||||||
|
err := rows.Scan(
|
||||||
|
&msg.ID,
|
||||||
|
&msg.RoomID,
|
||||||
|
&msg.UserID,
|
||||||
|
&msg.Content,
|
||||||
|
&msg.Type,
|
||||||
|
&msg.ParentID,
|
||||||
|
&msg.IsEdited,
|
||||||
|
&msg.IsDeleted,
|
||||||
|
&msg.CreatedAt,
|
||||||
|
&msg.UpdatedAt,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
messages = append(messages, msg)
|
||||||
|
}
|
||||||
|
|
||||||
|
return messages, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetMessageByID retrieves a message by ID
|
||||||
|
func (r *ChatRepository) GetMessageByID(ctx context.Context, messageID uuid.UUID) (*Message, error) {
|
||||||
|
query := `
|
||||||
|
SELECT id, room_id, user_id, content, type, parent_id, is_edited, is_deleted, created_at, updated_at
|
||||||
|
FROM messages
|
||||||
|
WHERE id = $1
|
||||||
|
`
|
||||||
|
|
||||||
|
msg := &Message{}
|
||||||
|
err := r.db.QueryRowContext(ctx, query, messageID).Scan(
|
||||||
|
&msg.ID,
|
||||||
|
&msg.RoomID,
|
||||||
|
&msg.UserID,
|
||||||
|
&msg.Content,
|
||||||
|
&msg.Type,
|
||||||
|
&msg.ParentID,
|
||||||
|
&msg.IsEdited,
|
||||||
|
&msg.IsDeleted,
|
||||||
|
&msg.CreatedAt,
|
||||||
|
&msg.UpdatedAt,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return msg, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateMessage updates a message
|
||||||
|
func (r *ChatRepository) UpdateMessage(ctx context.Context, message *Message) error {
|
||||||
|
query := `
|
||||||
|
UPDATE messages
|
||||||
|
SET content = $2, is_edited = $3, is_deleted = $4, updated_at = $5
|
||||||
|
WHERE id = $1
|
||||||
|
`
|
||||||
|
|
||||||
|
_, err := r.db.ExecContext(ctx, query,
|
||||||
|
message.ID,
|
||||||
|
message.Content,
|
||||||
|
message.IsEdited,
|
||||||
|
message.IsDeleted,
|
||||||
|
message.UpdatedAt,
|
||||||
|
)
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateReaction creates a new reaction
|
||||||
|
func (r *ChatRepository) CreateReaction(ctx context.Context, reaction *Reaction) error {
|
||||||
|
query := `
|
||||||
|
INSERT INTO reactions (message_id, user_id, emoji, created_at)
|
||||||
|
VALUES ($1, $2, $3, $4)
|
||||||
|
RETURNING id
|
||||||
|
`
|
||||||
|
|
||||||
|
err := r.db.QueryRowContext(ctx, query,
|
||||||
|
reaction.MessageID,
|
||||||
|
reaction.UserID,
|
||||||
|
reaction.Emoji,
|
||||||
|
reaction.CreatedAt,
|
||||||
|
).Scan(&reaction.ID)
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteReaction removes a reaction
|
||||||
|
func (r *ChatRepository) DeleteReaction(ctx context.Context, messageID, userID uuid.UUID, emoji string) error {
|
||||||
|
query := `DELETE FROM reactions WHERE message_id = $1 AND user_id = $2 AND emoji = $3`
|
||||||
|
_, err := r.db.ExecContext(ctx, query, messageID, userID, emoji)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateRoom creates a new room
|
||||||
|
func (r *ChatRepository) CreateRoom(ctx context.Context, room *Room) error {
|
||||||
|
query := `
|
||||||
|
INSERT INTO rooms (name, description, type, is_private, created_by, created_at, updated_at)
|
||||||
|
VALUES ($1, $2, $3, $4, $5, $6, $7)
|
||||||
|
RETURNING id
|
||||||
|
`
|
||||||
|
|
||||||
|
err := r.db.QueryRowContext(ctx, query,
|
||||||
|
room.Name,
|
||||||
|
room.Description,
|
||||||
|
room.Type,
|
||||||
|
room.IsPrivate,
|
||||||
|
room.CreatedBy,
|
||||||
|
room.CreatedAt,
|
||||||
|
room.UpdatedAt,
|
||||||
|
).Scan(&room.ID)
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetRooms retrieves available rooms for a user
|
||||||
|
func (r *ChatRepository) GetRooms(ctx context.Context, userID uuid.UUID, includePrivate bool) ([]*Room, error) {
|
||||||
|
var query string
|
||||||
|
if includePrivate {
|
||||||
|
query = `
|
||||||
|
SELECT DISTINCT r.id, r.name, r.description, r.type, r.is_private, r.created_by, r.created_at, r.updated_at
|
||||||
|
FROM rooms r
|
||||||
|
LEFT JOIN room_members rm ON r.id = rm.room_id
|
||||||
|
WHERE r.is_private = false OR rm.user_id = $1
|
||||||
|
ORDER BY r.created_at DESC
|
||||||
|
`
|
||||||
|
} else {
|
||||||
|
query = `
|
||||||
|
SELECT id, name, description, type, is_private, created_by, created_at, updated_at
|
||||||
|
FROM rooms
|
||||||
|
WHERE is_private = false
|
||||||
|
ORDER BY created_at DESC
|
||||||
|
`
|
||||||
|
}
|
||||||
|
|
||||||
|
var rows *sql.Rows
|
||||||
|
var err error
|
||||||
|
if includePrivate {
|
||||||
|
rows, err = r.db.QueryContext(ctx, query, userID)
|
||||||
|
} else {
|
||||||
|
rows, err = r.db.QueryContext(ctx, query)
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var rooms []*Room
|
||||||
|
for rows.Next() {
|
||||||
|
room := &Room{}
|
||||||
|
err := rows.Scan(
|
||||||
|
&room.ID,
|
||||||
|
&room.Name,
|
||||||
|
&room.Description,
|
||||||
|
&room.Type,
|
||||||
|
&room.IsPrivate,
|
||||||
|
&room.CreatedBy,
|
||||||
|
&room.CreatedAt,
|
||||||
|
&room.UpdatedAt,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
rooms = append(rooms, room)
|
||||||
|
}
|
||||||
|
|
||||||
|
return rooms, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetDirectMessageRoom retrieves or creates a DM room between two users
|
||||||
|
func (r *ChatRepository) GetDirectMessageRoom(ctx context.Context, userID1, userID2 uuid.UUID) (*Room, error) {
|
||||||
|
query := `
|
||||||
|
SELECT r.id, r.name, r.description, r.type, r.is_private, r.created_by, r.created_at, r.updated_at
|
||||||
|
FROM rooms r
|
||||||
|
JOIN room_members rm1 ON r.id = rm1.room_id
|
||||||
|
JOIN room_members rm2 ON r.id = rm2.room_id
|
||||||
|
WHERE r.type = 'dm'
|
||||||
|
AND rm1.user_id = $1 AND rm2.user_id = $2
|
||||||
|
LIMIT 1
|
||||||
|
`
|
||||||
|
|
||||||
|
room := &Room{}
|
||||||
|
err := r.db.QueryRowContext(ctx, query, userID1, userID2).Scan(
|
||||||
|
&room.ID,
|
||||||
|
&room.Name,
|
||||||
|
&room.Description,
|
||||||
|
&room.Type,
|
||||||
|
&room.IsPrivate,
|
||||||
|
&room.CreatedBy,
|
||||||
|
&room.CreatedAt,
|
||||||
|
&room.UpdatedAt,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return room, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddUserToRoom adds a user to a room
|
||||||
|
func (r *ChatRepository) AddUserToRoom(ctx context.Context, roomID, userID uuid.UUID) error {
|
||||||
|
query := `
|
||||||
|
INSERT INTO room_members (room_id, user_id, joined_at)
|
||||||
|
VALUES ($1, $2, $3)
|
||||||
|
ON CONFLICT (room_id, user_id) DO NOTHING
|
||||||
|
`
|
||||||
|
|
||||||
|
_, err := r.db.ExecContext(ctx, query, roomID, userID, time.Now())
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// RemoveUserFromRoom removes a user from a room
|
||||||
|
func (r *ChatRepository) RemoveUserFromRoom(ctx context.Context, roomID, userID uuid.UUID) error {
|
||||||
|
query := `DELETE FROM room_members WHERE room_id = $1 AND user_id = $2`
|
||||||
|
_, err := r.db.ExecContext(ctx, query, roomID, userID)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetRoomUserCount gets the number of users in a room
|
||||||
|
func (r *ChatRepository) GetRoomUserCount(ctx context.Context, roomID uuid.UUID) (int, error) {
|
||||||
|
query := `SELECT COUNT(*) FROM room_members WHERE room_id = $1`
|
||||||
|
var count int
|
||||||
|
err := r.db.QueryRowContext(ctx, query, roomID).Scan(&count)
|
||||||
|
return count, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// SearchMessages searches for messages in a room
|
||||||
|
func (r *ChatRepository) SearchMessages(ctx context.Context, roomID uuid.UUID, query string, limit int) ([]*Message, error) {
|
||||||
|
sqlQuery := `
|
||||||
|
SELECT id, room_id, user_id, content, type, parent_id, is_edited, is_deleted, created_at, updated_at
|
||||||
|
FROM messages
|
||||||
|
WHERE room_id = $1 AND is_deleted = false AND content ILIKE $2
|
||||||
|
ORDER BY created_at DESC
|
||||||
|
LIMIT $3
|
||||||
|
`
|
||||||
|
|
||||||
|
searchPattern := "%" + query + "%"
|
||||||
|
rows, err := r.db.QueryContext(ctx, sqlQuery, roomID, searchPattern, limit)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var messages []*Message
|
||||||
|
for rows.Next() {
|
||||||
|
msg := &Message{}
|
||||||
|
err := rows.Scan(
|
||||||
|
&msg.ID,
|
||||||
|
&msg.RoomID,
|
||||||
|
&msg.UserID,
|
||||||
|
&msg.Content,
|
||||||
|
&msg.Type,
|
||||||
|
&msg.ParentID,
|
||||||
|
&msg.IsEdited,
|
||||||
|
&msg.IsDeleted,
|
||||||
|
&msg.CreatedAt,
|
||||||
|
&msg.UpdatedAt,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
messages = append(messages, msg)
|
||||||
|
}
|
||||||
|
|
||||||
|
return messages, nil
|
||||||
|
}
|
||||||
523
veza-backend-api/internal/database/database.go
Normal file
523
veza-backend-api/internal/database/database.go
Normal file
|
|
@ -0,0 +1,523 @@
|
||||||
|
package database
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"database/sql"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"veza-backend-api/internal/models"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"go.uber.org/zap"
|
||||||
|
"gorm.io/driver/postgres"
|
||||||
|
"gorm.io/driver/sqlite" // Added sqlite driver
|
||||||
|
"gorm.io/gorm"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Config contient la configuration de la base de données
|
||||||
|
type Config struct {
|
||||||
|
URL string
|
||||||
|
Host string
|
||||||
|
Port string
|
||||||
|
Username string
|
||||||
|
Password string
|
||||||
|
Database string
|
||||||
|
SSLMode string
|
||||||
|
MaxOpenConns int
|
||||||
|
MaxIdleConns int
|
||||||
|
MaxLifetime time.Duration
|
||||||
|
MaxIdleTime time.Duration
|
||||||
|
MaxRetries int // Nombre maximal de tentatives de connexion
|
||||||
|
RetryInterval time.Duration // Intervalle entre les tentatives
|
||||||
|
}
|
||||||
|
|
||||||
|
// Database représente la connexion principale à la base de données
|
||||||
|
type Database struct {
|
||||||
|
*sql.DB
|
||||||
|
GormDB *gorm.DB
|
||||||
|
config *Config
|
||||||
|
Logger *zap.Logger
|
||||||
|
}
|
||||||
|
|
||||||
|
// DB est un wrapper autour de sql.DB pour les repositories
|
||||||
|
type DB struct {
|
||||||
|
*sql.DB
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewDatabaseWithRetry crée une nouvelle connexion à la base de données avec des tentatives de retry
|
||||||
|
func NewDatabaseWithRetry(cfg *Config, logger *zap.Logger) (*Database, error) {
|
||||||
|
if cfg.MaxRetries == 0 {
|
||||||
|
cfg.MaxRetries = 1 // Au moins une tentative
|
||||||
|
}
|
||||||
|
if cfg.RetryInterval == 0 {
|
||||||
|
cfg.RetryInterval = 5 * time.Second // 5 secondes par défaut
|
||||||
|
}
|
||||||
|
|
||||||
|
var db *Database
|
||||||
|
var err error
|
||||||
|
|
||||||
|
for i := 0; i < cfg.MaxRetries; i++ {
|
||||||
|
logger.Info("🔌 Tentative de connexion à la base de données PostgreSQL",
|
||||||
|
zap.Int("attempt", i+1),
|
||||||
|
zap.Int("max_attempts", cfg.MaxRetries),
|
||||||
|
zap.String("host", cfg.Host),
|
||||||
|
zap.String("port", cfg.Port),
|
||||||
|
zap.String("database", cfg.Database))
|
||||||
|
|
||||||
|
db, err = NewDatabase(cfg)
|
||||||
|
if err == nil {
|
||||||
|
logger.Info("✅ Connexion à la base de données établie avec succès après tentatives")
|
||||||
|
return db, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Warn("❌ Échec de connexion à la base de données",
|
||||||
|
zap.Error(err),
|
||||||
|
zap.Int("attempt", i+1),
|
||||||
|
zap.Int("max_attempts", cfg.MaxRetries))
|
||||||
|
|
||||||
|
if i < cfg.MaxRetries-1 {
|
||||||
|
logger.Info("🔄 Nouvelle tentative dans quelques secondes...",
|
||||||
|
zap.Duration("interval", cfg.RetryInterval))
|
||||||
|
time.Sleep(cfg.RetryInterval)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, fmt.Errorf("échec de connexion à la base de données après %d tentatives: %w", cfg.MaxRetries, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewDatabase crée une nouvelle connexion à la base de données avec configuration
|
||||||
|
func NewDatabase(cfg *Config) (*Database, error) {
|
||||||
|
logger, _ := zap.NewProduction()
|
||||||
|
|
||||||
|
// Construire l'URL de connexion
|
||||||
|
var dsn string
|
||||||
|
if cfg.URL != "" {
|
||||||
|
dsn = cfg.URL
|
||||||
|
} else {
|
||||||
|
dsn = fmt.Sprintf("host=%s port=%s user=%s password=%s dbname=%s sslmode=%s",
|
||||||
|
cfg.Host, cfg.Port, cfg.Username, cfg.Password, cfg.Database, cfg.SSLMode)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ouvrir la connexion
|
||||||
|
db, err := sql.Open("postgres", dsn)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to open database: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Configurer le pool de connexions optimisé
|
||||||
|
db.SetMaxOpenConns(cfg.MaxOpenConns)
|
||||||
|
db.SetMaxIdleConns(cfg.MaxIdleConns)
|
||||||
|
db.SetConnMaxLifetime(cfg.MaxLifetime)
|
||||||
|
db.SetConnMaxIdleTime(cfg.MaxIdleTime)
|
||||||
|
|
||||||
|
// Tester la connexion
|
||||||
|
if err := db.Ping(); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to ping database: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialiser GORM avec la même connexion
|
||||||
|
gormDB, err := gorm.Open(postgres.New(postgres.Config{
|
||||||
|
Conn: db,
|
||||||
|
}), &gorm.Config{
|
||||||
|
// Logger désactivé pour éviter les conflits avec zap
|
||||||
|
// On peut activer le logger GORM plus tard si nécessaire
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to initialize GORM: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Info("✅ Connexion à la base de données établie avec succès (connexion initiale)",
|
||||||
|
zap.Int("max_open_conns", cfg.MaxOpenConns),
|
||||||
|
zap.Int("max_idle_conns", cfg.MaxIdleConns),
|
||||||
|
zap.Duration("max_lifetime", cfg.MaxLifetime))
|
||||||
|
|
||||||
|
return &Database{
|
||||||
|
DB: db,
|
||||||
|
GormDB: gormDB,
|
||||||
|
config: cfg,
|
||||||
|
Logger: logger,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize initialise la base de données avec les migrations
|
||||||
|
func (d *Database) Initialize() error {
|
||||||
|
d.Logger.Info("🔧 Initialisation de la base de données...")
|
||||||
|
|
||||||
|
// Exécuter les migrations
|
||||||
|
if err := d.RunMigrations(); err != nil {
|
||||||
|
return fmt.Errorf("failed to run migrations: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Vérifier l'intégrité des données
|
||||||
|
if err := d.VerifyIntegrity(); err != nil {
|
||||||
|
d.Logger.Warn("⚠️ Problèmes d'intégrité détectés", zap.Error(err))
|
||||||
|
}
|
||||||
|
|
||||||
|
d.Logger.Info("✅ Base de données initialisée avec succès")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// RunMigrations exécute toutes les migrations en attente
|
||||||
|
func (d *Database) RunMigrations() error {
|
||||||
|
d.Logger.Info("📦 Exécution des migrations...")
|
||||||
|
|
||||||
|
// STRATÉGIE 100% SQL : Les migrations SQL sont exécutées EN PREMIER
|
||||||
|
// GORM n'est plus utilisé pour créer/modifier les tables
|
||||||
|
d.Logger.Info("📦 Exécution des migrations SQL...")
|
||||||
|
|
||||||
|
// Liste des migrations à exécuter dans l'ordre
|
||||||
|
migrations := []string{
|
||||||
|
// === TABLES DE BASE ===
|
||||||
|
"001_create_users.sql", // Table users - DOIT être première
|
||||||
|
"003_email_verification.sql",
|
||||||
|
"004_oauth_accounts.sql",
|
||||||
|
"005_user_profiles.sql",
|
||||||
|
"008_playlists.sql",
|
||||||
|
"009_follows.sql",
|
||||||
|
"013_notifications.sql",
|
||||||
|
"016_analytics.sql",
|
||||||
|
"017_admin_logs.sql",
|
||||||
|
"018_create_email_verification_tokens.sql",
|
||||||
|
"019_create_password_reset_tokens.sql",
|
||||||
|
"020_create_sessions.sql",
|
||||||
|
"021_add_profile_privacy.sql",
|
||||||
|
"022_add_profile_slug.sql",
|
||||||
|
"023_create_roles_permissions.sql",
|
||||||
|
"024_seed_permissions.sql",
|
||||||
|
"025_create_tracks.sql",
|
||||||
|
"026_add_track_status.sql",
|
||||||
|
"027_create_track_likes.sql",
|
||||||
|
"028_create_track_comments.sql",
|
||||||
|
"029_create_track_plays.sql",
|
||||||
|
"030_create_playlists.sql",
|
||||||
|
"031_create_playlist_collaborators.sql",
|
||||||
|
"031_create_track_shares.sql",
|
||||||
|
"032_create_playlist_follows.sql",
|
||||||
|
"032_create_track_versions.sql",
|
||||||
|
"033_create_track_history.sql",
|
||||||
|
"034_create_hls_streams_table.sql",
|
||||||
|
"035_create_hls_transcode_queue.sql",
|
||||||
|
"036_create_bitrate_adaptation_logs.sql",
|
||||||
|
"037_create_playback_analytics.sql",
|
||||||
|
"038_add_playback_analytics_indexes.sql",
|
||||||
|
"040_create_refresh_tokens.sql",
|
||||||
|
"041_create_rooms.sql",
|
||||||
|
"042_create_room_members.sql",
|
||||||
|
"043_create_messages.sql",
|
||||||
|
"044_add_sessions_revoked_at.sql",
|
||||||
|
"045_create_user_sessions.sql",
|
||||||
|
"046_add_playlists_missing_columns.sql", // Ajout follower_count et deleted_at
|
||||||
|
"add_sessions_table.sql",
|
||||||
|
"add_totp_tables.sql",
|
||||||
|
"add_audit_logs.sql",
|
||||||
|
"add_performance_indexes.sql",
|
||||||
|
}
|
||||||
|
|
||||||
|
// Créer la table migrations si elle n'existe pas
|
||||||
|
createMigrationsTable := `
|
||||||
|
CREATE TABLE IF NOT EXISTS schema_migrations (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
version VARCHAR(50) NOT NULL UNIQUE,
|
||||||
|
applied_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
|
)
|
||||||
|
`
|
||||||
|
if _, err := d.Exec(createMigrationsTable); err != nil {
|
||||||
|
return fmt.Errorf("failed to create migrations table: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exécuter chaque migration
|
||||||
|
for _, migration := range migrations {
|
||||||
|
// Vérifier si la migration a déjà été appliquée
|
||||||
|
var exists bool
|
||||||
|
checkQuery := "SELECT EXISTS(SELECT 1 FROM schema_migrations WHERE version = $1)"
|
||||||
|
if err := d.QueryRow(checkQuery, migration).Scan(&exists); err != nil && err != sql.ErrNoRows {
|
||||||
|
return fmt.Errorf("failed to check migration status: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if exists {
|
||||||
|
d.Logger.Info("Migration déjà appliquée", zap.String("migration", migration))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Lire le fichier de migration
|
||||||
|
migrationPath := fmt.Sprintf("migrations/%s", migration)
|
||||||
|
content, err := os.ReadFile(migrationPath)
|
||||||
|
if err != nil {
|
||||||
|
d.Logger.Warn("Migration non trouvée, skip", zap.String("migration", migration))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exécuter la migration
|
||||||
|
if _, err := d.Exec(string(content)); err != nil {
|
||||||
|
return fmt.Errorf("failed to execute migration %s: %w", migration, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Enregistrer la migration comme appliquée
|
||||||
|
_, err = d.Exec("INSERT INTO schema_migrations (version) VALUES ($1)", migration)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to record migration: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
d.Logger.Info("Migration appliquée", zap.String("migration", migration))
|
||||||
|
}
|
||||||
|
|
||||||
|
d.Logger.Info("✅ Toutes les migrations SQL ont été appliquées")
|
||||||
|
|
||||||
|
// Exécuter les migrations GORM APRÈS les migrations SQL
|
||||||
|
// (uniquement pour les indexes additionnels sur users, pas pour créer/modifier les tables)
|
||||||
|
if d.GormDB != nil {
|
||||||
|
if err := RunMigrations(d.GormDB); err != nil {
|
||||||
|
return fmt.Errorf("failed to run GORM migrations: %w", err)
|
||||||
|
}
|
||||||
|
d.Logger.Info("✅ Migrations GORM appliquées (indexes additionnels)")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// VerifyIntegrity vérifie l'intégrité de base de la base de données
|
||||||
|
func (d *Database) VerifyIntegrity() error {
|
||||||
|
d.Logger.Info("🔍 Vérification de l'intégrité de la base de données...")
|
||||||
|
|
||||||
|
// Vérifier que les tables principales existent
|
||||||
|
tables := []string{"users", "user_sessions", "tracks", "rooms", "messages"}
|
||||||
|
for _, table := range tables {
|
||||||
|
var exists bool
|
||||||
|
query := `SELECT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.tables
|
||||||
|
WHERE table_schema = 'public' AND table_name = $1
|
||||||
|
)`
|
||||||
|
|
||||||
|
if err := d.QueryRow(query, table).Scan(&exists); err != nil {
|
||||||
|
return fmt.Errorf("failed to check table %s: %w", table, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !exists {
|
||||||
|
return fmt.Errorf("required table %s does not exist", table)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Vérifier quelques contraintes importantes
|
||||||
|
constraints := map[string]string{
|
||||||
|
"users_username_key": "users",
|
||||||
|
"users_email_key": "users",
|
||||||
|
"user_sessions_pkey": "user_sessions",
|
||||||
|
"tracks_pkey": "tracks",
|
||||||
|
"rooms_pkey": "rooms",
|
||||||
|
"messages_pkey": "messages",
|
||||||
|
}
|
||||||
|
|
||||||
|
for constraint, table := range constraints {
|
||||||
|
var exists bool
|
||||||
|
query := `SELECT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.table_constraints
|
||||||
|
WHERE table_name = $1 AND constraint_name = $2
|
||||||
|
)`
|
||||||
|
|
||||||
|
if err := d.QueryRow(query, table, constraint).Scan(&exists); err != nil {
|
||||||
|
d.Logger.Warn("Impossible de vérifier la contrainte",
|
||||||
|
zap.String("constraint", constraint),
|
||||||
|
zap.Error(err))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if !exists {
|
||||||
|
d.Logger.Warn("Contrainte manquante",
|
||||||
|
zap.String("constraint", constraint),
|
||||||
|
zap.String("table", table))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
d.Logger.Info("✅ Vérification d'intégrité terminée")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Close ferme la connexion à la base de données de manière gracieuse
|
||||||
|
func (d *Database) Close() error {
|
||||||
|
d.Logger.Info("🔌 Fermeture de la connexion à la base de données")
|
||||||
|
|
||||||
|
// Fermeture gracieuse : attendre que les requêtes en cours se terminent
|
||||||
|
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
// Fermer GORM d'abord
|
||||||
|
if d.GormDB != nil {
|
||||||
|
// GORM ferme automatiquement via sql.DB
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fermer le pool de connexions
|
||||||
|
if err := d.DB.Close(); err != nil {
|
||||||
|
d.Logger.Error("Erreur lors de la fermeture de la base de données", zap.Error(err))
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Vérifier que la fermeture a réussi en utilisant le contexte
|
||||||
|
select {
|
||||||
|
case <-ctx.Done():
|
||||||
|
d.Logger.Warn("Timeout lors de la fermeture de la base de données")
|
||||||
|
return ctx.Err()
|
||||||
|
default:
|
||||||
|
d.Logger.Info("✅ Connexion à la base de données fermée avec succès")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Health vérifie la santé de la connexion à la base de données
|
||||||
|
func (d *Database) Health() error {
|
||||||
|
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
return d.PingContext(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Stats retourne les statistiques de la base de données
|
||||||
|
func (d *Database) Stats() sql.DBStats {
|
||||||
|
return d.DB.Stats()
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetUserByOAuthID récupère un utilisateur par son OAuth ID et provider
|
||||||
|
func (d *Database) GetUserByOAuthID(oauthID, provider string) (*models.User, error) {
|
||||||
|
// TODO: Implémenter OAuth user lookup
|
||||||
|
return nil, fmt.Errorf("not implemented")
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateUser crée un nouvel utilisateur
|
||||||
|
func (d *Database) CreateUser(user *models.User) error {
|
||||||
|
// TODO: Implémenter avec vraie DB
|
||||||
|
return fmt.Errorf("not implemented")
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateUser met à jour un utilisateur existant
|
||||||
|
func (d *Database) UpdateUser(user *models.User) error {
|
||||||
|
// TODO: Implémenter avec vraie DB
|
||||||
|
return fmt.Errorf("not implemented")
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetUserByID récupère un utilisateur par son ID
|
||||||
|
func (d *Database) GetUserByID(userID int64) (*models.User, error) {
|
||||||
|
// TODO: Implémenter avec vraie DB
|
||||||
|
return nil, fmt.Errorf("not implemented")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Chat methods - using interfaces to avoid import cycles
|
||||||
|
type Message struct {
|
||||||
|
ID uuid.UUID `json:"id"`
|
||||||
|
RoomID uuid.UUID `json:"room_id"`
|
||||||
|
UserID uuid.UUID `json:"user_id"`
|
||||||
|
Content string `json:"content"`
|
||||||
|
Type string `json:"type"`
|
||||||
|
ParentID *uuid.UUID `json:"parent_id,omitempty"`
|
||||||
|
IsEdited bool `json:"is_edited"`
|
||||||
|
IsDeleted bool `json:"is_deleted"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Reaction struct {
|
||||||
|
ID uuid.UUID `json:"id"`
|
||||||
|
MessageID uuid.UUID `json:"message_id"`
|
||||||
|
UserID uuid.UUID `json:"user_id"`
|
||||||
|
Emoji string `json:"emoji"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Room struct {
|
||||||
|
ID uuid.UUID `json:"id"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
Description string `json:"description"`
|
||||||
|
Type string `json:"type"`
|
||||||
|
IsPrivate bool `json:"is_private"`
|
||||||
|
CreatedBy uuid.UUID `json:"created_by"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Database) CreateMessage(ctx context.Context, message *Message) error {
|
||||||
|
repo := NewChatRepository(&DB{DB: d.DB})
|
||||||
|
return repo.CreateMessage(ctx, message)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Database) GetMessages(ctx context.Context, roomID uuid.UUID, page, limit int, beforeID *uuid.UUID) ([]*Message, error) {
|
||||||
|
repo := NewChatRepository(&DB{DB: d.DB})
|
||||||
|
return repo.GetMessages(ctx, roomID, page, limit, beforeID)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Database) GetMessageByID(ctx context.Context, messageID uuid.UUID) (*Message, error) {
|
||||||
|
repo := NewChatRepository(&DB{DB: d.DB})
|
||||||
|
return repo.GetMessageByID(ctx, messageID)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Database) UpdateMessage(ctx context.Context, message *Message) error {
|
||||||
|
repo := NewChatRepository(&DB{DB: d.DB})
|
||||||
|
return repo.UpdateMessage(ctx, message)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Database) CreateReaction(ctx context.Context, reaction *Reaction) error {
|
||||||
|
repo := NewChatRepository(&DB{DB: d.DB})
|
||||||
|
return repo.CreateReaction(ctx, reaction)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Database) DeleteReaction(ctx context.Context, messageID, userID uuid.UUID, emoji string) error {
|
||||||
|
repo := NewChatRepository(&DB{DB: d.DB})
|
||||||
|
return repo.DeleteReaction(ctx, messageID, userID, emoji)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Database) CreateRoom(ctx context.Context, room *Room) error {
|
||||||
|
repo := NewChatRepository(&DB{DB: d.DB})
|
||||||
|
return repo.CreateRoom(ctx, room)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Database) GetRooms(ctx context.Context, userID uuid.UUID, includePrivate bool) ([]*Room, error) {
|
||||||
|
repo := NewChatRepository(&DB{DB: d.DB})
|
||||||
|
return repo.GetRooms(ctx, userID, includePrivate)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Database) GetDirectMessageRoom(ctx context.Context, userID1, userID2 uuid.UUID) (*Room, error) {
|
||||||
|
repo := NewChatRepository(&DB{DB: d.DB})
|
||||||
|
return repo.GetDirectMessageRoom(ctx, userID1, userID2)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Database) AddUserToRoom(ctx context.Context, roomID, userID uuid.UUID) error {
|
||||||
|
repo := NewChatRepository(&DB{DB: d.DB})
|
||||||
|
return repo.AddUserToRoom(ctx, roomID, userID)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Database) RemoveUserFromRoom(ctx context.Context, roomID, userID uuid.UUID) error {
|
||||||
|
repo := NewChatRepository(&DB{DB: d.DB})
|
||||||
|
return repo.RemoveUserFromRoom(ctx, roomID, userID)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Database) GetRoomUserCount(ctx context.Context, roomID uuid.UUID) (int, error) {
|
||||||
|
repo := NewChatRepository(&DB{DB: d.DB})
|
||||||
|
return repo.GetRoomUserCount(ctx, roomID)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Database) SearchMessages(ctx context.Context, roomID uuid.UUID, query string, limit int) ([]*Message, error) {
|
||||||
|
repo := NewChatRepository(&DB{DB: d.DB})
|
||||||
|
return repo.SearchMessages(ctx, roomID, query, limit)
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewSQLiteTestDB crée une nouvelle connexion à une base de données SQLite en mémoire pour les tests.
|
||||||
|
// Pour les tests d'intégration, nous ne faisons pas d'AutoMigrate pour éviter les problèmes de DDL PostgreSQL.
|
||||||
|
// Les tests doivent mocker les interactions avec la base de données si nécessaire,
|
||||||
|
// ou s'appuyer sur des handlers qui ne touchent pas directement la base de données.
|
||||||
|
func NewSQLiteTestDB() (*Database, error) {
|
||||||
|
logger, _ := zap.NewProduction() // Ou un logger de test silencieux
|
||||||
|
|
||||||
|
// Ouvrir une connexion GORM avec SQLite en mémoire
|
||||||
|
gormDB, err := gorm.Open(sqlite.Open("file::memory:?cache=shared"), &gorm.Config{})
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to open sqlite test database: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ne pas exécuter AutoMigrate pour éviter les erreurs de DDL PostgreSQL.
|
||||||
|
// Les tests qui nécessitent des données devront les insérer manuellement
|
||||||
|
// ou les handlers devront être mockés/testés sans réelle interaction DB.
|
||||||
|
|
||||||
|
return &Database{
|
||||||
|
GormDB: gormDB,
|
||||||
|
Logger: logger,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
58
veza-backend-api/internal/database/migrations.go
Normal file
58
veza-backend-api/internal/database/migrations.go
Normal file
|
|
@ -0,0 +1,58 @@
|
||||||
|
package database
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"gorm.io/gorm"
|
||||||
|
// models n'est plus importé car AutoMigrate n'est plus utilisé (stratégie 100% SQL)
|
||||||
|
)
|
||||||
|
|
||||||
|
// RunMigrations exécute toutes les migrations GORM automatiques
|
||||||
|
// et ajoute les indexes personnalisés manquants.
|
||||||
|
func RunMigrations(db *gorm.DB) error {
|
||||||
|
// PostgreSQL active les foreign keys par défaut, pas besoin de PRAGMA
|
||||||
|
|
||||||
|
// Auto-migrate all models
|
||||||
|
// STRATÉGIE 100% SQL : Le schéma est géré exclusivement par les migrations SQL.
|
||||||
|
// GORM est utilisé uniquement pour mapper les modèles Go sur des tables existantes.
|
||||||
|
// Aucun modèle complexe n'est dans AutoMigrate pour éviter les bugs GORM + Postgres + soft delete + indexes.
|
||||||
|
modelsToMigrate := []interface{}{
|
||||||
|
// Tous les modèles sont gérés par SQL migrations:
|
||||||
|
// - users: migrations SQL existantes
|
||||||
|
// - tracks: 025_create_tracks.sql + 026_add_track_status.sql
|
||||||
|
// - playlists: 030_create_playlists.sql
|
||||||
|
// - playlist_tracks: 030_create_playlists.sql
|
||||||
|
// - rooms: 041_create_rooms.sql
|
||||||
|
// - room_members: 042_create_room_members.sql
|
||||||
|
// - messages: 043_create_messages.sql
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, model := range modelsToMigrate {
|
||||||
|
if err := db.AutoMigrate(model); err != nil {
|
||||||
|
return fmt.Errorf("failed to migrate %T: %w", model, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add custom indexes
|
||||||
|
if err := addIndexes(db); err != nil {
|
||||||
|
return fmt.Errorf("failed to add indexes: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// addIndexes ajoute les indexes manquants sur les foreign keys et colonnes fréquemment utilisées
|
||||||
|
// NOTE: Avec la stratégie 100% SQL, la plupart des indexes sont gérés dans les migrations SQL.
|
||||||
|
// Cette fonction reste pour compatibilité mais ne fait plus rien.
|
||||||
|
func addIndexes(db *gorm.DB) error {
|
||||||
|
// Tous les indexes sont maintenant gérés par les migrations SQL:
|
||||||
|
// - 001_create_users.sql: idx_users_email, idx_users_username, idx_users_slug
|
||||||
|
// - 025_create_tracks.sql: idx_tracks_user_id, idx_tracks_is_public, idx_tracks_created_at
|
||||||
|
// - 030_create_playlists.sql: idx_playlists_user_id, idx_playlist_tracks_*
|
||||||
|
// - 041_create_rooms.sql: idx_rooms_*
|
||||||
|
// - 042_create_room_members.sql: idx_room_members_*
|
||||||
|
// - 043_create_messages.sql: idx_messages_*
|
||||||
|
|
||||||
|
// Plus rien à faire ici - tous les indexes sont dans les migrations SQL
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,212 @@
|
||||||
|
package database
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
"gorm.io/driver/sqlite"
|
||||||
|
"gorm.io/gorm"
|
||||||
|
"veza-backend-api/internal/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
// TestPasswordResetTokensTable_Creation teste que la table password_reset_tokens est créée correctement
|
||||||
|
func TestPasswordResetTokensTable_Creation(t *testing.T) {
|
||||||
|
// Créer une base de données en mémoire
|
||||||
|
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||||
|
require.NoError(t, err, "Failed to open test database")
|
||||||
|
|
||||||
|
// Créer la table users d'abord (requis pour la foreign key)
|
||||||
|
err = db.AutoMigrate(&models.User{})
|
||||||
|
require.NoError(t, err, "Failed to migrate users table")
|
||||||
|
|
||||||
|
// Créer la table password_reset_tokens manuellement (simule la migration SQL)
|
||||||
|
// Note: SQLite stocke UUIDs comme TEXT, user_id est maintenant UUID
|
||||||
|
err = db.Exec(`
|
||||||
|
CREATE TABLE password_reset_tokens (
|
||||||
|
id TEXT PRIMARY KEY,
|
||||||
|
user_id TEXT NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||||
|
token TEXT NOT NULL UNIQUE,
|
||||||
|
expires_at TIMESTAMP NOT NULL,
|
||||||
|
used INTEGER NOT NULL DEFAULT 0,
|
||||||
|
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||||
|
)
|
||||||
|
`).Error
|
||||||
|
require.NoError(t, err, "Failed to create password_reset_tokens table")
|
||||||
|
|
||||||
|
// Créer les index
|
||||||
|
err = db.Exec("CREATE INDEX idx_password_reset_tokens_token ON password_reset_tokens(token)").Error
|
||||||
|
require.NoError(t, err)
|
||||||
|
err = db.Exec("CREATE INDEX idx_password_reset_tokens_user_id ON password_reset_tokens(user_id)").Error
|
||||||
|
require.NoError(t, err)
|
||||||
|
err = db.Exec("CREATE INDEX idx_password_reset_tokens_expires_at ON password_reset_tokens(expires_at)").Error
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Vérifier que la table existe
|
||||||
|
hasTable := db.Migrator().HasTable("password_reset_tokens")
|
||||||
|
assert.True(t, hasTable, "password_reset_tokens table should exist")
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestPasswordResetTokensTable_Columns teste que toutes les colonnes sont présentes
|
||||||
|
func TestPasswordResetTokensTable_Columns(t *testing.T) {
|
||||||
|
// Créer une base de données en mémoire
|
||||||
|
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Créer la table users
|
||||||
|
err = db.AutoMigrate(&models.User{})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Créer un utilisateur de test
|
||||||
|
user := &models.User{
|
||||||
|
Email: "test@example.com",
|
||||||
|
Username: "testuser",
|
||||||
|
Role: "user",
|
||||||
|
IsActive: true,
|
||||||
|
}
|
||||||
|
err = db.Create(user).Error
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Créer la table password_reset_tokens
|
||||||
|
err = db.Exec(`
|
||||||
|
CREATE TABLE password_reset_tokens (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||||
|
token TEXT NOT NULL UNIQUE,
|
||||||
|
expires_at TIMESTAMP NOT NULL,
|
||||||
|
used INTEGER NOT NULL DEFAULT 0,
|
||||||
|
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||||
|
)
|
||||||
|
`).Error
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Vérifier que toutes les colonnes existent en insérant un token
|
||||||
|
expiresAt := time.Now().Add(1 * time.Hour)
|
||||||
|
err = db.Exec(`
|
||||||
|
INSERT INTO password_reset_tokens (user_id, token, expires_at, used, created_at)
|
||||||
|
VALUES (?, ?, ?, ?, ?)
|
||||||
|
`, user.ID, "test-token-123", expiresAt, false, time.Now()).Error
|
||||||
|
require.NoError(t, err, "Should be able to insert a password reset token")
|
||||||
|
|
||||||
|
// Vérifier que le token a été inséré
|
||||||
|
var count int64
|
||||||
|
err = db.Raw("SELECT COUNT(*) FROM password_reset_tokens WHERE token = ?", "test-token-123").Scan(&count).Error
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, int64(1), count, "Token should be inserted")
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestPasswordResetTokensTable_ForeignKey teste que la foreign key fonctionne correctement
|
||||||
|
func TestPasswordResetTokensTable_ForeignKey(t *testing.T) {
|
||||||
|
// Créer une base de données en mémoire
|
||||||
|
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Activer les foreign keys pour SQLite (requis pour CASCADE DELETE)
|
||||||
|
err = db.Exec("PRAGMA foreign_keys = ON").Error
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Créer la table users
|
||||||
|
err = db.AutoMigrate(&models.User{})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Créer la table password_reset_tokens
|
||||||
|
err = db.Exec(`
|
||||||
|
CREATE TABLE password_reset_tokens (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||||
|
token TEXT NOT NULL UNIQUE,
|
||||||
|
expires_at TIMESTAMP NOT NULL,
|
||||||
|
used INTEGER NOT NULL DEFAULT 0,
|
||||||
|
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||||
|
)
|
||||||
|
`).Error
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Créer un utilisateur
|
||||||
|
user := &models.User{
|
||||||
|
Email: "test@example.com",
|
||||||
|
Username: "testuser",
|
||||||
|
Role: "user",
|
||||||
|
IsActive: true,
|
||||||
|
}
|
||||||
|
err = db.Create(user).Error
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Insérer un token valide
|
||||||
|
expiresAt := time.Now().Add(1 * time.Hour)
|
||||||
|
err = db.Exec(`
|
||||||
|
INSERT INTO password_reset_tokens (user_id, token, expires_at, used, created_at)
|
||||||
|
VALUES (?, ?, ?, ?, ?)
|
||||||
|
`, user.ID, "valid-token", expiresAt, false, time.Now()).Error
|
||||||
|
require.NoError(t, err, "Should be able to insert token for existing user")
|
||||||
|
|
||||||
|
// Tenter d'insérer un token avec un user_id inexistant (devrait échouer)
|
||||||
|
// Utiliser un UUID valide mais inexistant
|
||||||
|
fakeUserID := "00000000-0000-0000-0000-000000000999"
|
||||||
|
err = db.Exec(`
|
||||||
|
INSERT INTO password_reset_tokens (user_id, token, expires_at, used, created_at)
|
||||||
|
VALUES (?, ?, ?, ?, ?)
|
||||||
|
`, fakeUserID, "invalid-token", expiresAt, false, time.Now()).Error
|
||||||
|
assert.Error(t, err, "Should not be able to insert token with non-existent user_id")
|
||||||
|
|
||||||
|
// Vérifier que le CASCADE DELETE fonctionne
|
||||||
|
// Utiliser Unscoped() pour forcer la suppression réelle (pas soft delete)
|
||||||
|
err = db.Unscoped().Delete(user).Error
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Vérifier que le token a été supprimé automatiquement
|
||||||
|
var count int64
|
||||||
|
err = db.Raw("SELECT COUNT(*) FROM password_reset_tokens WHERE token = ?", "valid-token").Scan(&count).Error
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, int64(0), count, "Token should be deleted when user is deleted")
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestPasswordResetTokensTable_UniqueToken teste que le token doit être unique
|
||||||
|
func TestPasswordResetTokensTable_UniqueToken(t *testing.T) {
|
||||||
|
// Créer une base de données en mémoire
|
||||||
|
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Créer la table users
|
||||||
|
err = db.AutoMigrate(&models.User{})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Créer la table password_reset_tokens
|
||||||
|
err = db.Exec(`
|
||||||
|
CREATE TABLE password_reset_tokens (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||||
|
token TEXT NOT NULL UNIQUE,
|
||||||
|
expires_at TIMESTAMP NOT NULL,
|
||||||
|
used INTEGER NOT NULL DEFAULT 0,
|
||||||
|
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||||
|
)
|
||||||
|
`).Error
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Créer un utilisateur
|
||||||
|
user := &models.User{
|
||||||
|
Email: "test@example.com",
|
||||||
|
Username: "testuser",
|
||||||
|
Role: "user",
|
||||||
|
IsActive: true,
|
||||||
|
}
|
||||||
|
err = db.Create(user).Error
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Insérer un token
|
||||||
|
expiresAt := time.Now().Add(1 * time.Hour)
|
||||||
|
err = db.Exec(`
|
||||||
|
INSERT INTO password_reset_tokens (user_id, token, expires_at, used, created_at)
|
||||||
|
VALUES (?, ?, ?, ?, ?)
|
||||||
|
`, user.ID, "unique-token", expiresAt, false, time.Now()).Error
|
||||||
|
require.NoError(t, err, "Should be able to insert first token")
|
||||||
|
|
||||||
|
// Tenter d'insérer un token avec le même token (devrait échouer)
|
||||||
|
err = db.Exec(`
|
||||||
|
INSERT INTO password_reset_tokens (user_id, token, expires_at, used, created_at)
|
||||||
|
VALUES (?, ?, ?, ?, ?)
|
||||||
|
`, user.ID, "unique-token", expiresAt, false, time.Now()).Error
|
||||||
|
assert.Error(t, err, "Should not be able to insert duplicate token")
|
||||||
|
}
|
||||||
293
veza-backend-api/internal/database/migrations_sessions_test.go
Normal file
293
veza-backend-api/internal/database/migrations_sessions_test.go
Normal file
|
|
@ -0,0 +1,293 @@
|
||||||
|
package database
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
"gorm.io/driver/sqlite"
|
||||||
|
"gorm.io/gorm"
|
||||||
|
"veza-backend-api/internal/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
// TestSessionsTableMigration teste que le fichier de migration existe et peut être lu
|
||||||
|
func TestSessionsTableMigration(t *testing.T) {
|
||||||
|
migrationPath := "migrations/020_create_sessions.sql"
|
||||||
|
|
||||||
|
// Vérifier que le fichier existe
|
||||||
|
content, err := os.ReadFile(migrationPath)
|
||||||
|
require.NoError(t, err, "Migration file should exist and be readable")
|
||||||
|
|
||||||
|
// Vérifier que le contenu n'est pas vide
|
||||||
|
assert.NotEmpty(t, content, "Migration file should not be empty")
|
||||||
|
|
||||||
|
// Vérifier que le contenu contient les éléments essentiels
|
||||||
|
contentStr := string(content)
|
||||||
|
assert.Contains(t, contentStr, "CREATE TABLE sessions", "Should create sessions table")
|
||||||
|
// Note: user_id est BIGINT dans la migration 020, mais migré vers UUID dans 049
|
||||||
|
assert.Contains(t, contentStr, "user_id", "Should have user_id column")
|
||||||
|
assert.Contains(t, contentStr, "token_hash VARCHAR(255)", "Should have token_hash column")
|
||||||
|
assert.Contains(t, contentStr, "ip_address VARCHAR(45)", "Should have ip_address column")
|
||||||
|
assert.Contains(t, contentStr, "user_agent TEXT", "Should have user_agent column")
|
||||||
|
assert.Contains(t, contentStr, "expires_at TIMESTAMP", "Should have expires_at column")
|
||||||
|
assert.Contains(t, contentStr, "last_activity TIMESTAMP", "Should have last_activity column")
|
||||||
|
assert.Contains(t, contentStr, "created_at TIMESTAMP", "Should have created_at column")
|
||||||
|
assert.Contains(t, contentStr, "REFERENCES users(id) ON DELETE CASCADE", "Should have foreign key constraint")
|
||||||
|
assert.Contains(t, contentStr, "idx_sessions_user_id", "Should have index on user_id")
|
||||||
|
assert.Contains(t, contentStr, "idx_sessions_token_hash", "Should have index on token_hash")
|
||||||
|
assert.Contains(t, contentStr, "idx_sessions_expires_at", "Should have index on expires_at")
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestSessionsTable_Creation teste que la table sessions est créée correctement
|
||||||
|
func TestSessionsTable_Creation(t *testing.T) {
|
||||||
|
// Créer une base de données en mémoire
|
||||||
|
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||||
|
require.NoError(t, err, "Failed to open test database")
|
||||||
|
|
||||||
|
// Créer la table users d'abord (requis pour la foreign key)
|
||||||
|
err = db.AutoMigrate(&models.User{})
|
||||||
|
require.NoError(t, err, "Failed to migrate users table")
|
||||||
|
|
||||||
|
// Créer la table sessions manuellement (simule la migration SQL)
|
||||||
|
// Note: SQLite stocke UUIDs comme TEXT, user_id est maintenant UUID (migration 049)
|
||||||
|
err = db.Exec(`
|
||||||
|
CREATE TABLE sessions (
|
||||||
|
id TEXT PRIMARY KEY,
|
||||||
|
user_id TEXT NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||||
|
token_hash TEXT NOT NULL UNIQUE,
|
||||||
|
ip_address TEXT,
|
||||||
|
user_agent TEXT,
|
||||||
|
expires_at TIMESTAMP NOT NULL,
|
||||||
|
last_activity TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||||
|
)
|
||||||
|
`).Error
|
||||||
|
require.NoError(t, err, "Failed to create sessions table")
|
||||||
|
|
||||||
|
// Créer les index
|
||||||
|
err = db.Exec("CREATE INDEX idx_sessions_user_id ON sessions(user_id)").Error
|
||||||
|
require.NoError(t, err)
|
||||||
|
err = db.Exec("CREATE INDEX idx_sessions_token_hash ON sessions(token_hash)").Error
|
||||||
|
require.NoError(t, err)
|
||||||
|
err = db.Exec("CREATE INDEX idx_sessions_expires_at ON sessions(expires_at)").Error
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Vérifier que la table existe
|
||||||
|
hasTable := db.Migrator().HasTable("sessions")
|
||||||
|
assert.True(t, hasTable, "sessions table should exist")
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestSessionsTable_Columns teste que toutes les colonnes sont présentes
|
||||||
|
func TestSessionsTable_Columns(t *testing.T) {
|
||||||
|
// Créer une base de données en mémoire
|
||||||
|
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Créer la table users
|
||||||
|
err = db.AutoMigrate(&models.User{})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Créer un utilisateur de test
|
||||||
|
user := &models.User{
|
||||||
|
Email: "test@example.com",
|
||||||
|
Username: "testuser",
|
||||||
|
Role: "user",
|
||||||
|
IsActive: true,
|
||||||
|
}
|
||||||
|
err = db.Create(user).Error
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Créer la table sessions
|
||||||
|
err = db.Exec(`
|
||||||
|
CREATE TABLE sessions (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||||
|
token_hash TEXT NOT NULL UNIQUE,
|
||||||
|
ip_address TEXT,
|
||||||
|
user_agent TEXT,
|
||||||
|
expires_at TIMESTAMP NOT NULL,
|
||||||
|
last_activity TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||||
|
)
|
||||||
|
`).Error
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Vérifier que toutes les colonnes existent en insérant une session
|
||||||
|
expiresAt := time.Now().Add(1 * time.Hour)
|
||||||
|
err = db.Exec(`
|
||||||
|
INSERT INTO sessions (user_id, token_hash, ip_address, user_agent, expires_at, last_activity, created_at)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||||
|
`, user.ID, "test-token-hash-123", "192.168.1.1", "Mozilla/5.0", expiresAt, time.Now(), time.Now()).Error
|
||||||
|
require.NoError(t, err, "Should be able to insert a session")
|
||||||
|
|
||||||
|
// Vérifier que la session a été insérée
|
||||||
|
var count int64
|
||||||
|
err = db.Raw("SELECT COUNT(*) FROM sessions WHERE token_hash = ?", "test-token-hash-123").Scan(&count).Error
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, int64(1), count, "Session should be inserted")
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestSessionsTable_ForeignKey teste que la foreign key fonctionne correctement
|
||||||
|
func TestSessionsTable_ForeignKey(t *testing.T) {
|
||||||
|
// Créer une base de données en mémoire
|
||||||
|
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Activer les foreign keys pour SQLite (requis pour CASCADE DELETE et validation FK)
|
||||||
|
err = db.Exec("PRAGMA foreign_keys = ON").Error
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Créer la table users
|
||||||
|
err = db.AutoMigrate(&models.User{})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Créer la table sessions
|
||||||
|
err = db.Exec(`
|
||||||
|
CREATE TABLE sessions (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||||
|
token_hash TEXT NOT NULL UNIQUE,
|
||||||
|
ip_address TEXT,
|
||||||
|
user_agent TEXT,
|
||||||
|
expires_at TIMESTAMP NOT NULL,
|
||||||
|
last_activity TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||||
|
)
|
||||||
|
`).Error
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Créer un utilisateur
|
||||||
|
user := &models.User{
|
||||||
|
Email: "test@example.com",
|
||||||
|
Username: "testuser",
|
||||||
|
Role: "user",
|
||||||
|
IsActive: true,
|
||||||
|
}
|
||||||
|
err = db.Create(user).Error
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Insérer une session valide
|
||||||
|
expiresAt := time.Now().Add(1 * time.Hour)
|
||||||
|
err = db.Exec(`
|
||||||
|
INSERT INTO sessions (user_id, token_hash, ip_address, user_agent, expires_at, last_activity, created_at)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||||
|
`, user.ID, "valid-token-hash", "192.168.1.1", "Mozilla/5.0", expiresAt, time.Now(), time.Now()).Error
|
||||||
|
require.NoError(t, err, "Should be able to insert session for existing user")
|
||||||
|
|
||||||
|
// Tenter d'insérer une session avec un user_id inexistant (devrait échouer)
|
||||||
|
// Utiliser un UUID valide mais inexistant
|
||||||
|
fakeUserID := "00000000-0000-0000-0000-000000000999"
|
||||||
|
err = db.Exec(`
|
||||||
|
INSERT INTO sessions (user_id, token_hash, ip_address, user_agent, expires_at, last_activity, created_at)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||||
|
`, fakeUserID, "invalid-token-hash", "192.168.1.1", "Mozilla/5.0", expiresAt, time.Now(), time.Now()).Error
|
||||||
|
assert.Error(t, err, "Should not be able to insert session with non-existent user_id")
|
||||||
|
|
||||||
|
// Vérifier que le CASCADE DELETE fonctionne
|
||||||
|
// Utiliser Unscoped() pour forcer la suppression réelle (pas soft delete)
|
||||||
|
err = db.Unscoped().Delete(user).Error
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Vérifier que la session a été supprimée automatiquement
|
||||||
|
var count int64
|
||||||
|
err = db.Raw("SELECT COUNT(*) FROM sessions WHERE token_hash = ?", "valid-token-hash").Scan(&count).Error
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, int64(0), count, "Session should be deleted when user is deleted")
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestSessionsTable_UniqueTokenHash teste que le token_hash doit être unique
|
||||||
|
func TestSessionsTable_UniqueTokenHash(t *testing.T) {
|
||||||
|
// Créer une base de données en mémoire
|
||||||
|
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Créer la table users
|
||||||
|
err = db.AutoMigrate(&models.User{})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Créer la table sessions
|
||||||
|
err = db.Exec(`
|
||||||
|
CREATE TABLE sessions (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||||
|
token_hash TEXT NOT NULL UNIQUE,
|
||||||
|
ip_address TEXT,
|
||||||
|
user_agent TEXT,
|
||||||
|
expires_at TIMESTAMP NOT NULL,
|
||||||
|
last_activity TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||||
|
)
|
||||||
|
`).Error
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Créer un utilisateur
|
||||||
|
user := &models.User{
|
||||||
|
Email: "test@example.com",
|
||||||
|
Username: "testuser",
|
||||||
|
Role: "user",
|
||||||
|
IsActive: true,
|
||||||
|
}
|
||||||
|
err = db.Create(user).Error
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Insérer une session
|
||||||
|
expiresAt := time.Now().Add(1 * time.Hour)
|
||||||
|
err = db.Exec(`
|
||||||
|
INSERT INTO sessions (user_id, token_hash, ip_address, user_agent, expires_at, last_activity, created_at)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||||
|
`, user.ID, "unique-token-hash", "192.168.1.1", "Mozilla/5.0", expiresAt, time.Now(), time.Now()).Error
|
||||||
|
require.NoError(t, err, "Should be able to insert first session")
|
||||||
|
|
||||||
|
// Tenter d'insérer une session avec le même token_hash (devrait échouer)
|
||||||
|
err = db.Exec(`
|
||||||
|
INSERT INTO sessions (user_id, token_hash, ip_address, user_agent, expires_at, last_activity, created_at)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||||
|
`, user.ID, "unique-token-hash", "192.168.1.2", "Chrome", expiresAt, time.Now(), time.Now()).Error
|
||||||
|
assert.Error(t, err, "Should not be able to insert duplicate token_hash")
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestSessionsTable_Indexes teste que les index sont créés correctement
|
||||||
|
func TestSessionsTable_Indexes(t *testing.T) {
|
||||||
|
// Créer une base de données en mémoire
|
||||||
|
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Créer la table users
|
||||||
|
err = db.AutoMigrate(&models.User{})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Créer la table sessions
|
||||||
|
err = db.Exec(`
|
||||||
|
CREATE TABLE sessions (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||||
|
token_hash TEXT NOT NULL UNIQUE,
|
||||||
|
ip_address TEXT,
|
||||||
|
user_agent TEXT,
|
||||||
|
expires_at TIMESTAMP NOT NULL,
|
||||||
|
last_activity TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||||
|
)
|
||||||
|
`).Error
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Créer les index
|
||||||
|
err = db.Exec("CREATE INDEX idx_sessions_user_id ON sessions(user_id)").Error
|
||||||
|
require.NoError(t, err)
|
||||||
|
err = db.Exec("CREATE INDEX idx_sessions_token_hash ON sessions(token_hash)").Error
|
||||||
|
require.NoError(t, err)
|
||||||
|
err = db.Exec("CREATE INDEX idx_sessions_expires_at ON sessions(expires_at)").Error
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Vérifier que les index existent (SQLite stocke les index dans sqlite_master)
|
||||||
|
var indexCount int64
|
||||||
|
err = db.Raw(`
|
||||||
|
SELECT COUNT(*) FROM sqlite_master
|
||||||
|
WHERE type='index'
|
||||||
|
AND name IN ('idx_sessions_user_id', 'idx_sessions_token_hash', 'idx_sessions_expires_at')
|
||||||
|
`).Scan(&indexCount).Error
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, int64(3), indexCount, "All three indexes should exist")
|
||||||
|
}
|
||||||
283
veza-backend-api/internal/database/migrations_test.go
Normal file
283
veza-backend-api/internal/database/migrations_test.go
Normal file
|
|
@ -0,0 +1,283 @@
|
||||||
|
package database
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"veza-backend-api/internal/models"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
"gorm.io/driver/sqlite"
|
||||||
|
"gorm.io/gorm"
|
||||||
|
)
|
||||||
|
|
||||||
|
// setupTestDB crée une base de données de test en mémoire
|
||||||
|
func setupTestDB(t *testing.T) *gorm.DB {
|
||||||
|
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||||
|
require.NoError(t, err, "Failed to open test database")
|
||||||
|
return db
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestRunMigrations teste l'exécution des migrations GORM
|
||||||
|
func TestRunMigrations(t *testing.T) {
|
||||||
|
db := setupTestDB(t)
|
||||||
|
|
||||||
|
err := RunMigrations(db)
|
||||||
|
assert.NoError(t, err, "RunMigrations should not return an error")
|
||||||
|
|
||||||
|
// Vérifier que les tables existent
|
||||||
|
assert.True(t, db.Migrator().HasTable(&models.User{}), "Users table should exist")
|
||||||
|
assert.True(t, db.Migrator().HasTable(&models.RefreshToken{}), "RefreshTokens table should exist")
|
||||||
|
assert.True(t, db.Migrator().HasTable(&models.Track{}), "Tracks table should exist")
|
||||||
|
assert.True(t, db.Migrator().HasTable(&models.Playlist{}), "Playlists table should exist")
|
||||||
|
assert.True(t, db.Migrator().HasTable(&models.PlaylistTrack{}), "PlaylistTracks table should exist")
|
||||||
|
assert.True(t, db.Migrator().HasTable(&models.Message{}), "Messages table should exist")
|
||||||
|
assert.True(t, db.Migrator().HasTable(&models.Room{}), "Rooms table should exist")
|
||||||
|
assert.True(t, db.Migrator().HasTable(&models.RoomMember{}), "RoomMembers table should exist")
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestRunMigrations_Idempotent teste que les migrations sont idempotentes
|
||||||
|
func TestRunMigrations_Idempotent(t *testing.T) {
|
||||||
|
db := setupTestDB(t)
|
||||||
|
|
||||||
|
// Exécuter les migrations deux fois
|
||||||
|
err := RunMigrations(db)
|
||||||
|
assert.NoError(t, err, "First RunMigrations should not return an error")
|
||||||
|
|
||||||
|
err = RunMigrations(db)
|
||||||
|
assert.NoError(t, err, "Second RunMigrations should not return an error")
|
||||||
|
|
||||||
|
// Vérifier que les tables existent toujours
|
||||||
|
assert.True(t, db.Migrator().HasTable(&models.User{}))
|
||||||
|
assert.True(t, db.Migrator().HasTable(&models.Track{}))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestAddIndexes teste la création des indexes
|
||||||
|
func TestAddIndexes(t *testing.T) {
|
||||||
|
db := setupTestDB(t)
|
||||||
|
|
||||||
|
// Exécuter les migrations (qui incluent addIndexes)
|
||||||
|
err := RunMigrations(db)
|
||||||
|
require.NoError(t, err, "RunMigrations should succeed")
|
||||||
|
|
||||||
|
// Pour SQLite, vérifier que les indexes existent en vérifiant les migrations
|
||||||
|
// Note: SQLite stocke les indexes différemment de PostgreSQL
|
||||||
|
// On vérifie plutôt que les migrations n'ont pas d'erreur
|
||||||
|
// et que les tables peuvent être créées avec les indexes
|
||||||
|
|
||||||
|
// Vérifier que les tables ont bien les colonnes indexées
|
||||||
|
var user models.User
|
||||||
|
// Vérifier que l'index existe (HasIndex retourne un bool, pas une erreur)
|
||||||
|
hasIndex := db.Migrator().HasIndex(&user, "idx_users_email")
|
||||||
|
// SQLite peut avoir un comportement différent, donc on accepte les deux cas
|
||||||
|
// L'important est que la migration fonctionne sans erreur
|
||||||
|
_ = hasIndex
|
||||||
|
|
||||||
|
// Vérifier qu'on peut créer un utilisateur (ce qui teste les contraintes)
|
||||||
|
user = models.User{
|
||||||
|
Username: "testuser",
|
||||||
|
Email: "test@example.com",
|
||||||
|
Role: "user",
|
||||||
|
}
|
||||||
|
err = db.Create(&user).Error
|
||||||
|
assert.NoError(t, err, "Should be able to create a user")
|
||||||
|
|
||||||
|
// Vérifier qu'on ne peut pas créer un utilisateur avec un email dupliqué
|
||||||
|
user2 := models.User{
|
||||||
|
Username: "testuser2",
|
||||||
|
Email: "test@example.com",
|
||||||
|
Role: "user",
|
||||||
|
}
|
||||||
|
err = db.Create(&user2).Error
|
||||||
|
assert.Error(t, err, "Should not be able to create user with duplicate email")
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestMigrations_UserRelations teste les relations entre User et autres modèles
|
||||||
|
func TestMigrations_UserRelations(t *testing.T) {
|
||||||
|
db := setupTestDB(t)
|
||||||
|
|
||||||
|
err := RunMigrations(db)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Créer un utilisateur
|
||||||
|
user := models.User{
|
||||||
|
Username: "testuser",
|
||||||
|
Email: "test@example.com",
|
||||||
|
Role: "user",
|
||||||
|
}
|
||||||
|
err = db.Create(&user).Error
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Créer un refresh token pour cet utilisateur
|
||||||
|
refreshToken := models.RefreshToken{
|
||||||
|
UserID: user.ID,
|
||||||
|
TokenHash: "hash123",
|
||||||
|
ExpiresAt: db.NowFunc().AddDate(0, 0, 7),
|
||||||
|
}
|
||||||
|
err = db.Create(&refreshToken).Error
|
||||||
|
assert.NoError(t, err, "Should be able to create refresh token")
|
||||||
|
|
||||||
|
// Vérifier que la relation fonctionne
|
||||||
|
var retrievedToken models.RefreshToken
|
||||||
|
err = db.First(&retrievedToken, refreshToken.ID).Error
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, user.ID, retrievedToken.UserID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestMigrations_TrackRelations teste les relations entre Track et User
|
||||||
|
func TestMigrations_TrackRelations(t *testing.T) {
|
||||||
|
db := setupTestDB(t)
|
||||||
|
|
||||||
|
err := RunMigrations(db)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Créer un utilisateur
|
||||||
|
user := models.User{
|
||||||
|
Username: "creator",
|
||||||
|
Email: "creator@example.com",
|
||||||
|
Role: "user",
|
||||||
|
}
|
||||||
|
err = db.Create(&user).Error
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Créer une track pour cet utilisateur
|
||||||
|
track := models.Track{
|
||||||
|
UserID: user.ID,
|
||||||
|
Title: "Test Track",
|
||||||
|
Duration: 180,
|
||||||
|
}
|
||||||
|
err = db.Create(&track).Error
|
||||||
|
assert.NoError(t, err, "Should be able to create track")
|
||||||
|
|
||||||
|
// Vérifier que la relation fonctionne
|
||||||
|
var retrievedTrack models.Track
|
||||||
|
err = db.First(&retrievedTrack, track.ID).Error
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, user.ID, retrievedTrack.UserID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestMigrations_PlaylistRelations teste les relations pour les playlists
|
||||||
|
func TestMigrations_PlaylistRelations(t *testing.T) {
|
||||||
|
db := setupTestDB(t)
|
||||||
|
|
||||||
|
err := RunMigrations(db)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Créer un utilisateur
|
||||||
|
user := models.User{
|
||||||
|
Username: "playlist_owner",
|
||||||
|
Email: "owner@example.com",
|
||||||
|
Role: "user",
|
||||||
|
}
|
||||||
|
err = db.Create(&user).Error
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Créer une playlist
|
||||||
|
playlist := models.Playlist{
|
||||||
|
UserID: user.ID,
|
||||||
|
Title: "My Playlist",
|
||||||
|
}
|
||||||
|
err = db.Create(&playlist).Error
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Créer une track
|
||||||
|
track := models.Track{
|
||||||
|
UserID: user.ID,
|
||||||
|
Title: "Track 1",
|
||||||
|
Duration: 200,
|
||||||
|
}
|
||||||
|
err = db.Create(&track).Error
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Ajouter la track à la playlist
|
||||||
|
playlistTrack := models.PlaylistTrack{
|
||||||
|
PlaylistID: playlist.ID,
|
||||||
|
TrackID: track.ID,
|
||||||
|
Position: 1,
|
||||||
|
}
|
||||||
|
err = db.Create(&playlistTrack).Error
|
||||||
|
assert.NoError(t, err, "Should be able to add track to playlist")
|
||||||
|
|
||||||
|
// Vérifier la relation
|
||||||
|
var retrievedPlaylist models.Playlist
|
||||||
|
err = db.Preload("Tracks").First(&retrievedPlaylist, playlist.ID).Error
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Len(t, retrievedPlaylist.Tracks, 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestMigrations_RoomRelations teste les relations pour les rooms et messages
|
||||||
|
func TestMigrations_RoomRelations(t *testing.T) {
|
||||||
|
db := setupTestDB(t)
|
||||||
|
|
||||||
|
err := RunMigrations(db)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Créer un utilisateur
|
||||||
|
user := models.User{
|
||||||
|
Username: "room_creator",
|
||||||
|
Email: "creator@example.com",
|
||||||
|
Role: "user",
|
||||||
|
}
|
||||||
|
err = db.Create(&user).Error
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Créer une room
|
||||||
|
room := models.Room{
|
||||||
|
Name: "Test Room",
|
||||||
|
Type: "public",
|
||||||
|
CreatedBy: user.ID,
|
||||||
|
}
|
||||||
|
err = db.Create(&room).Error
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Ajouter l'utilisateur à la room
|
||||||
|
roomMember := models.RoomMember{
|
||||||
|
RoomID: room.ID,
|
||||||
|
UserID: user.ID,
|
||||||
|
Role: "owner",
|
||||||
|
}
|
||||||
|
err = db.Create(&roomMember).Error
|
||||||
|
assert.NoError(t, err, "Should be able to add user to room")
|
||||||
|
|
||||||
|
// Créer un message dans la room
|
||||||
|
message := models.Message{
|
||||||
|
RoomID: room.ID,
|
||||||
|
UserID: user.ID,
|
||||||
|
Content: "Hello, world!",
|
||||||
|
Type: "text",
|
||||||
|
}
|
||||||
|
err = db.Create(&message).Error
|
||||||
|
assert.NoError(t, err, "Should be able to create message")
|
||||||
|
|
||||||
|
// Vérifier les relations
|
||||||
|
var retrievedRoom models.Room
|
||||||
|
err = db.Preload("Members").Preload("Messages").First(&retrievedRoom, room.ID).Error
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Len(t, retrievedRoom.Members, 1)
|
||||||
|
assert.Len(t, retrievedRoom.Messages, 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestEmailVerificationTokensMigration teste que la migration pour la table email_verification_tokens existe et peut être lue
|
||||||
|
func TestEmailVerificationTokensMigration(t *testing.T) {
|
||||||
|
migrationPath := "migrations/018_create_email_verification_tokens.sql"
|
||||||
|
|
||||||
|
// Vérifier que le fichier existe
|
||||||
|
content, err := os.ReadFile(migrationPath)
|
||||||
|
require.NoError(t, err, "Migration file should exist and be readable")
|
||||||
|
|
||||||
|
// Vérifier que le contenu n'est pas vide
|
||||||
|
assert.NotEmpty(t, content, "Migration file should not be empty")
|
||||||
|
|
||||||
|
// Vérifier que le contenu contient les éléments essentiels
|
||||||
|
contentStr := string(content)
|
||||||
|
assert.Contains(t, contentStr, "CREATE TABLE email_verification_tokens", "Should create email_verification_tokens table")
|
||||||
|
assert.Contains(t, contentStr, "user_id BIGINT", "Should have user_id column")
|
||||||
|
assert.Contains(t, contentStr, "token VARCHAR(255)", "Should have token column")
|
||||||
|
assert.Contains(t, contentStr, "expires_at TIMESTAMP", "Should have expires_at column")
|
||||||
|
assert.Contains(t, contentStr, "used BOOLEAN", "Should have used column")
|
||||||
|
assert.Contains(t, contentStr, "REFERENCES users(id) ON DELETE CASCADE", "Should have foreign key constraint")
|
||||||
|
assert.Contains(t, contentStr, "idx_email_verification_tokens_token", "Should have index on token")
|
||||||
|
assert.Contains(t, contentStr, "idx_email_verification_tokens_user_id", "Should have index on user_id")
|
||||||
|
assert.Contains(t, contentStr, "idx_email_verification_tokens_expires_at", "Should have index on expires_at")
|
||||||
|
}
|
||||||
140
veza-backend-api/internal/database/pool.go
Normal file
140
veza-backend-api/internal/database/pool.go
Normal file
|
|
@ -0,0 +1,140 @@
|
||||||
|
package database
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"veza-backend-api/internal/metrics"
|
||||||
|
|
||||||
|
"gorm.io/driver/postgres"
|
||||||
|
"gorm.io/gorm"
|
||||||
|
)
|
||||||
|
|
||||||
|
// NewDB crée une nouvelle connexion GORM avec pool de connexions optimisé
|
||||||
|
// Prend les paramètres de connexion individuels pour plus de flexibilité
|
||||||
|
func NewDB(host string, port int, user, password, dbname string) (*gorm.DB, error) {
|
||||||
|
dsn := fmt.Sprintf(
|
||||||
|
"host=%s user=%s password=%s dbname=%s port=%d sslmode=disable",
|
||||||
|
host, user, password, dbname, port,
|
||||||
|
)
|
||||||
|
|
||||||
|
db, err := gorm.Open(postgres.Open(dsn), &gorm.Config{})
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to open database: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
sqlDB, err := db.DB()
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to get underlying sql.DB: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Configuration optimale du pool de connexions
|
||||||
|
// MaxOpenConns: Nombre maximum de connexions ouvertes (25 recommandé pour PostgreSQL)
|
||||||
|
sqlDB.SetMaxOpenConns(25)
|
||||||
|
|
||||||
|
// MaxIdleConns: Nombre maximum de connexions inactives (5 recommandé)
|
||||||
|
sqlDB.SetMaxIdleConns(5)
|
||||||
|
|
||||||
|
// ConnMaxLifetime: Durée maximale de vie d'une connexion (5 minutes)
|
||||||
|
// Cela permet de recycler les connexions et éviter les problèmes de timeout
|
||||||
|
sqlDB.SetConnMaxLifetime(5 * time.Minute)
|
||||||
|
|
||||||
|
// ConnMaxIdleTime: Durée maximale d'inactivité d'une connexion avant fermeture (1 minute)
|
||||||
|
sqlDB.SetConnMaxIdleTime(1 * time.Minute)
|
||||||
|
|
||||||
|
// Test de la connexion
|
||||||
|
if err := sqlDB.Ping(); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to ping database: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return db, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewDBFromEnvConfig crée une nouvelle connexion GORM à partir d'un EnvConfig
|
||||||
|
// Cette fonction facilite l'intégration avec le package config
|
||||||
|
func NewDBFromEnvConfig(host string, port int, user, password, dbname string) (*gorm.DB, error) {
|
||||||
|
return NewDB(host, port, user, password, dbname)
|
||||||
|
}
|
||||||
|
|
||||||
|
// CloseDB ferme proprement la connexion à la base de données
|
||||||
|
func CloseDB(db *gorm.DB) error {
|
||||||
|
if db == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
sqlDB, err := db.DB()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to get underlying sql.DB: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fermeture gracieuse de toutes les connexions
|
||||||
|
return sqlDB.Close()
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetPoolStats retourne les statistiques du pool de connexions
|
||||||
|
// Met également à jour les métriques Prometheus (T0023)
|
||||||
|
func GetPoolStats(db *gorm.DB) (sql.DBStats, error) {
|
||||||
|
if db == nil {
|
||||||
|
return sql.DBStats{}, fmt.Errorf("database connection is nil")
|
||||||
|
}
|
||||||
|
|
||||||
|
sqlDB, err := db.DB()
|
||||||
|
if err != nil {
|
||||||
|
return sql.DBStats{}, fmt.Errorf("failed to get underlying sql.DB: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
stats := sqlDB.Stats()
|
||||||
|
|
||||||
|
// Mettre à jour les métriques Prometheus (T0023)
|
||||||
|
// open: nombre total de connexions ouvertes
|
||||||
|
// idle: nombre de connexions inactives (OpenConnections - InUse)
|
||||||
|
// in_use: nombre de connexions en cours d'utilisation
|
||||||
|
open := stats.OpenConnections
|
||||||
|
idle := open - stats.InUse
|
||||||
|
inUse := stats.InUse
|
||||||
|
metrics.UpdateDBConnections(open, idle, inUse)
|
||||||
|
|
||||||
|
return stats, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// MeasureQuery mesure la durée d'une requête DB et l'enregistre dans Prometheus
|
||||||
|
// Cette fonction helper peut être utilisée pour wrapper les opérations DB
|
||||||
|
// operation: type d'opération (SELECT, INSERT, UPDATE, DELETE, etc.)
|
||||||
|
// table: nom de la table (ou "unknown" si non disponible)
|
||||||
|
// fn: fonction à exécuter et mesurer
|
||||||
|
func MeasureQuery(operation, table string, fn func() error) error {
|
||||||
|
start := time.Now()
|
||||||
|
err := fn()
|
||||||
|
duration := time.Since(start)
|
||||||
|
|
||||||
|
// Enregistrer la métrique indépendamment de l'erreur
|
||||||
|
metrics.RecordDBQuery(operation, table, duration)
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsConnectionHealthy vérifie si la connexion à la base de données est saine
|
||||||
|
func IsConnectionHealthy(db *gorm.DB, timeout time.Duration) error {
|
||||||
|
if db == nil {
|
||||||
|
return fmt.Errorf("database connection is nil")
|
||||||
|
}
|
||||||
|
|
||||||
|
sqlDB, err := db.DB()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to get underlying sql.DB: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Utiliser Ping avec un timeout personnalisé
|
||||||
|
pingChan := make(chan error, 1)
|
||||||
|
go func() {
|
||||||
|
pingChan <- sqlDB.Ping()
|
||||||
|
}()
|
||||||
|
|
||||||
|
select {
|
||||||
|
case err := <-pingChan:
|
||||||
|
return err
|
||||||
|
case <-time.After(timeout):
|
||||||
|
return fmt.Errorf("database ping timeout after %v", timeout)
|
||||||
|
}
|
||||||
|
}
|
||||||
311
veza-backend-api/internal/database/pool_test.go
Normal file
311
veza-backend-api/internal/database/pool_test.go
Normal file
|
|
@ -0,0 +1,311 @@
|
||||||
|
package database
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"strconv"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
"gorm.io/driver/postgres"
|
||||||
|
"gorm.io/gorm"
|
||||||
|
)
|
||||||
|
|
||||||
|
// setupPoolTestDB crée une connexion de test à la base de données pour les tests de pool
|
||||||
|
// Nécessite une base de données PostgreSQL en cours d'exécution
|
||||||
|
func setupPoolTestDB(t *testing.T) *gorm.DB {
|
||||||
|
// Récupérer les variables d'environnement ou utiliser des valeurs par défaut
|
||||||
|
host := getEnv("DB_HOST", "localhost")
|
||||||
|
port := getEnvInt("DB_PORT", 5432)
|
||||||
|
user := getEnv("DB_USER", "veza")
|
||||||
|
password := getEnv("DB_PASSWORD", "password")
|
||||||
|
dbname := getEnv("DB_NAME", "veza_db_test")
|
||||||
|
|
||||||
|
dsn := buildDSN(host, port, user, password, dbname)
|
||||||
|
|
||||||
|
db, err := gorm.Open(postgres.Open(dsn), &gorm.Config{})
|
||||||
|
if err != nil {
|
||||||
|
t.Skipf("Skipping test: cannot connect to database: %v", err)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Configurer le pool de connexions pour les tests
|
||||||
|
sqlDB, err := db.DB()
|
||||||
|
if err != nil {
|
||||||
|
t.Skipf("Skipping test: cannot get underlying sql.DB: %v", err)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
sqlDB.SetMaxOpenConns(5) // Moins de connexions pour les tests
|
||||||
|
sqlDB.SetMaxIdleConns(2)
|
||||||
|
sqlDB.SetConnMaxLifetime(1 * time.Minute)
|
||||||
|
sqlDB.SetConnMaxIdleTime(30 * time.Second)
|
||||||
|
|
||||||
|
// Tester la connexion
|
||||||
|
if err := sqlDB.Ping(); err != nil {
|
||||||
|
t.Skipf("Skipping test: cannot ping database: %v", err)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return db
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper functions
|
||||||
|
func getEnv(key, defaultValue string) string {
|
||||||
|
if value := os.Getenv(key); value != "" {
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
return defaultValue
|
||||||
|
}
|
||||||
|
|
||||||
|
func getEnvInt(key string, defaultValue int) int {
|
||||||
|
value := os.Getenv(key)
|
||||||
|
if value != "" {
|
||||||
|
if intValue, err := strconv.Atoi(value); err == nil {
|
||||||
|
return intValue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return defaultValue
|
||||||
|
}
|
||||||
|
|
||||||
|
func buildDSN(host string, port int, user, password, dbname string) string {
|
||||||
|
return fmt.Sprintf("host=%s user=%s password=%s dbname=%s port=%d sslmode=disable",
|
||||||
|
host, user, password, dbname, port)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNewDB(t *testing.T) {
|
||||||
|
if testing.Short() {
|
||||||
|
t.Skip("Skipping integration test in short mode")
|
||||||
|
}
|
||||||
|
|
||||||
|
host := getEnv("DB_HOST", "localhost")
|
||||||
|
port := getEnvInt("DB_PORT", 5432)
|
||||||
|
user := getEnv("DB_USER", "veza")
|
||||||
|
password := getEnv("DB_PASSWORD", "password")
|
||||||
|
dbname := getEnv("DB_NAME", "veza_db_test")
|
||||||
|
|
||||||
|
// Test de création de connexion
|
||||||
|
db, err := NewDB(host, port, user, password, dbname)
|
||||||
|
if err != nil {
|
||||||
|
t.Skipf("Skipping test: cannot connect to database: %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
require.NotNil(t, db)
|
||||||
|
defer CloseDB(db)
|
||||||
|
|
||||||
|
// Vérifier que la connexion fonctionne
|
||||||
|
sqlDB, err := db.DB()
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.NotNil(t, sqlDB)
|
||||||
|
|
||||||
|
// Vérifier les paramètres du pool
|
||||||
|
stats := sqlDB.Stats()
|
||||||
|
assert.Equal(t, 25, stats.MaxOpenConnections, "MaxOpenConns should be 25")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNewDB_InvalidCredentials(t *testing.T) {
|
||||||
|
if testing.Short() {
|
||||||
|
t.Skip("Skipping integration test in short mode")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test avec des credentials invalides
|
||||||
|
_, err := NewDB("localhost", 5432, "invalid_user", "invalid_password", "invalid_db")
|
||||||
|
require.Error(t, err)
|
||||||
|
assert.Contains(t, err.Error(), "failed to open database")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCloseDB(t *testing.T) {
|
||||||
|
if testing.Short() {
|
||||||
|
t.Skip("Skipping integration test in short mode")
|
||||||
|
}
|
||||||
|
|
||||||
|
db := setupPoolTestDB(t)
|
||||||
|
if db == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fermer la connexion
|
||||||
|
err := CloseDB(db)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
// Vérifier que la connexion est fermée
|
||||||
|
sqlDB, err := db.DB()
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
err = sqlDB.Ping()
|
||||||
|
assert.Error(t, err, "Connection should be closed")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCloseDB_NilDB(t *testing.T) {
|
||||||
|
// Test avec une DB nil
|
||||||
|
err := CloseDB(nil)
|
||||||
|
assert.NoError(t, err, "Closing nil DB should not return error")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetPoolStats(t *testing.T) {
|
||||||
|
if testing.Short() {
|
||||||
|
t.Skip("Skipping integration test in short mode")
|
||||||
|
}
|
||||||
|
|
||||||
|
db := setupPoolTestDB(t)
|
||||||
|
if db == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer CloseDB(db)
|
||||||
|
|
||||||
|
stats, err := GetPoolStats(db)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.NotNil(t, stats)
|
||||||
|
|
||||||
|
// Vérifier que les statistiques contiennent des informations valides
|
||||||
|
assert.GreaterOrEqual(t, stats.MaxOpenConnections, 0)
|
||||||
|
assert.GreaterOrEqual(t, stats.OpenConnections, 0)
|
||||||
|
assert.GreaterOrEqual(t, stats.InUse, 0)
|
||||||
|
assert.GreaterOrEqual(t, stats.Idle, 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetPoolStats_NilDB(t *testing.T) {
|
||||||
|
_, err := GetPoolStats(nil)
|
||||||
|
require.Error(t, err)
|
||||||
|
assert.Contains(t, err.Error(), "database connection is nil")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIsConnectionHealthy(t *testing.T) {
|
||||||
|
if testing.Short() {
|
||||||
|
t.Skip("Skipping integration test in short mode")
|
||||||
|
}
|
||||||
|
|
||||||
|
db := setupPoolTestDB(t)
|
||||||
|
if db == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer CloseDB(db)
|
||||||
|
|
||||||
|
// Test avec un timeout suffisant
|
||||||
|
err := IsConnectionHealthy(db, 5*time.Second)
|
||||||
|
assert.NoError(t, err, "Healthy connection should not return error")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIsConnectionHealthy_Timeout(t *testing.T) {
|
||||||
|
if testing.Short() {
|
||||||
|
t.Skip("Skipping integration test in short mode")
|
||||||
|
}
|
||||||
|
|
||||||
|
db := setupPoolTestDB(t)
|
||||||
|
if db == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer CloseDB(db)
|
||||||
|
|
||||||
|
// Test avec un timeout très court (devrait timeout)
|
||||||
|
// Note: Ce test peut être flaky, mais il vérifie le comportement de timeout
|
||||||
|
err := IsConnectionHealthy(db, 1*time.Nanosecond)
|
||||||
|
// Le timeout peut ne pas se produire si la connexion est très rapide
|
||||||
|
// Donc on accepte soit une erreur de timeout, soit pas d'erreur
|
||||||
|
if err != nil {
|
||||||
|
assert.Contains(t, err.Error(), "timeout")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIsConnectionHealthy_NilDB(t *testing.T) {
|
||||||
|
err := IsConnectionHealthy(nil, 5*time.Second)
|
||||||
|
require.Error(t, err)
|
||||||
|
assert.Contains(t, err.Error(), "database connection is nil")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDBPool_ConnectionPooling(t *testing.T) {
|
||||||
|
if testing.Short() {
|
||||||
|
t.Skip("Skipping integration test in short mode")
|
||||||
|
}
|
||||||
|
|
||||||
|
db := setupPoolTestDB(t)
|
||||||
|
if db == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer CloseDB(db)
|
||||||
|
|
||||||
|
sqlDB, err := db.DB()
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Vérifier les paramètres du pool
|
||||||
|
stats := sqlDB.Stats()
|
||||||
|
_ = stats.OpenConnections // Vérification que le pool fonctionne
|
||||||
|
|
||||||
|
// Simuler plusieurs requêtes pour utiliser le pool
|
||||||
|
for i := 0; i < 10; i++ {
|
||||||
|
var result int
|
||||||
|
err := sqlDB.QueryRow("SELECT 1").Scan(&result)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, 1, result)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Vérifier que les connexions sont réutilisées (le nombre ne devrait pas augmenter significativement)
|
||||||
|
stats = sqlDB.Stats()
|
||||||
|
// Le nombre de connexions ouvertes ne devrait pas dépasser MaxOpenConns
|
||||||
|
assert.LessOrEqual(t, stats.OpenConnections, 25, "Open connections should not exceed MaxOpenConns")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDBPool_MaxConnections(t *testing.T) {
|
||||||
|
if testing.Short() {
|
||||||
|
t.Skip("Skipping integration test in short mode")
|
||||||
|
}
|
||||||
|
|
||||||
|
db := setupPoolTestDB(t)
|
||||||
|
if db == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer CloseDB(db)
|
||||||
|
|
||||||
|
sqlDB, err := db.DB()
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Vérifier que MaxOpenConns est configuré
|
||||||
|
stats := sqlDB.Stats()
|
||||||
|
assert.Equal(t, 25, stats.MaxOpenConnections, "MaxOpenConns should be 25")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test de performance: vérifier que le pool peut gérer 100+ connexions simultanées
|
||||||
|
func TestDBPool_Performance(t *testing.T) {
|
||||||
|
if testing.Short() {
|
||||||
|
t.Skip("Skipping performance test in short mode")
|
||||||
|
}
|
||||||
|
|
||||||
|
db := setupPoolTestDB(t)
|
||||||
|
if db == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer CloseDB(db)
|
||||||
|
|
||||||
|
sqlDB, err := db.DB()
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Simuler 100 requêtes simultanées
|
||||||
|
const numRequests = 100
|
||||||
|
results := make(chan error, numRequests)
|
||||||
|
|
||||||
|
for i := 0; i < numRequests; i++ {
|
||||||
|
go func() {
|
||||||
|
var result int
|
||||||
|
err := sqlDB.QueryRow("SELECT $1", 1).Scan(&result)
|
||||||
|
results <- err
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Collecter tous les résultats
|
||||||
|
var errors int
|
||||||
|
for i := 0; i < numRequests; i++ {
|
||||||
|
if err := <-results; err != nil {
|
||||||
|
errors++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Toutes les requêtes devraient réussir
|
||||||
|
assert.Equal(t, 0, errors, "All requests should succeed")
|
||||||
|
|
||||||
|
// Vérifier les statistiques du pool
|
||||||
|
stats := sqlDB.Stats()
|
||||||
|
assert.LessOrEqual(t, stats.OpenConnections, stats.MaxOpenConnections,
|
||||||
|
"Open connections should not exceed MaxOpenConns")
|
||||||
|
}
|
||||||
375
veza-backend-api/internal/database/prepared_statements.go
Normal file
375
veza-backend-api/internal/database/prepared_statements.go
Normal file
|
|
@ -0,0 +1,375 @@
|
||||||
|
//! Gestionnaire de requêtes préparées pour optimiser les performances
|
||||||
|
//!
|
||||||
|
//! Ce module implémente un cache de requêtes préparées pour améliorer
|
||||||
|
//! les performances et la sécurité des requêtes SQL fréquentes.
|
||||||
|
|
||||||
|
package database
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"database/sql"
|
||||||
|
"fmt"
|
||||||
|
"sync"
|
||||||
|
|
||||||
|
"go.uber.org/zap"
|
||||||
|
)
|
||||||
|
|
||||||
|
// PreparedStatement représente une requête préparée avec son nom
|
||||||
|
type PreparedStatement struct {
|
||||||
|
Name string
|
||||||
|
Query string
|
||||||
|
Stmt *sql.Stmt
|
||||||
|
}
|
||||||
|
|
||||||
|
// PreparedStatementManager gère le cache des requêtes préparées
|
||||||
|
type PreparedStatementManager struct {
|
||||||
|
db *sql.DB
|
||||||
|
statements map[string]*PreparedStatement
|
||||||
|
mutex sync.RWMutex
|
||||||
|
logger *zap.Logger
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewPreparedStatementManager crée un nouveau gestionnaire de requêtes préparées
|
||||||
|
func NewPreparedStatementManager(db *sql.DB, logger *zap.Logger) *PreparedStatementManager {
|
||||||
|
return &PreparedStatementManager{
|
||||||
|
db: db,
|
||||||
|
statements: make(map[string]*PreparedStatement),
|
||||||
|
logger: logger,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prepare prépare une requête SQL et la met en cache
|
||||||
|
func (psm *PreparedStatementManager) Prepare(ctx context.Context, name, query string) error {
|
||||||
|
psm.mutex.Lock()
|
||||||
|
defer psm.mutex.Unlock()
|
||||||
|
|
||||||
|
// Vérifier si la requête est déjà préparée
|
||||||
|
if _, exists := psm.statements[name]; exists {
|
||||||
|
psm.logger.Debug("Statement already prepared", zap.String("name", name))
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Préparer la requête
|
||||||
|
stmt, err := psm.db.PrepareContext(ctx, query)
|
||||||
|
if err != nil {
|
||||||
|
psm.logger.Error("Failed to prepare statement",
|
||||||
|
zap.String("name", name),
|
||||||
|
zap.String("query", query),
|
||||||
|
zap.Error(err))
|
||||||
|
return fmt.Errorf("failed to prepare statement %s: %w", name, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Mettre en cache
|
||||||
|
psm.statements[name] = &PreparedStatement{
|
||||||
|
Name: name,
|
||||||
|
Query: query,
|
||||||
|
Stmt: stmt,
|
||||||
|
}
|
||||||
|
|
||||||
|
psm.logger.Debug("Statement prepared successfully",
|
||||||
|
zap.String("name", name))
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetStatement récupère une requête préparée depuis le cache
|
||||||
|
func (psm *PreparedStatementManager) GetStatement(name string) (*sql.Stmt, error) {
|
||||||
|
psm.mutex.RLock()
|
||||||
|
defer psm.mutex.RUnlock()
|
||||||
|
|
||||||
|
stmt, exists := psm.statements[name]
|
||||||
|
if !exists {
|
||||||
|
return nil, fmt.Errorf("statement %s not found", name)
|
||||||
|
}
|
||||||
|
|
||||||
|
return stmt.Stmt, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Execute exécute une requête préparée avec des arguments
|
||||||
|
func (psm *PreparedStatementManager) Execute(ctx context.Context, name string, args ...interface{}) (sql.Result, error) {
|
||||||
|
stmt, err := psm.GetStatement(name)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return stmt.ExecContext(ctx, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Query exécute une requête préparée et retourne des lignes
|
||||||
|
func (psm *PreparedStatementManager) Query(ctx context.Context, name string, args ...interface{}) (*sql.Rows, error) {
|
||||||
|
stmt, err := psm.GetStatement(name)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return stmt.QueryContext(ctx, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// QueryRow exécute une requête préparée et retourne une ligne
|
||||||
|
func (psm *PreparedStatementManager) QueryRow(ctx context.Context, name string, args ...interface{}) *sql.Row {
|
||||||
|
stmt, err := psm.GetStatement(name)
|
||||||
|
if err != nil {
|
||||||
|
// Retourner une erreur dans le Row
|
||||||
|
return &sql.Row{}
|
||||||
|
}
|
||||||
|
|
||||||
|
return stmt.QueryRowContext(ctx, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize prépare toutes les requêtes fréquemment utilisées
|
||||||
|
func (psm *PreparedStatementManager) Initialize(ctx context.Context) error {
|
||||||
|
psm.logger.Info("Initializing prepared statements...")
|
||||||
|
|
||||||
|
// Requêtes utilisateur
|
||||||
|
statements := map[string]string{
|
||||||
|
"get_user_by_id": `
|
||||||
|
SELECT id, username, email, password_hash, created_at, updated_at, deleted_at
|
||||||
|
FROM users WHERE id = $1 AND deleted_at IS NULL`,
|
||||||
|
|
||||||
|
"get_user_by_email": `
|
||||||
|
SELECT id, username, email, password_hash, created_at, updated_at, deleted_at
|
||||||
|
FROM users WHERE email = $1 AND deleted_at IS NULL`,
|
||||||
|
|
||||||
|
"get_user_by_username": `
|
||||||
|
SELECT id, username, email, password_hash, created_at, updated_at, deleted_at
|
||||||
|
FROM users WHERE username = $1 AND deleted_at IS NULL`,
|
||||||
|
|
||||||
|
"create_user": `
|
||||||
|
INSERT INTO users (username, email, password_hash, created_at, updated_at)
|
||||||
|
VALUES ($1, $2, $3, $4, $5) RETURNING id`,
|
||||||
|
|
||||||
|
"update_user": `
|
||||||
|
UPDATE users SET username = $2, email = $3, updated_at = $4
|
||||||
|
WHERE id = $1 AND deleted_at IS NULL`,
|
||||||
|
|
||||||
|
"delete_user": `
|
||||||
|
UPDATE users SET deleted_at = $2 WHERE id = $1`,
|
||||||
|
|
||||||
|
// Requêtes de session
|
||||||
|
"get_session_by_token": `
|
||||||
|
SELECT id, user_id, token, created_at, expires_at, ip_address, user_agent, is_valid
|
||||||
|
FROM sessions WHERE token = $1 AND expires_at > $2 AND is_valid = true`,
|
||||||
|
|
||||||
|
"create_session": `
|
||||||
|
INSERT INTO sessions (user_id, token, created_at, expires_at, ip_address, user_agent)
|
||||||
|
VALUES ($1, $2, $3, $4, $5, $6) RETURNING id`,
|
||||||
|
|
||||||
|
"revoke_session": `
|
||||||
|
UPDATE sessions SET is_valid = false, revoked_at = $2 WHERE token = $1`,
|
||||||
|
|
||||||
|
"revoke_user_sessions": `
|
||||||
|
UPDATE sessions SET is_valid = false, revoked_at = $2
|
||||||
|
WHERE user_id = $1 AND is_valid = true`,
|
||||||
|
|
||||||
|
"cleanup_expired_sessions": `
|
||||||
|
DELETE FROM sessions WHERE expires_at < $1`,
|
||||||
|
|
||||||
|
// Requêtes de messages
|
||||||
|
"get_messages_by_room": `
|
||||||
|
SELECT m.id, m.room_id, m.user_id, m.content, m.type, m.parent_id,
|
||||||
|
m.is_edited, m.is_deleted, m.created_at, m.updated_at,
|
||||||
|
u.username, u.email
|
||||||
|
FROM messages m
|
||||||
|
JOIN users u ON m.user_id = u.id
|
||||||
|
WHERE m.room_id = $1 AND m.created_at < $2
|
||||||
|
ORDER BY m.created_at DESC LIMIT $3`,
|
||||||
|
|
||||||
|
"create_message": `
|
||||||
|
INSERT INTO messages (room_id, user_id, content, type, parent_id, created_at, updated_at)
|
||||||
|
VALUES ($1, $2, $3, $4, $5, $6, $7) RETURNING id`,
|
||||||
|
|
||||||
|
"update_message": `
|
||||||
|
UPDATE messages SET content = $2, is_edited = true, updated_at = $3
|
||||||
|
WHERE id = $1 AND user_id = $4`,
|
||||||
|
|
||||||
|
"delete_message": `
|
||||||
|
UPDATE messages SET is_deleted = true, updated_at = $2 WHERE id = $1`,
|
||||||
|
|
||||||
|
// Requêtes de tracks
|
||||||
|
"get_track_by_id": `
|
||||||
|
SELECT id, user_id, title, artist, duration, file_path, file_size,
|
||||||
|
mime_type, status, created_at, updated_at
|
||||||
|
FROM tracks WHERE id = $1 AND status = 'active'`,
|
||||||
|
|
||||||
|
"get_user_tracks": `
|
||||||
|
SELECT id, user_id, title, artist, duration, file_path, file_size,
|
||||||
|
mime_type, status, created_at, updated_at
|
||||||
|
FROM tracks WHERE user_id = $1 AND created_at < $2 AND status = 'active'
|
||||||
|
ORDER BY created_at DESC LIMIT $3`,
|
||||||
|
|
||||||
|
"create_track": `
|
||||||
|
INSERT INTO tracks (user_id, title, artist, duration, file_path, file_size, mime_type, status, created_at, updated_at)
|
||||||
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9) RETURNING id`,
|
||||||
|
|
||||||
|
"update_track": `
|
||||||
|
UPDATE tracks SET title = $2, artist = $3, updated_at = $4
|
||||||
|
WHERE id = $1 AND user_id = $5`,
|
||||||
|
|
||||||
|
"delete_track": `
|
||||||
|
UPDATE tracks SET status = 'deleted', updated_at = $2 WHERE id = $1`,
|
||||||
|
|
||||||
|
// Requêtes de rooms
|
||||||
|
"get_room_by_id": `
|
||||||
|
SELECT id, name, description, type, is_private, created_by, created_at, updated_at
|
||||||
|
FROM rooms WHERE id = $1`,
|
||||||
|
|
||||||
|
"get_user_rooms": `
|
||||||
|
SELECT r.id, r.name, r.description, r.type, r.is_private, r.created_by, r.created_at, r.updated_at
|
||||||
|
FROM rooms r
|
||||||
|
JOIN room_users ru ON r.id = ru.room_id
|
||||||
|
WHERE ru.user_id = $1 AND r.created_at < $2
|
||||||
|
ORDER BY r.created_at DESC LIMIT $3`,
|
||||||
|
|
||||||
|
"create_room": `
|
||||||
|
INSERT INTO rooms (name, description, type, is_private, created_by, created_at, updated_at)
|
||||||
|
VALUES ($1, $2, $3, $4, $5, $6, $7) RETURNING id`,
|
||||||
|
|
||||||
|
"add_user_to_room": `
|
||||||
|
INSERT INTO room_users (room_id, user_id, created_at)
|
||||||
|
VALUES ($1, $2, $3) ON CONFLICT (room_id, user_id) DO NOTHING`,
|
||||||
|
|
||||||
|
"remove_user_from_room": `
|
||||||
|
DELETE FROM room_users WHERE room_id = $1 AND user_id = $2`,
|
||||||
|
|
||||||
|
// Requêtes d'audit
|
||||||
|
"create_audit_log": `
|
||||||
|
INSERT INTO audit_logs (user_id, action, entity_type, entity_id, ip_address, user_agent, details, created_at)
|
||||||
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8) RETURNING id`,
|
||||||
|
|
||||||
|
"get_audit_logs": `
|
||||||
|
SELECT id, user_id, action, entity_type, entity_id, ip_address, user_agent, details, created_at
|
||||||
|
FROM audit_logs WHERE user_id = $1 AND created_at < $2
|
||||||
|
ORDER BY created_at DESC LIMIT $3`,
|
||||||
|
|
||||||
|
// Requêtes de recherche
|
||||||
|
"search_tracks": `
|
||||||
|
SELECT id, user_id, title, artist, duration, file_path, file_size,
|
||||||
|
mime_type, status, created_at, updated_at,
|
||||||
|
ts_rank(to_tsvector('english', title || ' ' || artist), plainto_tsquery('english', $1)) as rank
|
||||||
|
FROM tracks WHERE status = 'active' AND to_tsvector('english', title || ' ' || artist) @@ plainto_tsquery('english', $1)
|
||||||
|
ORDER BY rank DESC, created_at DESC LIMIT $2`,
|
||||||
|
|
||||||
|
"search_messages": `
|
||||||
|
SELECT m.id, m.room_id, m.user_id, m.content, m.type, m.created_at,
|
||||||
|
u.username, u.email,
|
||||||
|
ts_rank(to_tsvector('english', m.content), plainto_tsquery('english', $1)) as rank
|
||||||
|
FROM messages m
|
||||||
|
JOIN users u ON m.user_id = u.id
|
||||||
|
WHERE m.room_id = $2 AND to_tsvector('english', m.content) @@ plainto_tsquery('english', $1)
|
||||||
|
ORDER BY rank DESC, m.created_at DESC LIMIT $3`,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Préparer toutes les requêtes
|
||||||
|
for name, query := range statements {
|
||||||
|
if err := psm.Prepare(ctx, name, query); err != nil {
|
||||||
|
psm.logger.Error("Failed to prepare statement",
|
||||||
|
zap.String("name", name),
|
||||||
|
zap.Error(err))
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
psm.logger.Info("All prepared statements initialized successfully",
|
||||||
|
zap.Int("count", len(statements)))
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Close ferme toutes les requêtes préparées
|
||||||
|
func (psm *PreparedStatementManager) Close() error {
|
||||||
|
psm.mutex.Lock()
|
||||||
|
defer psm.mutex.Unlock()
|
||||||
|
|
||||||
|
var lastErr error
|
||||||
|
for name, stmt := range psm.statements {
|
||||||
|
if err := stmt.Stmt.Close(); err != nil {
|
||||||
|
psm.logger.Error("Failed to close statement",
|
||||||
|
zap.String("name", name),
|
||||||
|
zap.Error(err))
|
||||||
|
lastErr = err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Vider le cache
|
||||||
|
psm.statements = make(map[string]*PreparedStatement)
|
||||||
|
|
||||||
|
psm.logger.Info("All prepared statements closed")
|
||||||
|
return lastErr
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetStats retourne les statistiques des requêtes préparées
|
||||||
|
func (psm *PreparedStatementManager) GetStats() map[string]interface{} {
|
||||||
|
psm.mutex.RLock()
|
||||||
|
defer psm.mutex.RUnlock()
|
||||||
|
|
||||||
|
stats := map[string]interface{}{
|
||||||
|
"total_statements": len(psm.statements),
|
||||||
|
"statements": make([]string, 0, len(psm.statements)),
|
||||||
|
}
|
||||||
|
|
||||||
|
for name := range psm.statements {
|
||||||
|
stats["statements"] = append(stats["statements"].([]string), name)
|
||||||
|
}
|
||||||
|
|
||||||
|
return stats
|
||||||
|
}
|
||||||
|
|
||||||
|
// RefreshStatement rafraîchit une requête préparée (utile après reconnexion DB)
|
||||||
|
func (psm *PreparedStatementManager) RefreshStatement(ctx context.Context, name string) error {
|
||||||
|
psm.mutex.Lock()
|
||||||
|
defer psm.mutex.Unlock()
|
||||||
|
|
||||||
|
stmt, exists := psm.statements[name]
|
||||||
|
if !exists {
|
||||||
|
return fmt.Errorf("statement %s not found", name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fermer l'ancienne requête
|
||||||
|
if err := stmt.Stmt.Close(); err != nil {
|
||||||
|
psm.logger.Warn("Failed to close old statement",
|
||||||
|
zap.String("name", name),
|
||||||
|
zap.Error(err))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Préparer la nouvelle requête
|
||||||
|
newStmt, err := psm.db.PrepareContext(ctx, stmt.Query)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to refresh statement %s: %w", name, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
stmt.Stmt = newStmt
|
||||||
|
psm.logger.Debug("Statement refreshed", zap.String("name", name))
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// RefreshAllStatements rafraîchit toutes les requêtes préparées
|
||||||
|
func (psm *PreparedStatementManager) RefreshAllStatements(ctx context.Context) error {
|
||||||
|
psm.mutex.Lock()
|
||||||
|
defer psm.mutex.Unlock()
|
||||||
|
|
||||||
|
var lastErr error
|
||||||
|
for name, stmt := range psm.statements {
|
||||||
|
// Fermer l'ancienne requête
|
||||||
|
if err := stmt.Stmt.Close(); err != nil {
|
||||||
|
psm.logger.Warn("Failed to close old statement",
|
||||||
|
zap.String("name", name),
|
||||||
|
zap.Error(err))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Préparer la nouvelle requête
|
||||||
|
newStmt, err := psm.db.PrepareContext(ctx, stmt.Query)
|
||||||
|
if err != nil {
|
||||||
|
psm.logger.Error("Failed to refresh statement",
|
||||||
|
zap.String("name", name),
|
||||||
|
zap.Error(err))
|
||||||
|
lastErr = err
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
stmt.Stmt = newStmt
|
||||||
|
}
|
||||||
|
|
||||||
|
psm.logger.Info("All statements refreshed")
|
||||||
|
return lastErr
|
||||||
|
}
|
||||||
12
veza-backend-api/internal/dto/login_request.go
Normal file
12
veza-backend-api/internal/dto/login_request.go
Normal file
|
|
@ -0,0 +1,12 @@
|
||||||
|
package dto
|
||||||
|
|
||||||
|
type LoginRequest struct {
|
||||||
|
Email string `json:"email" binding:"required,email"`
|
||||||
|
Password string `json:"password" binding:"required"`
|
||||||
|
RememberMe bool `json:"remember_me"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type LoginResponse struct {
|
||||||
|
User UserResponse `json:"user"`
|
||||||
|
Token TokenResponse `json:"token"`
|
||||||
|
}
|
||||||
7
veza-backend-api/internal/dto/refresh_request.go
Normal file
7
veza-backend-api/internal/dto/refresh_request.go
Normal file
|
|
@ -0,0 +1,7 @@
|
||||||
|
package dto
|
||||||
|
|
||||||
|
// RefreshRequest représente la requête de rafraîchissement de token
|
||||||
|
// T0172: DTO pour l'endpoint de refresh token
|
||||||
|
type RefreshRequest struct {
|
||||||
|
RefreshToken string `json:"refresh_token" binding:"required"`
|
||||||
|
}
|
||||||
29
veza-backend-api/internal/dto/register_request.go
Normal file
29
veza-backend-api/internal/dto/register_request.go
Normal file
|
|
@ -0,0 +1,29 @@
|
||||||
|
package dto
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/google/uuid"
|
||||||
|
)
|
||||||
|
|
||||||
|
type RegisterRequest struct {
|
||||||
|
Username string `json:"username" binding:"omitempty,min=3,max=50"`
|
||||||
|
Email string `json:"email" binding:"required,email"`
|
||||||
|
Password string `json:"password" binding:"required,min=12"`
|
||||||
|
PasswordConfirm string `json:"password_confirm" binding:"required,eqfield=Password"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type RegisterResponse struct {
|
||||||
|
User UserResponse `json:"user"`
|
||||||
|
Token TokenResponse `json:"token"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type UserResponse struct {
|
||||||
|
ID uuid.UUID `json:"id"`
|
||||||
|
Email string `json:"email"`
|
||||||
|
Username string `json:"username,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type TokenResponse struct {
|
||||||
|
AccessToken string `json:"access_token"`
|
||||||
|
RefreshToken string `json:"refresh_token"`
|
||||||
|
ExpiresIn int `json:"expires_in"`
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,5 @@
|
||||||
|
package dto
|
||||||
|
|
||||||
|
type ResendVerificationRequest struct {
|
||||||
|
Email string `json:"email" binding:"required,email"`
|
||||||
|
}
|
||||||
15
veza-backend-api/internal/dto/validation.go
Normal file
15
veza-backend-api/internal/dto/validation.go
Normal file
|
|
@ -0,0 +1,15 @@
|
||||||
|
package dto
|
||||||
|
|
||||||
|
// ValidationError représente une erreur de validation
|
||||||
|
// GO-013: Structure d'erreur de validation partagée pour éviter les cycles d'import
|
||||||
|
type ValidationError struct {
|
||||||
|
Field string `json:"field"`
|
||||||
|
Message string `json:"message"`
|
||||||
|
Value string `json:"value,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidationErrors représente une liste d'erreurs de validation
|
||||||
|
type ValidationErrors struct {
|
||||||
|
Errors []ValidationError `json:"errors"`
|
||||||
|
}
|
||||||
|
|
||||||
32
veza-backend-api/internal/errors/codes.go
Normal file
32
veza-backend-api/internal/errors/codes.go
Normal file
|
|
@ -0,0 +1,32 @@
|
||||||
|
package errors
|
||||||
|
|
||||||
|
const (
|
||||||
|
// Authentication & Authorization (1000-1999)
|
||||||
|
ErrCodeInvalidCredentials ErrorCode = 1000
|
||||||
|
ErrCodeTokenExpired ErrorCode = 1001
|
||||||
|
ErrCodeTokenInvalid ErrorCode = 1002
|
||||||
|
ErrCodeForbidden ErrorCode = 1003
|
||||||
|
ErrCodeUnauthorized ErrorCode = 1004
|
||||||
|
|
||||||
|
// Validation (2000-2999)
|
||||||
|
ErrCodeValidation ErrorCode = 2000
|
||||||
|
ErrCodeRequiredField ErrorCode = 2001
|
||||||
|
ErrCodeInvalidFormat ErrorCode = 2002
|
||||||
|
ErrCodeOutOfRange ErrorCode = 2003
|
||||||
|
|
||||||
|
// Resource (3000-3999)
|
||||||
|
ErrCodeNotFound ErrorCode = 3000
|
||||||
|
ErrCodeAlreadyExists ErrorCode = 3001
|
||||||
|
ErrCodeConflict ErrorCode = 3002
|
||||||
|
|
||||||
|
// Business Logic (4000-4999)
|
||||||
|
ErrCodeOperationNotAllowed ErrorCode = 4000
|
||||||
|
ErrCodeQuotaExceeded ErrorCode = 4005
|
||||||
|
|
||||||
|
// Rate Limiting (5000-5099)
|
||||||
|
ErrCodeRateLimitExceeded ErrorCode = 5000
|
||||||
|
|
||||||
|
// Internal (9000-9999)
|
||||||
|
ErrCodeInternal ErrorCode = 9000
|
||||||
|
ErrCodeDatabase ErrorCode = 9001
|
||||||
|
)
|
||||||
69
veza-backend-api/internal/errors/errors.go
Normal file
69
veza-backend-api/internal/errors/errors.go
Normal file
|
|
@ -0,0 +1,69 @@
|
||||||
|
package errors
|
||||||
|
|
||||||
|
import "fmt"
|
||||||
|
|
||||||
|
// ErrorCode représente un code d'erreur standardisé de l'application
|
||||||
|
type ErrorCode int
|
||||||
|
|
||||||
|
// AppError représente une erreur d'application avec un code standardisé
|
||||||
|
type AppError struct {
|
||||||
|
Code ErrorCode
|
||||||
|
Message string
|
||||||
|
Err error
|
||||||
|
Details []ErrorDetail
|
||||||
|
Context map[string]interface{} // Contexte additionnel (request_id, user_id, etc.)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrorDetail représente un détail d'erreur pour une validation
|
||||||
|
type ErrorDetail struct {
|
||||||
|
Field string `json:"field,omitempty"`
|
||||||
|
Message string `json:"message"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Error implémente l'interface error
|
||||||
|
func (e *AppError) Error() string {
|
||||||
|
if e.Err != nil {
|
||||||
|
return fmt.Sprintf("[%d] %s: %v", e.Code, e.Message, e.Err)
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("[%d] %s", e.Code, e.Message)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unwrap retourne l'erreur causale pour le support des errors.Is/errors.As
|
||||||
|
func (e *AppError) Unwrap() error {
|
||||||
|
return e.Err
|
||||||
|
}
|
||||||
|
|
||||||
|
// New crée une nouvelle AppError avec un code et un message
|
||||||
|
func New(code ErrorCode, message string) *AppError {
|
||||||
|
return &AppError{Code: code, Message: message}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wrap enveloppe une erreur existante dans une AppError
|
||||||
|
func Wrap(code ErrorCode, message string, err error) *AppError {
|
||||||
|
return &AppError{Code: code, Message: message, Err: err}
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewValidationError crée une nouvelle erreur de validation avec des détails
|
||||||
|
func NewValidationError(message string, details ...ErrorDetail) *AppError {
|
||||||
|
return &AppError{
|
||||||
|
Code: ErrCodeValidation,
|
||||||
|
Message: message,
|
||||||
|
Details: details,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewNotFoundError crée une nouvelle erreur "not found"
|
||||||
|
func NewNotFoundError(resource string) *AppError {
|
||||||
|
return &AppError{
|
||||||
|
Code: ErrCodeNotFound,
|
||||||
|
Message: fmt.Sprintf("%s not found", resource),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewUnauthorizedError crée une nouvelle erreur d'autorisation
|
||||||
|
func NewUnauthorizedError(message string) *AppError {
|
||||||
|
return &AppError{
|
||||||
|
Code: ErrCodeUnauthorized,
|
||||||
|
Message: message,
|
||||||
|
}
|
||||||
|
}
|
||||||
82
veza-backend-api/internal/errors/errors_context_test.go
Normal file
82
veza-backend-api/internal/errors/errors_context_test.go
Normal file
|
|
@ -0,0 +1,82 @@
|
||||||
|
package errors
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestAppError_WithContext(t *testing.T) {
|
||||||
|
err := New(ErrCodeValidation, "Invalid input")
|
||||||
|
err.Context = map[string]interface{}{
|
||||||
|
"request_id": "abc123",
|
||||||
|
"user_id": 42,
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.NotNil(t, err.Context)
|
||||||
|
assert.Equal(t, "abc123", err.Context["request_id"])
|
||||||
|
assert.Equal(t, 42, err.Context["user_id"])
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAppError_ContextNil(t *testing.T) {
|
||||||
|
err := New(ErrCodeValidation, "Invalid input")
|
||||||
|
assert.Nil(t, err.Context)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAppError_ContextEmpty(t *testing.T) {
|
||||||
|
err := New(ErrCodeValidation, "Invalid input")
|
||||||
|
err.Context = make(map[string]interface{})
|
||||||
|
assert.NotNil(t, err.Context)
|
||||||
|
assert.Equal(t, 0, len(err.Context))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAppError_ContextWithMultipleFields(t *testing.T) {
|
||||||
|
err := New(ErrCodeInternal, "Internal error")
|
||||||
|
err.Context = map[string]interface{}{
|
||||||
|
"request_id": "req-123",
|
||||||
|
"user_id": int64(100),
|
||||||
|
"ip_address": "192.168.1.1",
|
||||||
|
"path": "/api/test",
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.Equal(t, "req-123", err.Context["request_id"])
|
||||||
|
assert.Equal(t, int64(100), err.Context["user_id"])
|
||||||
|
assert.Equal(t, "192.168.1.1", err.Context["ip_address"])
|
||||||
|
assert.Equal(t, "/api/test", err.Context["path"])
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNewValidationError_Context(t *testing.T) {
|
||||||
|
err := NewValidationError("Validation failed",
|
||||||
|
ErrorDetail{Field: "email", Message: "Invalid format"},
|
||||||
|
)
|
||||||
|
|
||||||
|
// Context devrait être nil par défaut
|
||||||
|
assert.Nil(t, err.Context)
|
||||||
|
|
||||||
|
// Mais on peut l'ajouter après
|
||||||
|
err.Context = map[string]interface{}{
|
||||||
|
"request_id": "xyz789",
|
||||||
|
}
|
||||||
|
assert.Equal(t, "xyz789", err.Context["request_id"])
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNewNotFoundError_Context(t *testing.T) {
|
||||||
|
err := NewNotFoundError("User")
|
||||||
|
|
||||||
|
assert.Nil(t, err.Context)
|
||||||
|
err.Context = map[string]interface{}{
|
||||||
|
"resource_id": 123,
|
||||||
|
}
|
||||||
|
assert.Equal(t, 123, err.Context["resource_id"])
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWrap_Context(t *testing.T) {
|
||||||
|
originalErr := New(ErrCodeInternal, "Original error")
|
||||||
|
wrappedErr := Wrap(ErrCodeValidation, "Wrapped error", originalErr)
|
||||||
|
|
||||||
|
assert.Nil(t, wrappedErr.Context)
|
||||||
|
wrappedErr.Context = map[string]interface{}{
|
||||||
|
"wrapped": true,
|
||||||
|
}
|
||||||
|
assert.Equal(t, true, wrappedErr.Context["wrapped"])
|
||||||
|
}
|
||||||
106
veza-backend-api/internal/errors/errors_test.go
Normal file
106
veza-backend-api/internal/errors/errors_test.go
Normal file
|
|
@ -0,0 +1,106 @@
|
||||||
|
package errors
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
// TestAppError_Error teste le formatage des messages d'erreur
|
||||||
|
func TestAppError_Error(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
err *AppError
|
||||||
|
expected string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "error without wrapped error",
|
||||||
|
err: New(ErrCodeValidation, "Invalid input"),
|
||||||
|
expected: "[2000] Invalid input",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "error with wrapped error",
|
||||||
|
err: Wrap(ErrCodeDatabase, "Database query failed", assert.AnError),
|
||||||
|
expected: "[9001] Database query failed: assert.AnError general error for testing",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
result := tt.err.Error()
|
||||||
|
assert.Contains(t, result, tt.expected)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestAppError_Unwrap teste la fonction Unwrap
|
||||||
|
func TestAppError_Unwrap(t *testing.T) {
|
||||||
|
wrappedErr := assert.AnError
|
||||||
|
err := Wrap(ErrCodeDatabase, "Database error", wrappedErr)
|
||||||
|
|
||||||
|
assert.Equal(t, wrappedErr, err.Unwrap())
|
||||||
|
assert.Nil(t, New(ErrCodeValidation, "Test").Unwrap())
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestNew teste la création d'une nouvelle AppError
|
||||||
|
func TestNew(t *testing.T) {
|
||||||
|
err := New(ErrCodeValidation, "Test message")
|
||||||
|
|
||||||
|
assert.Equal(t, ErrCodeValidation, err.Code)
|
||||||
|
assert.Equal(t, "Test message", err.Message)
|
||||||
|
assert.Nil(t, err.Err)
|
||||||
|
assert.Empty(t, err.Details)
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestWrap teste l'enveloppement d'une erreur
|
||||||
|
func TestWrap(t *testing.T) {
|
||||||
|
wrappedErr := assert.AnError
|
||||||
|
err := Wrap(ErrCodeInternal, "Internal error", wrappedErr)
|
||||||
|
|
||||||
|
assert.Equal(t, ErrCodeInternal, err.Code)
|
||||||
|
assert.Equal(t, "Internal error", err.Message)
|
||||||
|
assert.Equal(t, wrappedErr, err.Err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestNewValidationError teste la création d'une erreur de validation
|
||||||
|
func TestNewValidationError(t *testing.T) {
|
||||||
|
details := []ErrorDetail{
|
||||||
|
{Field: "email", Message: "Invalid format"},
|
||||||
|
{Field: "password", Message: "Too short"},
|
||||||
|
}
|
||||||
|
|
||||||
|
err := NewValidationError("Validation failed", details...)
|
||||||
|
|
||||||
|
assert.Equal(t, ErrCodeValidation, err.Code)
|
||||||
|
assert.Equal(t, "Validation failed", err.Message)
|
||||||
|
assert.Len(t, err.Details, 2)
|
||||||
|
assert.Equal(t, "email", err.Details[0].Field)
|
||||||
|
assert.Equal(t, "Invalid format", err.Details[0].Message)
|
||||||
|
assert.Equal(t, "password", err.Details[1].Field)
|
||||||
|
assert.Equal(t, "Too short", err.Details[1].Message)
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestNewValidationError_NoDetails teste la création sans détails
|
||||||
|
func TestNewValidationError_NoDetails(t *testing.T) {
|
||||||
|
err := NewValidationError("Validation failed")
|
||||||
|
|
||||||
|
assert.Equal(t, ErrCodeValidation, err.Code)
|
||||||
|
assert.Equal(t, "Validation failed", err.Message)
|
||||||
|
assert.Empty(t, err.Details)
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestNewNotFoundError teste la création d'une erreur "not found"
|
||||||
|
func TestNewNotFoundError(t *testing.T) {
|
||||||
|
err := NewNotFoundError("User")
|
||||||
|
|
||||||
|
assert.Equal(t, ErrCodeNotFound, err.Code)
|
||||||
|
assert.Equal(t, "User not found", err.Message)
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestNewUnauthorizedError teste la création d'une erreur d'autorisation
|
||||||
|
func TestNewUnauthorizedError(t *testing.T) {
|
||||||
|
err := NewUnauthorizedError("Invalid token")
|
||||||
|
|
||||||
|
assert.Equal(t, ErrCodeUnauthorized, err.Code)
|
||||||
|
assert.Equal(t, "Invalid token", err.Message)
|
||||||
|
}
|
||||||
63
veza-backend-api/internal/errors/validation.go
Normal file
63
veza-backend-api/internal/errors/validation.go
Normal file
|
|
@ -0,0 +1,63 @@
|
||||||
|
package errors
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/go-playground/validator/v10"
|
||||||
|
)
|
||||||
|
|
||||||
|
// FromValidatorError convertit une erreur de validation en AppError
|
||||||
|
func FromValidatorError(err error) *AppError {
|
||||||
|
if validationErrors, ok := err.(validator.ValidationErrors); ok {
|
||||||
|
details := make([]ErrorDetail, 0, len(validationErrors))
|
||||||
|
|
||||||
|
for _, fieldError := range validationErrors {
|
||||||
|
details = append(details, ErrorDetail{
|
||||||
|
Field: fieldError.Field(),
|
||||||
|
Message: getValidationMessage(fieldError),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return &AppError{
|
||||||
|
Code: ErrCodeValidation,
|
||||||
|
Message: "Validation failed",
|
||||||
|
Details: details,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return New(ErrCodeValidation, err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
// getValidationMessage génère un message d'erreur lisible à partir d'une FieldError
|
||||||
|
func getValidationMessage(fieldError validator.FieldError) string {
|
||||||
|
switch fieldError.Tag() {
|
||||||
|
case "required":
|
||||||
|
return fieldError.Field() + " is required"
|
||||||
|
case "email":
|
||||||
|
return fieldError.Field() + " must be a valid email"
|
||||||
|
case "min":
|
||||||
|
return fieldError.Field() + " must be at least " + fieldError.Param()
|
||||||
|
case "max":
|
||||||
|
return fieldError.Field() + " must be at most " + fieldError.Param()
|
||||||
|
case "len":
|
||||||
|
return fieldError.Field() + " must be exactly " + fieldError.Param() + " characters"
|
||||||
|
case "gte":
|
||||||
|
return fieldError.Field() + " must be greater than or equal to " + fieldError.Param()
|
||||||
|
case "lte":
|
||||||
|
return fieldError.Field() + " must be less than or equal to " + fieldError.Param()
|
||||||
|
case "gt":
|
||||||
|
return fieldError.Field() + " must be greater than " + fieldError.Param()
|
||||||
|
case "lt":
|
||||||
|
return fieldError.Field() + " must be less than " + fieldError.Param()
|
||||||
|
case "url":
|
||||||
|
return fieldError.Field() + " must be a valid URL"
|
||||||
|
case "alphanum":
|
||||||
|
return fieldError.Field() + " must contain only alphanumeric characters"
|
||||||
|
case "alpha":
|
||||||
|
return fieldError.Field() + " must contain only alphabetic characters"
|
||||||
|
case "numeric":
|
||||||
|
return fieldError.Field() + " must be numeric"
|
||||||
|
case "oneof":
|
||||||
|
return fieldError.Field() + " must be one of: " + fieldError.Param()
|
||||||
|
default:
|
||||||
|
return fieldError.Field() + " is invalid"
|
||||||
|
}
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue