2025-12-03 19:29:37 +00:00
|
|
|
package handlers
|
|
|
|
|
|
|
|
|
|
import (
|
2025-12-29 18:23:23 +00:00
|
|
|
"context"
|
2025-12-03 19:29:37 +00:00
|
|
|
"net/http"
|
|
|
|
|
"strconv"
|
|
|
|
|
"time"
|
|
|
|
|
|
2025-12-16 19:07:36 +00:00
|
|
|
apperrors "veza-backend-api/internal/errors"
|
2025-12-16 18:34:08 +00:00
|
|
|
"veza-backend-api/internal/services"
|
2025-12-29 18:23:23 +00:00
|
|
|
"veza-backend-api/internal/types"
|
2025-12-16 18:34:08 +00:00
|
|
|
|
2025-12-03 19:29:37 +00:00
|
|
|
"github.com/gin-gonic/gin"
|
P0: stabilisation backend/chat/stream + nouvelle base migrations v1
Backend Go:
- Remplacement complet des anciennes migrations par la base V1 alignée sur ORIGIN.
- Durcissement global du parsing JSON (BindAndValidateJSON + RespondWithAppError).
- Sécurisation de config.go, CORS, statuts de santé et monitoring.
- Implémentation des transactions P0 (RBAC, duplication de playlists, social toggles).
- Ajout d’un job worker structuré (emails, analytics, thumbnails) + tests associés.
- Nouvelle doc backend : AUDIT_CONFIG, BACKEND_CONFIG, AUTH_PASSWORD_RESET, JOB_WORKER_*.
Chat server (Rust):
- Refonte du pipeline JWT + sécurité, audit et rate limiting avancé.
- Implémentation complète du cycle de message (read receipts, delivered, edit/delete, typing).
- Nettoyage des panics, gestion d’erreurs robuste, logs structurés.
- Migrations chat alignées sur le schéma UUID et nouvelles features.
Stream server (Rust):
- Refonte du moteur de streaming (encoding pipeline + HLS) et des modules core.
- Transactions P0 pour les jobs et segments, garanties d’atomicité.
- Documentation détaillée de la pipeline (AUDIT_STREAM_*, DESIGN_STREAM_PIPELINE, TRANSACTIONS_P0_IMPLEMENTATION).
Documentation & audits:
- TRIAGE.md et AUDIT_STABILITY.md à jour avec l’état réel des 3 services.
- Cartographie complète des migrations et des transactions (DB_MIGRATIONS_*, DB_TRANSACTION_PLAN, AUDIT_DB_TRANSACTIONS, TRANSACTION_TESTS_PHASE3).
- Scripts de reset et de cleanup pour la lab DB et la V1.
Ce commit fige l’ensemble du travail de stabilisation P0 (UUID, backend, chat et stream) avant les phases suivantes (Coherence Guardian, WS hardening, etc.).
2025-12-06 10:14:38 +00:00
|
|
|
"github.com/google/uuid"
|
|
|
|
|
"go.uber.org/zap"
|
2025-12-03 19:29:37 +00:00
|
|
|
)
|
|
|
|
|
|
2025-12-29 18:23:23 +00:00
|
|
|
// AnalyticsServiceInterface defines the interface for AnalyticsService
|
|
|
|
|
type AnalyticsServiceInterface interface {
|
|
|
|
|
RecordPlay(ctx context.Context, trackID uuid.UUID, userID *uuid.UUID, duration int, device, ipAddress string) error
|
|
|
|
|
GetTrackStats(ctx context.Context, trackID uuid.UUID) (*types.TrackStats, error)
|
|
|
|
|
GetTopTracks(ctx context.Context, limit int, startDate, endDate *time.Time) ([]services.TopTrack, error)
|
|
|
|
|
GetPlaysOverTime(ctx context.Context, trackID uuid.UUID, startDate, endDate time.Time, interval string) ([]services.PlayTimePoint, error)
|
|
|
|
|
GetUserStats(ctx context.Context, userID uuid.UUID) (*types.UserStats, error)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// AnalyticsJobWorkerInterface defines the interface for JobWorker (analytics related)
|
|
|
|
|
type AnalyticsJobWorkerInterface interface {
|
|
|
|
|
EnqueueAnalyticsJob(eventName string, userID *uuid.UUID, payload map[string]interface{})
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-03 19:29:37 +00:00
|
|
|
// AnalyticsHandler gère les opérations d'analytics de lecture de tracks
|
|
|
|
|
type AnalyticsHandler struct {
|
2025-12-29 18:23:23 +00:00
|
|
|
analyticsService AnalyticsServiceInterface
|
|
|
|
|
jobWorker AnalyticsJobWorkerInterface
|
P0: stabilisation backend/chat/stream + nouvelle base migrations v1
Backend Go:
- Remplacement complet des anciennes migrations par la base V1 alignée sur ORIGIN.
- Durcissement global du parsing JSON (BindAndValidateJSON + RespondWithAppError).
- Sécurisation de config.go, CORS, statuts de santé et monitoring.
- Implémentation des transactions P0 (RBAC, duplication de playlists, social toggles).
- Ajout d’un job worker structuré (emails, analytics, thumbnails) + tests associés.
- Nouvelle doc backend : AUDIT_CONFIG, BACKEND_CONFIG, AUTH_PASSWORD_RESET, JOB_WORKER_*.
Chat server (Rust):
- Refonte du pipeline JWT + sécurité, audit et rate limiting avancé.
- Implémentation complète du cycle de message (read receipts, delivered, edit/delete, typing).
- Nettoyage des panics, gestion d’erreurs robuste, logs structurés.
- Migrations chat alignées sur le schéma UUID et nouvelles features.
Stream server (Rust):
- Refonte du moteur de streaming (encoding pipeline + HLS) et des modules core.
- Transactions P0 pour les jobs et segments, garanties d’atomicité.
- Documentation détaillée de la pipeline (AUDIT_STREAM_*, DESIGN_STREAM_PIPELINE, TRANSACTIONS_P0_IMPLEMENTATION).
Documentation & audits:
- TRIAGE.md et AUDIT_STABILITY.md à jour avec l’état réel des 3 services.
- Cartographie complète des migrations et des transactions (DB_MIGRATIONS_*, DB_TRANSACTION_PLAN, AUDIT_DB_TRANSACTIONS, TRANSACTION_TESTS_PHASE3).
- Scripts de reset et de cleanup pour la lab DB et la V1.
Ce commit fige l’ensemble du travail de stabilisation P0 (UUID, backend, chat et stream) avant les phases suivantes (Coherence Guardian, WS hardening, etc.).
2025-12-06 10:14:38 +00:00
|
|
|
commonHandler *CommonHandler
|
2025-12-03 19:29:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// NewAnalyticsHandler crée un nouveau handler d'analytics
|
P0: stabilisation backend/chat/stream + nouvelle base migrations v1
Backend Go:
- Remplacement complet des anciennes migrations par la base V1 alignée sur ORIGIN.
- Durcissement global du parsing JSON (BindAndValidateJSON + RespondWithAppError).
- Sécurisation de config.go, CORS, statuts de santé et monitoring.
- Implémentation des transactions P0 (RBAC, duplication de playlists, social toggles).
- Ajout d’un job worker structuré (emails, analytics, thumbnails) + tests associés.
- Nouvelle doc backend : AUDIT_CONFIG, BACKEND_CONFIG, AUTH_PASSWORD_RESET, JOB_WORKER_*.
Chat server (Rust):
- Refonte du pipeline JWT + sécurité, audit et rate limiting avancé.
- Implémentation complète du cycle de message (read receipts, delivered, edit/delete, typing).
- Nettoyage des panics, gestion d’erreurs robuste, logs structurés.
- Migrations chat alignées sur le schéma UUID et nouvelles features.
Stream server (Rust):
- Refonte du moteur de streaming (encoding pipeline + HLS) et des modules core.
- Transactions P0 pour les jobs et segments, garanties d’atomicité.
- Documentation détaillée de la pipeline (AUDIT_STREAM_*, DESIGN_STREAM_PIPELINE, TRANSACTIONS_P0_IMPLEMENTATION).
Documentation & audits:
- TRIAGE.md et AUDIT_STABILITY.md à jour avec l’état réel des 3 services.
- Cartographie complète des migrations et des transactions (DB_MIGRATIONS_*, DB_TRANSACTION_PLAN, AUDIT_DB_TRANSACTIONS, TRANSACTION_TESTS_PHASE3).
- Scripts de reset et de cleanup pour la lab DB et la V1.
Ce commit fige l’ensemble du travail de stabilisation P0 (UUID, backend, chat et stream) avant les phases suivantes (Coherence Guardian, WS hardening, etc.).
2025-12-06 10:14:38 +00:00
|
|
|
func NewAnalyticsHandler(analyticsService *services.AnalyticsService, logger *zap.Logger) *AnalyticsHandler {
|
|
|
|
|
return &AnalyticsHandler{
|
|
|
|
|
analyticsService: analyticsService,
|
|
|
|
|
commonHandler: NewCommonHandler(logger),
|
|
|
|
|
}
|
2025-12-03 19:29:37 +00:00
|
|
|
}
|
|
|
|
|
|
2025-12-29 18:23:23 +00:00
|
|
|
// NewAnalyticsHandlerWithInterface creates a new analytics handler with interfaces for testing
|
|
|
|
|
func NewAnalyticsHandlerWithInterface(analyticsService AnalyticsServiceInterface, logger *zap.Logger) *AnalyticsHandler {
|
|
|
|
|
return &AnalyticsHandler{
|
|
|
|
|
analyticsService: analyticsService,
|
|
|
|
|
commonHandler: NewCommonHandler(logger),
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-24 13:47:12 +00:00
|
|
|
// SetJobWorker définit le JobWorker pour enregistrer des événements analytics
|
2025-12-29 18:23:23 +00:00
|
|
|
func (h *AnalyticsHandler) SetJobWorker(jobWorker AnalyticsJobWorkerInterface) {
|
2025-12-24 13:47:12 +00:00
|
|
|
h.jobWorker = jobWorker
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-03 19:29:37 +00:00
|
|
|
// RecordPlayRequest représente la requête pour enregistrer une lecture
|
2025-12-16 18:34:08 +00:00
|
|
|
// MOD-P1-001: Ajout tags validate pour validation systématique
|
2025-12-03 19:29:37 +00:00
|
|
|
type RecordPlayRequest struct {
|
2025-12-16 18:34:08 +00:00
|
|
|
Duration int `json:"duration" binding:"required,min=1" validate:"required,min=1"`
|
|
|
|
|
Device string `json:"device,omitempty" validate:"omitempty,max=100"`
|
2025-12-03 19:29:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// RecordPlay gère l'enregistrement d'une lecture de track
|
2026-01-07 18:39:21 +00:00
|
|
|
// @Summary Record play
|
|
|
|
|
// @Description Record a play event for a track. Can be called anonymously or with authentication.
|
|
|
|
|
// @Tags Analytics
|
|
|
|
|
// @Accept json
|
|
|
|
|
// @Produce json
|
|
|
|
|
// @Param id path string true "Track ID (UUID)"
|
|
|
|
|
// @Param request body handlers.RecordPlayRequest true "Play event data"
|
|
|
|
|
// @Success 200 {object} handlers.APIResponse{data=object{message=string}}
|
|
|
|
|
// @Failure 400 {object} handlers.APIResponse "Validation error"
|
|
|
|
|
// @Failure 404 {object} handlers.APIResponse "Track not found"
|
|
|
|
|
// @Failure 500 {object} handlers.APIResponse "Internal server error"
|
|
|
|
|
// @Router /tracks/{id}/play [post]
|
2025-12-03 19:29:37 +00:00
|
|
|
func (h *AnalyticsHandler) RecordPlay(c *gin.Context) {
|
|
|
|
|
trackIDStr := c.Param("id")
|
|
|
|
|
if trackIDStr == "" {
|
2025-12-16 19:07:36 +00:00
|
|
|
RespondWithAppError(c, apperrors.NewValidationError("track id is required"))
|
2025-12-03 19:29:37 +00:00
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
trackID, err := uuid.Parse(trackIDStr) // Changed to uuid.Parse
|
|
|
|
|
if err != nil {
|
2025-12-16 19:07:36 +00:00
|
|
|
RespondWithAppError(c, apperrors.NewValidationError("invalid track id"))
|
2025-12-03 19:29:37 +00:00
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
var req RecordPlayRequest
|
P0: stabilisation backend/chat/stream + nouvelle base migrations v1
Backend Go:
- Remplacement complet des anciennes migrations par la base V1 alignée sur ORIGIN.
- Durcissement global du parsing JSON (BindAndValidateJSON + RespondWithAppError).
- Sécurisation de config.go, CORS, statuts de santé et monitoring.
- Implémentation des transactions P0 (RBAC, duplication de playlists, social toggles).
- Ajout d’un job worker structuré (emails, analytics, thumbnails) + tests associés.
- Nouvelle doc backend : AUDIT_CONFIG, BACKEND_CONFIG, AUTH_PASSWORD_RESET, JOB_WORKER_*.
Chat server (Rust):
- Refonte du pipeline JWT + sécurité, audit et rate limiting avancé.
- Implémentation complète du cycle de message (read receipts, delivered, edit/delete, typing).
- Nettoyage des panics, gestion d’erreurs robuste, logs structurés.
- Migrations chat alignées sur le schéma UUID et nouvelles features.
Stream server (Rust):
- Refonte du moteur de streaming (encoding pipeline + HLS) et des modules core.
- Transactions P0 pour les jobs et segments, garanties d’atomicité.
- Documentation détaillée de la pipeline (AUDIT_STREAM_*, DESIGN_STREAM_PIPELINE, TRANSACTIONS_P0_IMPLEMENTATION).
Documentation & audits:
- TRIAGE.md et AUDIT_STABILITY.md à jour avec l’état réel des 3 services.
- Cartographie complète des migrations et des transactions (DB_MIGRATIONS_*, DB_TRANSACTION_PLAN, AUDIT_DB_TRANSACTIONS, TRANSACTION_TESTS_PHASE3).
- Scripts de reset et de cleanup pour la lab DB et la V1.
Ce commit fige l’ensemble du travail de stabilisation P0 (UUID, backend, chat et stream) avant les phases suivantes (Coherence Guardian, WS hardening, etc.).
2025-12-06 10:14:38 +00:00
|
|
|
if appErr := h.commonHandler.BindAndValidateJSON(c, &req); appErr != nil {
|
|
|
|
|
RespondWithAppError(c, appErr)
|
2025-12-03 19:29:37 +00:00
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Récupérer user_id si authentifié (optionnel pour analytics anonymes)
|
|
|
|
|
var userID *uuid.UUID
|
|
|
|
|
if uid, ok := c.Get("user_id"); ok {
|
|
|
|
|
if uidUUID, ok := uid.(uuid.UUID); ok {
|
|
|
|
|
userID = &uidUUID
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Récupérer IP address et device
|
|
|
|
|
ipAddress := c.ClientIP()
|
|
|
|
|
device := req.Device
|
|
|
|
|
if device == "" {
|
|
|
|
|
device = c.GetHeader("User-Agent")
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
err = h.analyticsService.RecordPlay(c.Request.Context(), trackID, userID, req.Duration, device, ipAddress)
|
|
|
|
|
if err != nil {
|
|
|
|
|
if err.Error() == "track not found" {
|
2025-12-16 19:07:36 +00:00
|
|
|
RespondWithAppError(c, apperrors.NewNotFoundError("track"))
|
2025-12-03 19:29:37 +00:00
|
|
|
return
|
|
|
|
|
}
|
2025-12-16 19:07:36 +00:00
|
|
|
RespondWithAppError(c, apperrors.Wrap(apperrors.ErrCodeInternal, "Failed to record play", err))
|
2025-12-03 19:29:37 +00:00
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-06 16:21:59 +00:00
|
|
|
RespondSuccess(c, http.StatusOK, gin.H{"message": "play recorded"})
|
2025-12-03 19:29:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// GetTrackStats gère la récupération des statistiques d'un track
|
2026-01-07 18:39:21 +00:00
|
|
|
// @Summary Get track statistics
|
|
|
|
|
// @Description Get statistics for a track (plays, likes, etc.)
|
|
|
|
|
// @Tags Analytics
|
|
|
|
|
// @Accept json
|
|
|
|
|
// @Produce json
|
|
|
|
|
// @Param id path string true "Track ID (UUID)"
|
|
|
|
|
// @Success 200 {object} handlers.APIResponse{data=object{stats=object}}
|
|
|
|
|
// @Failure 400 {object} handlers.APIResponse "Validation error"
|
|
|
|
|
// @Failure 404 {object} handlers.APIResponse "Track not found"
|
|
|
|
|
// @Failure 500 {object} handlers.APIResponse "Internal server error"
|
|
|
|
|
// @Router /tracks/{id}/stats [get]
|
2025-12-03 19:29:37 +00:00
|
|
|
func (h *AnalyticsHandler) GetTrackStats(c *gin.Context) {
|
|
|
|
|
trackIDStr := c.Param("id")
|
|
|
|
|
if trackIDStr == "" {
|
2025-12-16 19:07:36 +00:00
|
|
|
RespondWithAppError(c, apperrors.NewValidationError("track id is required"))
|
2025-12-03 19:29:37 +00:00
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
trackID, err := uuid.Parse(trackIDStr) // Changed to uuid.Parse
|
|
|
|
|
if err != nil {
|
2025-12-16 19:07:36 +00:00
|
|
|
RespondWithAppError(c, apperrors.NewValidationError("invalid track id"))
|
2025-12-03 19:29:37 +00:00
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
stats, err := h.analyticsService.GetTrackStats(c.Request.Context(), trackID)
|
|
|
|
|
if err != nil {
|
|
|
|
|
if err.Error() == "track not found" {
|
2025-12-16 19:07:36 +00:00
|
|
|
RespondWithAppError(c, apperrors.NewNotFoundError("track"))
|
2025-12-03 19:29:37 +00:00
|
|
|
return
|
|
|
|
|
}
|
2025-12-16 19:07:36 +00:00
|
|
|
RespondWithAppError(c, apperrors.Wrap(apperrors.ErrCodeInternal, "Failed to get track stats", err))
|
2025-12-03 19:29:37 +00:00
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-06 16:21:59 +00:00
|
|
|
RespondSuccess(c, http.StatusOK, gin.H{"stats": stats})
|
2025-12-03 19:29:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// GetTopTracks gère la récupération des tracks les plus écoutés
|
2026-01-07 18:39:21 +00:00
|
|
|
// @Summary Get top tracks
|
|
|
|
|
// @Description Get list of top tracks by play count, optionally filtered by date range
|
|
|
|
|
// @Tags Analytics
|
|
|
|
|
// @Accept json
|
|
|
|
|
// @Produce json
|
|
|
|
|
// @Param limit query int false "Number of tracks to return" default(10) minimum(1) maximum(100)
|
|
|
|
|
// @Param start_date query string false "Start date filter (RFC3339 format)"
|
|
|
|
|
// @Param end_date query string false "End date filter (RFC3339 format)"
|
|
|
|
|
// @Success 200 {object} handlers.APIResponse{data=object{tracks=array}}
|
|
|
|
|
// @Failure 400 {object} handlers.APIResponse "Validation error"
|
|
|
|
|
// @Failure 500 {object} handlers.APIResponse "Internal server error"
|
|
|
|
|
// @Router /analytics/tracks/top [get]
|
2025-12-03 19:29:37 +00:00
|
|
|
func (h *AnalyticsHandler) GetTopTracks(c *gin.Context) {
|
|
|
|
|
// Parse limit
|
|
|
|
|
limit := 10
|
|
|
|
|
if limitStr := c.Query("limit"); limitStr != "" {
|
|
|
|
|
if l, err := strconv.Atoi(limitStr); err == nil && l > 0 && l <= 100 {
|
|
|
|
|
limit = l
|
|
|
|
|
} else {
|
2025-12-16 19:07:36 +00:00
|
|
|
RespondWithAppError(c, apperrors.NewValidationError("invalid limit (must be between 1 and 100)"))
|
2025-12-03 19:29:37 +00:00
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Parse start_date (optionnel)
|
|
|
|
|
var startDate *time.Time
|
|
|
|
|
if startDateStr := c.Query("start_date"); startDateStr != "" {
|
|
|
|
|
parsed, err := time.Parse(time.RFC3339, startDateStr)
|
|
|
|
|
if err != nil {
|
2025-12-16 19:07:36 +00:00
|
|
|
RespondWithAppError(c, apperrors.NewValidationError("invalid start_date format (use RFC3339)"))
|
2025-12-03 19:29:37 +00:00
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
startDate = &parsed
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Parse end_date (optionnel)
|
|
|
|
|
var endDate *time.Time
|
|
|
|
|
if endDateStr := c.Query("end_date"); endDateStr != "" {
|
|
|
|
|
parsed, err := time.Parse(time.RFC3339, endDateStr)
|
|
|
|
|
if err != nil {
|
2025-12-16 19:07:36 +00:00
|
|
|
RespondWithAppError(c, apperrors.NewValidationError("invalid end_date format (use RFC3339)"))
|
2025-12-03 19:29:37 +00:00
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
endDate = &parsed
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
topTracks, err := h.analyticsService.GetTopTracks(c.Request.Context(), limit, startDate, endDate)
|
|
|
|
|
if err != nil {
|
2025-12-16 19:07:36 +00:00
|
|
|
RespondWithAppError(c, apperrors.Wrap(apperrors.ErrCodeInternal, "Failed to get top tracks", err))
|
2025-12-03 19:29:37 +00:00
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-06 16:21:59 +00:00
|
|
|
RespondSuccess(c, http.StatusOK, gin.H{"tracks": topTracks})
|
2025-12-03 19:29:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// GetPlaysOverTime gère la récupération des lectures sur une période
|
2026-01-07 18:39:21 +00:00
|
|
|
// @Summary Get plays over time
|
|
|
|
|
// @Description Get play statistics over time for a track, grouped by time period
|
|
|
|
|
// @Tags Analytics
|
|
|
|
|
// @Accept json
|
|
|
|
|
// @Produce json
|
|
|
|
|
// @Param id path string true "Track ID (UUID)"
|
|
|
|
|
// @Param start_date query string false "Start date (RFC3339 format)"
|
|
|
|
|
// @Param end_date query string false "End date (RFC3339 format)"
|
|
|
|
|
// @Param interval query string false "Time period grouping (hour, day, week, month)" default(day)
|
|
|
|
|
// @Success 200 {object} handlers.APIResponse{data=object{points=array}}
|
|
|
|
|
// @Failure 400 {object} handlers.APIResponse "Validation error"
|
|
|
|
|
// @Failure 404 {object} handlers.APIResponse "Track not found"
|
|
|
|
|
// @Failure 500 {object} handlers.APIResponse "Internal server error"
|
|
|
|
|
// @Router /tracks/{id}/analytics/plays [get]
|
2025-12-03 19:29:37 +00:00
|
|
|
func (h *AnalyticsHandler) GetPlaysOverTime(c *gin.Context) {
|
|
|
|
|
trackIDStr := c.Param("id")
|
|
|
|
|
if trackIDStr == "" {
|
2025-12-16 19:07:36 +00:00
|
|
|
RespondWithAppError(c, apperrors.NewValidationError("track id is required"))
|
2025-12-03 19:29:37 +00:00
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
trackID, err := uuid.Parse(trackIDStr) // Changed to uuid.Parse
|
|
|
|
|
if err != nil {
|
2025-12-16 19:07:36 +00:00
|
|
|
RespondWithAppError(c, apperrors.NewValidationError("invalid track id"))
|
2025-12-03 19:29:37 +00:00
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Parse start_date (optionnel, défaut: 30 jours)
|
|
|
|
|
startDate := time.Now().AddDate(0, 0, -30)
|
|
|
|
|
if startDateStr := c.Query("start_date"); startDateStr != "" {
|
|
|
|
|
parsed, err := time.Parse(time.RFC3339, startDateStr)
|
|
|
|
|
if err != nil {
|
2025-12-16 19:07:36 +00:00
|
|
|
RespondWithAppError(c, apperrors.NewValidationError("invalid start_date format (use RFC3339)"))
|
2025-12-03 19:29:37 +00:00
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
startDate = parsed
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Parse end_date (optionnel, défaut: maintenant)
|
|
|
|
|
endDate := time.Now()
|
|
|
|
|
if endDateStr := c.Query("end_date"); endDateStr != "" {
|
|
|
|
|
parsed, err := time.Parse(time.RFC3339, endDateStr)
|
|
|
|
|
if err != nil {
|
2025-12-16 19:07:36 +00:00
|
|
|
RespondWithAppError(c, apperrors.NewValidationError("invalid end_date format (use RFC3339)"))
|
2025-12-03 19:29:37 +00:00
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
endDate = parsed
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Parse interval (optionnel, défaut: day)
|
|
|
|
|
interval := c.DefaultQuery("interval", "day")
|
|
|
|
|
validIntervals := map[string]bool{"hour": true, "day": true, "week": true, "month": true}
|
|
|
|
|
if !validIntervals[interval] {
|
2025-12-16 19:07:36 +00:00
|
|
|
RespondWithAppError(c, apperrors.NewValidationError("invalid interval (must be: hour, day, week, month)"))
|
2025-12-03 19:29:37 +00:00
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
points, err := h.analyticsService.GetPlaysOverTime(c.Request.Context(), trackID, startDate, endDate, interval)
|
|
|
|
|
if err != nil {
|
|
|
|
|
if err.Error() == "track not found" {
|
2025-12-16 19:07:36 +00:00
|
|
|
RespondWithAppError(c, apperrors.NewNotFoundError("track"))
|
2025-12-03 19:29:37 +00:00
|
|
|
return
|
|
|
|
|
}
|
2025-12-16 19:07:36 +00:00
|
|
|
RespondWithAppError(c, apperrors.Wrap(apperrors.ErrCodeInternal, "Failed to get plays over time", err))
|
2025-12-03 19:29:37 +00:00
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-06 16:21:59 +00:00
|
|
|
RespondSuccess(c, http.StatusOK, gin.H{"points": points})
|
2025-12-03 19:29:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// GetUserStats gère la récupération des statistiques d'un utilisateur
|
2026-01-07 18:39:21 +00:00
|
|
|
// @Summary Get user statistics
|
|
|
|
|
// @Description Get analytics statistics for a user (total plays, tracks, etc.)
|
|
|
|
|
// @Tags Analytics
|
|
|
|
|
// @Accept json
|
|
|
|
|
// @Produce json
|
|
|
|
|
// @Security BearerAuth
|
|
|
|
|
// @Param id path string true "User ID (UUID)"
|
|
|
|
|
// @Success 200 {object} handlers.APIResponse{data=object{stats=object}}
|
|
|
|
|
// @Failure 400 {object} handlers.APIResponse "Validation error"
|
|
|
|
|
// @Failure 401 {object} handlers.APIResponse "Unauthorized"
|
|
|
|
|
// @Failure 403 {object} handlers.APIResponse "Forbidden - can only view own stats"
|
|
|
|
|
// @Failure 404 {object} handlers.APIResponse "User not found"
|
|
|
|
|
// @Failure 500 {object} handlers.APIResponse "Internal server error"
|
|
|
|
|
// @Router /users/{id}/analytics/stats [get]
|
2025-12-03 19:29:37 +00:00
|
|
|
func (h *AnalyticsHandler) GetUserStats(c *gin.Context) {
|
|
|
|
|
userIDStr := c.Param("id")
|
|
|
|
|
if userIDStr == "" {
|
2025-12-16 19:07:36 +00:00
|
|
|
RespondWithAppError(c, apperrors.NewValidationError("user id is required"))
|
2025-12-03 19:29:37 +00:00
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
userID, err := uuid.Parse(userIDStr)
|
|
|
|
|
if err != nil {
|
2025-12-16 19:07:36 +00:00
|
|
|
RespondWithAppError(c, apperrors.NewValidationError("invalid user id"))
|
2025-12-03 19:29:37 +00:00
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Vérifier que l'utilisateur peut accéder à ses propres stats
|
|
|
|
|
var authenticatedUserID *uuid.UUID
|
|
|
|
|
if uid, ok := c.Get("user_id"); ok {
|
|
|
|
|
if uidUUID, ok := uid.(uuid.UUID); ok {
|
|
|
|
|
authenticatedUserID = &uidUUID
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if authenticatedUserID != nil && *authenticatedUserID != userID {
|
2025-12-16 19:07:36 +00:00
|
|
|
RespondWithAppError(c, apperrors.NewForbiddenError("cannot access other user's stats"))
|
2025-12-03 19:29:37 +00:00
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
stats, err := h.analyticsService.GetUserStats(c.Request.Context(), userID)
|
|
|
|
|
if err != nil {
|
|
|
|
|
if err.Error() == "user not found" {
|
2025-12-16 19:07:36 +00:00
|
|
|
RespondWithAppError(c, apperrors.NewNotFoundError("user"))
|
2025-12-03 19:29:37 +00:00
|
|
|
return
|
|
|
|
|
}
|
2025-12-16 19:07:36 +00:00
|
|
|
RespondWithAppError(c, apperrors.Wrap(apperrors.ErrCodeInternal, "Failed to get user stats", err))
|
2025-12-03 19:29:37 +00:00
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-06 16:21:59 +00:00
|
|
|
RespondSuccess(c, http.StatusOK, gin.H{"stats": stats})
|
2025-12-03 19:29:37 +00:00
|
|
|
}
|
2025-12-24 13:47:12 +00:00
|
|
|
|
2025-12-24 13:48:28 +00:00
|
|
|
// GetTrackAnalyticsDashboard gère la récupération du dashboard d'analytics complet pour un track
|
|
|
|
|
// BE-API-036: GET /api/v1/analytics/tracks/:id returns comprehensive track analytics
|
|
|
|
|
// @Summary Get Track Analytics Dashboard
|
|
|
|
|
// @Description Get comprehensive analytics dashboard for a track
|
|
|
|
|
// @Tags Analytics
|
|
|
|
|
// @Accept json
|
|
|
|
|
// @Produce json
|
|
|
|
|
// @Security BearerAuth
|
|
|
|
|
// @Param id path string true "Track ID"
|
|
|
|
|
// @Success 200 {object} APIResponse{data=object{dashboard=object}}
|
|
|
|
|
// @Failure 400 {object} APIResponse "Validation Error"
|
|
|
|
|
// @Failure 404 {object} APIResponse "Track not found"
|
|
|
|
|
// @Failure 500 {object} APIResponse "Internal Error"
|
|
|
|
|
// @Router /analytics/tracks/{id} [get]
|
|
|
|
|
func (h *AnalyticsHandler) GetTrackAnalyticsDashboard(c *gin.Context) {
|
|
|
|
|
trackIDStr := c.Param("id")
|
|
|
|
|
if trackIDStr == "" {
|
|
|
|
|
RespondWithAppError(c, apperrors.NewValidationError("track id is required"))
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
trackID, err := uuid.Parse(trackIDStr)
|
|
|
|
|
if err != nil {
|
|
|
|
|
RespondWithAppError(c, apperrors.NewValidationError("invalid track id"))
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Récupérer les statistiques de base
|
|
|
|
|
stats, err := h.analyticsService.GetTrackStats(c.Request.Context(), trackID)
|
|
|
|
|
if err != nil {
|
|
|
|
|
if err.Error() == "track not found" {
|
|
|
|
|
RespondWithAppError(c, apperrors.NewNotFoundError("track"))
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
RespondWithAppError(c, apperrors.Wrap(apperrors.ErrCodeInternal, "Failed to get track stats", err))
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Récupérer les lectures sur une période (30 derniers jours)
|
|
|
|
|
startDate := time.Now().AddDate(0, 0, -30)
|
|
|
|
|
endDate := time.Now()
|
|
|
|
|
playsOverTime, err := h.analyticsService.GetPlaysOverTime(c.Request.Context(), trackID, startDate, endDate, "day")
|
|
|
|
|
if err != nil {
|
|
|
|
|
// Ne pas échouer si on ne peut pas récupérer les données temporelles
|
|
|
|
|
playsOverTime = []services.PlayTimePoint{}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Construire le dashboard complet
|
|
|
|
|
dashboard := gin.H{
|
|
|
|
|
"track_id": trackID.String(),
|
|
|
|
|
"stats": gin.H{
|
2025-12-29 18:23:23 +00:00
|
|
|
"total_plays": stats.TotalPlays,
|
|
|
|
|
"unique_listeners": stats.UniqueListeners,
|
|
|
|
|
"average_duration": stats.AverageDuration,
|
|
|
|
|
"completion_rate": stats.CompletionRate,
|
2025-12-24 13:48:28 +00:00
|
|
|
},
|
|
|
|
|
"plays_over_time": playsOverTime,
|
|
|
|
|
"period": gin.H{
|
|
|
|
|
"start_date": startDate.Format(time.RFC3339),
|
|
|
|
|
"end_date": endDate.Format(time.RFC3339),
|
|
|
|
|
"days": 30,
|
|
|
|
|
},
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
RespondSuccess(c, http.StatusOK, gin.H{
|
|
|
|
|
"dashboard": dashboard,
|
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-24 13:47:12 +00:00
|
|
|
// RecordEventRequest représente la requête pour enregistrer un événement analytics personnalisé
|
|
|
|
|
// BE-API-035: POST /api/v1/analytics/events to record custom analytics events
|
|
|
|
|
type RecordEventRequest struct {
|
|
|
|
|
EventName string `json:"event_name" binding:"required,min=1,max=100" validate:"required,min=1,max=100"`
|
|
|
|
|
Payload map[string]interface{} `json:"payload,omitempty" validate:"omitempty"`
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// RecordEvent gère l'enregistrement d'un événement analytics personnalisé
|
|
|
|
|
// BE-API-035: Implement analytics events endpoint
|
|
|
|
|
// @Summary Record Analytics Event
|
|
|
|
|
// @Description Record a custom analytics event
|
|
|
|
|
// @Tags Analytics
|
|
|
|
|
// @Accept json
|
|
|
|
|
// @Produce json
|
|
|
|
|
// @Security BearerAuth
|
|
|
|
|
// @Param request body RecordEventRequest true "Event Data"
|
|
|
|
|
// @Success 200 {object} APIResponse{data=object{message=string}}
|
|
|
|
|
// @Failure 400 {object} APIResponse "Validation Error"
|
|
|
|
|
// @Failure 401 {object} APIResponse "Unauthorized"
|
|
|
|
|
// @Failure 500 {object} APIResponse "Internal Error"
|
|
|
|
|
// @Router /analytics/events [post]
|
|
|
|
|
func (h *AnalyticsHandler) RecordEvent(c *gin.Context) {
|
|
|
|
|
var req RecordEventRequest
|
|
|
|
|
if appErr := h.commonHandler.BindAndValidateJSON(c, &req); appErr != nil {
|
|
|
|
|
RespondWithAppError(c, appErr)
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Récupérer user_id si authentifié (optionnel pour analytics anonymes)
|
|
|
|
|
var userID *uuid.UUID
|
|
|
|
|
if uid, ok := c.Get("user_id"); ok {
|
|
|
|
|
if uidUUID, ok := uid.(uuid.UUID); ok {
|
|
|
|
|
userID = &uidUUID
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Vérifier que le JobWorker est disponible
|
|
|
|
|
if h.jobWorker == nil {
|
|
|
|
|
RespondWithAppError(c, apperrors.New(apperrors.ErrCodeInternal, "analytics service not available"))
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Enqueue l'événement analytics via le JobWorker
|
|
|
|
|
h.jobWorker.EnqueueAnalyticsJob(req.EventName, userID, req.Payload)
|
|
|
|
|
|
|
|
|
|
RespondSuccess(c, http.StatusOK, gin.H{
|
|
|
|
|
"message": "event recorded",
|
|
|
|
|
"event_name": req.EventName,
|
|
|
|
|
})
|
|
|
|
|
}
|
2026-01-07 18:39:21 +00:00
|
|
|
|
|
|
|
|
// GetAnalytics gère la récupération des analytics agrégées pour l'utilisateur
|
|
|
|
|
// BE-API-037: GET /api/v1/analytics endpoint for aggregated analytics
|
|
|
|
|
// @Summary Get Analytics Data
|
|
|
|
|
// @Description Get aggregated analytics data for tracks and playlists
|
|
|
|
|
// @Tags Analytics
|
|
|
|
|
// @Accept json
|
|
|
|
|
// @Produce json
|
|
|
|
|
// @Security BearerAuth
|
|
|
|
|
// @Param days query int false "Number of days (default: 30)"
|
|
|
|
|
// @Param start_date query string false "Start date (ISO 8601)"
|
|
|
|
|
// @Param end_date query string false "End date (ISO 8601)"
|
|
|
|
|
// @Success 200 {object} APIResponse{data=object{tracks=object,playlists=object,period=object}}
|
|
|
|
|
// @Failure 401 {object} APIResponse "Unauthorized"
|
|
|
|
|
// @Failure 500 {object} APIResponse "Internal Error"
|
|
|
|
|
// @Router /analytics [get]
|
|
|
|
|
func (h *AnalyticsHandler) GetAnalytics(c *gin.Context) {
|
|
|
|
|
// Récupérer l'utilisateur authentifié
|
|
|
|
|
userIDInterface, exists := c.Get("user_id")
|
|
|
|
|
if !exists {
|
|
|
|
|
RespondWithAppError(c, apperrors.New(apperrors.ErrCodeUnauthorized, "authentication required"))
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
userID, ok := userIDInterface.(uuid.UUID)
|
|
|
|
|
if !ok {
|
|
|
|
|
RespondWithAppError(c, apperrors.New(apperrors.ErrCodeInternal, "invalid user id"))
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Parser les paramètres de date
|
|
|
|
|
daysStr := c.DefaultQuery("days", "30")
|
|
|
|
|
days, err := strconv.Atoi(daysStr)
|
|
|
|
|
if err != nil || days < 1 {
|
|
|
|
|
days = 30
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
var startDate, endDate *time.Time
|
|
|
|
|
if startDateStr := c.Query("start_date"); startDateStr != "" {
|
|
|
|
|
if parsed, err := time.Parse(time.RFC3339, startDateStr); err == nil {
|
|
|
|
|
startDate = &parsed
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if endDateStr := c.Query("end_date"); endDateStr != "" {
|
|
|
|
|
if parsed, err := time.Parse(time.RFC3339, endDateStr); err == nil {
|
|
|
|
|
endDate = &parsed
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Si les dates ne sont pas fournies, calculer depuis days
|
|
|
|
|
if startDate == nil || endDate == nil {
|
|
|
|
|
now := time.Now()
|
|
|
|
|
if endDate == nil {
|
|
|
|
|
endDate = &now
|
|
|
|
|
}
|
|
|
|
|
if startDate == nil {
|
|
|
|
|
calculatedStart := endDate.AddDate(0, 0, -days)
|
|
|
|
|
startDate = &calculatedStart
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
ctx := c.Request.Context()
|
|
|
|
|
|
|
|
|
|
// Accéder à la DB via le service (nécessite un cast)
|
|
|
|
|
analyticsSvc, ok := h.analyticsService.(*services.AnalyticsService)
|
|
|
|
|
if !ok {
|
|
|
|
|
RespondWithAppError(c, apperrors.New(apperrors.ErrCodeInternal, "analytics service type error"))
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Récupérer les tracks de l'utilisateur avec leurs stats
|
|
|
|
|
var tracks []struct {
|
|
|
|
|
ID uuid.UUID `gorm:"column:id"`
|
|
|
|
|
Title string `gorm:"column:title"`
|
|
|
|
|
PlayCount int64 `gorm:"column:play_count"`
|
|
|
|
|
LikeCount int64 `gorm:"column:like_count"`
|
|
|
|
|
DownloadCount int64 `gorm:"column:download_count"`
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if err := analyticsSvc.GetDB().WithContext(ctx).
|
|
|
|
|
Table("tracks").
|
|
|
|
|
Select("id, title, play_count, like_count, COALESCE(download_count, 0) as download_count").
|
|
|
|
|
Where("creator_id = ?", userID).
|
|
|
|
|
Find(&tracks).Error; err != nil {
|
|
|
|
|
h.commonHandler.logger.Error("Failed to fetch user tracks", zap.Error(err))
|
|
|
|
|
RespondWithAppError(c, apperrors.New(apperrors.ErrCodeInternal, "failed to fetch tracks"))
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Calculer les stats des tracks
|
|
|
|
|
totalTracks := len(tracks)
|
|
|
|
|
var totalPlays, totalLikes, totalDownloads int64
|
|
|
|
|
for _, track := range tracks {
|
|
|
|
|
totalPlays += track.PlayCount
|
|
|
|
|
totalLikes += track.LikeCount
|
|
|
|
|
totalDownloads += track.DownloadCount
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
avgPlayCount := float64(0)
|
|
|
|
|
if totalTracks > 0 {
|
|
|
|
|
avgPlayCount = float64(totalPlays) / float64(totalTracks)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Top 5 tracks
|
|
|
|
|
topTracks := make([]gin.H, 0, 5)
|
|
|
|
|
sortedTracks := make([]struct {
|
2026-01-13 18:47:57 +00:00
|
|
|
ID uuid.UUID
|
|
|
|
|
Title string
|
|
|
|
|
PlayCount int64
|
|
|
|
|
LikeCount int64
|
2026-01-07 18:39:21 +00:00
|
|
|
}, len(tracks))
|
|
|
|
|
for i, t := range tracks {
|
|
|
|
|
sortedTracks[i] = struct {
|
2026-01-13 18:47:57 +00:00
|
|
|
ID uuid.UUID
|
|
|
|
|
Title string
|
|
|
|
|
PlayCount int64
|
|
|
|
|
LikeCount int64
|
2026-01-07 18:39:21 +00:00
|
|
|
}{t.ID, t.Title, t.PlayCount, t.LikeCount}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Trier par play_count
|
|
|
|
|
for i := 0; i < len(sortedTracks)-1; i++ {
|
|
|
|
|
for j := i + 1; j < len(sortedTracks); j++ {
|
|
|
|
|
if sortedTracks[i].PlayCount < sortedTracks[j].PlayCount {
|
|
|
|
|
sortedTracks[i], sortedTracks[j] = sortedTracks[j], sortedTracks[i]
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for i := 0; i < 5 && i < len(sortedTracks); i++ {
|
|
|
|
|
topTracks = append(topTracks, gin.H{
|
|
|
|
|
"id": sortedTracks[i].ID.String(),
|
|
|
|
|
"title": sortedTracks[i].Title,
|
|
|
|
|
"play_count": sortedTracks[i].PlayCount,
|
2026-01-13 18:47:57 +00:00
|
|
|
"like_count": sortedTracks[i].LikeCount,
|
2026-01-07 18:39:21 +00:00
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Récupérer les playlists de l'utilisateur
|
|
|
|
|
// CRITIQUE FIX #15: Gérer gracieusement l'erreur si la table playlists n'existe pas ou est vide
|
|
|
|
|
var playlists []struct {
|
|
|
|
|
ID uuid.UUID `gorm:"column:id"`
|
|
|
|
|
Name string `gorm:"column:title"`
|
|
|
|
|
PlayCount int64 `gorm:"column:play_count"`
|
|
|
|
|
LikeCount int64 `gorm:"column:like_count"`
|
|
|
|
|
ShareCount int64 `gorm:"column:share_count"`
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
playlistsError := analyticsSvc.GetDB().WithContext(ctx).
|
|
|
|
|
Table("playlists").
|
|
|
|
|
Select("id, title as name, COALESCE(play_count, 0) as play_count, COALESCE(like_count, 0) as like_count, COALESCE(share_count, 0) as share_count").
|
|
|
|
|
Where("user_id = ?", userID).
|
|
|
|
|
Find(&playlists).Error
|
2026-01-13 18:47:57 +00:00
|
|
|
|
2026-01-07 18:39:21 +00:00
|
|
|
// Si erreur lors de la récupération des playlists, logger mais continuer avec des données vides
|
|
|
|
|
// Cela permet de retourner les analytics des tracks même si les playlists ne sont pas disponibles
|
|
|
|
|
if playlistsError != nil {
|
|
|
|
|
h.commonHandler.logger.Warn("Failed to fetch user playlists, continuing with empty playlists data", zap.Error(playlistsError))
|
|
|
|
|
playlists = []struct {
|
|
|
|
|
ID uuid.UUID `gorm:"column:id"`
|
|
|
|
|
Name string `gorm:"column:title"`
|
|
|
|
|
PlayCount int64 `gorm:"column:play_count"`
|
|
|
|
|
LikeCount int64 `gorm:"column:like_count"`
|
|
|
|
|
ShareCount int64 `gorm:"column:share_count"`
|
|
|
|
|
}{}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Calculer les stats des playlists
|
|
|
|
|
totalPlaylists := len(playlists)
|
|
|
|
|
var playlistPlays, playlistLikes, playlistShares int64
|
|
|
|
|
for _, playlist := range playlists {
|
|
|
|
|
playlistPlays += playlist.PlayCount
|
|
|
|
|
playlistLikes += playlist.LikeCount
|
|
|
|
|
playlistShares += playlist.ShareCount
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
avgPlaylistPlayCount := float64(0)
|
|
|
|
|
if totalPlaylists > 0 {
|
|
|
|
|
avgPlaylistPlayCount = float64(playlistPlays) / float64(totalPlaylists)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Top 5 playlists
|
|
|
|
|
topPlaylists := make([]gin.H, 0, 5)
|
|
|
|
|
sortedPlaylists := make([]struct {
|
|
|
|
|
ID uuid.UUID
|
|
|
|
|
Name string
|
|
|
|
|
PlayCount int64
|
|
|
|
|
LikeCount int64
|
|
|
|
|
}, len(playlists))
|
|
|
|
|
for i, p := range playlists {
|
|
|
|
|
sortedPlaylists[i] = struct {
|
|
|
|
|
ID uuid.UUID
|
|
|
|
|
Name string
|
|
|
|
|
PlayCount int64
|
|
|
|
|
LikeCount int64
|
|
|
|
|
}{p.ID, p.Name, p.PlayCount, p.LikeCount}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Trier par play_count
|
|
|
|
|
for i := 0; i < len(sortedPlaylists)-1; i++ {
|
|
|
|
|
for j := i + 1; j < len(sortedPlaylists); j++ {
|
|
|
|
|
if sortedPlaylists[i].PlayCount < sortedPlaylists[j].PlayCount {
|
|
|
|
|
sortedPlaylists[i], sortedPlaylists[j] = sortedPlaylists[j], sortedPlaylists[i]
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for i := 0; i < 5 && i < len(sortedPlaylists); i++ {
|
|
|
|
|
topPlaylists = append(topPlaylists, gin.H{
|
|
|
|
|
"id": sortedPlaylists[i].ID.String(),
|
|
|
|
|
"name": sortedPlaylists[i].Name,
|
|
|
|
|
"play_count": sortedPlaylists[i].PlayCount,
|
2026-01-13 18:47:57 +00:00
|
|
|
"like_count": sortedPlaylists[i].LikeCount,
|
2026-01-07 18:39:21 +00:00
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Construire la réponse
|
|
|
|
|
analyticsData := gin.H{
|
|
|
|
|
"tracks": gin.H{
|
2026-01-13 18:47:57 +00:00
|
|
|
"total_tracks": totalTracks,
|
|
|
|
|
"total_plays": totalPlays,
|
|
|
|
|
"total_likes": totalLikes,
|
|
|
|
|
"total_downloads": totalDownloads,
|
|
|
|
|
"average_play_count": avgPlayCount,
|
|
|
|
|
"top_tracks": topTracks,
|
2026-01-07 18:39:21 +00:00
|
|
|
},
|
|
|
|
|
"playlists": gin.H{
|
2026-01-13 18:47:57 +00:00
|
|
|
"total_playlists": totalPlaylists,
|
|
|
|
|
"total_plays": playlistPlays,
|
|
|
|
|
"total_likes": playlistLikes,
|
|
|
|
|
"total_shares": playlistShares,
|
|
|
|
|
"average_play_count": avgPlaylistPlayCount,
|
|
|
|
|
"top_playlists": topPlaylists,
|
2026-01-07 18:39:21 +00:00
|
|
|
},
|
|
|
|
|
"period": gin.H{
|
|
|
|
|
"start_date": startDate.Format(time.RFC3339),
|
|
|
|
|
"end_date": endDate.Format(time.RFC3339),
|
|
|
|
|
"days": days,
|
|
|
|
|
},
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
RespondSuccess(c, http.StatusOK, analyticsData)
|
|
|
|
|
}
|