diff --git a/veza-backend-api/internal/api/routes_analytics.go b/veza-backend-api/internal/api/routes_analytics.go index aa1e69bc1..ebe654d39 100644 --- a/veza-backend-api/internal/api/routes_analytics.go +++ b/veza-backend-api/internal/api/routes_analytics.go @@ -55,5 +55,18 @@ func (r *APIRouter) setupAnalyticsRoutes(router *gin.RouterGroup) { creatorGroup.GET("/live/:streamId", creatorHandler.GetLiveMetrics) // F385 creatorGroup.GET("/tracks", creatorHandler.GetTracks) // F381 creatorGroup.GET("/export", creatorHandler.ExportAnalytics) // F383 + + // v0.11.1: Advanced Analytics (F396-F399) + advancedService := services.NewAdvancedAnalyticsService(r.db.GormDB, r.logger) + advancedHandler := analytics.NewAdvancedAnalyticsHandler(advancedService, r.logger) + + creatorGroup.GET("/heatmap/:trackId", advancedHandler.GetTrackHeatmap) // F396 + creatorGroup.GET("/compare", advancedHandler.ComparePeriods) // F397 + creatorGroup.GET("/marketplace", advancedHandler.GetMarketplaceAnalytics) // F398 + creatorGroup.GET("/alerts", advancedHandler.GetMetricAlerts) // F399 + creatorGroup.POST("/alerts", advancedHandler.CreateAlert) // F399 + creatorGroup.PUT("/alerts/preferences", advancedHandler.UpdateAlertPreference) // F399 + creatorGroup.DELETE("/alerts/:alertId", advancedHandler.DeleteAlert) // F399 + creatorGroup.POST("/alerts/check", advancedHandler.CheckAlerts) // F399 } } diff --git a/veza-backend-api/internal/core/analytics/advanced_handler.go b/veza-backend-api/internal/core/analytics/advanced_handler.go new file mode 100644 index 000000000..d62e20feb --- /dev/null +++ b/veza-backend-api/internal/core/analytics/advanced_handler.go @@ -0,0 +1,298 @@ +package analytics + +import ( + "net/http" + "strings" + "time" + + apperrors "veza-backend-api/internal/errors" + "veza-backend-api/internal/handlers" + "veza-backend-api/internal/services" + + "github.com/gin-gonic/gin" + "github.com/google/uuid" + "go.uber.org/zap" +) + +// AdvancedAnalyticsHandler handles advanced analytics HTTP requests (F396-F399) +type AdvancedAnalyticsHandler struct { + service *services.AdvancedAnalyticsService + logger *zap.Logger +} + +// NewAdvancedAnalyticsHandler creates a new advanced analytics handler +func NewAdvancedAnalyticsHandler(service *services.AdvancedAnalyticsService, logger *zap.Logger) *AdvancedAnalyticsHandler { + if logger == nil { + logger = zap.NewNop() + } + return &AdvancedAnalyticsHandler{service: service, logger: logger} +} + +// getCreatorID extracts and validates the creator user ID from context +func (h *AdvancedAnalyticsHandler) getCreatorID(c *gin.Context) (uuid.UUID, bool) { + userIDInterface, exists := c.Get("user_id") + if !exists { + handlers.RespondWithAppError(c, apperrors.New(apperrors.ErrCodeUnauthorized, "authentication required")) + return uuid.Nil, false + } + userID, ok := userIDInterface.(uuid.UUID) + if !ok { + handlers.RespondWithAppError(c, apperrors.New(apperrors.ErrCodeInternal, "invalid user id")) + return uuid.Nil, false + } + return userID, true +} + +// GetTrackHeatmap handles GET /api/v1/creator/analytics/heatmap/:trackId (F396) +func (h *AdvancedAnalyticsHandler) GetTrackHeatmap(c *gin.Context) { + creatorID, ok := h.getCreatorID(c) + if !ok { + return + } + + trackIDStr := c.Param("trackId") + trackID, err := uuid.Parse(trackIDStr) + if err != nil { + handlers.RespondWithAppError(c, apperrors.NewValidationError("invalid track id")) + return + } + + startDate, endDate := parseDateRange(c) + + heatmap, err := h.service.GetTrackHeatmap(c.Request.Context(), creatorID, trackID, startDate, endDate) + if err != nil { + if strings.Contains(err.Error(), "not found") || strings.Contains(err.Error(), "not owned") { + handlers.RespondWithAppError(c, apperrors.NewNotFoundError("track")) + return + } + handlers.RespondWithAppError(c, apperrors.Wrap(apperrors.ErrCodeInternal, "failed to get heatmap", err)) + return + } + + handlers.RespondSuccess(c, http.StatusOK, gin.H{ + "heatmap": heatmap, + "period": gin.H{ + "start_date": startDate.Format(time.RFC3339), + "end_date": endDate.Format(time.RFC3339), + }, + }) +} + +// ComparePeriods handles GET /api/v1/creator/analytics/compare (F397) +func (h *AdvancedAnalyticsHandler) ComparePeriods(c *gin.Context) { + creatorID, ok := h.getCreatorID(c) + if !ok { + return + } + + // Parse current period + now := time.Now() + preset := c.DefaultQuery("preset", "week") // week, month, quarter + var currentStart, currentEnd, previousStart, previousEnd time.Time + + switch preset { + case "month": + currentEnd = now + currentStart = now.AddDate(0, -1, 0) + previousEnd = currentStart + previousStart = currentStart.AddDate(0, -1, 0) + case "quarter": + currentEnd = now + currentStart = now.AddDate(0, -3, 0) + previousEnd = currentStart + previousStart = currentStart.AddDate(0, -3, 0) + default: // week + currentEnd = now + currentStart = now.AddDate(0, 0, -7) + previousEnd = currentStart + previousStart = currentStart.AddDate(0, 0, -7) + } + + // Allow custom date overrides + if s := c.Query("current_start"); s != "" { + if parsed, err := time.Parse(time.RFC3339, s); err == nil { + currentStart = parsed + } + } + if e := c.Query("current_end"); e != "" { + if parsed, err := time.Parse(time.RFC3339, e); err == nil { + currentEnd = parsed + } + } + if s := c.Query("previous_start"); s != "" { + if parsed, err := time.Parse(time.RFC3339, s); err == nil { + previousStart = parsed + } + } + if e := c.Query("previous_end"); e != "" { + if parsed, err := time.Parse(time.RFC3339, e); err == nil { + previousEnd = parsed + } + } + + comparison, err := h.service.ComparePeriods(c.Request.Context(), creatorID, currentStart, currentEnd, previousStart, previousEnd) + if err != nil { + handlers.RespondWithAppError(c, apperrors.Wrap(apperrors.ErrCodeInternal, "failed to compare periods", err)) + return + } + + handlers.RespondSuccess(c, http.StatusOK, gin.H{ + "comparison": comparison, + }) +} + +// GetMarketplaceAnalytics handles GET /api/v1/creator/analytics/marketplace (F398) +func (h *AdvancedAnalyticsHandler) GetMarketplaceAnalytics(c *gin.Context) { + creatorID, ok := h.getCreatorID(c) + if !ok { + return + } + + startDate, endDate := parseDateRange(c) + + analytics, err := h.service.GetMarketplaceAnalytics(c.Request.Context(), creatorID, startDate, endDate) + if err != nil { + handlers.RespondWithAppError(c, apperrors.Wrap(apperrors.ErrCodeInternal, "failed to get marketplace analytics", err)) + return + } + + handlers.RespondSuccess(c, http.StatusOK, gin.H{ + "marketplace": analytics, + "period": gin.H{ + "start_date": startDate.Format(time.RFC3339), + "end_date": endDate.Format(time.RFC3339), + }, + }) +} + +// GetMetricAlerts handles GET /api/v1/creator/analytics/alerts (F399) +func (h *AdvancedAnalyticsHandler) GetMetricAlerts(c *gin.Context) { + creatorID, ok := h.getCreatorID(c) + if !ok { + return + } + + summary, err := h.service.GetMetricAlerts(c.Request.Context(), creatorID) + if err != nil { + handlers.RespondWithAppError(c, apperrors.Wrap(apperrors.ErrCodeInternal, "failed to get alerts", err)) + return + } + + handlers.RespondSuccess(c, http.StatusOK, gin.H{ + "alerts": summary, + }) +} + +// updateAlertPreferenceRequest is the request body for updating alert preferences +type updateAlertPreferenceRequest struct { + MetricType string `json:"metric_type" binding:"required"` + Enabled bool `json:"enabled"` +} + +// UpdateAlertPreference handles PUT /api/v1/creator/analytics/alerts/preferences (F399) +func (h *AdvancedAnalyticsHandler) UpdateAlertPreference(c *gin.Context) { + creatorID, ok := h.getCreatorID(c) + if !ok { + return + } + + var req updateAlertPreferenceRequest + if err := c.ShouldBindJSON(&req); err != nil { + handlers.RespondWithAppError(c, apperrors.NewValidationError("invalid request body")) + return + } + + if err := h.service.UpdateAlertPreference(c.Request.Context(), creatorID, req.MetricType, req.Enabled); err != nil { + if strings.Contains(err.Error(), "invalid metric type") { + handlers.RespondWithAppError(c, apperrors.NewValidationError(err.Error())) + return + } + handlers.RespondWithAppError(c, apperrors.Wrap(apperrors.ErrCodeInternal, "failed to update preference", err)) + return + } + + handlers.RespondSuccess(c, http.StatusOK, gin.H{ + "status": "updated", + }) +} + +// createAlertRequest is the request body for creating a metric alert +type createAlertRequest struct { + MetricType string `json:"metric_type" binding:"required"` + Threshold int64 `json:"threshold" binding:"required,min=1"` +} + +// CreateAlert handles POST /api/v1/creator/analytics/alerts (F399) +func (h *AdvancedAnalyticsHandler) CreateAlert(c *gin.Context) { + creatorID, ok := h.getCreatorID(c) + if !ok { + return + } + + var req createAlertRequest + if err := c.ShouldBindJSON(&req); err != nil { + handlers.RespondWithAppError(c, apperrors.NewValidationError("invalid request body")) + return + } + + if err := h.service.CreateAlertThreshold(c.Request.Context(), creatorID, req.MetricType, req.Threshold); err != nil { + if strings.Contains(err.Error(), "invalid metric type") || strings.Contains(err.Error(), "threshold must be positive") { + handlers.RespondWithAppError(c, apperrors.NewValidationError(err.Error())) + return + } + handlers.RespondWithAppError(c, apperrors.Wrap(apperrors.ErrCodeInternal, "failed to create alert", err)) + return + } + + handlers.RespondSuccess(c, http.StatusCreated, gin.H{ + "status": "created", + }) +} + +// DeleteAlert handles DELETE /api/v1/creator/analytics/alerts/:alertId (F399) +func (h *AdvancedAnalyticsHandler) DeleteAlert(c *gin.Context) { + creatorID, ok := h.getCreatorID(c) + if !ok { + return + } + + alertIDStr := c.Param("alertId") + alertID, err := uuid.Parse(alertIDStr) + if err != nil { + handlers.RespondWithAppError(c, apperrors.NewValidationError("invalid alert id")) + return + } + + if err := h.service.DeleteAlertThreshold(c.Request.Context(), creatorID, alertID); err != nil { + if strings.Contains(err.Error(), "not found") { + handlers.RespondWithAppError(c, apperrors.NewNotFoundError("alert")) + return + } + handlers.RespondWithAppError(c, apperrors.Wrap(apperrors.ErrCodeInternal, "failed to delete alert", err)) + return + } + + handlers.RespondSuccess(c, http.StatusOK, gin.H{ + "status": "deleted", + }) +} + +// CheckAlerts handles POST /api/v1/creator/analytics/alerts/check (F399) +// Triggers alert checking for the current user +func (h *AdvancedAnalyticsHandler) CheckAlerts(c *gin.Context) { + creatorID, ok := h.getCreatorID(c) + if !ok { + return + } + + triggered, err := h.service.CheckAndTriggerAlerts(c.Request.Context(), creatorID) + if err != nil { + handlers.RespondWithAppError(c, apperrors.Wrap(apperrors.ErrCodeInternal, "failed to check alerts", err)) + return + } + + handlers.RespondSuccess(c, http.StatusOK, gin.H{ + "triggered": triggered, + "count": len(triggered), + }) +} diff --git a/veza-backend-api/internal/core/analytics/advanced_handler_test.go b/veza-backend-api/internal/core/analytics/advanced_handler_test.go new file mode 100644 index 000000000..097ec7e04 --- /dev/null +++ b/veza-backend-api/internal/core/analytics/advanced_handler_test.go @@ -0,0 +1,155 @@ +package analytics + +import ( + "encoding/json" + "net/http" + "net/http/httptest" + "strings" + "testing" + + "github.com/gin-gonic/gin" + "github.com/google/uuid" + "go.uber.org/zap" +) + +func TestAdvancedHandler_GetTrackHeatmap_NoAuth(t *testing.T) { + gin.SetMode(gin.TestMode) + + handler := NewAdvancedAnalyticsHandler(nil, zap.NewNop()) + + w := httptest.NewRecorder() + c, _ := gin.CreateTestContext(w) + c.Request = httptest.NewRequest(http.MethodGet, "/creator/analytics/heatmap/"+uuid.New().String(), nil) + + handler.GetTrackHeatmap(c) + + if w.Code != http.StatusUnauthorized { + t.Errorf("expected status %d, got %d", http.StatusUnauthorized, w.Code) + } +} + +func TestAdvancedHandler_GetTrackHeatmap_InvalidTrackID(t *testing.T) { + gin.SetMode(gin.TestMode) + + handler := NewAdvancedAnalyticsHandler(nil, zap.NewNop()) + + w := httptest.NewRecorder() + c, _ := gin.CreateTestContext(w) + c.Request = httptest.NewRequest(http.MethodGet, "/creator/analytics/heatmap/invalid-uuid", nil) + c.Set("user_id", uuid.New()) + c.Params = gin.Params{{Key: "trackId", Value: "invalid-uuid"}} + + handler.GetTrackHeatmap(c) + + if w.Code != http.StatusBadRequest { + t.Errorf("expected status %d, got %d", http.StatusBadRequest, w.Code) + } +} + +func TestAdvancedHandler_ComparePeriods_NoAuth(t *testing.T) { + gin.SetMode(gin.TestMode) + + handler := NewAdvancedAnalyticsHandler(nil, zap.NewNop()) + + w := httptest.NewRecorder() + c, _ := gin.CreateTestContext(w) + c.Request = httptest.NewRequest(http.MethodGet, "/creator/analytics/compare?preset=week", nil) + + handler.ComparePeriods(c) + + if w.Code != http.StatusUnauthorized { + t.Errorf("expected status %d, got %d", http.StatusUnauthorized, w.Code) + } +} + +func TestAdvancedHandler_GetMarketplaceAnalytics_NoAuth(t *testing.T) { + gin.SetMode(gin.TestMode) + + handler := NewAdvancedAnalyticsHandler(nil, zap.NewNop()) + + w := httptest.NewRecorder() + c, _ := gin.CreateTestContext(w) + c.Request = httptest.NewRequest(http.MethodGet, "/creator/analytics/marketplace", nil) + + handler.GetMarketplaceAnalytics(c) + + if w.Code != http.StatusUnauthorized { + t.Errorf("expected status %d, got %d", http.StatusUnauthorized, w.Code) + } +} + +func TestAdvancedHandler_GetMetricAlerts_NoAuth(t *testing.T) { + gin.SetMode(gin.TestMode) + + handler := NewAdvancedAnalyticsHandler(nil, zap.NewNop()) + + w := httptest.NewRecorder() + c, _ := gin.CreateTestContext(w) + c.Request = httptest.NewRequest(http.MethodGet, "/creator/analytics/alerts", nil) + + handler.GetMetricAlerts(c) + + if w.Code != http.StatusUnauthorized { + t.Errorf("expected status %d, got %d", http.StatusUnauthorized, w.Code) + } +} + +func TestAdvancedHandler_CreateAlert_InvalidBody(t *testing.T) { + gin.SetMode(gin.TestMode) + + handler := NewAdvancedAnalyticsHandler(nil, zap.NewNop()) + + w := httptest.NewRecorder() + c, _ := gin.CreateTestContext(w) + c.Request = httptest.NewRequest(http.MethodPost, "/creator/analytics/alerts", strings.NewReader(`{"invalid": true}`)) + c.Request.Header.Set("Content-Type", "application/json") + c.Set("user_id", uuid.New()) + + handler.CreateAlert(c) + + if w.Code != http.StatusBadRequest { + t.Errorf("expected status %d, got %d", http.StatusBadRequest, w.Code) + } +} + +func TestAdvancedHandler_DeleteAlert_InvalidID(t *testing.T) { + gin.SetMode(gin.TestMode) + + handler := NewAdvancedAnalyticsHandler(nil, zap.NewNop()) + + w := httptest.NewRecorder() + c, _ := gin.CreateTestContext(w) + c.Request = httptest.NewRequest(http.MethodDelete, "/creator/analytics/alerts/bad-id", nil) + c.Set("user_id", uuid.New()) + c.Params = gin.Params{{Key: "alertId", Value: "bad-id"}} + + handler.DeleteAlert(c) + + if w.Code != http.StatusBadRequest { + t.Errorf("expected status %d, got %d", http.StatusBadRequest, w.Code) + } +} + +func TestAdvancedHandler_UpdateAlertPreference_InvalidBody(t *testing.T) { + gin.SetMode(gin.TestMode) + + handler := NewAdvancedAnalyticsHandler(nil, zap.NewNop()) + + w := httptest.NewRecorder() + c, _ := gin.CreateTestContext(w) + c.Request = httptest.NewRequest(http.MethodPut, "/creator/analytics/alerts/preferences", strings.NewReader(`{}`)) + c.Request.Header.Set("Content-Type", "application/json") + c.Set("user_id", uuid.New()) + + handler.UpdateAlertPreference(c) + + if w.Code != http.StatusBadRequest { + t.Errorf("expected status %d, got %d", http.StatusBadRequest, w.Code) + } + + var response map[string]interface{} + json.Unmarshal(w.Body.Bytes(), &response) + if response["error"] == nil { + t.Error("expected error in response") + } +} diff --git a/veza-backend-api/internal/services/advanced_analytics_service.go b/veza-backend-api/internal/services/advanced_analytics_service.go new file mode 100644 index 000000000..9f76c2b7d --- /dev/null +++ b/veza-backend-api/internal/services/advanced_analytics_service.go @@ -0,0 +1,645 @@ +package services + +import ( + "context" + "fmt" + "time" + + "github.com/google/uuid" + "go.uber.org/zap" + "gorm.io/gorm" +) + +// AdvancedAnalyticsService provides advanced analytics for creators (F396-F399) +// All data is private to the creator — never exposed publicly. +type AdvancedAnalyticsService struct { + db *gorm.DB + logger *zap.Logger +} + +// NewAdvancedAnalyticsService creates a new advanced analytics service +func NewAdvancedAnalyticsService(db *gorm.DB, logger *zap.Logger) *AdvancedAnalyticsService { + if logger == nil { + logger = zap.NewNop() + } + return &AdvancedAnalyticsService{db: db, logger: logger} +} + +// --- F396: Heatmap d'écoute --- + +// TrackSegmentStat represents a segment of a track with aggregated listening data +type TrackSegmentStat struct { + SegmentIndex int `json:"segment_index"` + SegmentStartMs int64 `json:"segment_start_ms"` + SegmentEndMs int64 `json:"segment_end_ms"` + ListenCount int64 `json:"listen_count"` + DropOffCount int64 `json:"drop_off_count"` + ReplayCount int64 `json:"replay_count"` + Intensity float64 `json:"intensity"` // normalized 0-1 +} + +// TrackHeatmap represents the full heatmap for a track +type TrackHeatmap struct { + TrackID string `json:"track_id"` + TotalSegments int `json:"total_segments"` + SegmentDurationMs int64 `json:"segment_duration_ms"` + Segments []TrackSegmentStat `json:"segments"` + MaxListens int64 `json:"max_listens"` + AvgDropOff float64 `json:"avg_drop_off"` +} + +// GetTrackHeatmap returns aggregated heatmap data for a track (F396) +func (s *AdvancedAnalyticsService) GetTrackHeatmap(ctx context.Context, creatorID uuid.UUID, trackID uuid.UUID, startDate, endDate time.Time) (*TrackHeatmap, error) { + // Verify the track belongs to the creator + var trackCount int64 + if err := s.db.WithContext(ctx).Table("tracks"). + Where("id = ? AND creator_id = ? AND deleted_at IS NULL", trackID, creatorID). + Count(&trackCount).Error; err != nil { + return nil, fmt.Errorf("failed to verify track ownership: %w", err) + } + if trackCount == 0 { + return nil, fmt.Errorf("track not found or not owned by creator") + } + + var rows []struct { + SegmentIndex int `gorm:"column:segment_index"` + SegmentStartMs int64 `gorm:"column:segment_start_ms"` + SegmentEndMs int64 `gorm:"column:segment_end_ms"` + ListenCount int64 `gorm:"column:listen_count"` + DropOffCount int64 `gorm:"column:drop_off_count"` + ReplayCount int64 `gorm:"column:replay_count"` + } + + if err := s.db.WithContext(ctx).Raw(` + SELECT segment_index, segment_start_ms, segment_end_ms, + SUM(listen_count) AS listen_count, + SUM(drop_off_count) AS drop_off_count, + SUM(replay_count) AS replay_count + FROM track_segment_stats + WHERE track_id = ? AND date >= ? AND date <= ? + GROUP BY segment_index, segment_start_ms, segment_end_ms + ORDER BY segment_index ASC + `, trackID, startDate.Format("2006-01-02"), endDate.Format("2006-01-02")).Scan(&rows).Error; err != nil { + return nil, fmt.Errorf("failed to get heatmap data: %w", err) + } + + // Find max listen count for normalization + var maxListens int64 + for _, r := range rows { + if r.ListenCount > maxListens { + maxListens = r.ListenCount + } + } + + var totalDropOff int64 + segments := make([]TrackSegmentStat, len(rows)) + for i, r := range rows { + intensity := float64(0) + if maxListens > 0 { + intensity = float64(r.ListenCount) / float64(maxListens) + } + segments[i] = TrackSegmentStat{ + SegmentIndex: r.SegmentIndex, + SegmentStartMs: r.SegmentStartMs, + SegmentEndMs: r.SegmentEndMs, + ListenCount: r.ListenCount, + DropOffCount: r.DropOffCount, + ReplayCount: r.ReplayCount, + Intensity: intensity, + } + totalDropOff += r.DropOffCount + } + + avgDropOff := float64(0) + if len(segments) > 0 { + avgDropOff = float64(totalDropOff) / float64(len(segments)) + } + + segmentDuration := int64(0) + if len(rows) > 0 { + segmentDuration = rows[0].SegmentEndMs - rows[0].SegmentStartMs + } + + return &TrackHeatmap{ + TrackID: trackID.String(), + TotalSegments: len(segments), + SegmentDurationMs: segmentDuration, + Segments: segments, + MaxListens: maxListens, + AvgDropOff: avgDropOff, + }, nil +} + +// --- F397: Comparaison de périodes --- + +// PeriodComparison holds the comparison between two time periods +type PeriodComparison struct { + CurrentPeriod PeriodStats `json:"current_period"` + PreviousPeriod PeriodStats `json:"previous_period"` + Changes PeriodChanges `json:"changes"` +} + +// PeriodStats holds stats for a single period +type PeriodStats struct { + StartDate string `json:"start_date"` + EndDate string `json:"end_date"` + TotalPlays int64 `json:"total_plays"` + UniqueListeners int64 `json:"unique_listeners"` + CompleteListens int64 `json:"complete_listens"` + TotalPlayTime int64 `json:"total_play_time"` + AvgCompletion float64 `json:"avg_completion"` + TotalRevenue float64 `json:"total_revenue"` + NewFollowers int64 `json:"new_followers"` +} + +// PeriodChanges holds the percentage changes between periods +type PeriodChanges struct { + PlaysChange float64 `json:"plays_change"` // percentage + ListenersChange float64 `json:"listeners_change"` // percentage + CompletionChange float64 `json:"completion_change"` // percentage + RevenueChange float64 `json:"revenue_change"` // percentage + FollowersChange float64 `json:"followers_change"` // percentage + PlayTimeChange float64 `json:"play_time_change"` // percentage +} + +// ComparePeriods compares analytics between two time periods (F397) +func (s *AdvancedAnalyticsService) ComparePeriods(ctx context.Context, creatorID uuid.UUID, currentStart, currentEnd, previousStart, previousEnd time.Time) (*PeriodComparison, error) { + current, err := s.getPeriodStats(ctx, creatorID, currentStart, currentEnd) + if err != nil { + return nil, fmt.Errorf("failed to get current period: %w", err) + } + current.StartDate = currentStart.Format(time.RFC3339) + current.EndDate = currentEnd.Format(time.RFC3339) + + previous, err := s.getPeriodStats(ctx, creatorID, previousStart, previousEnd) + if err != nil { + return nil, fmt.Errorf("failed to get previous period: %w", err) + } + previous.StartDate = previousStart.Format(time.RFC3339) + previous.EndDate = previousEnd.Format(time.RFC3339) + + changes := PeriodChanges{ + PlaysChange: calcPercentChange(previous.TotalPlays, current.TotalPlays), + ListenersChange: calcPercentChange(previous.UniqueListeners, current.UniqueListeners), + CompletionChange: calcFloatPercentChange(previous.AvgCompletion, current.AvgCompletion), + RevenueChange: calcFloatPercentChange(previous.TotalRevenue, current.TotalRevenue), + FollowersChange: calcPercentChange(previous.NewFollowers, current.NewFollowers), + PlayTimeChange: calcPercentChange(previous.TotalPlayTime, current.TotalPlayTime), + } + + return &PeriodComparison{ + CurrentPeriod: *current, + PreviousPeriod: *previous, + Changes: changes, + }, nil +} + +func (s *AdvancedAnalyticsService) getPeriodStats(ctx context.Context, creatorID uuid.UUID, startDate, endDate time.Time) (*PeriodStats, error) { + stats := &PeriodStats{} + + var playbackAgg struct { + TotalPlays int64 `gorm:"column:total_plays"` + UniqueListeners int64 `gorm:"column:unique_listeners"` + CompleteListens int64 `gorm:"column:complete_listens"` + TotalPlayTime int64 `gorm:"column:total_play_time"` + AvgCompletion float64 `gorm:"column:avg_completion"` + } + if err := s.db.WithContext(ctx).Raw(` + SELECT + COUNT(pa.id) AS total_plays, + COUNT(DISTINCT pa.user_id) AS unique_listeners, + COUNT(CASE WHEN pa.completion_rate >= 90 THEN 1 END) AS complete_listens, + COALESCE(SUM(pa.play_time), 0) AS total_play_time, + COALESCE(AVG(pa.completion_rate), 0) AS avg_completion + FROM playback_analytics pa + JOIN tracks t ON t.id = pa.track_id + WHERE t.creator_id = ? AND pa.started_at >= ? AND pa.started_at <= ? + `, creatorID, startDate, endDate).Scan(&playbackAgg).Error; err != nil { + return nil, fmt.Errorf("failed to get playback stats: %w", err) + } + + stats.TotalPlays = playbackAgg.TotalPlays + stats.UniqueListeners = playbackAgg.UniqueListeners + stats.CompleteListens = playbackAgg.CompleteListens + stats.TotalPlayTime = playbackAgg.TotalPlayTime + stats.AvgCompletion = playbackAgg.AvgCompletion + + // Revenue + if err := s.db.WithContext(ctx).Raw(` + SELECT COALESCE(SUM(oi.price), 0) + FROM order_items oi + JOIN orders o ON o.id = oi.order_id + JOIN products p ON p.id = oi.product_id + WHERE p.seller_id = ? AND o.status IN ('completed', 'paid') + AND o.created_at >= ? AND o.created_at <= ? + `, creatorID, startDate, endDate).Scan(&stats.TotalRevenue).Error; err != nil { + s.logger.Warn("Failed to get revenue", zap.Error(err)) + } + + // New followers + if err := s.db.WithContext(ctx).Raw(` + SELECT COUNT(*) FROM follows + WHERE followed_id = ? AND created_at >= ? AND created_at <= ? + `, creatorID, startDate, endDate).Scan(&stats.NewFollowers).Error; err != nil { + s.logger.Warn("Failed to get new followers", zap.Error(err)) + } + + return stats, nil +} + +func calcPercentChange(previous, current int64) float64 { + if previous == 0 { + if current > 0 { + return 100.0 + } + return 0.0 + } + return float64(current-previous) / float64(previous) * 100.0 +} + +func calcFloatPercentChange(previous, current float64) float64 { + if previous == 0 { + if current > 0 { + return 100.0 + } + return 0.0 + } + return (current - previous) / previous * 100.0 +} + +// --- F398: Analytics Marketplace --- + +// MarketplaceAnalytics holds marketplace analytics data for a creator +type MarketplaceAnalytics struct { + TotalViews int64 `json:"total_views"` + TotalSales int64 `json:"total_sales"` + TotalRevenue float64 `json:"total_revenue"` + OverallConversion float64 `json:"overall_conversion"` // percentage + PlatformCommission float64 `json:"platform_commission"` + NetRevenue float64 `json:"net_revenue"` + Products []ProductAnalytics `json:"products"` + RevenueTimeline []MarketplaceRevenue `json:"revenue_timeline"` +} + +// ProductAnalytics holds analytics for a single product +type ProductAnalytics struct { + ProductID string `json:"product_id"` + Name string `json:"name"` + ProductType string `json:"product_type"` + Views int64 `json:"views"` + Sales int64 `json:"sales"` + Revenue float64 `json:"revenue"` + ConversionRate float64 `json:"conversion_rate"` // percentage +} + +// MarketplaceRevenue holds revenue data for a time period +type MarketplaceRevenue struct { + Date string `json:"date"` + Revenue float64 `json:"revenue"` + Sales int64 `json:"sales"` + Views int64 `json:"views"` +} + +// GetMarketplaceAnalytics returns marketplace analytics for the creator (F398) +func (s *AdvancedAnalyticsService) GetMarketplaceAnalytics(ctx context.Context, creatorID uuid.UUID, startDate, endDate time.Time) (*MarketplaceAnalytics, error) { + analytics := &MarketplaceAnalytics{ + Products: []ProductAnalytics{}, + RevenueTimeline: []MarketplaceRevenue{}, + } + + // Per-product views, sales, and conversion + var productRows []struct { + ProductID string `gorm:"column:product_id"` + Name string `gorm:"column:name"` + ProductType string `gorm:"column:product_type"` + Views int64 `gorm:"column:views"` + Sales int64 `gorm:"column:sales"` + Revenue float64 `gorm:"column:revenue"` + } + + if err := s.db.WithContext(ctx).Raw(` + SELECT CAST(p.id AS TEXT) AS product_id, + p.name, + COALESCE(p.product_type, 'track') AS product_type, + COALESCE(pv.view_count, 0) AS views, + COALESCE(oi.sale_count, 0) AS sales, + COALESCE(oi.revenue, 0) AS revenue + FROM products p + LEFT JOIN ( + SELECT product_id, COUNT(*) AS view_count + FROM product_views + WHERE created_at >= ? AND created_at <= ? + GROUP BY product_id + ) pv ON pv.product_id = p.id + LEFT JOIN ( + SELECT oi.product_id, COUNT(oi.id) AS sale_count, SUM(oi.price) AS revenue + FROM order_items oi + JOIN orders o ON o.id = oi.order_id + WHERE o.status IN ('completed', 'paid') + AND o.created_at >= ? AND o.created_at <= ? + GROUP BY oi.product_id + ) oi ON oi.product_id = p.id + WHERE p.seller_id = ? + ORDER BY COALESCE(oi.revenue, 0) DESC + `, startDate, endDate, startDate, endDate, creatorID).Scan(&productRows).Error; err != nil { + return nil, fmt.Errorf("failed to get product analytics: %w", err) + } + + var totalViews, totalSales int64 + var totalRevenue float64 + + for _, r := range productRows { + convRate := float64(0) + if r.Views > 0 { + convRate = float64(r.Sales) / float64(r.Views) * 100.0 + } + analytics.Products = append(analytics.Products, ProductAnalytics{ + ProductID: r.ProductID, + Name: r.Name, + ProductType: r.ProductType, + Views: r.Views, + Sales: r.Sales, + Revenue: r.Revenue, + ConversionRate: convRate, + }) + totalViews += r.Views + totalSales += r.Sales + totalRevenue += r.Revenue + } + + analytics.TotalViews = totalViews + analytics.TotalSales = totalSales + analytics.TotalRevenue = totalRevenue + if totalViews > 0 { + analytics.OverallConversion = float64(totalSales) / float64(totalViews) * 100.0 + } + + // Platform commission (15% as per Veza business model) + analytics.PlatformCommission = totalRevenue * 0.15 + analytics.NetRevenue = totalRevenue - analytics.PlatformCommission + + // Revenue timeline + var timelineRows []struct { + Date string `gorm:"column:date"` + Revenue float64 `gorm:"column:revenue"` + Sales int64 `gorm:"column:sales"` + Views int64 `gorm:"column:views"` + } + + if err := s.db.WithContext(ctx).Raw(` + WITH daily_sales AS ( + SELECT DATE(o.created_at) AS date, + COALESCE(SUM(oi.price), 0) AS revenue, + COUNT(oi.id) AS sales + FROM order_items oi + JOIN orders o ON o.id = oi.order_id + JOIN products p ON p.id = oi.product_id + WHERE p.seller_id = ? AND o.status IN ('completed', 'paid') + AND o.created_at >= ? AND o.created_at <= ? + GROUP BY DATE(o.created_at) + ), + daily_views AS ( + SELECT DATE(pv.created_at) AS date, COUNT(*) AS views + FROM product_views pv + JOIN products p ON p.id = pv.product_id + WHERE p.seller_id = ? AND pv.created_at >= ? AND pv.created_at <= ? + GROUP BY DATE(pv.created_at) + ) + SELECT COALESCE(ds.date, dv.date) AS date, + COALESCE(ds.revenue, 0) AS revenue, + COALESCE(ds.sales, 0) AS sales, + COALESCE(dv.views, 0) AS views + FROM daily_sales ds + FULL OUTER JOIN daily_views dv ON ds.date = dv.date + ORDER BY date ASC + `, creatorID, startDate, endDate, creatorID, startDate, endDate).Scan(&timelineRows).Error; err != nil { + s.logger.Warn("Failed to get revenue timeline", zap.Error(err)) + } + + for _, r := range timelineRows { + analytics.RevenueTimeline = append(analytics.RevenueTimeline, MarketplaceRevenue{ + Date: r.Date, + Revenue: r.Revenue, + Sales: r.Sales, + Views: r.Views, + }) + } + + return analytics, nil +} + +// --- F399: Alertes métriques --- + +// MetricAlert represents a metric alert configuration and status +type MetricAlert struct { + ID string `json:"id"` + MetricType string `json:"metric_type"` + Threshold int64 `json:"threshold"` + IsTriggered bool `json:"is_triggered"` + TriggeredAt *time.Time `json:"triggered_at,omitempty"` + CreatedAt time.Time `json:"created_at"` +} + +// MetricAlertPreference represents the user's preference for a metric type +type MetricAlertPreference struct { + MetricType string `json:"metric_type"` + Enabled bool `json:"enabled"` +} + +// MetricAlertSummary holds all alerts and preferences for a user +type MetricAlertSummary struct { + Preferences []MetricAlertPreference `json:"preferences"` + Alerts []MetricAlert `json:"alerts"` + Pending []MetricAlert `json:"pending"` // triggered but not yet seen +} + +// GetMetricAlerts returns all metric alerts for a user (F399) +func (s *AdvancedAnalyticsService) GetMetricAlerts(ctx context.Context, userID uuid.UUID) (*MetricAlertSummary, error) { + summary := &MetricAlertSummary{ + Preferences: []MetricAlertPreference{}, + Alerts: []MetricAlert{}, + Pending: []MetricAlert{}, + } + + // Get preferences + var prefRows []struct { + MetricType string `gorm:"column:metric_type"` + Enabled bool `gorm:"column:enabled"` + } + if err := s.db.WithContext(ctx).Raw(` + SELECT metric_type, enabled FROM metric_alert_preferences + WHERE user_id = ? + ORDER BY metric_type + `, userID).Scan(&prefRows).Error; err != nil { + s.logger.Warn("Failed to get alert preferences", zap.Error(err)) + } + + // Provide defaults if no preferences exist + if len(prefRows) == 0 { + for _, mt := range []string{"plays", "followers", "sales", "listeners"} { + summary.Preferences = append(summary.Preferences, MetricAlertPreference{ + MetricType: mt, + Enabled: true, // opt-in by default but user controls + }) + } + } else { + for _, r := range prefRows { + summary.Preferences = append(summary.Preferences, MetricAlertPreference{ + MetricType: r.MetricType, + Enabled: r.Enabled, + }) + } + } + + // Get alerts + var alertRows []struct { + ID uuid.UUID `gorm:"column:id"` + MetricType string `gorm:"column:metric_type"` + Threshold int64 `gorm:"column:threshold"` + IsTriggered bool `gorm:"column:is_triggered"` + TriggeredAt *time.Time `gorm:"column:triggered_at"` + CreatedAt time.Time `gorm:"column:created_at"` + } + if err := s.db.WithContext(ctx).Raw(` + SELECT id, metric_type, threshold, is_triggered, triggered_at, created_at + FROM metric_alerts + WHERE user_id = ? + ORDER BY metric_type, threshold + `, userID).Scan(&alertRows).Error; err != nil { + return nil, fmt.Errorf("failed to get alerts: %w", err) + } + + for _, r := range alertRows { + alert := MetricAlert{ + ID: r.ID.String(), + MetricType: r.MetricType, + Threshold: r.Threshold, + IsTriggered: r.IsTriggered, + TriggeredAt: r.TriggeredAt, + CreatedAt: r.CreatedAt, + } + summary.Alerts = append(summary.Alerts, alert) + if r.IsTriggered { + summary.Pending = append(summary.Pending, alert) + } + } + + return summary, nil +} + +// UpdateAlertPreference updates the preference for a metric type (F399) +func (s *AdvancedAnalyticsService) UpdateAlertPreference(ctx context.Context, userID uuid.UUID, metricType string, enabled bool) error { + validTypes := map[string]bool{"plays": true, "followers": true, "sales": true, "listeners": true} + if !validTypes[metricType] { + return fmt.Errorf("invalid metric type: %s", metricType) + } + + return s.db.WithContext(ctx).Exec(` + INSERT INTO metric_alert_preferences (user_id, metric_type, enabled) + VALUES (?, ?, ?) + ON CONFLICT (user_id, metric_type) + DO UPDATE SET enabled = EXCLUDED.enabled, updated_at = NOW() + `, userID, metricType, enabled).Error +} + +// CreateAlertThreshold creates a new alert threshold for a metric (F399) +func (s *AdvancedAnalyticsService) CreateAlertThreshold(ctx context.Context, userID uuid.UUID, metricType string, threshold int64) error { + validTypes := map[string]bool{"plays": true, "followers": true, "sales": true, "listeners": true} + if !validTypes[metricType] { + return fmt.Errorf("invalid metric type: %s", metricType) + } + + if threshold <= 0 { + return fmt.Errorf("threshold must be positive") + } + + return s.db.WithContext(ctx).Exec(` + INSERT INTO metric_alerts (user_id, metric_type, threshold) + VALUES (?, ?, ?) + ON CONFLICT (user_id, metric_type, threshold) DO NOTHING + `, userID, metricType, threshold).Error +} + +// DeleteAlertThreshold removes an alert threshold (F399) +func (s *AdvancedAnalyticsService) DeleteAlertThreshold(ctx context.Context, userID uuid.UUID, alertID uuid.UUID) error { + result := s.db.WithContext(ctx).Exec(` + DELETE FROM metric_alerts WHERE id = ? AND user_id = ? + `, alertID, userID) + if result.Error != nil { + return fmt.Errorf("failed to delete alert: %w", result.Error) + } + if result.RowsAffected == 0 { + return fmt.Errorf("alert not found") + } + return nil +} + +// CheckAndTriggerAlerts checks current metrics against thresholds and triggers alerts (F399) +// This should be called periodically (e.g., by a cron job) +func (s *AdvancedAnalyticsService) CheckAndTriggerAlerts(ctx context.Context, userID uuid.UUID) ([]MetricAlert, error) { + var triggered []MetricAlert + + // Get untriggered alerts for enabled metric types + var alerts []struct { + ID uuid.UUID `gorm:"column:id"` + MetricType string `gorm:"column:metric_type"` + Threshold int64 `gorm:"column:threshold"` + } + + if err := s.db.WithContext(ctx).Raw(` + SELECT ma.id, ma.metric_type, ma.threshold + FROM metric_alerts ma + JOIN metric_alert_preferences map ON map.user_id = ma.user_id AND map.metric_type = ma.metric_type + WHERE ma.user_id = ? AND ma.is_triggered = FALSE AND map.enabled = TRUE + `, userID).Scan(&alerts).Error; err != nil { + return nil, fmt.Errorf("failed to get pending alerts: %w", err) + } + + for _, alert := range alerts { + var currentValue int64 + switch alert.MetricType { + case "plays": + s.db.WithContext(ctx).Raw(` + SELECT COUNT(*) FROM playback_analytics pa + JOIN tracks t ON t.id = pa.track_id + WHERE t.creator_id = ? + `, userID).Scan(¤tValue) + case "followers": + s.db.WithContext(ctx).Raw(` + SELECT COUNT(*) FROM follows WHERE followed_id = ? + `, userID).Scan(¤tValue) + case "sales": + s.db.WithContext(ctx).Raw(` + SELECT COUNT(oi.id) + FROM order_items oi + JOIN orders o ON o.id = oi.order_id + JOIN products p ON p.id = oi.product_id + WHERE p.seller_id = ? AND o.status IN ('completed', 'paid') + `, userID).Scan(¤tValue) + case "listeners": + s.db.WithContext(ctx).Raw(` + SELECT COUNT(DISTINCT pa.user_id) + FROM playback_analytics pa + JOIN tracks t ON t.id = pa.track_id + WHERE t.creator_id = ? + `, userID).Scan(¤tValue) + } + + if currentValue >= alert.Threshold { + now := time.Now() + s.db.WithContext(ctx).Exec(` + UPDATE metric_alerts SET is_triggered = TRUE, triggered_at = ?, updated_at = ? + WHERE id = ? + `, now, now, alert.ID) + triggered = append(triggered, MetricAlert{ + ID: alert.ID.String(), + MetricType: alert.MetricType, + Threshold: alert.Threshold, + IsTriggered: true, + TriggeredAt: &now, + }) + } + } + + return triggered, nil +} diff --git a/veza-backend-api/internal/services/advanced_analytics_service_test.go b/veza-backend-api/internal/services/advanced_analytics_service_test.go new file mode 100644 index 000000000..b7507234d --- /dev/null +++ b/veza-backend-api/internal/services/advanced_analytics_service_test.go @@ -0,0 +1,64 @@ +package services + +import ( + "testing" +) + +func TestCalcPercentChange(t *testing.T) { + tests := []struct { + name string + previous int64 + current int64 + expected float64 + }{ + {"zero to positive", 0, 100, 100.0}, + {"zero to zero", 0, 0, 0.0}, + {"positive to zero", 100, 0, -100.0}, + {"same value", 100, 100, 0.0}, + {"50% increase", 100, 150, 50.0}, + {"50% decrease", 200, 100, -50.0}, + {"double", 50, 100, 100.0}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := calcPercentChange(tt.previous, tt.current) + if result != tt.expected { + t.Errorf("calcPercentChange(%d, %d) = %f, want %f", tt.previous, tt.current, result, tt.expected) + } + }) + } +} + +func TestCalcFloatPercentChange(t *testing.T) { + tests := []struct { + name string + previous float64 + current float64 + expected float64 + }{ + {"zero to positive", 0.0, 50.0, 100.0}, + {"zero to zero", 0.0, 0.0, 0.0}, + {"50% increase", 80.0, 120.0, 50.0}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := calcFloatPercentChange(tt.previous, tt.current) + if result != tt.expected { + t.Errorf("calcFloatPercentChange(%f, %f) = %f, want %f", tt.previous, tt.current, result, tt.expected) + } + }) + } +} + +func TestNewAdvancedAnalyticsService(t *testing.T) { + // Test nil logger fallback + svc := NewAdvancedAnalyticsService(nil, nil) + if svc == nil { + t.Fatal("expected non-nil service") + } + if svc.logger == nil { + t.Fatal("expected non-nil logger (nop logger)") + } +}