feat(v0.11.0): F381-F385 creator analytics service

Implement CreatorAnalyticsService with:
- GetCreatorDashboard: aggregated plays, listeners, revenue (F381)
- GetPlayEvolution: temporal data by day/week/month (F382)
- GetSalesSummary: revenue and sales history (F383)
- GetDiscoverySources: how listeners find tracks (F381)
- GetGeographicBreakdown: anonymized geographic data (F381)
- GetAudienceProfile: aggregated audience demographics, min 10 users (F384)
- GetLiveStreamMetrics: real-time viewer count (F385)
- GetPerTrackStats: per-track analytics with pagination

All data is private to the creator, never exposed publicly.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
senke 2026-03-10 16:25:30 +01:00
parent c50b7049b9
commit e8fc494184
2 changed files with 945 additions and 0 deletions

View file

@ -0,0 +1,601 @@
package services
import (
"context"
"fmt"
"time"
"github.com/google/uuid"
"go.uber.org/zap"
"gorm.io/gorm"
)
// CreatorAnalyticsService provides analytics data for creators (F381-F395)
// All data is private to the creator — never exposed publicly.
type CreatorAnalyticsService struct {
db *gorm.DB
logger *zap.Logger
}
// NewCreatorAnalyticsService creates a new creator analytics service
func NewCreatorAnalyticsService(db *gorm.DB, logger *zap.Logger) *CreatorAnalyticsService {
if logger == nil {
logger = zap.NewNop()
}
return &CreatorAnalyticsService{db: db, logger: logger}
}
// CreatorDashboardStats is the aggregated creator dashboard response (F381)
type CreatorDashboardStats struct {
TotalPlays int64 `json:"total_plays"`
CompleteListens int64 `json:"complete_listens"`
UniqueListeners int64 `json:"unique_listeners"`
TotalPlayTime int64 `json:"total_play_time"` // seconds
AvgPlayDuration float64 `json:"avg_play_duration"` // seconds
AvgCompletionRate float64 `json:"avg_completion_rate"` // percentage
TotalTracks int64 `json:"total_tracks"`
TotalFollowers int64 `json:"total_followers"`
TotalRevenue float64 `json:"total_revenue"`
}
// GetCreatorDashboard returns aggregated stats for a creator's dashboard (F381)
func (s *CreatorAnalyticsService) GetCreatorDashboard(ctx context.Context, creatorID uuid.UUID, startDate, endDate time.Time) (*CreatorDashboardStats, error) {
stats := &CreatorDashboardStats{}
// Plays, unique listeners, complete listens, play time, completion from playback_analytics
var playbackAgg struct {
TotalPlays int64 `gorm:"column:total_plays"`
UniqueListeners int64 `gorm:"column:unique_listeners"`
CompleteListens int64 `gorm:"column:complete_listens"`
TotalPlayTime int64 `gorm:"column:total_play_time"`
AvgCompletion float64 `gorm:"column:avg_completion"`
}
if err := s.db.WithContext(ctx).Raw(`
SELECT
COUNT(pa.id) AS total_plays,
COUNT(DISTINCT pa.user_id) AS unique_listeners,
COUNT(CASE WHEN pa.completion_rate >= 90 THEN 1 END) AS complete_listens,
COALESCE(SUM(pa.play_time), 0) AS total_play_time,
COALESCE(AVG(pa.completion_rate), 0) AS avg_completion
FROM playback_analytics pa
JOIN tracks t ON t.id = pa.track_id
WHERE t.creator_id = ? AND pa.started_at >= ? AND pa.started_at <= ?
`, creatorID, startDate, endDate).Scan(&playbackAgg).Error; err != nil {
return nil, fmt.Errorf("failed to get playback stats: %w", err)
}
stats.TotalPlays = playbackAgg.TotalPlays
stats.UniqueListeners = playbackAgg.UniqueListeners
stats.CompleteListens = playbackAgg.CompleteListens
stats.TotalPlayTime = playbackAgg.TotalPlayTime
stats.AvgCompletionRate = playbackAgg.AvgCompletion
if stats.TotalPlays > 0 {
stats.AvgPlayDuration = float64(stats.TotalPlayTime) / float64(stats.TotalPlays)
}
// Also add track_plays data
var trackPlayAgg struct {
TrackPlays int64 `gorm:"column:track_plays"`
}
if err := s.db.WithContext(ctx).Raw(`
SELECT COUNT(tp.id) AS track_plays
FROM track_plays tp
JOIN tracks t ON t.id = tp.track_id
WHERE t.creator_id = ? AND tp.played_at >= ? AND tp.played_at <= ?
`, creatorID, startDate, endDate).Scan(&trackPlayAgg).Error; err != nil {
s.logger.Warn("Failed to get track_plays count", zap.Error(err))
}
// Use the higher of the two counts
if trackPlayAgg.TrackPlays > stats.TotalPlays {
stats.TotalPlays = trackPlayAgg.TrackPlays
}
// Total tracks
s.db.WithContext(ctx).Table("tracks").Where("creator_id = ? AND deleted_at IS NULL", creatorID).Count(&stats.TotalTracks)
// Total followers
s.db.WithContext(ctx).Table("follows").Where("followed_id = ?", creatorID).Count(&stats.TotalFollowers)
// Total revenue
if err := s.db.WithContext(ctx).Raw(`
SELECT COALESCE(SUM(oi.price), 0)
FROM order_items oi
JOIN orders o ON o.id = oi.order_id
JOIN products p ON p.id = oi.product_id
WHERE p.seller_id = ? AND o.status IN ('completed', 'paid')
AND o.created_at >= ? AND o.created_at <= ?
`, creatorID, startDate, endDate).Scan(&stats.TotalRevenue).Error; err != nil {
s.logger.Warn("Failed to get revenue", zap.Error(err))
}
return stats, nil
}
// PlayEvolutionPoint represents a single data point in the play evolution chart (F382)
type PlayEvolutionPoint struct {
Date string `json:"date"`
TotalPlays int64 `json:"total_plays"`
CompleteListens int64 `json:"complete_listens"`
UniqueListeners int64 `json:"unique_listeners"`
TotalPlayTime int64 `json:"total_play_time"`
AvgCompletion float64 `json:"avg_completion"`
}
// GetPlayEvolution returns play data over time for the creator (F382)
// interval: "day", "week", "month"
func (s *CreatorAnalyticsService) GetPlayEvolution(ctx context.Context, creatorID uuid.UUID, startDate, endDate time.Time, interval string) ([]PlayEvolutionPoint, error) {
var dateExpr string
switch interval {
case "week":
dateExpr = "DATE_TRUNC('week', pa.started_at)::date"
case "month":
dateExpr = "DATE_TRUNC('month', pa.started_at)::date"
default:
dateExpr = "DATE(pa.started_at)"
}
var rows []struct {
Date string `gorm:"column:date"`
TotalPlays int64 `gorm:"column:total_plays"`
CompleteListens int64 `gorm:"column:complete_listens"`
UniqueListeners int64 `gorm:"column:unique_listeners"`
TotalPlayTime int64 `gorm:"column:total_play_time"`
AvgCompletion float64 `gorm:"column:avg_completion"`
}
query := fmt.Sprintf(`
SELECT
%s AS date,
COUNT(pa.id) AS total_plays,
COUNT(CASE WHEN pa.completion_rate >= 90 THEN 1 END) AS complete_listens,
COUNT(DISTINCT pa.user_id) AS unique_listeners,
COALESCE(SUM(pa.play_time), 0) AS total_play_time,
COALESCE(AVG(pa.completion_rate), 0) AS avg_completion
FROM playback_analytics pa
JOIN tracks t ON t.id = pa.track_id
WHERE t.creator_id = ? AND pa.started_at >= ? AND pa.started_at <= ?
GROUP BY date
ORDER BY date ASC
`, dateExpr)
if err := s.db.WithContext(ctx).Raw(query, creatorID, startDate, endDate).Scan(&rows).Error; err != nil {
return nil, fmt.Errorf("failed to get play evolution: %w", err)
}
points := make([]PlayEvolutionPoint, len(rows))
for i, r := range rows {
points[i] = PlayEvolutionPoint{
Date: r.Date,
TotalPlays: r.TotalPlays,
CompleteListens: r.CompleteListens,
UniqueListeners: r.UniqueListeners,
TotalPlayTime: r.TotalPlayTime,
AvgCompletion: r.AvgCompletion,
}
}
return points, nil
}
// SalesRecord represents a sales entry for downloads and purchases (F383)
type SalesRecord struct {
Date string `json:"date"`
TrackID string `json:"track_id"`
TrackTitle string `json:"track_title"`
ProductType string `json:"product_type"`
Amount float64 `json:"amount"`
Currency string `json:"currency"`
BuyerCount int64 `json:"buyer_count"`
}
// SalesSummary aggregates sales data (F383)
type SalesSummary struct {
TotalRevenue float64 `json:"total_revenue"`
TotalSales int64 `json:"total_sales"`
RevenueByPeriod []RevenuePeriod `json:"revenue_by_period"`
TopSellingTracks []TopSellingTrack `json:"top_selling_tracks"`
}
// RevenuePeriod represents revenue for a time period
type RevenuePeriod struct {
Date string `json:"date"`
Revenue float64 `json:"revenue"`
Sales int64 `json:"sales"`
}
// TopSellingTrack represents a top-selling track
type TopSellingTrack struct {
TrackID string `json:"track_id"`
Title string `json:"title"`
Revenue float64 `json:"revenue"`
Sales int64 `json:"sales"`
}
// GetSalesSummary returns sales and download data for the creator (F383)
func (s *CreatorAnalyticsService) GetSalesSummary(ctx context.Context, creatorID uuid.UUID, startDate, endDate time.Time) (*SalesSummary, error) {
summary := &SalesSummary{
RevenueByPeriod: []RevenuePeriod{},
TopSellingTracks: []TopSellingTrack{},
}
// Total revenue and sales count
var totals struct {
TotalRevenue float64 `gorm:"column:total_revenue"`
TotalSales int64 `gorm:"column:total_sales"`
}
if err := s.db.WithContext(ctx).Raw(`
SELECT COALESCE(SUM(oi.price), 0) AS total_revenue, COUNT(oi.id) AS total_sales
FROM order_items oi
JOIN orders o ON o.id = oi.order_id
JOIN products p ON p.id = oi.product_id
WHERE p.seller_id = ? AND o.status IN ('completed', 'paid')
AND o.created_at >= ? AND o.created_at <= ?
`, creatorID, startDate, endDate).Scan(&totals).Error; err != nil {
s.logger.Warn("Failed to get sales totals", zap.Error(err))
} else {
summary.TotalRevenue = totals.TotalRevenue
summary.TotalSales = totals.TotalSales
}
// Revenue by day
var periodRows []struct {
Date string `gorm:"column:date"`
Revenue float64 `gorm:"column:revenue"`
Sales int64 `gorm:"column:sales"`
}
if err := s.db.WithContext(ctx).Raw(`
SELECT DATE(o.created_at) AS date, COALESCE(SUM(oi.price), 0) AS revenue, COUNT(oi.id) AS sales
FROM order_items oi
JOIN orders o ON o.id = oi.order_id
JOIN products p ON p.id = oi.product_id
WHERE p.seller_id = ? AND o.status IN ('completed', 'paid')
AND o.created_at >= ? AND o.created_at <= ?
GROUP BY DATE(o.created_at)
ORDER BY date ASC
`, creatorID, startDate, endDate).Scan(&periodRows).Error; err != nil {
s.logger.Warn("Failed to get revenue by period", zap.Error(err))
}
for _, r := range periodRows {
summary.RevenueByPeriod = append(summary.RevenueByPeriod, RevenuePeriod{
Date: r.Date, Revenue: r.Revenue, Sales: r.Sales,
})
}
// Top selling tracks
var topRows []struct {
TrackID string `gorm:"column:track_id"`
Title string `gorm:"column:title"`
Revenue float64 `gorm:"column:revenue"`
Sales int64 `gorm:"column:sales"`
}
if err := s.db.WithContext(ctx).Raw(`
SELECT CAST(p.id AS TEXT) AS track_id, COALESCE(t.title, p.name) AS title,
COALESCE(SUM(oi.price), 0) AS revenue, COUNT(oi.id) AS sales
FROM order_items oi
JOIN orders o ON o.id = oi.order_id
JOIN products p ON p.id = oi.product_id
LEFT JOIN tracks t ON t.id = p.id
WHERE p.seller_id = ? AND o.status IN ('completed', 'paid')
AND o.created_at >= ? AND o.created_at <= ?
GROUP BY p.id, t.title, p.name
ORDER BY revenue DESC
LIMIT 10
`, creatorID, startDate, endDate).Scan(&topRows).Error; err != nil {
s.logger.Warn("Failed to get top selling tracks", zap.Error(err))
}
for _, r := range topRows {
summary.TopSellingTracks = append(summary.TopSellingTracks, TopSellingTrack{
TrackID: r.TrackID, Title: r.Title, Revenue: r.Revenue, Sales: r.Sales,
})
}
return summary, nil
}
// DiscoverySourceBreakdown represents how users discover the creator's tracks (F381)
type DiscoverySourceBreakdown struct {
Source string `json:"source"`
Count int64 `json:"count"`
Percentage float64 `json:"percentage"`
}
// GetDiscoverySources returns how listeners find the creator's tracks (F381)
func (s *CreatorAnalyticsService) GetDiscoverySources(ctx context.Context, creatorID uuid.UUID, startDate, endDate time.Time) ([]DiscoverySourceBreakdown, error) {
var rows []struct {
Source string `gorm:"column:source"`
Count int64 `gorm:"column:cnt"`
}
if err := s.db.WithContext(ctx).Raw(`
SELECT COALESCE(NULLIF(tp.source, ''), 'direct') AS source, COUNT(*) AS cnt
FROM track_plays tp
JOIN tracks t ON t.id = tp.track_id
WHERE t.creator_id = ? AND tp.played_at >= ? AND tp.played_at <= ?
GROUP BY source
ORDER BY cnt DESC
`, creatorID, startDate, endDate).Scan(&rows).Error; err != nil {
return nil, fmt.Errorf("failed to get discovery sources: %w", err)
}
var total int64
for _, r := range rows {
total += r.Count
}
sources := make([]DiscoverySourceBreakdown, len(rows))
for i, r := range rows {
pct := float64(0)
if total > 0 {
pct = float64(r.Count) / float64(total) * 100
}
sources[i] = DiscoverySourceBreakdown{
Source: r.Source,
Count: r.Count,
Percentage: pct,
}
}
return sources, nil
}
// GeographicBreakdown represents geographic distribution of plays (F381)
type GeographicBreakdown struct {
CountryCode string `json:"country_code"`
Region string `json:"region,omitempty"`
PlayCount int64 `json:"play_count"`
UniqueListeners int64 `json:"unique_listeners"`
Percentage float64 `json:"percentage"`
}
// GetGeographicBreakdown returns geographic distribution of plays (F381)
// Data is always aggregated — never exposes individual user locations
func (s *CreatorAnalyticsService) GetGeographicBreakdown(ctx context.Context, creatorID uuid.UUID, startDate, endDate time.Time) ([]GeographicBreakdown, error) {
var rows []struct {
CountryCode string `gorm:"column:country_code"`
PlayCount int64 `gorm:"column:play_count"`
UniqueListeners int64 `gorm:"column:unique_listeners"`
}
if err := s.db.WithContext(ctx).Raw(`
SELECT COALESCE(NULLIF(tp.country_code, ''), 'XX') AS country_code,
COUNT(*) AS play_count,
COUNT(DISTINCT tp.user_id) AS unique_listeners
FROM track_plays tp
JOIN tracks t ON t.id = tp.track_id
WHERE t.creator_id = ? AND tp.played_at >= ? AND tp.played_at <= ?
GROUP BY country_code
HAVING COUNT(DISTINCT tp.user_id) >= 10
ORDER BY play_count DESC
`, creatorID, startDate, endDate).Scan(&rows).Error; err != nil {
return nil, fmt.Errorf("failed to get geographic breakdown: %w", err)
}
var totalPlays int64
for _, r := range rows {
totalPlays += r.PlayCount
}
result := make([]GeographicBreakdown, len(rows))
for i, r := range rows {
pct := float64(0)
if totalPlays > 0 {
pct = float64(r.PlayCount) / float64(totalPlays) * 100
}
result[i] = GeographicBreakdown{
CountryCode: r.CountryCode,
PlayCount: r.PlayCount,
UniqueListeners: r.UniqueListeners,
Percentage: pct,
}
}
return result, nil
}
// AudienceProfile represents aggregated audience demographics (F384)
// Only shown when >= 10 unique listeners for privacy
type AudienceProfile struct {
TotalListeners int64 `json:"total_listeners"`
ListenersByGenre []GenreBreakdown `json:"listeners_by_genre"`
TopListeningTimes []ListeningTimeSlot `json:"top_listening_times"`
}
// GenreBreakdown shows what genres the audience listens to
type GenreBreakdown struct {
Genre string `json:"genre"`
Count int64 `json:"count"`
Percentage float64 `json:"percentage"`
}
// ListeningTimeSlot shows when the audience listens most
type ListeningTimeSlot struct {
Hour int `json:"hour"`
PlayCount int64 `json:"play_count"`
}
// GetAudienceProfile returns aggregated audience data (F384)
// Requires minimum 10 unique listeners for privacy protection
func (s *CreatorAnalyticsService) GetAudienceProfile(ctx context.Context, creatorID uuid.UUID, startDate, endDate time.Time) (*AudienceProfile, error) {
profile := &AudienceProfile{
ListenersByGenre: []GenreBreakdown{},
TopListeningTimes: []ListeningTimeSlot{},
}
// Count unique listeners
if err := s.db.WithContext(ctx).Raw(`
SELECT COUNT(DISTINCT pa.user_id)
FROM playback_analytics pa
JOIN tracks t ON t.id = pa.track_id
WHERE t.creator_id = ? AND pa.started_at >= ? AND pa.started_at <= ?
`, creatorID, startDate, endDate).Scan(&profile.TotalListeners).Error; err != nil {
return nil, fmt.Errorf("failed to count listeners: %w", err)
}
// Privacy check: minimum 10 unique listeners
if profile.TotalListeners < 10 {
return profile, nil
}
// What genres does the audience also listen to
var genreRows []struct {
Genre string `gorm:"column:genre"`
Count int64 `gorm:"column:cnt"`
}
if err := s.db.WithContext(ctx).Raw(`
SELECT t2.genre, COUNT(DISTINCT pa2.id) AS cnt
FROM playback_analytics pa
JOIN tracks t ON t.id = pa.track_id
JOIN playback_analytics pa2 ON pa2.user_id = pa.user_id AND pa2.track_id != pa.track_id
JOIN tracks t2 ON t2.id = pa2.track_id
WHERE t.creator_id = ? AND pa.started_at >= ? AND pa.started_at <= ?
AND t2.genre != '' AND t2.genre IS NOT NULL
GROUP BY t2.genre
ORDER BY cnt DESC
LIMIT 10
`, creatorID, startDate, endDate).Scan(&genreRows).Error; err != nil {
s.logger.Warn("Failed to get audience genres", zap.Error(err))
}
var totalGenre int64
for _, r := range genreRows {
totalGenre += r.Count
}
for _, r := range genreRows {
pct := float64(0)
if totalGenre > 0 {
pct = float64(r.Count) / float64(totalGenre) * 100
}
profile.ListenersByGenre = append(profile.ListenersByGenre, GenreBreakdown{
Genre: r.Genre, Count: r.Count, Percentage: pct,
})
}
// Peak listening times (hour of day)
var timeRows []struct {
Hour int `gorm:"column:hour"`
PlayCount int64 `gorm:"column:play_count"`
}
if err := s.db.WithContext(ctx).Raw(`
SELECT EXTRACT(HOUR FROM pa.started_at)::int AS hour, COUNT(*) AS play_count
FROM playback_analytics pa
JOIN tracks t ON t.id = pa.track_id
WHERE t.creator_id = ? AND pa.started_at >= ? AND pa.started_at <= ?
GROUP BY hour
ORDER BY hour ASC
`, creatorID, startDate, endDate).Scan(&timeRows).Error; err != nil {
s.logger.Warn("Failed to get listening times", zap.Error(err))
}
for _, r := range timeRows {
profile.TopListeningTimes = append(profile.TopListeningTimes, ListeningTimeSlot{
Hour: r.Hour, PlayCount: r.PlayCount,
})
}
return profile, nil
}
// LiveStreamMetrics represents real-time metrics for a live stream (F385)
type LiveStreamMetrics struct {
StreamID string `json:"stream_id"`
CurrentViewers int `json:"current_viewers"`
PeakViewers int `json:"peak_viewers"`
IsLive bool `json:"is_live"`
DurationSeconds int64 `json:"duration_seconds"`
}
// GetLiveStreamMetrics returns current metrics for a live stream (F385)
func (s *CreatorAnalyticsService) GetLiveStreamMetrics(ctx context.Context, creatorID uuid.UUID, streamID uuid.UUID) (*LiveStreamMetrics, error) {
var stream struct {
ID uuid.UUID `gorm:"column:id"`
UserID uuid.UUID `gorm:"column:user_id"`
IsLive bool `gorm:"column:is_live"`
ViewerCount int `gorm:"column:viewer_count"`
StartedAt *time.Time `gorm:"column:started_at"`
}
if err := s.db.WithContext(ctx).Table("live_streams").
Where("id = ? AND user_id = ?", streamID, creatorID).
First(&stream).Error; err != nil {
if err == gorm.ErrRecordNotFound {
return nil, fmt.Errorf("live stream not found")
}
return nil, fmt.Errorf("failed to get live stream: %w", err)
}
metrics := &LiveStreamMetrics{
StreamID: streamID.String(),
CurrentViewers: stream.ViewerCount,
PeakViewers: stream.ViewerCount, // Could track separately in future
IsLive: stream.IsLive,
}
if stream.StartedAt != nil && stream.IsLive {
metrics.DurationSeconds = int64(time.Since(*stream.StartedAt).Seconds())
}
return metrics, nil
}
// GetTrackDetailedStats returns per-track stats for a creator (used in F381 dashboard)
type TrackDetailedStats struct {
TrackID string `json:"track_id"`
Title string `json:"title"`
TotalPlays int64 `json:"total_plays"`
CompleteListens int64 `json:"complete_listens"`
UniqueListeners int64 `json:"unique_listeners"`
AvgPlayDuration float64 `json:"avg_play_duration"`
AvgCompletion float64 `json:"avg_completion"`
}
// GetPerTrackStats returns detailed stats for each of the creator's tracks
func (s *CreatorAnalyticsService) GetPerTrackStats(ctx context.Context, creatorID uuid.UUID, startDate, endDate time.Time, limit, offset int) ([]TrackDetailedStats, int64, error) {
if limit <= 0 {
limit = 20
}
if limit > 100 {
limit = 100
}
var total int64
s.db.WithContext(ctx).Table("tracks").Where("creator_id = ? AND deleted_at IS NULL", creatorID).Count(&total)
var rows []struct {
TrackID string `gorm:"column:track_id"`
Title string `gorm:"column:title"`
TotalPlays int64 `gorm:"column:total_plays"`
CompleteListens int64 `gorm:"column:complete_listens"`
UniqueListeners int64 `gorm:"column:unique_listeners"`
AvgPlayDuration float64 `gorm:"column:avg_play_duration"`
AvgCompletion float64 `gorm:"column:avg_completion"`
}
if err := s.db.WithContext(ctx).Raw(`
SELECT CAST(t.id AS TEXT) AS track_id, t.title,
COUNT(pa.id) AS total_plays,
COUNT(CASE WHEN pa.completion_rate >= 90 THEN 1 END) AS complete_listens,
COUNT(DISTINCT pa.user_id) AS unique_listeners,
COALESCE(AVG(pa.play_time), 0) AS avg_play_duration,
COALESCE(AVG(pa.completion_rate), 0) AS avg_completion
FROM tracks t
LEFT JOIN playback_analytics pa ON pa.track_id = t.id
AND pa.started_at >= ? AND pa.started_at <= ?
WHERE t.creator_id = ? AND t.deleted_at IS NULL
GROUP BY t.id, t.title
ORDER BY total_plays DESC
LIMIT ? OFFSET ?
`, startDate, endDate, creatorID, limit, offset).Scan(&rows).Error; err != nil {
return nil, 0, fmt.Errorf("failed to get per-track stats: %w", err)
}
result := make([]TrackDetailedStats, len(rows))
for i, r := range rows {
result[i] = TrackDetailedStats{
TrackID: r.TrackID,
Title: r.Title,
TotalPlays: r.TotalPlays,
CompleteListens: r.CompleteListens,
UniqueListeners: r.UniqueListeners,
AvgPlayDuration: r.AvgPlayDuration,
AvgCompletion: r.AvgCompletion,
}
}
return result, total, nil
}

View file

@ -0,0 +1,344 @@
package services
import (
"context"
"testing"
"time"
"github.com/google/uuid"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"go.uber.org/zap"
"gorm.io/driver/sqlite"
"gorm.io/gorm"
"veza-backend-api/internal/models"
)
func setupTestCreatorAnalyticsService(t *testing.T) (*CreatorAnalyticsService, *gorm.DB, uuid.UUID, uuid.UUID) {
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
require.NoError(t, err)
db.Exec("PRAGMA foreign_keys = ON")
err = db.AutoMigrate(
&models.User{},
&models.Track{},
&models.TrackPlay{},
&models.PlaybackAnalytics{},
&models.LiveStream{},
)
require.NoError(t, err)
// Add source and country_code columns to track_plays (normally done by migration)
db.Exec("ALTER TABLE track_plays ADD COLUMN source TEXT DEFAULT ''")
db.Exec("ALTER TABLE track_plays ADD COLUMN country_code TEXT DEFAULT ''")
// Create follows table (normally done by migration, no GORM model)
db.Exec(`CREATE TABLE IF NOT EXISTS follows (
id INTEGER PRIMARY KEY AUTOINCREMENT,
follower_id TEXT NOT NULL,
followed_id TEXT NOT NULL,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
)`)
// Create test user (creator)
creator := &models.User{
Username: "creator1", Email: "creator@test.com",
PasswordHash: "hash", Slug: "creator1", IsActive: true,
}
require.NoError(t, db.Create(creator).Error)
// Create test track
track := &models.Track{
UserID: creator.ID, Title: "Test Track", FilePath: "/test.mp3",
FileSize: 5 * 1024 * 1024, Format: "MP3", Duration: 180,
IsPublic: true, Status: models.TrackStatusCompleted,
}
require.NoError(t, db.Create(track).Error)
// Create listener users and playback data
for i := 0; i < 15; i++ {
listener := &models.User{
Username: "listener" + string(rune('a'+i)),
Email: "listener" + string(rune('a'+i)) + "@test.com",
PasswordHash: "hash",
Slug: "listener" + string(rune('a'+i)),
IsActive: true,
}
require.NoError(t, db.Create(listener).Error)
// Create playback analytics for each listener
pa := &models.PlaybackAnalytics{
TrackID: track.ID,
UserID: listener.ID,
PlayTime: 120 + i*5,
PauseCount: i % 3,
SeekCount: i % 2,
CompletionRate: float64(60 + i*2),
StartedAt: time.Now().Add(-time.Duration(i) * time.Hour),
}
require.NoError(t, db.Create(pa).Error)
// Create track_plays
tp := &models.TrackPlay{
TrackID: track.ID,
UserID: &listener.ID,
Duration: 120 + i*5,
PlayedAt: time.Now().Add(-time.Duration(i) * time.Hour),
Device: "desktop",
}
require.NoError(t, db.Create(tp).Error)
// Create a follow
db.Exec("INSERT INTO follows (follower_id, followed_id, created_at) VALUES (?, ?, ?)",
listener.ID, creator.ID, time.Now())
}
logger := zap.NewNop()
service := NewCreatorAnalyticsService(db, logger)
return service, db, creator.ID, track.ID
}
func TestCreatorAnalyticsService_GetCreatorDashboard(t *testing.T) {
service, _, creatorID, _ := setupTestCreatorAnalyticsService(t)
ctx := context.Background()
startDate := time.Now().Add(-30 * 24 * time.Hour)
endDate := time.Now().Add(time.Hour)
dashboard, err := service.GetCreatorDashboard(ctx, creatorID, startDate, endDate)
require.NoError(t, err)
require.NotNil(t, dashboard)
assert.Greater(t, dashboard.TotalPlays, int64(0), "should have plays")
assert.Greater(t, dashboard.UniqueListeners, int64(0), "should have unique listeners")
assert.Greater(t, dashboard.TotalPlayTime, int64(0), "should have play time")
assert.Greater(t, dashboard.TotalTracks, int64(0), "should have tracks")
assert.Greater(t, dashboard.TotalFollowers, int64(0), "should have followers")
assert.Greater(t, dashboard.AvgCompletionRate, float64(0), "should have completion rate")
assert.Greater(t, dashboard.AvgPlayDuration, float64(0), "should have avg play duration")
}
func TestCreatorAnalyticsService_GetCreatorDashboard_NoData(t *testing.T) {
service, _, _, _ := setupTestCreatorAnalyticsService(t)
ctx := context.Background()
nonExistentID := uuid.New()
startDate := time.Now().Add(-30 * 24 * time.Hour)
endDate := time.Now()
dashboard, err := service.GetCreatorDashboard(ctx, nonExistentID, startDate, endDate)
require.NoError(t, err)
require.NotNil(t, dashboard)
assert.Equal(t, int64(0), dashboard.TotalPlays)
}
func TestCreatorAnalyticsService_GetPlayEvolution(t *testing.T) {
service, _, creatorID, _ := setupTestCreatorAnalyticsService(t)
ctx := context.Background()
startDate := time.Now().Add(-30 * 24 * time.Hour)
endDate := time.Now().Add(time.Hour)
// Test day interval
points, err := service.GetPlayEvolution(ctx, creatorID, startDate, endDate, "day")
require.NoError(t, err)
assert.NotEmpty(t, points, "should have data points")
for _, p := range points {
assert.NotEmpty(t, p.Date, "date should not be empty")
assert.GreaterOrEqual(t, p.TotalPlays, int64(0))
}
}
func TestCreatorAnalyticsService_GetDiscoverySources(t *testing.T) {
service, _, creatorID, _ := setupTestCreatorAnalyticsService(t)
ctx := context.Background()
startDate := time.Now().Add(-30 * 24 * time.Hour)
endDate := time.Now().Add(time.Hour)
sources, err := service.GetDiscoverySources(ctx, creatorID, startDate, endDate)
require.NoError(t, err)
// Should have at least one source ("direct" since all track_plays have empty source)
assert.NotEmpty(t, sources)
// Verify percentages sum to ~100
var totalPct float64
for _, s := range sources {
totalPct += s.Percentage
assert.NotEmpty(t, s.Source)
assert.Greater(t, s.Count, int64(0))
}
assert.InDelta(t, 100.0, totalPct, 1.0)
}
func TestCreatorAnalyticsService_GetAudienceProfile_MinimumListeners(t *testing.T) {
service, _, creatorID, _ := setupTestCreatorAnalyticsService(t)
ctx := context.Background()
startDate := time.Now().Add(-30 * 24 * time.Hour)
endDate := time.Now().Add(time.Hour)
profile, err := service.GetAudienceProfile(ctx, creatorID, startDate, endDate)
require.NoError(t, err)
require.NotNil(t, profile)
// We have 15 listeners which is >= 10, so we should get data
assert.GreaterOrEqual(t, profile.TotalListeners, int64(10), "should have >= 10 listeners")
}
func TestCreatorAnalyticsService_GetAudienceProfile_InsufficientListeners(t *testing.T) {
// Test with creator who has < 10 listeners
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
require.NoError(t, err)
db.Exec("PRAGMA foreign_keys = ON")
err = db.AutoMigrate(&models.User{}, &models.Track{}, &models.PlaybackAnalytics{})
require.NoError(t, err)
creator := &models.User{
Username: "lonely", Email: "lonely@test.com",
PasswordHash: "hash", Slug: "lonely", IsActive: true,
}
require.NoError(t, db.Create(creator).Error)
track := &models.Track{
UserID: creator.ID, Title: "Lonely Track", FilePath: "/lonely.mp3",
FileSize: 1024, Format: "MP3", Duration: 60,
IsPublic: true, Status: models.TrackStatusCompleted,
}
require.NoError(t, db.Create(track).Error)
// Only create 3 listeners (< 10 minimum)
for i := 0; i < 3; i++ {
listener := &models.User{
Username: "few" + string(rune('a'+i)),
Email: "few" + string(rune('a'+i)) + "@test.com",
PasswordHash: "hash",
Slug: "few" + string(rune('a'+i)),
IsActive: true,
}
require.NoError(t, db.Create(listener).Error)
pa := &models.PlaybackAnalytics{
TrackID: track.ID, UserID: listener.ID,
PlayTime: 30, CompletionRate: 50,
StartedAt: time.Now().Add(-time.Hour),
}
require.NoError(t, db.Create(pa).Error)
}
service := NewCreatorAnalyticsService(db, zap.NewNop())
ctx := context.Background()
profile, err := service.GetAudienceProfile(ctx, creator.ID,
time.Now().Add(-24*time.Hour), time.Now().Add(time.Hour))
require.NoError(t, err)
require.NotNil(t, profile)
// < 10 listeners: should NOT expose detailed data
assert.Less(t, profile.TotalListeners, int64(10))
assert.Empty(t, profile.ListenersByGenre, "should not expose genre data with < 10 listeners")
assert.Empty(t, profile.TopListeningTimes, "should not expose listening times with < 10 listeners")
}
func TestCreatorAnalyticsService_GetPerTrackStats(t *testing.T) {
service, _, creatorID, _ := setupTestCreatorAnalyticsService(t)
ctx := context.Background()
startDate := time.Now().Add(-30 * 24 * time.Hour)
endDate := time.Now().Add(time.Hour)
tracks, total, err := service.GetPerTrackStats(ctx, creatorID, startDate, endDate, 20, 0)
require.NoError(t, err)
assert.Greater(t, total, int64(0))
assert.NotEmpty(t, tracks)
for _, track := range tracks {
assert.NotEmpty(t, track.TrackID)
assert.NotEmpty(t, track.Title)
assert.GreaterOrEqual(t, track.TotalPlays, int64(0))
}
}
func TestCreatorAnalyticsService_GetPerTrackStats_PaginationLimit(t *testing.T) {
service, _, creatorID, _ := setupTestCreatorAnalyticsService(t)
ctx := context.Background()
startDate := time.Now().Add(-30 * 24 * time.Hour)
endDate := time.Now().Add(time.Hour)
// Test max limit enforcement
tracks, _, err := service.GetPerTrackStats(ctx, creatorID, startDate, endDate, 200, 0)
require.NoError(t, err)
assert.LessOrEqual(t, len(tracks), 100, "limit should be capped at 100")
}
func TestCreatorAnalyticsService_GetLiveStreamMetrics(t *testing.T) {
service, db, creatorID, _ := setupTestCreatorAnalyticsService(t)
ctx := context.Background()
// Create a live stream started 10 minutes ago
tenMinAgo := time.Now().Add(-10 * time.Minute)
stream := &models.LiveStream{
UserID: creatorID,
Title: "Test Stream",
IsLive: true,
ViewerCount: 42,
StartedAt: &tenMinAgo,
}
require.NoError(t, db.Create(stream).Error)
metrics, err := service.GetLiveStreamMetrics(ctx, creatorID, stream.ID)
require.NoError(t, err)
require.NotNil(t, metrics)
assert.Equal(t, 42, metrics.CurrentViewers)
assert.True(t, metrics.IsLive)
assert.Greater(t, metrics.DurationSeconds, int64(500), "stream started 10 min ago")
}
func TestCreatorAnalyticsService_GetLiveStreamMetrics_NotFound(t *testing.T) {
service, _, creatorID, _ := setupTestCreatorAnalyticsService(t)
ctx := context.Background()
_, err := service.GetLiveStreamMetrics(ctx, creatorID, uuid.New())
assert.Error(t, err)
assert.Contains(t, err.Error(), "not found")
}
func TestCreatorAnalyticsService_GetLiveStreamMetrics_WrongCreator(t *testing.T) {
service, db, creatorID, _ := setupTestCreatorAnalyticsService(t)
ctx := context.Background()
now := time.Now()
stream := &models.LiveStream{
UserID: creatorID, Title: "Stream", IsLive: true, StartedAt: &now,
}
require.NoError(t, db.Create(stream).Error)
// Try to access with a different creator ID
_, err := service.GetLiveStreamMetrics(ctx, uuid.New(), stream.ID)
assert.Error(t, err)
}
func TestCreatorAnalyticsService_MetricsNeverPublic(t *testing.T) {
// This test verifies the design principle: all creator analytics
// require a creator ID and only return data for that creator's tracks.
service, _, _, _ := setupTestCreatorAnalyticsService(t)
ctx := context.Background()
startDate := time.Now().Add(-30 * 24 * time.Hour)
endDate := time.Now().Add(time.Hour)
// All methods require a creator ID — there's no public endpoint
randomID := uuid.New()
dashboard, err := service.GetCreatorDashboard(ctx, randomID, startDate, endDate)
require.NoError(t, err)
assert.Equal(t, int64(0), dashboard.TotalPlays, "random user should see no data")
points, err := service.GetPlayEvolution(ctx, randomID, startDate, endDate, "day")
require.NoError(t, err)
assert.Empty(t, points, "random user should see no evolution data")
}