stabilisation commit: while implementing v0.10.5

This commit is contained in:
senke 2026-03-09 19:36:33 +01:00
parent ac182d9f35
commit 22f0c04b3f
70 changed files with 2626 additions and 285 deletions

View file

@ -40,6 +40,9 @@ jobs:
- name: Build
run: npm run build
- name: Bundle size gate
run: node scripts/check-bundle-size.mjs
- name: Audit dependencies
run: npm audit --audit-level=critical

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,21 @@
# =============================================================================
# VEZA Frontend - Remote development on R720 (Cursor Remote-SSH + port forwarding)
# =============================================================================
# Copy to .env or .env.local when developing on the R720:
# cp env.remote-r720.example .env
#
# With Cursor Remote-SSH, the browser on your laptop accesses localhost:5173.
# Vite proxy forwards /api/v1 and /stream to the backend/stream on the R720.
# =============================================================================
# Domain: localhost (browser on laptop uses port forwarding)
VITE_DOMAIN=localhost
# Backend and Stream ports (match docker-compose / config.mk)
VITE_BACKEND_PORT=18080
VITE_STREAM_PORT=18082
# API: use /api/v1 so Vite proxy forwards (same-origin cookies)
VITE_API_URL=/api/v1
VITE_STREAM_URL=/stream
VITE_UPLOAD_URL=/upload

View file

@ -167,7 +167,7 @@ export default [js.configs.recommended, {
'@typescript-eslint/no-unused-vars': ['warn', { argsIgnorePattern: '^_' }],
'@typescript-eslint/explicit-function-return-type': 'off',
'@typescript-eslint/explicit-module-boundary-types': 'off',
'@typescript-eslint/no-explicit-any': 'off',
'@typescript-eslint/no-explicit-any': 'warn',
'@typescript-eslint/no-non-null-assertion': 'warn',
// React
@ -268,6 +268,11 @@ export default [js.configs.recommended, {
rules: {
'react-hooks/rules-of-hooks': 'off',
},
}, {
files: ['**/*.test.ts', '**/*.test.tsx', '**/__tests__/**'],
rules: {
'@typescript-eslint/no-explicit-any': 'off',
},
}, {
ignores: [
'node_modules/',

View file

@ -9,6 +9,7 @@
"dev:lab": "bash ./scripts/start_lab.sh",
"dev:mocks": "VITE_USE_MSW=1 vite",
"build": "vite build",
"build:ci": "vite build && node scripts/check-bundle-size.mjs",
"preview": "vite preview",
"test": "vitest",
"test:ui": "vitest --ui",
@ -60,6 +61,7 @@
"qa:backstop:test": "make backstop-test",
"qa:loki": "make loki",
"qa:a11y": "echo 'pa11y-ci removed (security audit A06). Use Makefile.old if needed.'",
"a11y:audit": "npx --yes @axe-core/cli ${A11Y_URL:-http://localhost:5173} --tags wcag2aa --load-delay=2000",
"qa:all": "make qa-all",
"prepare": "husky",
"storybook": "cross-env VITE_API_URL=/api/v1 VITE_USE_MSW=true VITE_STORYBOOK=true storybook dev -p 6006",
@ -131,6 +133,7 @@
"autoprefixer": "^10.4.17",
"babel-plugin-react-remove-properties": "^0.3.1",
"backstopjs": "^6.2.3",
"bundlesize": "^0.18.2",
"cross-env": "^7.0.3",
"eslint": "^9.0.0",
"eslint-plugin-jsx-a11y": "^6.10.2",

View file

@ -0,0 +1,66 @@
#!/usr/bin/env node
/**
* Bundle size gate TASK-DEBT-015
* Fails the build if initial JS bundle exceeds 200KB gzipped.
* Measures: index-*.js + vendor-react-*.js (critical path for first paint).
*/
import { readdirSync, readFileSync } from 'fs';
import { createGzip } from 'zlib';
import { dirname, join } from 'path';
import { fileURLToPath } from 'url';
const __dirname = dirname(fileURLToPath(import.meta.url));
const DIST = join(__dirname, '../dist_verification/assets');
const MAX_COMBINED_KB = 200;
function gzipSize(bytes) {
return new Promise((resolve, reject) => {
const chunks = [];
const gzip = createGzip();
gzip.on('data', (chunk) => chunks.push(chunk));
gzip.on('end', () => resolve(Buffer.concat(chunks).length));
gzip.on('error', reject);
gzip.end(bytes);
});
}
async function main() {
let dir;
try {
dir = readdirSync(DIST);
} catch (e) {
console.error('Run "npm run build" first. dist_verification/assets not found.');
process.exit(1);
}
const indexFile = dir.find((f) => f.startsWith('index-') && f.endsWith('.js'));
const vendorReactFile = dir.find((f) => f.startsWith('vendor-react-') && f.endsWith('.js'));
let totalGzip = 0;
const results = [];
for (const file of [indexFile, vendorReactFile]) {
if (!file) continue;
const path = join(DIST, file);
const bytes = readFileSync(path);
const sizeGzip = await gzipSize(bytes);
totalGzip += sizeGzip;
results.push({ file, kb: (sizeGzip / 1024).toFixed(1) });
}
const totalKb = totalGzip / 1024;
console.log('Bundle size (gzipped):');
results.forEach((r) => console.log(` ${r.file}: ${r.kb} kB`));
console.log(` Total (index + vendor-react): ${totalKb.toFixed(1)} kB`);
if (totalKb > MAX_COMBINED_KB) {
console.error(`\nFAIL: Initial bundle exceeds ${MAX_COMBINED_KB} kB (target: ORIGIN_PERFORMANCE_TARGETS §3.2)`);
process.exit(1);
}
console.log(`\nPASS: Bundle under ${MAX_COMBINED_KB} kB limit.`);
}
main().catch((e) => {
console.error(e);
process.exit(1);
});

View file

@ -1,4 +1,4 @@
import React, { useState, useEffect } from 'react';
import React, { useState, useEffect, useCallback } from 'react';
import { Card } from '../ui/card';
import { Button } from '../ui/button';
import { Input } from '../ui/input';
@ -7,13 +7,23 @@ import { adminService } from '../../services/adminService';
import { format } from 'date-fns';
import { logger } from '@/utils/logger';
interface AuditLogEntry {
id?: string;
action?: string;
resource?: string;
user_id?: string;
ip_address?: string;
details?: Record<string, unknown>;
timestamp?: string;
}
export const AdminAuditLogsView: React.FC = () => {
const [logs, setLogs] = useState<any[]>([]);
const [logs, setLogs] = useState<AuditLogEntry[]>([]);
const [loading, setLoading] = useState(true);
const [page, setPage] = useState(1);
const [total, setTotal] = useState(0);
const fetchLogs = async () => {
const fetchLogs = useCallback(async () => {
setLoading(true);
try {
const data = await adminService.getAuditLogs({ page, limit: 20 });
@ -24,11 +34,11 @@ export const AdminAuditLogsView: React.FC = () => {
} finally {
setLoading(false);
}
};
}, [page]);
useEffect(() => {
fetchLogs();
}, [page]);
}, [fetchLogs]);
return (
<div className="space-y-6 animate-fadeIn h-full flex flex-col">

View file

@ -1,6 +1,7 @@
import type { Meta, StoryObj } from '@storybook/react';
import { CartItem } from './CartItem';
import { fn } from '@storybook/test';
import type { CartItem as CartItemType } from '@/stores/cartStore';
const MOCK_ITEM = {
cartId: 'c1',
@ -56,7 +57,7 @@ type Story = StoryObj<typeof meta>;
export const Default: Story = {
args: {
item: MOCK_ITEM as any,
item: MOCK_ITEM as CartItemType,
},
};
@ -65,6 +66,6 @@ export const StandardLicense: Story = {
item: {
...MOCK_ITEM,
selectedLicense: undefined,
} as any,
} as CartItemType,
},
};

View file

@ -214,7 +214,9 @@ export const ChatInput: React.FC = () => {
)}
<span className="truncate flex-1">{att.file_name}</span>
<button
type="button"
onClick={() => removeAttachment(idx)}
aria-label={`Remove attachment ${att.file_name}`}
className="p-1 hover:bg-white/10 rounded-full text-destructive opacity-0 group-hover:opacity-100 transition-opacity"
>
<X size={12} />
@ -230,6 +232,7 @@ export const ChatInput: React.FC = () => {
type="button"
variant="ghost"
size="icon"
aria-label="Attach file"
className="text-muted-foreground hover:text-foreground hover:bg-white/5"
onClick={() => fileInputRef.current?.click()}
>
@ -241,6 +244,7 @@ export const ChatInput: React.FC = () => {
type="button"
variant="ghost"
size="icon"
aria-label={showEmojiPicker ? 'Close emoji picker' : 'Add emoji'}
className={cn(
'text-muted-foreground hover:text-foreground hover:bg-white/5',
showEmojiPicker && 'text-muted-foreground bg-white/5',
@ -308,6 +312,7 @@ export const ChatInput: React.FC = () => {
type="button"
variant="ghost"
size="icon"
aria-label="Voice message"
className="absolute right-1 top-1/2 -translate-y-1/2 h-8 w-8 text-muted-foreground/30 hover:text-foreground"
>
<Mic className="w-4 h-4" />
@ -320,6 +325,7 @@ export const ChatInput: React.FC = () => {
type="submit"
variant="primary"
size="icon"
aria-label="Send message"
className={cn(
'rounded-xl transition-all duration-[var(--sumi-duration-normal)]',
message.trim() || attachments.length > 0

View file

@ -1,4 +1,5 @@
import { useEffect, useRef, useState, useCallback } from 'react';
import { useQueryClient } from '@tanstack/react-query';
import { useUser } from '@/features/auth/hooks/useUser';
import { useChatStore } from '../store/chatStore';
import { apiClient } from '@/services/api/client';
@ -14,6 +15,7 @@ import toast from '@/utils/toast';
export const useChat = (): UseChatReturn => {
const { data: user } = useUser();
const userId = user?.id;
const queryClient = useQueryClient();
// const _username = user?.username;
const {
wsToken,
@ -226,6 +228,9 @@ export const useChat = (): UseChatReturn => {
setCallState('ended');
clearCall();
}
} else if (data.type === 'notification') {
// v0.10.5 F551: Real-time notifications via WebSocket - invalidate queries
queryClient.invalidateQueries({ queryKey: ['notifications'] });
}
// Handle other incoming message types (ActionConfirmed, Error, Pong)
};
@ -277,6 +282,13 @@ export const useChat = (): UseChatReturn => {
setUserTyping,
setMessageDelivered,
setMessageRead,
setIncomingCall,
setActiveCall,
setCallState,
clearCall,
setPendingCallAnswer,
addPendingICECandidate,
queryClient,
]);
const disconnect = useCallback(() => {

View file

@ -1,4 +1,5 @@
import type { Meta, StoryObj } from '@storybook/react';
import { http, HttpResponse } from 'msw';
import { ChatPage } from './ChatPage';
const meta: Meta<typeof ChatPage> = {
@ -19,5 +20,34 @@ export default meta;
type Story = StoryObj<typeof meta>;
export const Default: Story = { name: 'Par défaut' };
export const Loading: Story = { name: 'Chargement' };
export const Loading: Story = {
name: 'Chargement',
parameters: {
msw: {
handlers: [
http.post('*/api/v1/chat/token', async () => {
await new Promise(() => {}); // Never resolves — simulates loading
}),
],
},
},
};
export const Error: Story = {
name: 'Erreur',
parameters: {
msw: {
handlers: [
http.post('*/api/v1/chat/token', () =>
HttpResponse.json(
{ success: false, error: { message: 'Connection failed' } },
{ status: 500 },
),
),
],
},
},
};
export const Empty: Story = { name: 'Vide' };

View file

@ -1,4 +1,5 @@
import type { Meta, StoryObj } from '@storybook/react';
import { http, HttpResponse } from 'msw';
import { LibraryPage, LibraryPageSkeleton } from './library-page';
import { withRouter, withQueryClient, withToast, withAudio } from '../../../stories/decorators';
@ -34,3 +35,19 @@ export const Loading: Story = {
name: 'Chargement',
render: () => <LibraryPageSkeleton />,
};
export const Error: Story = {
name: 'Erreur',
parameters: {
msw: {
handlers: [
http.get('*/api/v1/tracks', () =>
HttpResponse.json(
{ success: false, error: { message: 'Failed to load library' } },
{ status: 500 },
),
),
],
},
},
};

View file

@ -1,6 +1,7 @@
import { useMemo } from 'react';
import { Link } from 'react-router-dom';
import { Card } from '@/components/ui/card';
import { Button } from '@/components/ui/button';
import { useNotificationsPage } from './useNotificationsPage';
import { NotificationsPageHeader } from './NotificationsPageHeader';
import { NotificationsPageFilters } from './NotificationsPageFilters';
@ -58,6 +59,10 @@ export function NotificationsPage() {
setFilterType,
typeFilter,
setTypeFilter,
page,
setPage,
totalPages,
total,
notifications,
hasUnread,
isLoading,
@ -149,6 +154,29 @@ export function NotificationsPage() {
</div>
</div>
))}
{totalPages > 1 && (
<div className="flex justify-center gap-2 pt-4">
<Button
variant="outline"
size="sm"
disabled={page <= 1}
onClick={() => setPage(page - 1)}
>
Previous
</Button>
<span className="flex items-center px-2 text-sm text-muted-foreground">
{page} / {totalPages}
</span>
<Button
variant="outline"
size="sm"
disabled={page >= totalPages}
onClick={() => setPage(page + 1)}
>
Next
</Button>
</div>
)}
</div>
)}
</div>

View file

@ -10,9 +10,12 @@ import {
} from '../../services/notificationService';
import type { FilterType, NotificationTypeFilter } from './types';
const DEFAULT_PAGE_SIZE = 20;
export function useNotificationsPage() {
const [filterType, setFilterType] = useState<FilterType>('all');
const [typeFilter, setTypeFilter] = useState<NotificationTypeFilter>('all');
const [page, setPage] = useState(1);
const { success, error: toastError } = useToast();
const queryClient = useQueryClient();
@ -22,14 +25,26 @@ export function useNotificationsPage() {
isError,
error,
} = useQuery({
queryKey: ['notifications', filterType, typeFilter],
queryKey: ['notifications', filterType, typeFilter, page],
queryFn: () =>
getNotifications({
read: filterType === 'all' ? undefined : filterType === 'read',
type: typeFilter === 'all' ? undefined : typeFilter,
page,
limit: DEFAULT_PAGE_SIZE,
}),
});
// Reset to page 1 when filters change
const setFilterTypeWithReset = (v: FilterType) => {
setFilterType(v);
setPage(1);
};
const setTypeFilterWithReset = (v: NotificationTypeFilter) => {
setTypeFilter(v);
setPage(1);
};
const notifications = notificationsData?.notifications ?? [];
const unreadNotifications = notifications.filter((n) => !n.read);
const readNotifications = notifications.filter((n) => n.read);
@ -42,10 +57,11 @@ export function useNotificationsPage() {
'notifications',
filterType,
typeFilter,
page,
]);
if (previous) {
queryClient.setQueryData<NotificationsResponse>(
['notifications', filterType, typeFilter],
['notifications', filterType, typeFilter, page],
{
...previous,
notifications: previous.notifications.map((n) =>
@ -60,7 +76,7 @@ export function useNotificationsPage() {
onError: (_err, _id, context) => {
if (context?.previous) {
queryClient.setQueryData(
['notifications', filterType, typeFilter],
['notifications', filterType, typeFilter, page],
context.previous,
);
}
@ -80,10 +96,11 @@ export function useNotificationsPage() {
'notifications',
filterType,
typeFilter,
page,
]);
if (previous) {
queryClient.setQueryData<NotificationsResponse>(
['notifications', filterType, typeFilter],
['notifications', filterType, typeFilter, page],
{
...previous,
notifications: previous.notifications.map((n) => ({ ...n, read: true })),
@ -96,7 +113,7 @@ export function useNotificationsPage() {
onError: (_err, _v, context) => {
if (context?.previous) {
queryClient.setQueryData(
['notifications', filterType, typeFilter],
['notifications', filterType, typeFilter, page],
context.previous,
);
}
@ -129,12 +146,18 @@ export function useNotificationsPage() {
: readNotifications;
const hasUnread = unreadNotifications.length > 0;
const totalPages = notificationsData?.totalPages ?? 1;
const total = notificationsData?.total ?? 0;
return {
filterType,
setFilterType,
setFilterType: setFilterTypeWithReset,
typeFilter,
setTypeFilter,
setTypeFilter: setTypeFilterWithReset,
page,
setPage,
totalPages,
total,
notifications: filteredNotifications,
hasUnread,
isLoading,

View file

@ -72,17 +72,18 @@ export async function getNotifications(
total?: number;
page?: number;
limit?: number;
totalPages?: number;
total_pages?: number;
unread_count?: number;
}>(`/notifications?${queryParams.toString()}`, { signal });
const data = response.data;
return {
notifications: response.data.notifications || [],
total: response.data.total,
page: response.data.page || params?.page || 1,
limit: response.data.limit || params?.limit || 20,
totalPages: response.data.totalPages,
unreadCount: response.data.unread_count,
notifications: data.notifications || [],
total: data.total,
page: data.page ?? params?.page ?? 1,
limit: data.limit ?? params?.limit ?? 20,
totalPages: data.total_pages,
unreadCount: data.unread_count,
};
} catch (error) {
if (error instanceof AxiosError) {
@ -214,6 +215,10 @@ export interface NotificationPreferences {
push_comment: boolean;
push_message: boolean;
push_mention: boolean;
/** v0.10.5 F553: Do not disturb during this time window */
quiet_hours_enabled?: boolean;
quiet_hours_start?: string; // "22:00"
quiet_hours_end?: string; // "08:00"
}
/**

View file

@ -47,10 +47,10 @@ export function FollowButton({
staleTime: 30000, // 30 seconds
});
// Update following state from profile if available
// Update following state from profile if available (v0.10.0 F187)
useEffect(() => {
if (profile && (profile as any).is_following !== undefined) {
setFollowing((profile as any).is_following);
if (profile && profile.is_following !== undefined) {
setFollowing(profile.is_following);
} else if (initialFollowing !== undefined) {
setFollowing(initialFollowing);
}

View file

@ -16,6 +16,7 @@ interface UserProfilePageHeaderProps {
tracksCount: number;
playlistsCount: number;
followersCount: number;
followingCount?: number; // v0.10.0 F187
roles?: Role[];
}
@ -27,12 +28,14 @@ export function UserProfilePageHeader({
tracksCount,
playlistsCount,
followersCount,
followingCount = 0,
roles = [],
}: UserProfilePageHeaderProps) {
const stats = [
{ icon: Music, value: tracksCount, label: 'Tracks' },
{ icon: Library, value: playlistsCount, label: 'Playlists' },
{ icon: Users, value: followersCount, label: 'Followers' },
{ icon: User, value: followingCount, label: 'Following' },
];
return (

View file

@ -7,6 +7,7 @@ import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query';
import { Checkbox } from '@/components/ui/checkbox';
import { Label } from '@/components/ui/label';
import { Button } from '@/components/ui/button';
import { Input } from '@/components/ui/input';
import {
getPreferences,
updatePreferences,
@ -36,7 +37,7 @@ export function PushPreferencesSection() {
},
});
const handleChange = (field: keyof NotificationPreferences, value: boolean) => {
const handleChange = (field: keyof NotificationPreferences, value: boolean | string) => {
if (!prefs) return;
updateMutation.mutate({ ...prefs, [field]: value });
};
@ -154,6 +155,47 @@ export function PushPreferencesSection() {
</div>
</div>
</div>
{/* v0.10.5 F553: Quiet hours — no push/real-time during this window */}
<div>
<h3 className="text-lg font-semibold mb-4">Ne pas déranger</h3>
<p className="text-sm text-muted-foreground mb-4">
Aucune notification push ni en temps réel pendant cette plage horaire
</p>
<div className="flex items-center justify-between mb-4">
<Label htmlFor="quiet_hours_enabled">Activer les heures calmes</Label>
<Checkbox
id="quiet_hours_enabled"
checked={prefs.quiet_hours_enabled ?? false}
onCheckedChange={(checked) =>
handleChange('quiet_hours_enabled', checked === true)
}
/>
</div>
{(prefs.quiet_hours_enabled ?? false) && (
<div className="flex items-center gap-4">
<div className="flex-1 space-y-1">
<Label htmlFor="quiet_hours_start">Début (ex: 22:00)</Label>
<Input
id="quiet_hours_start"
type="time"
value={prefs.quiet_hours_start ?? '22:00'}
onChange={(e) => handleChange('quiet_hours_start', e.target.value)}
className="max-w-32"
/>
</div>
<div className="flex-1 space-y-1">
<Label htmlFor="quiet_hours_end">Fin (ex: 08:00)</Label>
<Input
id="quiet_hours_end"
type="time"
value={prefs.quiet_hours_end ?? '08:00'}
onChange={(e) => handleChange('quiet_hours_end', e.target.value)}
className="max-w-32"
/>
</div>
</div>
)}
</div>
</div>
);
}

View file

@ -330,15 +330,23 @@ export const handlersMisc = [
});
}),
http.get('*/api/v1/notifications', () => {
http.get('*/api/v1/notifications', ({ request }) => {
const url = new URL(request.url);
const page = Number(url.searchParams.get('page')) || 1;
const limit = Number(url.searchParams.get('limit')) || 20;
const notifications = [
{ id: 'notif-1', user_id: 'user-1', type: 'new_message', title: 'New message', content: 'Someone sent you a message', read: false, created_at: '2024-01-04T00:00:00Z', link: '/chat/1' },
{ id: 'notif-2', user_id: 'user-1', type: 'track_uploaded', title: 'New track', content: 'A creator you follow uploaded a track', read: true, created_at: '2024-01-03T12:00:00Z' },
];
const total = notifications.length;
return HttpResponse.json({
success: true,
data: {
notifications: [
{ id: 'notif-1', user_id: 'user-1', type: 'new_message', title: 'New message', content: 'Someone sent you a message', read: false, created_at: '2024-01-04T00:00:00Z', link: '/chat/1' },
{ id: 'notif-2', user_id: 'user-1', type: 'track_uploaded', title: 'New track', content: 'A creator you follow uploaded a track', read: true, created_at: '2024-01-03T12:00:00Z' },
],
total: 2,
notifications,
total,
page,
limit,
total_pages: Math.max(1, Math.ceil(total / limit)),
unread_count: 1,
},
});
@ -361,6 +369,9 @@ export const handlersMisc = [
push_comment: true,
push_message: true,
push_mention: true,
quiet_hours_enabled: false,
quiet_hours_start: '22:00',
quiet_hours_end: '08:00',
},
});
}),

View file

@ -1,7 +1,11 @@
import type { AxiosRequestConfig } from 'axios';
import type { z } from 'zod';
import { apiClient } from '@/services/api/client';
import { User, PaginatedResponse } from '@/types/api';
import { userSchema } from '@/schemas/apiSchemas';
type RequestConfigWithSchema = AxiosRequestConfig & { validateSchema?: z.ZodSchema };
/**
* Service pour la gestion des profils utilisateurs
* Aligné avec /api/v1/users backend
@ -11,9 +15,8 @@ export const userService = {
* Récupère le profil d'un utilisateur par son ID
*/
getProfile: async (id: string) => {
const response = await apiClient.get<User>(`/users/${id}`, {
validateSchema: userSchema,
} as any);
const config: RequestConfigWithSchema = { validateSchema: userSchema };
const response = await apiClient.get<User>(`/users/${id}`, config);
return { profile: response.data };
},
@ -21,11 +24,10 @@ export const userService = {
* Récupère le profil d'un utilisateur par son nom d'utilisateur
*/
getProfileByUsername: async (username: string) => {
const config: RequestConfigWithSchema = { validateSchema: userSchema };
const response = await apiClient.get<User>(
`/users/by-username/${username}`,
{
validateSchema: userSchema,
} as any,
config,
);
return { profile: response.data };
},
@ -34,9 +36,8 @@ export const userService = {
* Met à jour le profil de l'utilisateur
*/
updateProfile: async (id: string, data: object) => {
const response = await apiClient.put<User>(`/users/${id}`, data, {
validateSchema: userSchema,
} as any);
const config: RequestConfigWithSchema = { validateSchema: userSchema };
const response = await apiClient.put<User>(`/users/${id}`, data, config);
return { profile: response.data };
},
@ -75,16 +76,24 @@ export const userService = {
// apiClient unwrap le format { success, data }
// Pour les listes, le backend retourne souvent { list: [], pagination: {} }
// Mais PaginatedResponse attend { items: [], ... }
const data = response.data as any;
const items = data.list || data.items || [];
const data = response.data as {
list?: User[];
items?: User[];
pagination?: { total: number; page: number; limit: number; total_pages: number };
total?: number;
page?: number;
limit?: number;
total_pages?: number;
};
const items = data.list ?? data.items ?? [];
return {
users: items,
pagination: data.pagination || {
total: data.total || items.length,
page: data.page || params?.page || 1,
limit: data.limit || params?.limit || 10,
total_pages: data.total_pages || 1,
pagination: data.pagination ?? {
total: data.total ?? items.length,
page: data.page ?? params?.page ?? 1,
limit: data.limit ?? params?.limit ?? 10,
total_pages: data.total_pages ?? 1,
},
};
},
@ -100,7 +109,7 @@ export const userService = {
* Récupère les paramètres de l'utilisateur connecté
*/
getSettings: async () => {
const response = await apiClient.get<any>('/users/settings');
const response = await apiClient.get<Record<string, unknown>>('/users/settings');
return response.data;
},
@ -108,7 +117,7 @@ export const userService = {
* Met à jour les paramètres de l'utilisateur connecté
*/
updateSettings: async (data: object) => {
const response = await apiClient.put<any>('/users/settings', data);
const response = await apiClient.put<Record<string, unknown>>('/users/settings', data);
return response.data;
},

View file

@ -18,7 +18,7 @@ export type StoreCreator<T> = StateCreator<T, [], [], T>;
*/
export type StoreState<T> = Omit<
T,
{ [K in keyof T]: T[K] extends (...args: any[]) => any ? K : never }[keyof T]
{ [K in keyof T]: T[K] extends (...args: unknown[]) => unknown ? K : never }[keyof T]
>;
/**
@ -26,5 +26,5 @@ export type StoreState<T> = Omit<
*/
export type StoreActions<T> = Pick<
T,
{ [K in keyof T]: T[K] extends (...args: any[]) => any ? K : never }[keyof T]
{ [K in keyof T]: T[K] extends (...args: unknown[]) => unknown ? K : never }[keyof T]
>;

View file

@ -386,11 +386,12 @@ export type OutgoingWebSocketMessage =
export function isIncomingWebSocketMessage(
message: unknown,
): message is IncomingWebSocketMessage {
const obj = message as Record<string, unknown>;
return (
typeof message === 'object' &&
message !== null &&
'type' in message &&
typeof (message as any).type === 'string'
typeof obj.type === 'string'
);
}
@ -400,11 +401,12 @@ export function isIncomingWebSocketMessage(
export function isOutgoingWebSocketMessage(
message: unknown,
): message is OutgoingWebSocketMessage {
const obj = message as Record<string, unknown>;
return (
typeof message === 'object' &&
message !== null &&
'type' in message &&
typeof (message as any).type === 'string'
typeof obj.type === 'string'
);
}

View file

@ -58,21 +58,23 @@ export function parseApiError(error: unknown): ApiError {
// Type guard helpers locaux
const isStandardError = (data: unknown): data is StandardErrorResponse => {
const obj = data as Record<string, unknown>;
return (
typeof data === 'object' &&
data !== null &&
'success' in data &&
(data as any).success === false &&
obj.success === false &&
'error' in data
);
};
const isNestedError = (data: unknown): data is NestedErrorResponse => {
const obj = data as Record<string, unknown>;
return (
typeof data === 'object' &&
data !== null &&
'error' in data &&
typeof (data as any).error === 'object'
typeof obj.error === 'object'
);
};
@ -309,13 +311,14 @@ function normalizeDetails(details: unknown): ErrorDetail[] | undefined {
// Filtrer pour ne garder que les objets valides qui ressemblent à des ErrorDetail
const validDetails = details.filter((item): item is ErrorDetail => {
const obj = item as Record<string, unknown>;
return (
typeof item === 'object' &&
item !== null &&
'field' in item &&
'message' in item &&
typeof (item as any).field === 'string' &&
typeof (item as any).message === 'string'
typeof obj.field === 'string' &&
typeof obj.message === 'string'
);
});
@ -325,13 +328,13 @@ function normalizeDetails(details: unknown): ErrorDetail[] | undefined {
/**
* Normalise le contexte d'une erreur
*/
function normalizeContext(context: unknown): Record<string, any> | undefined {
function normalizeContext(context: unknown): Record<string, unknown> | undefined {
if (
typeof context === 'object' &&
context !== null &&
!Array.isArray(context)
) {
return context as Record<string, any>;
return context as Record<string, unknown>;
}
return undefined;
}
@ -555,13 +558,14 @@ export function getValidationErrors(error: ApiError): Record<string, string> {
* Vérifie si une erreur est une ApiError
*/
function isApiError(error: unknown): error is ApiError {
const obj = error as Record<string, unknown>;
return (
typeof error === 'object' &&
error !== null &&
'code' in error &&
'message' in error &&
typeof (error as any).code === 'number' &&
typeof (error as any).message === 'string'
typeof obj.code === 'number' &&
typeof obj.message === 'string'
);
}
@ -569,10 +573,11 @@ function isApiError(error: unknown): error is ApiError {
* Vérifie si une erreur est une AxiosError
*/
function isAxiosError(error: unknown): error is AxiosError {
const obj = error as Record<string, unknown>;
return (
typeof error === 'object' &&
error !== null &&
'isAxiosError' in error &&
(error as any).isAxiosError === true
obj.isAxiosError === true
);
}

View file

@ -24,8 +24,22 @@ import { logger } from './logger';
// quand ce fichier est évalué
const toastModulePromise = import('react-hot-toast');
type ToastDefault = typeof import('react-hot-toast').default;
type ToastModule = { default: ToastDefault };
// Stub API shape used when module fails or is not yet loaded
interface ToastStub {
success: (...args: unknown[]) => void;
error: (...args: unknown[]) => void;
loading: (...args: unknown[]) => void;
custom: (...args: unknown[]) => unknown;
dismiss: (...args: unknown[]) => void;
remove: (...args: unknown[]) => void;
promise: (...args: unknown[]) => Promise<unknown>;
}
// Cache pour le module une fois chargé
let toastModule: typeof import('react-hot-toast') | null = null;
let toastModule: ToastModule | null = null;
let isResolved = false;
// Charger le module et le mettre en cache immédiatement
@ -37,81 +51,74 @@ toastModulePromise.then((mod) => {
// Ignorer les erreurs de chargement
});
const createNoopStub = (): ToastStub => ({
success: () => {},
error: () => {},
loading: () => {},
custom: () => undefined,
dismiss: () => {},
remove: () => {},
promise: () => Promise.resolve(),
});
const createDeferredStub = (): ToastStub => ({
success: (...args: unknown[]) => {
toastModulePromise.then((mod) => (mod.default.success as (...a: unknown[]) => void)(...args));
},
error: (...args: unknown[]) => {
toastModulePromise.then((mod) => (mod.default.error as (...a: unknown[]) => void)(...args));
},
loading: (...args: unknown[]) => {
toastModulePromise.then((mod) => (mod.default.loading as (...a: unknown[]) => void)(...args));
},
custom: (...args: unknown[]) =>
toastModulePromise.then((mod) => (mod.default.custom as (...a: unknown[]) => unknown)(...args)),
dismiss: (...args: unknown[]) => {
toastModulePromise.then((mod) => (mod.default.dismiss as (...a: unknown[]) => void)(...args));
},
remove: (...args: unknown[]) => {
toastModulePromise.then((mod) => (mod.default.remove as (...a: unknown[]) => void)(...args));
},
promise: (...args: unknown[]) =>
toastModulePromise.then((mod) => (mod.default.promise as (...a: unknown[]) => Promise<unknown>)(...args)),
});
/**
* Récupère le module toast de manière synchrone
* Le module devrait être déjà chargé grâce au préchargement dans main.tsx
*/
function getToastModuleSync() {
// Attendre que le module soit chargé (bloquant mais très rapide)
// En pratique, le module sera déjà chargé car il est préchargé dans main.tsx
function getToastModuleSync(): ToastDefault | ToastStub {
if (!toastModule && isResolved) {
// Le module a échoué à charger, retourner un stub
logger.error('Toast module failed to load');
return {
success: () => { },
error: () => { },
loading: () => { },
custom: () => { },
dismiss: () => { },
remove: () => { },
promise: () => Promise.resolve(),
} as any;
return createNoopStub();
}
if (toastModule) {
return toastModule.default;
}
// Si le module n'est pas encore chargé, retourner un stub temporaire
// qui sera remplacé une fois le module chargé
return {
success: (...args: any[]) => {
toastModulePromise.then((mod) => (mod.default.success as any)(...args));
},
error: (...args: any[]) => {
toastModulePromise.then((mod) => (mod.default.error as any)(...args));
},
loading: (...args: any[]) => {
toastModulePromise.then((mod) => (mod.default.loading as any)(...args));
},
custom: (...args: any[]) => {
toastModulePromise.then((mod) => (mod.default.custom as any)(...args));
},
dismiss: (...args: any[]) => {
toastModulePromise.then((mod) => (mod.default.dismiss as any)(...args));
},
remove: (...args: any[]) => {
toastModulePromise.then((mod) => (mod.default.remove as any)(...args));
},
promise: (...args: any[]) => {
return toastModulePromise.then((mod) => (mod.default.promise as any)(...args));
},
} as any;
return createDeferredStub();
}
// Créer un proxy qui délègue toutes les méthodes à react-hot-toast
const toast = new Proxy({} as typeof import('react-hot-toast').default, {
const toast = new Proxy({} as ToastDefault, {
get(_target, prop) {
const toastFn = getToastModuleSync() as any;
if (prop in toastFn) {
const method = toastFn[prop];
const toastFn = getToastModuleSync();
const method = (toastFn as Record<string, unknown>)[prop as string];
if (typeof method === 'function') {
return method.bind(toastFn);
}
return method;
}
return undefined;
},
apply(_target, _thisArg, args) {
apply(_target, _thisArg, args: unknown[]) {
const toastFn = getToastModuleSync();
if (typeof toastFn === 'function') {
return toastFn(...args);
return (toastFn as (...a: unknown[]) => unknown)(...args);
}
// Si ce n'est pas une fonction, essayer d'appeler via le module
return toastModulePromise.then((mod) => (mod.default as any)(...args));
return toastModulePromise.then((mod) =>
(mod.default as (...a: unknown[]) => unknown)(...args)
);
},
}) as typeof import('react-hot-toast').default;
}) as ToastDefault;
export default toast;

View file

@ -23,7 +23,12 @@ import type { ApiError } from '@/schemas/apiSchemas';
/**
* Type guard for User
*/
function hasStringProps(value: Record<string, unknown>, props: string[]): boolean {
return props.every((p) => typeof value[p] === 'string');
}
export function isUser(value: unknown): value is User {
const obj = value as Record<string, unknown>;
return (
typeof value === 'object' &&
value !== null &&
@ -31,10 +36,7 @@ export function isUser(value: unknown): value is User {
'username' in value &&
'email' in value &&
'role' in value &&
typeof (value as any).id === 'string' &&
typeof (value as any).username === 'string' &&
typeof (value as any).email === 'string' &&
typeof (value as any).role === 'string'
hasStringProps(obj, ['id', 'username', 'email', 'role'])
);
}
@ -42,6 +44,7 @@ export function isUser(value: unknown): value is User {
* Type guard for Track
*/
export function isTrack(value: unknown): value is Track {
const obj = value as Record<string, unknown>;
return (
typeof value === 'object' &&
value !== null &&
@ -49,10 +52,8 @@ export function isTrack(value: unknown): value is Track {
'title' in value &&
'artist' in value &&
'duration' in value &&
typeof (value as any).id === 'string' &&
typeof (value as any).title === 'string' &&
typeof (value as any).artist === 'string' &&
typeof (value as any).duration === 'number'
hasStringProps(obj, ['id', 'title', 'artist']) &&
typeof obj.duration === 'number'
);
}
@ -60,6 +61,7 @@ export function isTrack(value: unknown): value is Track {
* Type guard for Playlist
*/
export function isPlaylist(value: unknown): value is Playlist {
const obj = value as Record<string, unknown>;
return (
typeof value === 'object' &&
value !== null &&
@ -67,10 +69,8 @@ export function isPlaylist(value: unknown): value is Playlist {
'user_id' in value &&
'title' in value &&
'is_public' in value &&
typeof (value as any).id === 'string' &&
typeof (value as any).user_id === 'string' &&
typeof (value as any).title === 'string' &&
typeof (value as any).is_public === 'boolean'
hasStringProps(obj, ['id', 'user_id', 'title']) &&
typeof obj.is_public === 'boolean'
);
}
@ -78,6 +78,7 @@ export function isPlaylist(value: unknown): value is Playlist {
* Type guard for Conversation
*/
export function isConversation(value: unknown): value is Conversation {
const obj = value as Record<string, unknown>;
return (
typeof value === 'object' &&
value !== null &&
@ -85,10 +86,7 @@ export function isConversation(value: unknown): value is Conversation {
'name' in value &&
'type' in value &&
'creator_id' in value &&
typeof (value as any).id === 'string' &&
typeof (value as any).name === 'string' &&
typeof (value as any).type === 'string' &&
typeof (value as any).creator_id === 'string'
hasStringProps(obj, ['id', 'name', 'type', 'creator_id'])
);
}
@ -96,6 +94,7 @@ export function isConversation(value: unknown): value is Conversation {
* Type guard for Message
*/
export function isMessage(value: unknown): value is Message {
const obj = value as Record<string, unknown>;
return (
typeof value === 'object' &&
value !== null &&
@ -103,10 +102,7 @@ export function isMessage(value: unknown): value is Message {
'conversation_id' in value &&
'sender_id' in value &&
'content' in value &&
typeof (value as any).id === 'string' &&
typeof (value as any).conversation_id === 'string' &&
typeof (value as any).sender_id === 'string' &&
typeof (value as any).content === 'string'
hasStringProps(obj, ['id', 'conversation_id', 'sender_id', 'content'])
);
}
@ -114,6 +110,7 @@ export function isMessage(value: unknown): value is Message {
* Type guard for Session
*/
export function isSession(value: unknown): value is Session {
const obj = value as Record<string, unknown>;
return (
typeof value === 'object' &&
value !== null &&
@ -122,11 +119,7 @@ export function isSession(value: unknown): value is Session {
'ip_address' in value &&
'user_agent' in value &&
'expires_at' in value &&
typeof (value as any).id === 'string' &&
typeof (value as any).user_id === 'string' &&
typeof (value as any).ip_address === 'string' &&
typeof (value as any).user_agent === 'string' &&
typeof (value as any).expires_at === 'string'
hasStringProps(obj, ['id', 'user_id', 'ip_address', 'user_agent', 'expires_at'])
);
}
@ -134,6 +127,7 @@ export function isSession(value: unknown): value is Session {
* Type guard for AuditLog
*/
export function isAuditLog(value: unknown): value is AuditLog {
const obj = value as Record<string, unknown>;
return (
typeof value === 'object' &&
value !== null &&
@ -141,10 +135,7 @@ export function isAuditLog(value: unknown): value is AuditLog {
'action' in value &&
'resource' in value &&
'timestamp' in value &&
typeof (value as any).id === 'string' &&
typeof (value as any).action === 'string' &&
typeof (value as any).resource === 'string' &&
typeof (value as any).timestamp === 'string'
hasStringProps(obj, ['id', 'action', 'resource', 'timestamp'])
);
}
@ -152,6 +143,7 @@ export function isAuditLog(value: unknown): value is AuditLog {
* Type guard for Notification
*/
export function isNotification(value: unknown): value is Notification {
const obj = value as Record<string, unknown>;
return (
typeof value === 'object' &&
value !== null &&
@ -160,11 +152,8 @@ export function isNotification(value: unknown): value is Notification {
'type' in value &&
'content' in value &&
'read' in value &&
typeof (value as any).id === 'string' &&
typeof (value as any).user_id === 'string' &&
typeof (value as any).type === 'string' &&
typeof (value as any).content === 'string' &&
typeof (value as any).read === 'boolean'
hasStringProps(obj, ['id', 'user_id', 'type', 'content']) &&
typeof obj.read === 'boolean'
);
}
@ -172,15 +161,16 @@ export function isNotification(value: unknown): value is Notification {
* Type guard for ApiError
*/
export function isApiError(value: unknown): value is ApiError {
const obj = value as Record<string, unknown>;
return (
typeof value === 'object' &&
value !== null &&
'code' in value &&
'message' in value &&
'timestamp' in value &&
typeof (value as any).code === 'number' &&
typeof (value as any).message === 'string' &&
typeof (value as any).timestamp === 'string'
typeof obj.code === 'number' &&
typeof obj.message === 'string' &&
typeof obj.timestamp === 'string'
);
}
@ -190,11 +180,12 @@ export function isApiError(value: unknown): value is ApiError {
export function isApiResponse<T = unknown>(
value: unknown,
): value is ApiResponse<T> {
const obj = value as Record<string, unknown>;
return (
typeof value === 'object' &&
value !== null &&
'success' in value &&
typeof (value as any).success === 'boolean'
typeof obj.success === 'boolean'
);
}
@ -202,6 +193,7 @@ export function isApiResponse<T = unknown>(
* Type guard for PaginationData
*/
export function isPaginationData(value: unknown): value is PaginationData {
const obj = value as Record<string, unknown>;
return (
typeof value === 'object' &&
value !== null &&
@ -211,12 +203,12 @@ export function isPaginationData(value: unknown): value is PaginationData {
'total_pages' in value &&
'has_next' in value &&
'has_prev' in value &&
typeof (value as any).page === 'number' &&
typeof (value as any).limit === 'number' &&
typeof (value as any).total === 'number' &&
typeof (value as any).total_pages === 'number' &&
typeof (value as any).has_next === 'boolean' &&
typeof (value as any).has_prev === 'boolean'
typeof obj.page === 'number' &&
typeof obj.limit === 'number' &&
typeof obj.total === 'number' &&
typeof obj.total_pages === 'number' &&
typeof obj.has_next === 'boolean' &&
typeof obj.has_prev === 'boolean'
);
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 45 KiB

After

Width:  |  Height:  |  Size: 465 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 137 KiB

After

Width:  |  Height:  |  Size: 546 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 34 KiB

After

Width:  |  Height:  |  Size: 37 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 137 KiB

After

Width:  |  Height:  |  Size: 469 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 137 KiB

After

Width:  |  Height:  |  Size: 546 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.6 KiB

After

Width:  |  Height:  |  Size: 42 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.2 KiB

After

Width:  |  Height:  |  Size: 477 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 44 KiB

After

Width:  |  Height:  |  Size: 546 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 21 KiB

After

Width:  |  Height:  |  Size: 120 KiB

View file

@ -16,6 +16,7 @@ Index de la documentation principale du monorepo.
## Développement
- **[Remote Dev R720](REMOTE_DEV_R720.md)** — Développement sur serveur R720 via Cursor Remote-SSH
- **[Scope v0.201](V0_201_RELEASE_SCOPE.md)** — Périmètre de la version courante (référence prioritaire)
- **[État du projet](PROJECT_STATE.md)** — Où en sommes-nous, prochaines étapes
- **[Contrôle du scope](SCOPE_CONTROL.md)** — Processus anti-scope-creep

176
docs/REMOTE_DEV_R720.md Normal file
View file

@ -0,0 +1,176 @@
# Développement Veza sur R720 (Remote-SSH)
Guide pour développer Veza sur le serveur Dell PowerEdge R720 depuis un laptop via Cursor Remote-SSH. Le laptop devient un terminal léger ; tout le compute (Docker, tests, builds) tourne sur le R720.
## Prérequis
- R720 initialisé selon le guide (Debian 13, WireGuard, nftables, Suricata, Incus)
- Cursor installé sur le laptop
- Connexion SSH vers le R720 (`Host r720` dans `~/.ssh/config`)
## Phase 1 — Installation sur le R720
### 1.1 Docker
```bash
# Sur le R720 (en SSH)
sudo apt update && sudo apt install -y docker.io docker-compose-plugin
sudo usermod -aG docker senke
# Reconnecter pour activer le groupe : exit && ssh r720
```
### 1.2 Outils de développement
- **Node.js** (20+) : `nvm` ou `sudo apt install nodejs npm`
- **Go** (1.22+) : `sudo apt install golang-go` ou [go.dev/dl](https://go.dev/dl)
- **Rust** : `curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh`
- **Optionnel** : `air` (hot reload Go), `cargo-watch` (hot reload Rust)
### 1.3 Cloner le projet
```bash
mkdir -p ~/git
cd ~/git
git clone <url-du-repo> veza
cd veza
```
## Phase 2 — Configuration des variables d'environnement
### 2.1 Racine du projet
```bash
cp env.remote-r720.example .env
```
Le fichier `env.remote-r720.example` contient `APP_DOMAIN=localhost` et les URLs localhost pour le port forwarding Cursor.
### 2.2 Frontend (apps/web)
```bash
cp apps/web/env.remote-r720.example apps/web/.env
# ou apps/web/.env.local
```
### 2.3 Backend (veza-backend-api)
Le backend charge son `.env` depuis `veza-backend-api/`. Créez-le avec les mêmes valeurs :
```bash
cat > veza-backend-api/.env << 'EOF'
APP_DOMAIN=localhost
DATABASE_URL=postgres://veza:devpassword@localhost:15432/veza?sslmode=disable
REDIS_URL=redis://localhost:16379
RABBITMQ_URL=amqp://veza:devpassword@localhost:15672/
CORS_ALLOWED_ORIGINS=http://localhost:5173,http://localhost:3000
JWT_SECRET=dev-secret-key-minimum-32-characters-long
EOF
```
### 2.4 Stream server (veza-stream-server)
Si vous lancez le stream server en local, créez `veza-stream-server/.env` avec :
```
DATABASE_URL=postgres://veza:devpassword@localhost:15432/veza?sslmode=disable
REDIS_URL=redis://localhost:16379
```
## Phase 3 — Connexion Cursor Remote-SSH
### 3.1 SSH config (laptop)
Vérifiez `~/.ssh/config` :
```
Host r720
HostName 192.168.0.102
User senke
IdentityFile ~/.ssh/r720
ServerAliveInterval 60
```
### 3.2 Accès distant (hors LAN)
- **Sur le LAN** : SSH vers `192.168.0.102`
- **Hors LAN** : activez WireGuard sur le laptop, puis SSH. Si le VPN ne route pas `192.168.0.0/24`, ajoutez cette plage dans `AllowedIPs` du client WireGuard.
### 3.3 Connexion Cursor
1. `Ctrl+Shift+P` → "Remote-SSH: Connect to Host"
2. Choisir `r720`
3. Ouvrir le dossier `/home/senke/git/veza` (ou le chemin réel du clone)
## Phase 4 — Workflow de développement
### 4.1 Démarrage
```bash
# 1. Infra Docker (postgres, redis, rabbitmq, clamav, minio)
make infra-up-dev
# 2. Migrations (si nécessaire)
make db-migrate
# 3. Lancer le dev complet (backend + stream + web avec hot reload)
make dev-full
```
Ou par service :
```bash
make infra-up-dev
make dev-web # frontend seul
make dev-backend-api # backend Go seul
make dev-stream-server # stream Rust seul
```
### 4.2 Port forwarding Cursor
Cursor détecte généralement les serveurs (Vite, etc.) et propose le forwarding. Sinon, vérifiez longlet **Ports** :
| Port | Service |
|-------|--------------|
| 5173 | Vite (web) |
| 18080 | Backend API |
| 18082 | Stream |
| 15432 | PostgreSQL |
| 16379 | Redis |
| 25672 | RabbitMQ UI |
| 6006 | Storybook |
### 4.3 Accès depuis le laptop
- Frontend : http://localhost:5173
- API : http://localhost:18080
- RabbitMQ UI : http://localhost:25672
## Phase 5 — Tests
```bash
# Tous les tests (infra doit être up pour les tests backend)
make test
# Par service
make test-backend-api
make test-stream-server
make test-web
```
## Points dattention
1. **`/etc/hosts`** : Avec `APP_DOMAIN=localhost`, pas besoin de `veza.fr` dans `/etc/hosts` pour le dev remote.
2. **Storybook** : `npm run storybook` dans `apps/web` ; forwarder le port 6006.
3. **Performances** : Édition et indexation via SSH peuvent être plus lentes ; un lien réseau stable (LAN ou VPN) est important.
4. **Incus vs Docker** : Docker pour le dev (postgres, redis, etc.) ; Incus reste pour la prod (veza-api, veza-front, etc. dans net-veza).
## Ordre dexécution recommandé
1. Installer Docker + outils (Node, Go, Rust) sur le R720
2. Cloner le repo dans `~/git/veza`
3. Créer les `.env` : `cp env.remote-r720.example .env` et `cp apps/web/env.remote-r720.example apps/web/.env`
4. Créer `veza-backend-api/.env` (voir Phase 2.3)
5. Tester : `make infra-up-dev` puis `make dev-web`
6. Se connecter en Remote-SSH avec Cursor et ouvrir le dossier
7. Vérifier le port forwarding et laccès à http://localhost:5173
8. Lancer `make dev-full` et valider le flux complet

View file

@ -0,0 +1,31 @@
# ADR-001: Choix de Go pour le Backend API
**Date**: 2025-01-01
**Status**: Accepted
**Source**: ORIGIN_MASTER_ARCHITECTURE.md
## Contexte
Besoin d'un langage performant, typé, avec bonne concurrence pour API REST haute charge.
## Décision
Utiliser Go 1.23+ avec framework Gin pour le Backend API.
## Conséquences
**Positives**:
- Compilation rapide, binaire unique
- Goroutines pour concurrence
- Typage fort, pas de runtime errors
- Excellent pour microservices
**Négatives**:
- Verbosité du code (error handling)
- Écosystème moins riche que Node.js
## Alternatives rejetées
- **Node.js**: Single-threaded, performance inférieure
- **Python**: GIL, performance médiocre pour API haute charge
- **Java**: Trop lourd, démarrage lent, complexité

View file

@ -0,0 +1,29 @@
# ADR-002: Choix de Go pour le Chat Server
**Date**: 2025-01-01 (révisé 2026-03-04)
**Status**: Accepted (révisé)
**Source**: ORIGIN_MASTER_ARCHITECTURE.md
## Contexte
Le chat nécessite des WebSocket performantes mais pas la performance critique du streaming audio. Rust était initialement prévu pour tous les services temps réel, mais la complexité de développement et de maintenance n'est pas justifiée pour le chat.
## Décision
Utiliser Go 1.23+ avec gorilla/websocket pour le Chat Server. Rust reste pour le Stream Server uniquement.
## Conséquences
**Positives**:
- Cohérence stack avec le Backend API (Go + Go)
- Goroutines suffisantes pour les WebSocket chat
- Recrutement et maintenance plus simples
- Temps de compilation rapide
**Négatives**:
- GC pauses possibles (acceptable pour le chat, non critique)
## Alternatives rejetées
- **Rust (Axum)**: surengineering pour le chat, complexité excessive
- **Node.js**: single-threaded, performance inférieure

View file

@ -0,0 +1,24 @@
# ADR-002b: Choix de Rust pour le Stream Server
**Date**: 2025-01-01
**Status**: Accepted
**Source**: ORIGIN_MASTER_ARCHITECTURE.md
## Contexte
Le streaming audio nécessite une performance native pour le transcoding, le traitement audio et le HLS.
## Décision
Utiliser Rust 1.75+ avec Axum + Tokio pour le Stream Server uniquement.
## Conséquences
**Positives**:
- Zero-cost abstractions pour le traitement audio
- Sécurité mémoire garantie
- Performance native (niveau C/C++)
**Négatives**:
- Courbe d'apprentissage raide
- Temps de compilation long

View file

@ -0,0 +1,31 @@
# ADR-003: Choix de PostgreSQL comme Base Principale
**Date**: 2025-01-01
**Status**: Accepted
**Source**: ORIGIN_MASTER_ARCHITECTURE.md
## Contexte
Besoin ACID, relations complexes, performance pour la plateforme Veza.
## Décision
PostgreSQL 15+ comme base principale.
## Conséquences
**Positives**:
- ACID complet
- Relations complexes (foreign keys, joins)
- Full-text search intégré
- JSON/JSONB pour flexibilité
- Extensions (pgcrypto, pg_trgm, etc.)
**Négatives**:
- Scaling horizontal complexe
## Alternatives rejetées
- **MySQL**: Moins de fonctionnalités avancées
- **MongoDB**: Pas ACID, relations difficiles
- **CockroachDB**: Trop jeune, écosystème limité

View file

@ -0,0 +1,31 @@
# ADR-004: Architecture Microservices Modulaire
**Date**: 2025-01-01
**Status**: Accepted
**Source**: ORIGIN_MASTER_ARCHITECTURE.md
## Contexte
Nombreuses features réparties sur 18 domaines métier, équipe multiple, scalabilité requise.
## Décision
Architecture microservices avec 3 services principaux (API, Chat, Stream).
## Conséquences
**Positives**:
- Scalabilité indépendante
- Technologies différentes par service
- Isolation des pannes
- Déploiements indépendants
**Négatives**:
- Complexité opérationnelle
- Transactions distribuées complexes
## Alternatives rejetées
- **Monolithe**: Pas scalable, déploiements risqués
- **Serverless**: Vendor lock-in, cold starts
- **Microservices complets** (20+ services): Trop complexe au démarrage

30
docs/adr/ADR-005-rest.md Normal file
View file

@ -0,0 +1,30 @@
# ADR-005: REST pour Communication Inter-Services
**Date**: 2025-01-01 (révisé 2026-03-04)
**Status**: Accepted (révisé)
**Source**: ORIGIN_MASTER_ARCHITECTURE.md
## Contexte
Avec 3 services (API, Chat, Stream), la complexité de gRPC n'est pas justifiée. REST est suffisant et simplifie le développement.
## Décision
REST (JSON over HTTP/2) pour toutes les communications : inter-services et clients externes.
## Conséquences
**Positives**:
- Simplicité de développement et debugging
- Outillage universel (curl, Postman, navigateur)
- OpenAPI pour documentation et génération de clients
- Un seul protocole à maîtriser
**Négatives**:
- Overhead JSON vs protobuf (acceptable pour 3 services)
## Alternatives rejetées
- **gRPC**: surengineering pour 3 services, debugging difficile
- **GraphQL**: trop complexe pour inter-service
- **Message Queue pure**: latence, complexité

31
docs/adr/ADR-006-redis.md Normal file
View file

@ -0,0 +1,31 @@
# ADR-006: Redis pour Cache et Sessions
**Date**: 2025-01-01
**Status**: Accepted
**Source**: ORIGIN_MASTER_ARCHITECTURE.md
## Contexte
Besoin cache in-memory ultra-rapide + pub/sub pour la plateforme.
## Décision
Redis 7+ Cluster.
## Conséquences
**Positives**:
- Performance exceptionnelle (<1ms)
- Pub/sub intégré
- Structures de données riches
- Cluster mode (scaling horizontal)
**Négatives**:
- Volatilité (RAM)
- Coût (RAM expensive)
## Alternatives rejetées
- **Memcached**: Moins de fonctionnalités
- **In-memory applicatif**: Pas partagé entre instances
- **Hazelcast**: Trop complexe, Java-centric

View file

@ -0,0 +1,31 @@
# ADR-007: RabbitMQ pour Message Queue
**Date**: 2025-01-01
**Status**: Accepted
**Source**: ORIGIN_MASTER_ARCHITECTURE.md
## Contexte
Événements asynchrones, découplage services.
## Décision
RabbitMQ 3.12+ avec AMQP.
## Conséquences
**Positives**:
- Mature, stable
- Routing flexible (exchanges, queues)
- Garanties de livraison
- Management UI
**Négatives**:
- Throughput inférieur à Kafka
- Persistence moins optimale que Kafka
## Alternatives rejetées
- **Kafka**: Over-engineering pour début, complexité
- **AWS SQS**: Vendor lock-in
- **NATS**: Moins mature pour persistence

31
docs/adr/ADR-008-react.md Normal file
View file

@ -0,0 +1,31 @@
# ADR-008: React avec TypeScript pour Frontend
**Date**: 2025-01-01
**Status**: Accepted
**Source**: ORIGIN_MASTER_ARCHITECTURE.md
## Contexte
UI complexe, typage strict, écosystème riche requis pour le frontend Veza.
## Décision
React 18+ avec TypeScript 5.3+ strict.
## Conséquences
**Positives**:
- Écosystème immense
- Typage strict (moins d'erreurs runtime)
- Performance (Concurrent Mode)
- Communauté énorme
**Négatives**:
- Bundle size important
- Complexité state management
## Alternatives rejetées
- **Vue.js**: Écosystème plus petit
- **Svelte**: Moins mature, écosystème limité
- **Angular**: Trop lourd, opinionated

30
docs/adr/ADR-009-vite.md Normal file
View file

@ -0,0 +1,30 @@
# ADR-009: Vite comme Build Tool Frontend
**Date**: 2025-01-01
**Status**: Accepted
**Source**: ORIGIN_MASTER_ARCHITECTURE.md
## Contexte
Build rapide, HMR performant pour le frontend.
## Décision
Vite 7+ au lieu de Webpack.
## Conséquences
**Positives**:
- Build ultra-rapide (ESBuild)
- HMR instantané
- Configuration simple
- Support natif TypeScript
**Négatives**:
- Écosystème moins mature que Webpack
## Alternatives rejetées
- **Webpack**: Lent, configuration complexe
- **Parcel**: Moins performant que Vite
- **Rollup**: Moins de fonctionnalités DX

View file

@ -0,0 +1,31 @@
# ADR-010: Docker pour Conteneurisation
**Date**: 2025-01-01
**Status**: Accepted
**Source**: ORIGIN_MASTER_ARCHITECTURE.md
## Contexte
Déploiement consistant multi-environnements.
## Décision
Docker 24+ avec multi-stage builds.
## Conséquences
**Positives**:
- Portabilité totale
- Isolation
- Écosystème mature
- CI/CD intégré
**Négatives**:
- Overhead léger (performance)
- Sécurité (root privileges)
## Alternatives rejetées
- **VMs**: Trop lourd, lent
- **Bare metal**: Pas portable
- **Podman**: Moins mature

View file

@ -0,0 +1,30 @@
# ADR-011: Hyperswitch pour les Paiements
**Date**: 2026-03-04
**Status**: Accepted
**Source**: ORIGIN_MASTER_ARCHITECTURE.md
## Contexte
Le projet nécessite un système de paiement pour la marketplace. Un vendor lock-in sur un seul PSP (Stripe) limite la flexibilité et augmente les coûts.
## Décision
Utiliser Hyperswitch, agrégateur de paiement open source, comme couche d'abstraction au-dessus des PSP.
## Conséquences
**Positives**:
- Multi-PSP (Stripe, Adyen, PayPal) sans changement de code
- Open source, auditable
- Pas de vendor lock-in
- Smart routing entre PSP
**Négatives**:
- Hébergement et maintenance de l'instance Hyperswitch
- Moins de documentation que l'intégration Stripe directe
## Alternatives rejetées
- **Stripe direct**: vendor lock-in, commissions non négociables
- **Développement interne**: trop risqué pour conformité PCI-DSS

View file

@ -0,0 +1,30 @@
# ADR-012: Elasticsearch pour la Recherche (pas de ML)
**Date**: 2026-03-04
**Status**: Accepted
**Source**: ORIGIN_MASTER_ARCHITECTURE.md
## Contexte
La recherche et la découverte doivent être transparentes et auditables. Les systèmes de recommandation ML sont incompatibles avec les principes éthiques du projet.
## Décision
Utiliser Elasticsearch pour la recherche fulltext avec algorithmes déterministes (BM25). La découverte repose sur des règles, la curation humaine et le graphe social.
## Conséquences
**Positives**:
- Algorithme de scoring transparent et documenté
- Résultats reproductibles et auditables
- Pas de profilage utilisateur
- Infrastructure mature et éprouvée
**Négatives**:
- Pas de personnalisation automatique (choix éthique assumé)
## Alternatives rejetées
- **Algolia**: propriétaire, boîte noire
- **ML/embeddings**: incompatible avec principes éthiques
- **PostgreSQL GIN seul**: performance insuffisante à grande échelle

View file

@ -1,8 +1,28 @@
# Architecture Decision Records
## ORIGIN Architecture (v0.9.8)
| ADR | Title | Status |
|-----|-------|--------|
| [ADR-001](ADR-001-rust-services.md) | Go + Rust + React Architecture | Accepted |
| [ADR-002](ADR-002-chat-server.md) | Chat Server Rust → Go | Accepted |
| [ADR-003](ADR-003-redis-sentinel-postponed.md) | Redis Sentinel postponed to v1.1 | Accepted |
| [ADR-004](ADR-004-pkce-oauth.md) | PKCE for OAuth (v0.902) | Accepted |
| [ADR-001](ADR-001-go-backend-api.md) | Go pour Backend API | Accepted |
| [ADR-002](ADR-002-chat-server-go.md) | Chat Server Go | Accepted |
| [ADR-002b](ADR-002b-stream-server-rust.md) | Stream Server Rust | Accepted |
| [ADR-003](ADR-003-postgresql.md) | PostgreSQL | Accepted |
| [ADR-004](ADR-004-microservices.md) | Microservices | Accepted |
| [ADR-005](ADR-005-rest.md) | REST | Accepted |
| [ADR-006](ADR-006-redis.md) | Redis Cache et Sessions | Accepted |
| [ADR-007](ADR-007-rabbitmq.md) | RabbitMQ Message Queue | Accepted |
| [ADR-008](ADR-008-react.md) | React + TypeScript Frontend | Accepted |
| [ADR-009](ADR-009-vite.md) | Vite Build Tool | Accepted |
| [ADR-010](ADR-010-docker.md) | Docker Conteneurisation | Accepted |
| [ADR-011](ADR-011-hyperswitch.md) | Hyperswitch Paiements | Accepted |
| [ADR-012](ADR-012-elasticsearch.md) | Elasticsearch Recherche | Accepted |
## Project-specific ADRs (legacy)
| File | Title | Status |
|------|-------|--------|
| [ADR-001-rust-services](ADR-001-rust-services.md) | Go + Rust + React Architecture | Accepted |
| [ADR-002-chat-server](ADR-002-chat-server.md) | Chat Server Rust → Go | Accepted |
| [ADR-003-redis-sentinel-postponed](ADR-003-redis-sentinel-postponed.md) | Redis Sentinel postponed to v1.1 | Accepted |
| [ADR-004-pkce-oauth](ADR-004-pkce-oauth.md) | PKCE for OAuth (v0.902) | Accepted |

42
env.remote-r720.example Normal file
View file

@ -0,0 +1,42 @@
# =============================================================================
# VEZA - Remote development on R720 (Cursor Remote-SSH + port forwarding)
# =============================================================================
# Copy to .env when developing on the R720:
# cp env.remote-r720.example .env
#
# With Cursor Remote-SSH, the browser on your laptop accesses localhost:5173,
# localhost:18080, etc. — Cursor forwards these to the R720. Use localhost
# as APP_DOMAIN so CORS and URLs work correctly.
# =============================================================================
# Domain: localhost (browser on laptop uses port forwarding)
APP_DOMAIN=localhost
# Docker Compose - host port mappings (unchanged)
PORT_POSTGRES=15432
PORT_REDIS=16379
PORT_RABBITMQ_AMQP=15672
PORT_RABBITMQ_MGMT=25672
PORT_BACKEND=18080
PORT_STREAM=18082
PORT_WEB=5173
# Database (backend on R720 host connects to Docker containers)
DB_USER=veza
DB_PASS=devpassword
DB_NAME=veza
DB_HOST=localhost
# Connection strings (host → Docker containers)
DATABASE_URL=postgres://veza:devpassword@localhost:15432/veza?sslmode=disable
REDIS_URL=redis://localhost:16379
RABBITMQ_URL=amqp://veza:devpassword@localhost:15672/
# Frontend URL (OAuth redirect, etc.)
FRONTEND_URL=http://localhost:5173
# CORS: must include localhost (browser origin via port forwarding)
CORS_ALLOWED_ORIGINS=http://localhost:5173,http://localhost:3000
# JWT (dev only)
JWT_SECRET=dev-secret-key-minimum-32-characters-long

839
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -9,6 +9,7 @@ import (
"time"
"github.com/gin-gonic/gin"
"github.com/google/uuid"
"go.uber.org/zap"
"veza-backend-api/internal/config"
@ -368,9 +369,23 @@ func (r *APIRouter) setupChatWebSocket(router *gin.RouterGroup) {
router.GET("/ws", wsHandler.HandleWebSocket)
// v0.10.5 F551: Inject chat hub into notification service for real-time delivery
if r.notificationService != nil {
r.notificationService.SetWSNotifier(&chatHubNotifierAdapter{hub: hub})
}
r.logger.Info("Chat WebSocket endpoint registered at /api/v1/ws")
}
// chatHubNotifierAdapter adapts chatws.Hub to services.NotificationWSNotifier (F551)
type chatHubNotifierAdapter struct {
hub *chatws.Hub
}
func (a *chatHubNotifierAdapter) NotifyUser(userID uuid.UUID, payload []byte) {
a.hub.SendToUser(userID, payload)
}
// setupChatRoutes configure les routes de chat
func (r *APIRouter) setupChatRoutes(router *gin.RouterGroup) {
// BE-API-006: Use NewChatServiceWithDB to enable stats functionality

View file

@ -2,6 +2,7 @@ package handlers
import (
"net/http"
"strconv"
apperrors "veza-backend-api/internal/errors"
"veza-backend-api/internal/services"
@ -15,14 +16,14 @@ var NotificationHandlersInstance *NotificationHandlers
// NotificationServiceInterface defines the interface for notification operations
// This allows for easier testing with mocks
type NotificationServiceInterface interface {
GetNotifications(userID uuid.UUID, unreadOnly bool) ([]services.Notification, error)
GetNotifications(userID uuid.UUID, params services.GetNotificationsParams) (*services.GetNotificationsResult, error)
MarkAsRead(userID uuid.UUID, notificationID uuid.UUID) error
MarkAllAsRead(userID uuid.UUID) error
GetUnreadCount(userID uuid.UUID) (int, error)
DeleteNotification(userID uuid.UUID, notificationID uuid.UUID) error
DeleteAllNotifications(userID uuid.UUID) error
GetPreferences(userID uuid.UUID) (*services.NotificationPrefs, error)
UpdatePreferences(userID uuid.UUID, pushFollow, pushLike, pushComment, pushMessage, pushMention *bool) error
UpdatePreferences(userID uuid.UUID, pushFollow, pushLike, pushComment, pushMessage, pushMention *bool, quietHoursEnabled *bool, quietHoursStart, quietHoursEnd *string) error
}
type NotificationHandlers struct {
@ -44,8 +45,8 @@ func NewNotificationHandlersWithInterface(notificationService NotificationServic
}
}
// GetNotifications retrieves all notifications for the authenticated user
// GET /api/v1/notifications
// GetNotifications retrieves all notifications for the authenticated user (v0.10.5 F555)
// GET /api/v1/notifications?type=follow|like|comment|...&page=1&limit=20&read=false
// BE-API-016: Implement notifications endpoints
func (nh *NotificationHandlers) GetNotifications(c *gin.Context) {
userID, ok := GetUserIDUUID(c)
@ -54,18 +55,40 @@ func (nh *NotificationHandlers) GetNotifications(c *gin.Context) {
}
read := c.DefaultQuery("read", "")
var unreadOnly bool
if read == "false" {
unreadOnly = true
unreadOnly := read == "false"
typeFilter := c.DefaultQuery("type", "")
page := 1
if p := c.Query("page"); p != "" {
if v, err := strconv.Atoi(p); err == nil && v > 0 {
page = v
}
}
limit := 20
if l := c.Query("limit"); l != "" {
if v, err := strconv.Atoi(l); err == nil && v > 0 && v <= 100 {
limit = v
}
}
notifications, err := nh.notificationService.GetNotifications(userID, unreadOnly)
result, err := nh.notificationService.GetNotifications(userID, services.GetNotificationsParams{
UnreadOnly: unreadOnly,
TypeFilter: typeFilter,
Page: page,
Limit: limit,
})
if err != nil {
RespondWithAppError(c, apperrors.Wrap(apperrors.ErrCodeInternal, "Failed to get notifications", err))
return
}
RespondSuccess(c, http.StatusOK, notifications)
RespondSuccess(c, http.StatusOK, gin.H{
"notifications": result.Notifications,
"total": result.Total,
"page": result.Page,
"limit": result.Limit,
"total_pages": result.TotalPages,
"unread_count": result.UnreadCount,
})
}
// MarkAsRead marks a notification as read
@ -216,16 +239,22 @@ func (nh *NotificationHandlers) GetPreferences(c *gin.Context) {
"push_comment": prefs.PushComment,
"push_message": prefs.PushMessage,
"push_mention": prefs.PushMention,
"quiet_hours_enabled": prefs.QuietHoursEnabled,
"quiet_hours_start": prefs.QuietHoursStart,
"quiet_hours_end": prefs.QuietHoursEnd,
})
}
// UpdatePreferencesRequest is the DTO for updating preferences
// UpdatePreferencesRequest is the DTO for updating preferences (F553: quiet hours)
type UpdatePreferencesRequest struct {
PushFollow *bool `json:"push_follow"`
PushLike *bool `json:"push_like"`
PushComment *bool `json:"push_comment"`
PushMessage *bool `json:"push_message"`
PushMention *bool `json:"push_mention"`
QuietHoursEnabled *bool `json:"quiet_hours_enabled"`
QuietHoursStart *string `json:"quiet_hours_start"` // "22:00"
QuietHoursEnd *string `json:"quiet_hours_end"` // "08:00"
}
// UpdatePreferences updates notification preferences (N1.3)
@ -241,7 +270,8 @@ func (nh *NotificationHandlers) UpdatePreferences(c *gin.Context) {
return
}
if err := nh.notificationService.UpdatePreferences(userID, req.PushFollow, req.PushLike, req.PushComment, req.PushMessage, req.PushMention); err != nil {
if err := nh.notificationService.UpdatePreferences(userID, req.PushFollow, req.PushLike, req.PushComment, req.PushMessage, req.PushMention,
req.QuietHoursEnabled, req.QuietHoursStart, req.QuietHoursEnd); err != nil {
RespondWithAppError(c, apperrors.Wrap(apperrors.ErrCodeInternal, "Failed to update preferences", err))
return
}

View file

@ -19,12 +19,12 @@ type MockNotificationService struct {
mock.Mock
}
func (m *MockNotificationService) GetNotifications(userID uuid.UUID, unreadOnly bool) ([]services.Notification, error) {
args := m.Called(userID, unreadOnly)
func (m *MockNotificationService) GetNotifications(userID uuid.UUID, params services.GetNotificationsParams) (*services.GetNotificationsResult, error) {
args := m.Called(userID, params)
if args.Get(0) == nil {
return nil, args.Error(1)
}
return args.Get(0).([]services.Notification), args.Error(1)
return args.Get(0).(*services.GetNotificationsResult), args.Error(1)
}
func (m *MockNotificationService) MarkAsRead(userID uuid.UUID, notificationID uuid.UUID) error {
@ -60,8 +60,8 @@ func (m *MockNotificationService) GetPreferences(userID uuid.UUID) (*services.No
return args.Get(0).(*services.NotificationPrefs), args.Error(1)
}
func (m *MockNotificationService) UpdatePreferences(userID uuid.UUID, pushFollow, pushLike, pushComment, pushMessage, pushMention *bool) error {
args := m.Called(userID, pushFollow, pushLike, pushComment, pushMessage, pushMention)
func (m *MockNotificationService) UpdatePreferences(userID uuid.UUID, pushFollow, pushLike, pushComment, pushMessage, pushMention *bool, quietHoursEnabled *bool, quietHoursStart, quietHoursEnd *string) error {
args := m.Called(userID, pushFollow, pushLike, pushComment, pushMessage, pushMention, quietHoursEnabled, quietHoursStart, quietHoursEnd)
return args.Error(0)
}
@ -118,7 +118,18 @@ func TestNotificationHandlers_GetNotifications_Success(t *testing.T) {
},
}
mockService.On("GetNotifications", userID, false).Return(expectedNotifications, nil)
mockService.On("GetNotifications", userID, services.GetNotificationsParams{
UnreadOnly: false,
Page: 1,
Limit: 20,
}).Return(&services.GetNotificationsResult{
Notifications: expectedNotifications,
Total: len(expectedNotifications),
Page: 1,
Limit: 20,
TotalPages: 1,
UnreadCount: 1,
}, nil)
// Execute
req, _ := http.NewRequest("GET", "/api/v1/notifications", nil)
@ -148,7 +159,18 @@ func TestNotificationHandlers_GetNotifications_UnreadOnly(t *testing.T) {
},
}
mockService.On("GetNotifications", userID, true).Return(expectedNotifications, nil)
mockService.On("GetNotifications", userID, services.GetNotificationsParams{
UnreadOnly: true,
Page: 1,
Limit: 20,
}).Return(&services.GetNotificationsResult{
Notifications: expectedNotifications,
Total: 1,
Page: 1,
Limit: 20,
TotalPages: 1,
UnreadCount: 1,
}, nil)
// Execute
req, _ := http.NewRequest("GET", "/api/v1/notifications?read=false", nil)
@ -183,7 +205,7 @@ func TestNotificationHandlers_GetNotifications_ServiceError(t *testing.T) {
userID := uuid.New()
mockService.On("GetNotifications", userID, false).Return(nil, assert.AnError)
mockService.On("GetNotifications", userID, mock.Anything).Return(nil, assert.AnError)
// Execute
req, _ := http.NewRequest("GET", "/api/v1/notifications", nil)

View file

@ -5,8 +5,10 @@ import (
"context"
"encoding/csv"
"encoding/json"
"fmt"
"net/http"
"strconv"
"strings"
"time"
"github.com/google/uuid"
@ -249,3 +251,76 @@ func (h *PlaylistExportHandler) ExportPlaylistCSV(c *gin.Context) {
c.Header("Content-Disposition", "attachment; filename="+filename)
c.Data(http.StatusOK, "text/csv", csvBuffer.Bytes())
}
// ExportPlaylistM3U exporte une playlist au format M3U (v0.10.4 F145).
// Each track gets an #EXTINF line and a URL (download or stream).
func (h *PlaylistExportHandler) ExportPlaylistM3U(c *gin.Context) {
playlistID, err := uuid.Parse(c.Param("id"))
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid playlist id"})
return
}
var userID *uuid.UUID
if uidInterface, exists := c.Get("user_id"); exists {
if uid, ok := uidInterface.(uuid.UUID); ok {
userID = &uid
}
}
playlist, err := h.playlistService.GetPlaylist(c.Request.Context(), playlistID, userID)
if err != nil {
if err.Error() == "playlist not found" {
c.JSON(http.StatusNotFound, gin.H{"error": "playlist not found"})
return
}
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
currentUserID := uuid.Nil
if userID != nil {
currentUserID = *userID
}
if playlist.UserID != currentUserID && !playlist.IsPublic {
if userID != nil {
hasAccess, err := h.playlistService.CheckPermission(c.Request.Context(), playlistID, *userID, models.PlaylistPermissionRead)
if err != nil || !hasAccess {
c.JSON(http.StatusForbidden, gin.H{"error": "forbidden"})
return
}
} else {
c.JSON(http.StatusForbidden, gin.H{"error": "forbidden"})
return
}
}
// Build base URL for track URLs (download endpoint)
scheme := "https"
if c.GetHeader("X-Forwarded-Proto") == "http" || (c.Request.TLS == nil && !strings.Contains(c.Request.Host, "localhost")) {
scheme = "http"
}
if strings.HasPrefix(c.Request.Host, "localhost") || strings.HasPrefix(c.Request.Host, "127.0.0.1") {
scheme = "http"
}
baseURL := scheme + "://" + c.Request.Host
if !strings.HasPrefix(baseURL, "http") {
baseURL = "https://" + c.Request.Host
}
var buf bytes.Buffer
buf.WriteString("#EXTM3U\n")
if playlist.Tracks != nil {
for _, pt := range playlist.Tracks {
dur := pt.Track.Duration
if dur <= 0 {
dur = -1
}
title := pt.Track.Title
artist := pt.Track.Artist
extInf := fmt.Sprintf("#EXTINF:%d,%s - %s\n", dur, artist, title)
buf.WriteString(extInf)
trackURL := fmt.Sprintf("%s/api/v1/tracks/%s/download\n", baseURL, pt.Track.ID.String())
buf.WriteString(trackURL)
}
}
filename := "playlist_" + playlistID.String() + "_" + time.Now().Format("20060102") + ".m3u"
c.Header("Content-Type", "audio/x-mpegurl")
c.Header("Content-Disposition", "attachment; filename="+filename)
c.Data(http.StatusOK, "audio/x-mpegurl", buf.Bytes())
}

View file

@ -364,6 +364,32 @@ func (h *ProfileHandler) FollowUser(c *gin.Context) {
RespondSuccess(c, http.StatusOK, gin.H{"message": "User followed successfully"})
}
// GetFollowSuggestions returns users to follow (v0.10.0 F211)
// GET /api/v1/users/suggestions?limit=10
func (h *ProfileHandler) GetFollowSuggestions(c *gin.Context) {
userID, ok := GetUserIDUUID(c)
if !ok {
return
}
if h.socialService == nil {
RespondWithAppError(c, apperrors.Wrap(apperrors.ErrCodeInternal, "Social service not initialized", nil))
return
}
limit := 10
if l := c.Query("limit"); l != "" {
if n, err := strconv.Atoi(l); err == nil && n > 0 && n <= 20 {
limit = n
}
}
suggestions, err := h.socialService.GetFollowSuggestions(c.Request.Context(), userID, limit)
if err != nil {
h.logger.Error("failed to get follow suggestions", zap.Error(err), zap.String("user_id", userID.String()))
RespondWithAppError(c, apperrors.Wrap(apperrors.ErrCodeInternal, "Failed to get suggestions", err))
return
}
RespondSuccess(c, http.StatusOK, gin.H{"suggestions": suggestions})
}
// UnfollowUser gère l'arrêt du suivi d'un utilisateur
// DELETE /api/v1/users/:id/follow
// BE-API-017: Implement user follow/unfollow endpoints

View file

@ -18,6 +18,8 @@ type Playlist struct {
CoverURL string `gorm:"size:500" json:"cover_url,omitempty" db:"cover_url"`
TrackCount int `gorm:"default:0" json:"track_count" db:"track_count"`
FollowerCount int `gorm:"default:0" json:"follower_count" db:"follower_count"`
IsEditorial bool `gorm:"default:false" json:"is_editorial" db:"is_editorial"` // v0.10.4 F141
IsDefaultFavorites bool `gorm:"default:false" json:"is_default_favorites" db:"is_default_favorites"` // v0.10.4 F136
CreatedAt time.Time `gorm:"autoCreateTime" json:"created_at" db:"created_at"`
UpdatedAt time.Time `gorm:"autoUpdateTime" json:"updated_at" db:"updated_at"`
DeletedAt gorm.DeletedAt `json:"-" db:"deleted_at"`

View file

@ -3,23 +3,39 @@ package services
import (
"context"
"database/sql"
"encoding/json"
"fmt"
"regexp"
"strings"
"time"
"github.com/google/uuid"
"veza-backend-api/internal/database"
ws "veza-backend-api/internal/websocket"
"go.uber.org/zap"
)
// NotificationWSNotifier sends notification payloads to users via WebSocket (v0.10.5 F551)
type NotificationWSNotifier interface {
NotifyUser(userID uuid.UUID, payload []byte)
}
// NotificationService handles notification operations
type NotificationService struct {
db *database.Database
logger *zap.Logger
pushService *PushService // optional, for N1.2 Web Push
wsNotifier NotificationWSNotifier // optional, for F551 real-time
}
// Notification represents a notification
// SetWSNotifier injects the WebSocket notifier for real-time delivery (v0.10.5 F551)
func (ns *NotificationService) SetWSNotifier(n NotificationWSNotifier) {
ns.wsNotifier = n
}
// Notification represents a notification (F554: grouping fields)
type Notification struct {
ID uuid.UUID `json:"id" db:"id"`
UserID uuid.UUID `json:"user_id" db:"user_id"`
@ -29,6 +45,9 @@ type Notification struct {
Link string `json:"link" db:"link"`
Read bool `json:"read" db:"read"`
CreatedAt string `json:"created_at" db:"created_at"`
GroupKey string `json:"group_key,omitempty" db:"group_key"`
ActorCount int `json:"actor_count" db:"actor_count"`
Metadata string `json:"metadata,omitempty" db:"metadata"` // JSON string
}
// NewNotificationService creates a new notification service
@ -44,26 +63,125 @@ func (ns *NotificationService) SetPushService(ps *PushService) {
ns.pushService = ps
}
// timeHHMM matches "HH:MM" or "HH:MM:SS"
var timeHHMM = regexp.MustCompile(`^(\d{1,2}):(\d{2})(?::(\d{2}))?$`)
// isWithinQuietHours returns true if current time (UTC) falls within quiet hours (F553)
// Supports overnight ranges: start 22:00, end 08:00 means 10pm-8am
func (ns *NotificationService) isWithinQuietHours(prefs *NotificationPrefs) bool {
if !prefs.QuietHoursEnabled || prefs.QuietHoursStart == "" || prefs.QuietHoursEnd == "" {
return false
}
now := time.Now().UTC()
startMin := parseTimeToMinutes(prefs.QuietHoursStart)
endMin := parseTimeToMinutes(prefs.QuietHoursEnd)
if startMin < 0 || endMin < 0 {
return false
}
nowMin := now.Hour()*60 + now.Minute()
if startMin <= endMin {
return nowMin >= startMin && nowMin < endMin
}
// Overnight: e.g. 22:00-08:00
return nowMin >= startMin || nowMin < endMin
}
func parseTimeToMinutes(s string) int {
m := timeHHMM.FindStringSubmatch(s)
if m == nil {
return -1
}
var h, min int
_, _ = fmt.Sscanf(m[1], "%d", &h)
_, _ = fmt.Sscanf(m[2], "%d", &min)
if h < 0 || h > 23 || min < 0 || min > 59 {
return -1
}
return h*60 + min
}
// CreateNotification creates a new notification and optionally sends Web Push (N1.2)
// groupKey and actorID are optional (F554): when set, may update existing recent notification instead of insert
func (ns *NotificationService) CreateNotification(userID uuid.UUID, notificationType, title, content, link string) error {
return ns.CreateNotificationWithGroup(userID, notificationType, title, content, link, "", uuid.Nil)
}
// CreateNotificationWithGroup supports grouping (F554): when groupKey is set, checks for existing
// notification with same group_key created < 24h ago; if found, increments actor_count and updates metadata
func (ns *NotificationService) CreateNotificationWithGroup(userID uuid.UUID, notificationType, title, content, link, groupKey string, actorID uuid.UUID) error {
ctx := context.Background()
if groupKey != "" && actorID != uuid.Nil {
var existingID uuid.UUID
var actorCount int
var metadata sql.NullString
err := ns.db.QueryRowContext(ctx, `
SELECT id, COALESCE(actor_count, 1), COALESCE(metadata::text, '{}')
FROM notifications
WHERE user_id = $1 AND group_key = $2 AND created_at > NOW() - INTERVAL '24 hours'
ORDER BY created_at DESC LIMIT 1
`, userID, groupKey).Scan(&existingID, &actorCount, &metadata)
if err == nil {
newCount := actorCount + 1
metadataJSON := "[]"
if metadata.Valid && metadata.String != "" && metadata.String != "{}" {
metadataJSON = metadata.String
}
actorIDs := []string{}
_ = json.Unmarshal([]byte(metadataJSON), &actorIDs)
actorIDs = append(actorIDs, actorID.String())
actorIDsJSON, _ := json.Marshal(actorIDs)
_, err = ns.db.ExecContext(ctx, `
UPDATE notifications SET
actor_count = $1, metadata = $2::jsonb, updated_at = NOW()
WHERE id = $3
`, newCount, string(actorIDsJSON), existingID)
if err == nil {
return nil
}
}
}
actorCount := 1
metadata := "{}"
if actorID != uuid.Nil {
actorIDsJSON, _ := json.Marshal([]string{actorID.String()})
metadata = string(actorIDsJSON)
}
_, err := ns.db.ExecContext(ctx, `
INSERT INTO notifications (user_id, type, title, content, link)
VALUES ($1, $2, $3, $4, $5)
`, userID, notificationType, title, content, link)
INSERT INTO notifications (user_id, type, title, content, link, group_key, actor_count, metadata)
VALUES ($1, $2, $3, $4, $5, NULLIF($6, ''), $7, $8::jsonb)
`, userID, notificationType, title, content, link, groupKey, actorCount, metadata)
if err != nil {
return fmt.Errorf("failed to create notification: %w", err)
}
// N1.2: Send Web Push if enabled and user has subscriptions
if ns.pushService != nil {
prefs, err := ns.GetPreferences(userID)
if err != nil {
ns.logger.Warn("failed to get push preferences", zap.Error(err))
return nil
// F553: Check quiet hours before push/WS delivery
prefs, prefsErr := ns.GetPreferences(userID)
if prefsErr != nil {
ns.logger.Warn("failed to get preferences for quiet hours", zap.Error(prefsErr))
}
withinQuietHours := prefs != nil && ns.isWithinQuietHours(prefs)
// F551: Send real-time via WebSocket (in-app) — skip during quiet hours
if !withinQuietHours && ns.wsNotifier != nil {
msg := ws.NewWebSocketMessage(ws.MessageTypeNotification, map[string]interface{}{
"type": notificationType,
"title": title,
"content": content,
"link": link,
})
if payload, err := json.Marshal(msg); err == nil {
ns.wsNotifier.NotifyUser(userID, payload)
} else {
ns.logger.Warn("failed to marshal notification WS message", zap.Error(err))
}
}
// N1.2: Send Web Push if enabled and user has subscriptions — skip during quiet hours
if !withinQuietHours && ns.pushService != nil && prefs != nil {
var shouldPush bool
switch notificationType {
case "follow":
@ -89,24 +207,71 @@ func (ns *NotificationService) CreateNotification(userID uuid.UUID, notification
return nil
}
// GetNotifications retrieves notifications for a user
func (ns *NotificationService) GetNotifications(userID uuid.UUID, unreadOnly bool) ([]Notification, error) {
// GetNotificationsParams holds pagination and filter params (v0.10.5 F555)
type GetNotificationsParams struct {
UnreadOnly bool
TypeFilter string // empty = all
Page int // 1-based
Limit int // default 20, max 100
}
// GetNotificationsResult holds paginated result (v0.10.5 F555)
type GetNotificationsResult struct {
Notifications []Notification
Total int
Page int
Limit int
TotalPages int
UnreadCount int
}
// GetNotifications retrieves notifications for a user with pagination and filters
func (ns *NotificationService) GetNotifications(userID uuid.UUID, params GetNotificationsParams) (*GetNotificationsResult, error) {
ctx := context.Background()
query := `
SELECT id, user_id, type, title, content, link, read, created_at
FROM notifications
WHERE user_id = $1
`
args := []interface{}{userID}
if params.Limit <= 0 {
params.Limit = 20
}
if params.Limit > 100 {
params.Limit = 100
}
if params.Page < 1 {
params.Page = 1
}
offset := (params.Page - 1) * params.Limit
if unreadOnly {
query += " AND read = FALSE"
where := "WHERE user_id = $1"
whereArgs := []interface{}{userID}
argIdx := 2
if params.UnreadOnly {
where += " AND read = FALSE"
}
if params.TypeFilter != "" {
where += fmt.Sprintf(" AND type = $%d", argIdx)
whereArgs = append(whereArgs, params.TypeFilter)
argIdx++
}
query += " ORDER BY created_at DESC LIMIT 50"
countQuery := "SELECT COUNT(*) FROM notifications " + where
var total int
if err := ns.db.QueryRowContext(ctx, countQuery, whereArgs...).Scan(&total); err != nil {
return nil, fmt.Errorf("failed to count notifications: %w", err)
}
rows, err := ns.db.QueryContext(ctx, query, args...)
unreadCount := 0
if err := ns.db.QueryRowContext(ctx, `
SELECT COUNT(*) FROM notifications WHERE user_id = $1 AND read = FALSE
`, userID).Scan(&unreadCount); err != nil {
unreadCount = 0
}
selectQuery := fmt.Sprintf(`
SELECT id, user_id, type, title, content, link, read, created_at
FROM notifications %s ORDER BY created_at DESC LIMIT $%d OFFSET $%d`, where, argIdx, argIdx+1)
selectArgs := append(whereArgs, params.Limit, offset)
rows, err := ns.db.QueryContext(ctx, selectQuery, selectArgs...)
if err != nil {
return nil, fmt.Errorf("failed to get notifications: %w", err)
}
@ -130,7 +295,19 @@ func (ns *NotificationService) GetNotifications(userID uuid.UUID, unreadOnly boo
notifications = append(notifications, notification)
}
return notifications, nil
totalPages := (total + params.Limit - 1) / params.Limit
if totalPages < 1 {
totalPages = 1
}
return &GetNotificationsResult{
Notifications: notifications,
Total: total,
Page: params.Page,
Limit: params.Limit,
TotalPages: totalPages,
UnreadCount: unreadCount,
}, nil
}
// MarkAsRead marks a notification as read
@ -217,13 +394,16 @@ func (ns *NotificationService) DeleteAllNotifications(userID uuid.UUID) error {
return nil
}
// NotificationPrefs represents notification preferences (N1.3)
// NotificationPrefs represents notification preferences (N1.3, F553)
type NotificationPrefs struct {
PushFollow bool `json:"push_follow"`
PushLike bool `json:"push_like"`
PushComment bool `json:"push_comment"`
PushMessage bool `json:"push_message"`
PushMention bool `json:"push_mention"`
QuietHoursEnabled bool `json:"quiet_hours_enabled"`
QuietHoursStart string `json:"quiet_hours_start"` // "22:00"
QuietHoursEnd string `json:"quiet_hours_end"` // "08:00"
}
// GetPreferences returns notification preferences for a user
@ -232,11 +412,23 @@ func (ns *NotificationService) GetPreferences(userID uuid.UUID) (*NotificationPr
prefs := &NotificationPrefs{PushFollow: true, PushLike: true, PushComment: true, PushMessage: true, PushMention: true}
var startNullable, endNullable sql.NullString
err := ns.db.QueryRowContext(ctx, `
SELECT push_follow, push_like, push_comment, push_message, push_mention
SELECT push_follow, push_like, push_comment, push_message, push_mention,
COALESCE(quiet_hours_enabled, false),
quiet_hours_start::text, quiet_hours_end::text
FROM notification_preferences
WHERE user_id = $1
`, userID).Scan(&prefs.PushFollow, &prefs.PushLike, &prefs.PushComment, &prefs.PushMessage, &prefs.PushMention)
`, userID).Scan(&prefs.PushFollow, &prefs.PushLike, &prefs.PushComment, &prefs.PushMessage, &prefs.PushMention,
&prefs.QuietHoursEnabled, &startNullable, &endNullable)
if err == nil {
if startNullable.Valid {
prefs.QuietHoursStart = startNullable.String
}
if endNullable.Valid {
prefs.QuietHoursEnd = endNullable.String
}
}
if err == nil {
return prefs, nil
@ -248,21 +440,40 @@ func (ns *NotificationService) GetPreferences(userID uuid.UUID) (*NotificationPr
return prefs, nil
}
// UpdatePreferences updates notification preferences
func (ns *NotificationService) UpdatePreferences(userID uuid.UUID, pushFollow, pushLike, pushComment, pushMessage, pushMention *bool) error {
// UpdatePreferences updates notification preferences (F553: quiet hours)
func (ns *NotificationService) UpdatePreferences(userID uuid.UUID, pushFollow, pushLike, pushComment, pushMessage, pushMention *bool,
quietHoursEnabled *bool, quietHoursStart, quietHoursEnd *string) error {
ctx := context.Background()
qhStart := nullIfEmpty(quietHoursStart)
qhEnd := nullIfEmpty(quietHoursEnd)
_, err := ns.db.ExecContext(ctx, `
INSERT INTO notification_preferences (user_id, push_follow, push_like, push_comment, push_message, push_mention, updated_at)
VALUES ($1, COALESCE($2, true), COALESCE($3, true), COALESCE($4, true), COALESCE($5, true), COALESCE($6, true), NOW())
INSERT INTO notification_preferences (user_id, push_follow, push_like, push_comment, push_message, push_mention,
quiet_hours_enabled, quiet_hours_start, quiet_hours_end, updated_at)
VALUES ($1, COALESCE($2, true), COALESCE($3, true), COALESCE($4, true), COALESCE($5, true), COALESCE($6, true),
COALESCE($7, false), $8::time, $9::time, NOW())
ON CONFLICT (user_id) DO UPDATE SET
push_follow = CASE WHEN $2 IS NOT NULL THEN $2 ELSE notification_preferences.push_follow END,
push_like = CASE WHEN $3 IS NOT NULL THEN $3 ELSE notification_preferences.push_like END,
push_comment = CASE WHEN $4 IS NOT NULL THEN $4 ELSE notification_preferences.push_comment END,
push_message = CASE WHEN $5 IS NOT NULL THEN $5 ELSE notification_preferences.push_message END,
push_mention = CASE WHEN $6 IS NOT NULL THEN $6 ELSE notification_preferences.push_mention END,
quiet_hours_enabled = CASE WHEN $7 IS NOT NULL THEN $7 ELSE notification_preferences.quiet_hours_enabled END,
quiet_hours_start = CASE WHEN $8::text IS NOT NULL AND $8::text != '' THEN $8::time ELSE notification_preferences.quiet_hours_start END,
quiet_hours_end = CASE WHEN $9::text IS NOT NULL AND $9::text != '' THEN $9::time ELSE notification_preferences.quiet_hours_end END,
updated_at = NOW()
`, userID, pushFollow, pushLike, pushComment, pushMessage, pushMention)
`, userID, pushFollow, pushLike, pushComment, pushMessage, pushMention,
quietHoursEnabled, qhStart, qhEnd)
return err
}
func nullIfEmpty(s *string) interface{} {
if s == nil {
return nil
}
if *s == "" {
return nil
}
return *s
}

View file

@ -90,10 +90,12 @@ func TestNotificationService_GetNotifications_AllNotifications(t *testing.T) {
require.NoError(t, err)
}
notifications, err := service.GetNotifications(userID, false)
result, err := service.GetNotifications(userID, GetNotificationsParams{Page: 1, Limit: 20})
assert.NoError(t, err)
assert.Len(t, notifications, 3)
assert.Equal(t, userID, notifications[0].UserID)
require.NotNil(t, result)
assert.Len(t, result.Notifications, 3)
assert.Equal(t, userID, result.Notifications[0].UserID)
assert.Equal(t, 3, result.Total)
}
func TestNotificationService_GetNotifications_UnreadOnly(t *testing.T) {
@ -119,15 +121,17 @@ func TestNotificationService_GetNotifications_UnreadOnly(t *testing.T) {
require.NoError(t, err)
// Get all notifications
allNotifications, err := service.GetNotifications(userID, false)
allResult, err := service.GetNotifications(userID, GetNotificationsParams{Page: 1, Limit: 20})
assert.NoError(t, err)
assert.Len(t, allNotifications, 2)
require.NotNil(t, allResult)
assert.Len(t, allResult.Notifications, 2)
// Get unread only
unreadNotifications, err := service.GetNotifications(userID, true)
unreadResult, err := service.GetNotifications(userID, GetNotificationsParams{UnreadOnly: true, Page: 1, Limit: 20})
assert.NoError(t, err)
assert.Len(t, unreadNotifications, 1)
assert.False(t, unreadNotifications[0].Read)
require.NotNil(t, unreadResult)
assert.Len(t, unreadResult.Notifications, 1)
assert.False(t, unreadResult.Notifications[0].Read)
}
func TestNotificationService_GetNotifications_NoNotifications(t *testing.T) {
@ -135,9 +139,11 @@ func TestNotificationService_GetNotifications_NoNotifications(t *testing.T) {
userID := uuid.New()
notifications, err := service.GetNotifications(userID, false)
result, err := service.GetNotifications(userID, GetNotificationsParams{Page: 1, Limit: 20})
assert.NoError(t, err)
assert.Len(t, notifications, 0)
require.NotNil(t, result)
assert.Len(t, result.Notifications, 0)
assert.Equal(t, 0, result.Total)
}
func TestNotificationService_GetNotifications_DifferentUsers(t *testing.T) {
@ -164,16 +170,18 @@ func TestNotificationService_GetNotifications_DifferentUsers(t *testing.T) {
require.NoError(t, err)
// Get notifications for user1
notifications1, err := service.GetNotifications(userID1, false)
result1, err := service.GetNotifications(userID1, GetNotificationsParams{Page: 1, Limit: 20})
assert.NoError(t, err)
assert.Len(t, notifications1, 1)
assert.Equal(t, userID1, notifications1[0].UserID)
require.NotNil(t, result1)
assert.Len(t, result1.Notifications, 1)
assert.Equal(t, userID1, result1.Notifications[0].UserID)
// Get notifications for user2
notifications2, err := service.GetNotifications(userID2, false)
result2, err := service.GetNotifications(userID2, GetNotificationsParams{Page: 1, Limit: 20})
assert.NoError(t, err)
assert.Len(t, notifications2, 1)
assert.Equal(t, userID2, notifications2[0].UserID)
require.NotNil(t, result2)
assert.Len(t, result2.Notifications, 1)
assert.Equal(t, userID2, result2.Notifications[0].UserID)
}
func TestNotificationService_MarkAsRead_Success(t *testing.T) {

View file

@ -312,3 +312,51 @@ func (ss *SocialService) IsBlocked(blockerID, blockedID uuid.UUID) (bool, error)
return exists, nil
}
// SuggestionUser is a minimal user for follow suggestions (v0.10.0 F211)
type SuggestionUser struct {
ID uuid.UUID `json:"id"`
Username string `json:"username"`
AvatarURL string `json:"avatar_url"`
FollowersCount int `json:"followers_count"`
}
// GetFollowSuggestions returns users to follow based on "friends of friends" (v0.10.0 F211).
// No ML - simple 2-hop: users followed by people the current user follows.
func (ss *SocialService) GetFollowSuggestions(ctx context.Context, userID uuid.UUID, limit int) ([]SuggestionUser, error) {
if limit <= 0 {
limit = 10
}
if limit > 20 {
limit = 20
}
rows, err := ss.db.QueryContext(ctx, `
SELECT DISTINCT u.id, u.username, COALESCE(u.avatar, '') as avatar,
COALESCE(up.follower_count, 0) as follower_count
FROM follows f1
JOIN follows f2 ON f2.follower_id = f1.followed_id
JOIN users u ON u.id = f2.followed_id AND u.deleted_at IS NULL
LEFT JOIN user_profiles up ON up.user_id = u.id
WHERE f1.follower_id = $1
AND f2.followed_id != $1
AND f2.followed_id NOT IN (SELECT followed_id FROM follows WHERE follower_id = $1)
LIMIT $2
`, userID, userID, limit)
if err != nil {
return nil, fmt.Errorf("failed to get follow suggestions: %w", err)
}
defer rows.Close()
var result []SuggestionUser
for rows.Next() {
var u SuggestionUser
var avatar string
if err := rows.Scan(&u.ID, &u.Username, &avatar, &u.FollowersCount); err != nil {
continue
}
u.AvatarURL = avatar
result = append(result, u)
}
return result, nil
}

View file

@ -34,6 +34,7 @@ type UserService struct {
userRepo UserRepository
db *gorm.DB // Optional DB access for settings
cacheService *CacheService // BE-SVC-001: Cache service for user profiles
socialService *SocialService // v0.10.0 F187: Optional, for is_following in profiles
uploadDir string
}
@ -54,6 +55,7 @@ type UpdateProfileRequest struct {
// Profile represents a user profile with necessary fields
// MIGRATION UUID: ID et UserID migrés vers uuid.UUID
// v0.10.0 F187: Added FollowersCount, FollowingCount, IsFollowing
type Profile struct {
ID uuid.UUID `json:"id"`
UserID uuid.UUID `json:"user_id"`
@ -69,6 +71,9 @@ type Profile struct {
SocialLinks map[string]interface{} `json:"social_links"`
IsPublic bool `json:"is_public"`
CreatedAt time.Time `json:"created_at"`
FollowersCount int `json:"followers_count"`
FollowingCount int `json:"following_count"`
IsFollowing bool `json:"is_following"`
}
// UserStats est maintenant défini dans internal/types/stats.go
@ -94,6 +99,11 @@ func (s *UserService) SetCacheService(cacheService *CacheService) {
s.cacheService = cacheService
}
// SetSocialService définit le service social pour is_following dans les profils (v0.10.0 F187)
func (s *UserService) SetSocialService(socialService *SocialService) {
s.socialService = socialService
}
// NewUserServiceWithDB crée une nouvelle instance d'UserService avec accès DB
func NewUserServiceWithDB(userRepo UserRepository, db *gorm.DB) *UserService {
return &UserService{
@ -188,6 +198,7 @@ func (s *UserService) UpdateProfileWithRequest(userID uuid.UUID, req *UpdateProf
// If profile is private and requesterID is different from userID, returns limited fields
// MIGRATION UUID: requesterID migré vers *uuid.UUID
// BE-SVC-001: Add caching for user profiles
// v0.10.0 F187: Enriches with followers_count, following_count, is_following
func (s *UserService) GetProfile(userID uuid.UUID, requesterID *uuid.UUID) (*Profile, error) {
ctx := context.Background()
cacheConfig := DefaultCacheConfig()
@ -196,8 +207,7 @@ func (s *UserService) GetProfile(userID uuid.UUID, requesterID *uuid.UUID) (*Pro
if s.cacheService != nil {
var cachedProfile Profile
if err := s.cacheService.GetUser(ctx, userID, &cachedProfile); err == nil {
// Cache hit - but we still need to check privacy settings
// For now, return cached profile (privacy check would need to be cached too)
s.enrichProfileCounts(ctx, &cachedProfile, userID, requesterID)
return &cachedProfile, nil
}
}
@ -219,6 +229,8 @@ func (s *UserService) GetProfile(userID uuid.UUID, requesterID *uuid.UUID) (*Pro
profile.SocialLinks = nil
}
s.enrichProfileCounts(ctx, profile, userID, requesterID)
// Cache the profile
if s.cacheService != nil {
if err := s.cacheService.SetUser(ctx, userID, profile, cacheConfig); err != nil {
@ -230,6 +242,29 @@ func (s *UserService) GetProfile(userID uuid.UUID, requesterID *uuid.UUID) (*Pro
return profile, nil
}
// enrichProfileCounts fetches follower_count, following_count from user_profiles
// and is_following from SocialService when requesterID is set (v0.10.0 F187)
func (s *UserService) enrichProfileCounts(ctx context.Context, profile *Profile, userID uuid.UUID, requesterID *uuid.UUID) {
if s.db != nil {
var counts struct {
FollowerCount int `gorm:"column:follower_count"`
FollowingCount int `gorm:"column:following_count"`
}
if err := s.db.WithContext(ctx).Table("user_profiles").
Select("follower_count, following_count").
Where("user_id = ?", userID).
First(&counts).Error; err == nil {
profile.FollowersCount = counts.FollowerCount
profile.FollowingCount = counts.FollowingCount
}
}
if requesterID != nil && *requesterID != userID && s.socialService != nil {
if following, err := s.socialService.IsFollowing(*requesterID, userID); err == nil {
profile.IsFollowing = following
}
}
}
// GetProfileByUsername retrieves a user profile by username
// requesterID can be nil for unauthenticated requests
// If profile is private and requesterID is different from userID, returns limited fields

View file

@ -0,0 +1,48 @@
-- 125_follow_counts_triggers.sql
-- v0.10.0 F187: Denormalized follower/following counts in user_profiles
-- Triggers to update user_profiles.follower_count and following_count on follows insert/delete
-- Increment follower_count for followed user, following_count for follower
CREATE OR REPLACE FUNCTION increment_follow_counts()
RETURNS TRIGGER AS $$
BEGIN
UPDATE public.user_profiles
SET follower_count = follower_count + 1
WHERE user_id = NEW.followed_id;
UPDATE public.user_profiles
SET following_count = following_count + 1
WHERE user_id = NEW.follower_id;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
-- Decrement with floor at 0 to prevent negative counts
CREATE OR REPLACE FUNCTION decrement_follow_counts()
RETURNS TRIGGER AS $$
BEGIN
UPDATE public.user_profiles
SET follower_count = GREATEST(0, follower_count - 1)
WHERE user_id = OLD.followed_id;
UPDATE public.user_profiles
SET following_count = GREATEST(0, following_count - 1)
WHERE user_id = OLD.follower_id;
RETURN OLD;
END;
$$ LANGUAGE plpgsql;
DROP TRIGGER IF EXISTS trg_follows_insert_counts ON public.follows;
CREATE TRIGGER trg_follows_insert_counts
AFTER INSERT ON public.follows
FOR EACH ROW EXECUTE FUNCTION increment_follow_counts();
DROP TRIGGER IF EXISTS trg_follows_delete_counts ON public.follows;
CREATE TRIGGER trg_follows_delete_counts
AFTER DELETE ON public.follows
FOR EACH ROW EXECUTE FUNCTION decrement_follow_counts();
COMMENT ON FUNCTION increment_follow_counts() IS 'v0.10.0 F187: Update user_profiles counts on follow';
COMMENT ON FUNCTION decrement_follow_counts() IS 'v0.10.0 F187: Update user_profiles counts on unfollow';

View file

@ -0,0 +1,8 @@
-- 125_follow_counts_triggers_down.sql
-- Rollback v0.10.0 F187 follow counts triggers
DROP TRIGGER IF EXISTS trg_follows_insert_counts ON public.follows;
DROP TRIGGER IF EXISTS trg_follows_delete_counts ON public.follows;
DROP FUNCTION IF EXISTS increment_follow_counts();
DROP FUNCTION IF EXISTS decrement_follow_counts();

View file

@ -0,0 +1,11 @@
-- 129_playlist_editorial.sql
-- v0.10.4 F141/F136: Editorial playlists, Favoris flag
ALTER TABLE public.playlists ADD COLUMN IF NOT EXISTS is_editorial BOOLEAN NOT NULL DEFAULT false;
ALTER TABLE public.playlists ADD COLUMN IF NOT EXISTS is_default_favorites BOOLEAN NOT NULL DEFAULT false;
CREATE INDEX IF NOT EXISTS idx_playlists_is_editorial ON public.playlists(is_editorial) WHERE is_editorial = true;
CREATE INDEX IF NOT EXISTS idx_playlists_is_default_favorites ON public.playlists(user_id, is_default_favorites) WHERE is_default_favorites = true;
COMMENT ON COLUMN public.playlists.is_editorial IS 'v0.10.4 F141: Curatorial playlist visible in Discover, editable by admin/mod only';
COMMENT ON COLUMN public.playlists.is_default_favorites IS 'v0.10.4 F136: Auto-created Favoris playlist per user';

View file

@ -0,0 +1,5 @@
-- 129_playlist_editorial_down.sql
DROP INDEX IF EXISTS public.idx_playlists_is_default_favorites;
DROP INDEX IF EXISTS public.idx_playlists_is_editorial;
ALTER TABLE public.playlists DROP COLUMN IF EXISTS is_default_favorites;
ALTER TABLE public.playlists DROP COLUMN IF EXISTS is_editorial;

View file

@ -0,0 +1,12 @@
-- Migration 132: Quiet hours for notifications (v0.10.5 F553)
-- When enabled, no push/WebSocket delivery during the configured time window
ALTER TABLE notification_preferences
ADD COLUMN IF NOT EXISTS quiet_hours_enabled BOOLEAN NOT NULL DEFAULT false,
ADD COLUMN IF NOT EXISTS quiet_hours_start TIME, -- e.g. 22:00
ADD COLUMN IF NOT EXISTS quiet_hours_end TIME; -- e.g. 08:00
-- Start/end are only meaningful when enabled
COMMENT ON COLUMN notification_preferences.quiet_hours_enabled IS 'If true, suppress push and real-time delivery during quiet hours';
COMMENT ON COLUMN notification_preferences.quiet_hours_start IS 'Quiet hours start (e.g. 22:00 for 10pm)';
COMMENT ON COLUMN notification_preferences.quiet_hours_end IS 'Quiet hours end (e.g. 08:00 for 8am); overnight range supported';

View file

@ -0,0 +1,5 @@
-- Rollback 132: Quiet hours
ALTER TABLE notification_preferences
DROP COLUMN IF EXISTS quiet_hours_enabled,
DROP COLUMN IF EXISTS quiet_hours_start,
DROP COLUMN IF EXISTS quiet_hours_end;

View file

@ -0,0 +1,13 @@
-- Migration 133: Notification grouping (v0.10.5 F554)
-- Aggregate similar notifications (e.g. "3 people liked your track")
ALTER TABLE notifications
ADD COLUMN IF NOT EXISTS group_key VARCHAR(255),
ADD COLUMN IF NOT EXISTS actor_count INT NOT NULL DEFAULT 1,
ADD COLUMN IF NOT EXISTS metadata JSONB DEFAULT '{}';
CREATE INDEX IF NOT EXISTS idx_notifications_group_key ON notifications(group_key) WHERE group_key IS NOT NULL;
COMMENT ON COLUMN notifications.group_key IS 'Semantic key for grouping (e.g. like:track:uuid)';
COMMENT ON COLUMN notifications.actor_count IS 'Number of actors when grouped';
COMMENT ON COLUMN notifications.metadata IS 'Additional data (actor_ids, etc.)';

View file

@ -0,0 +1,6 @@
-- Rollback 133: Notification grouping
DROP INDEX IF EXISTS idx_notifications_group_key;
ALTER TABLE notifications
DROP COLUMN IF EXISTS group_key,
DROP COLUMN IF EXISTS actor_count,
DROP COLUMN IF EXISTS metadata;