feat(v0.13.4): polish audio & player — PiP canvas, visualizer, Cast/AirPlay stubs

TASK-APLSH-001: Enhanced PiP with canvas-based display showing cover art + track info
TASK-APLSH-002: Chromecast detection hook (useCastSupport) — full casting deferred
TASK-APLSH-003: AirPlay detection hook (useAirPlaySupport) — Safari target picker
TASK-APLSH-004: AudioVisualizer component with 3 modes (bars/wave/spectrogram)
  - useSpectrumAnalyser hook (64 bands, high-res FFT)
  - Canvas-based rendering with SUMI color palette
  - Integrated into PlayerExpanded with toggle button

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
senke 2026-03-13 13:59:30 +01:00
parent 55cc159f67
commit c1db9f03b0
13 changed files with 996 additions and 25 deletions

View file

@ -0,0 +1,57 @@
import { describe, it, expect, vi, beforeAll } from 'vitest';
import { render, screen, fireEvent } from '@testing-library/react';
import { AudioVisualizer } from './AudioVisualizer';
// Mock canvas getContext since JSDOM doesn't implement Canvas 2D
beforeAll(() => {
HTMLCanvasElement.prototype.getContext = vi.fn().mockReturnValue(null);
});
describe('AudioVisualizer', () => {
const defaultProps = {
bands: Array(64).fill(0.5),
frequencyData: new Uint8Array(128),
waveformData: new Uint8Array(512),
isPlaying: true,
};
it('should render canvas element', () => {
render(<AudioVisualizer {...defaultProps} />);
const canvas = screen.getByRole('img', { name: /audio visualizer/i });
expect(canvas).toBeDefined();
expect(canvas.tagName).toBe('CANVAS');
});
it('should render mode selector buttons', () => {
render(<AudioVisualizer {...defaultProps} />);
expect(screen.getByLabelText('Equalizer')).toBeDefined();
expect(screen.getByLabelText('Waveform')).toBeDefined();
expect(screen.getByLabelText('Spectrogram')).toBeDefined();
});
it('should switch modes on button click', () => {
render(<AudioVisualizer {...defaultProps} />);
const waveBtn = screen.getByLabelText('Waveform');
fireEvent.click(waveBtn);
const canvas = screen.getByRole('img', { name: /wave mode/i });
expect(canvas).toBeDefined();
});
it('should start in bars mode', () => {
render(<AudioVisualizer {...defaultProps} />);
const canvas = screen.getByRole('img', { name: /bars mode/i });
expect(canvas).toBeDefined();
});
it('should render with empty data when not playing', () => {
render(<AudioVisualizer {...defaultProps} isPlaying={false} />);
const canvas = screen.getByRole('img');
expect(canvas).toBeDefined();
});
it('should accept custom className', () => {
const { container } = render(<AudioVisualizer {...defaultProps} className="custom-class" />);
const wrapper = container.firstChild as HTMLElement;
expect(wrapper.classList.contains('custom-class')).toBe(true);
});
});

View file

@ -0,0 +1,296 @@
/**
* AudioVisualizer Canvas-based audio visualization
* v0.13.4 TASK-APLSH-004: Spectrogram/Equalizer visualisers
*
* Three modes:
* - bars: Frequency equalizer bars with SUMI gradient
* - spectrogram: Scrolling time-frequency waterfall
* - wave: Oscilloscope waveform
*/
import { useRef, useEffect, useCallback, useState } from 'react';
import { cn } from '@/lib/utils';
import { Button } from '@/components/ui/button';
import { BarChart3, Activity, Radio } from 'lucide-react';
import type { VisualizerMode } from '../hooks/useSpectrumAnalyser';
interface AudioVisualizerProps {
/** Normalized frequency bands [0-1] */
bands: number[];
/** Raw frequency data (Uint8Array) for spectrogram */
frequencyData: Uint8Array;
/** Time-domain waveform data for oscilloscope */
waveformData: Uint8Array;
isPlaying: boolean;
className?: string;
}
const MODES: { mode: VisualizerMode; icon: typeof BarChart3; label: string }[] = [
{ mode: 'bars', icon: BarChart3, label: 'Equalizer' },
{ mode: 'wave', icon: Activity, label: 'Waveform' },
{ mode: 'spectrogram', icon: Radio, label: 'Spectrogram' },
];
// SUMI colors
const ACCENT_COLOR = '#7c9dd6'; // --sumi-accent
const SAGE = '#7a9e6c'; // --sumi-sage
const GOLD = '#c9a84c'; // --sumi-gold
const BG_VOID = '#0c0c0f'; // --sumi-bg-void
export function AudioVisualizer({
bands,
frequencyData,
waveformData,
isPlaying,
className,
}: AudioVisualizerProps) {
const canvasRef = useRef<HTMLCanvasElement>(null);
const spectrogramRef = useRef<ImageData | null>(null);
const [mode, setMode] = useState<VisualizerMode>('bars');
const drawBars = useCallback(
(ctx: CanvasRenderingContext2D, W: number, H: number) => {
ctx.fillStyle = BG_VOID;
ctx.fillRect(0, 0, W, H);
if (!isPlaying || bands.length === 0) {
// Idle state: flat line
ctx.strokeStyle = ACCENT_COLOR + '40';
ctx.lineWidth = 1;
ctx.beginPath();
ctx.moveTo(0, H / 2);
ctx.lineTo(W, H / 2);
ctx.stroke();
return;
}
const barCount = bands.length;
const gap = 2;
const barWidth = (W - gap * (barCount - 1)) / barCount;
const cornerRadius = Math.min(barWidth / 2, 3);
for (let i = 0; i < barCount; i++) {
const level = bands[i] ?? 0;
const barH = Math.max(2, level * (H - 8));
const x = i * (barWidth + gap);
const y = H - barH;
// Gradient from accent to vermillion based on frequency
const t = i / barCount;
const r = lerp(0x7c, 0xd4, t);
const g = lerp(0x9d, 0x63, t);
const b = lerp(0xd6, 0x4a, t);
ctx.fillStyle = `rgb(${r}, ${g}, ${b})`;
// Rounded top rect
ctx.beginPath();
if (typeof ctx.roundRect === 'function') {
ctx.roundRect(x, y, barWidth, barH, [cornerRadius, cornerRadius, 0, 0]);
} else {
ctx.rect(x, y, barWidth, barH);
}
ctx.fill();
// Glow at peaks
if (level > 0.7) {
ctx.shadowColor = `rgba(${r}, ${g}, ${b}, 0.6)`;
ctx.shadowBlur = 8;
ctx.fillRect(x, y, barWidth, 2);
ctx.shadowBlur = 0;
}
}
},
[bands, isPlaying],
);
const drawWave = useCallback(
(ctx: CanvasRenderingContext2D, W: number, H: number) => {
ctx.fillStyle = BG_VOID;
ctx.fillRect(0, 0, W, H);
if (!isPlaying || waveformData.length === 0) {
ctx.strokeStyle = ACCENT_COLOR + '40';
ctx.lineWidth = 1;
ctx.beginPath();
ctx.moveTo(0, H / 2);
ctx.lineTo(W, H / 2);
ctx.stroke();
return;
}
const sliceWidth = W / waveformData.length;
// Fill gradient under the wave
const gradient = ctx.createLinearGradient(0, 0, W, 0);
gradient.addColorStop(0, SAGE + '20');
gradient.addColorStop(0.5, ACCENT_COLOR + '20');
gradient.addColorStop(1, GOLD + '20');
ctx.beginPath();
ctx.moveTo(0, H / 2);
for (let i = 0; i < waveformData.length; i++) {
const v = (waveformData[i] ?? 128) / 128.0;
const y = (v * H) / 2;
const x = i * sliceWidth;
if (i === 0) ctx.moveTo(x, y);
else ctx.lineTo(x, y);
}
ctx.lineTo(W, H / 2);
ctx.fillStyle = gradient;
ctx.fill();
// Stroke the wave line
ctx.beginPath();
for (let i = 0; i < waveformData.length; i++) {
const v = (waveformData[i] ?? 128) / 128.0;
const y = (v * H) / 2;
const x = i * sliceWidth;
if (i === 0) ctx.moveTo(x, y);
else ctx.lineTo(x, y);
}
const lineGradient = ctx.createLinearGradient(0, 0, W, 0);
lineGradient.addColorStop(0, SAGE);
lineGradient.addColorStop(0.5, ACCENT_COLOR);
lineGradient.addColorStop(1, GOLD);
ctx.strokeStyle = lineGradient;
ctx.lineWidth = 2;
ctx.stroke();
},
[waveformData, isPlaying],
);
const drawSpectrogram = useCallback(
(ctx: CanvasRenderingContext2D, W: number, H: number) => {
if (!isPlaying || frequencyData.length === 0) {
ctx.fillStyle = BG_VOID;
ctx.fillRect(0, 0, W, H);
ctx.strokeStyle = ACCENT_COLOR + '40';
ctx.lineWidth = 1;
ctx.beginPath();
ctx.moveTo(0, H / 2);
ctx.lineTo(W, H / 2);
ctx.stroke();
spectrogramRef.current = null;
return;
}
// Shift existing image left by 1 column
if (spectrogramRef.current) {
ctx.putImageData(spectrogramRef.current, -1, 0);
} else {
ctx.fillStyle = BG_VOID;
ctx.fillRect(0, 0, W, H);
}
// Draw new column on the right edge
const colX = W - 1;
const binCount = frequencyData.length;
const binHeight = H / binCount;
for (let i = 0; i < binCount; i++) {
const value = frequencyData[i] ?? 0;
const intensity = value / 255;
// Bottom = low freq, top = high freq
const y = H - (i + 1) * binHeight;
// Color mapping: dark → accent → vermillion → gold (heat)
const [r, g, b] = spectrogramColor(intensity);
ctx.fillStyle = `rgb(${r}, ${g}, ${b})`;
ctx.fillRect(colX, y, 1, Math.ceil(binHeight));
}
// Save for next frame shift
spectrogramRef.current = ctx.getImageData(0, 0, W, H);
},
[frequencyData, isPlaying],
);
useEffect(() => {
const canvas = canvasRef.current;
if (!canvas) return;
const ctx = canvas.getContext('2d');
if (!ctx || typeof ctx.fillRect !== 'function') return;
// Set canvas size to match display size
const rect = canvas.getBoundingClientRect();
const dpr = Math.min(window.devicePixelRatio || 1, 2);
const W = rect.width || 300;
const H = rect.height || 150;
canvas.width = W * dpr;
canvas.height = H * dpr;
if (typeof ctx.scale === 'function') ctx.scale(dpr, dpr);
switch (mode) {
case 'bars':
drawBars(ctx, W, H);
break;
case 'wave':
drawWave(ctx, W, H);
break;
case 'spectrogram':
drawSpectrogram(ctx, W, H);
break;
}
}, [mode, drawBars, drawWave, drawSpectrogram]);
// Reset spectrogram data when switching modes
useEffect(() => {
spectrogramRef.current = null;
}, [mode]);
return (
<div className={cn('relative rounded-xl overflow-hidden', className)}>
<canvas
ref={canvasRef}
className="w-full h-full"
role="img"
aria-label={`Audio visualizer — ${mode} mode`}
/>
{/* Mode selector */}
<div className="absolute bottom-3 right-3 flex items-center gap-1 bg-black/50 backdrop-blur-sm rounded-lg p-1">
{MODES.map(({ mode: m, icon: Icon, label }) => (
<Button
key={m}
variant="ghost"
size="icon"
className={cn(
'h-7 w-7 rounded-md transition-colors',
mode === m
? 'text-[var(--sumi-accent)] bg-[var(--sumi-accent-muted)]'
: 'text-[var(--sumi-text-tertiary)] hover:text-[var(--sumi-text-primary)]',
)}
onClick={() => setMode(m)}
aria-label={label}
>
<Icon className="w-3.5 h-3.5" />
</Button>
))}
</div>
</div>
);
}
function lerp(a: number, b: number, t: number): number {
return Math.round(a + (b - a) * t);
}
function spectrogramColor(intensity: number): [number, number, number] {
// 0..0.25: black → deep blue
// 0.25..0.5: deep blue → accent
// 0.5..0.75: accent → vermillion
// 0.75..1: vermillion → gold
if (intensity < 0.25) {
const t = intensity / 0.25;
return [lerp(12, 40, t), lerp(12, 50, t), lerp(15, 100, t)];
} else if (intensity < 0.5) {
const t = (intensity - 0.25) / 0.25;
return [lerp(40, 0x7c, t), lerp(50, 0x9d, t), lerp(100, 0xd6, t)];
} else if (intensity < 0.75) {
const t = (intensity - 0.5) / 0.25;
return [lerp(0x7c, 0xd4, t), lerp(0x9d, 0x63, t), lerp(0xd6, 0x4a, t)];
} else {
const t = (intensity - 0.75) / 0.25;
return [lerp(0xd4, 0xc9, t), lerp(0x63, 0xa8, t), lerp(0x4a, 0x4c, t)];
}
}

View file

@ -1,4 +1,4 @@
import { useState, useRef, useCallback } from 'react';
import { useState, useRef, useCallback, useEffect } from 'react';
import { createPortal } from 'react-dom';
import { usePlayer } from '@/features/player/hooks/usePlayer';
import { usePictureInPicture } from '@/features/player/hooks/usePictureInPicture';
@ -7,6 +7,8 @@ import { useAudioAnalyser } from '@/features/player/hooks/useAudioAnalyser';
import { useAudioNormalization } from '@/features/player/hooks/useAudioNormalization';
import { useMediaSession } from '@/features/player/hooks/useMediaSession';
import { useWakeLock } from '@/features/player/hooks/useWakeLock';
import { useCastSupport } from '@/features/player/hooks/useCastSupport';
import { useAirPlaySupport } from '@/features/player/hooks/useAirPlaySupport';
import { useUIStore } from '@/stores/ui';
import { formatTime } from '@/features/player/services/playerService';
import { PlayerControls } from './PlayerControls';
@ -53,10 +55,25 @@ export function GlobalPlayer() {
const displayTrack = currentTrack || IDLE_TRACK;
const isIdle = !currentTrack;
const { setVideoRef: setPiPVideoRef, togglePiP, isPiPActive, isSupported: isPiPSupported } = usePictureInPicture(
const { setVideoRef: setPiPVideoRef, togglePiP, isPiPActive, isSupported: isPiPSupported, updateTrackInfo: updatePiPTrackInfo } = usePictureInPicture(
currentTrack?.cover ?? null,
);
// v0.13.4 TASK-APLSH-002/003: Cast & AirPlay detection
const cast = useCastSupport();
const airplay = useAirPlaySupport(audioEl);
// v0.13.4 TASK-APLSH-001: Update PiP canvas when track changes
useEffect(() => {
if (currentTrack && isPiPActive) {
updatePiPTrackInfo({
title: currentTrack.title,
artist: currentTrack.artist || 'Unknown Artist',
cover: currentTrack.cover,
});
}
}, [currentTrack, isPiPActive, updatePiPTrackInfo]);
useWakeLock(player.isPlaying);
const SEEK_STEP_SEC = 10;
@ -94,6 +111,7 @@ export function GlobalPlayer() {
duration={player.duration}
onSeek={player.seek}
player={player}
audioElement={audioEl}
/>
<PlayerQueue
@ -183,6 +201,10 @@ export function GlobalPlayer() {
pipSupported={isPiPSupported}
pipActive={isPiPActive}
onTogglePiP={togglePiP}
castAvailable={cast.isAvailable}
onCast={cast.requestSession}
airplayAvailable={airplay.isAvailable}
onAirPlay={airplay.showPicker}
/>
</div>

View file

@ -6,12 +6,14 @@ import { Slider } from '@/components/ui/slider';
import { Tooltip } from '@/components/ui/tooltip';
import {
ChevronDown, Heart, MoreHorizontal, Share2,
Mic2, AlignLeft, Settings2
Mic2, AlignLeft, Settings2, BarChart3
} from 'lucide-react';
import { PlayPauseButton } from './PlayPauseButton';
import { NextPreviousButtons } from './NextPreviousButtons';
import { RepeatShuffleButtons } from './RepeatShuffleButtons';
import { AudioSettingsPanel } from './AudioSettingsPanel';
import { AudioVisualizer } from './AudioVisualizer';
import { useSpectrumAnalyser } from '../hooks/useSpectrumAnalyser';
interface PlayerExpandedProps {
isOpen: boolean;
@ -20,16 +22,21 @@ interface PlayerExpandedProps {
duration: number;
onSeek: (time: number) => void;
player: any; // Using the player hook object
audioElement?: HTMLAudioElement | null;
}
export function PlayerExpanded({ isOpen, onClose, currentTime, duration, onSeek, player }: PlayerExpandedProps) {
export function PlayerExpanded({ isOpen, onClose, currentTime, duration, onSeek, player, audioElement }: PlayerExpandedProps) {
const { currentTrack } = usePlayerStore();
const [showLyrics, setShowLyrics] = useState(false);
const [showAudioSettings, setShowAudioSettings] = useState(false);
const [showVisualizer, setShowVisualizer] = useState(false);
const [autoScrollLyrics, setAutoScrollLyrics] = useState(true);
const lyricsScrollRef = useRef<HTMLDivElement>(null);
const lyrics = currentTrack?.lyrics;
// v0.13.4 TASK-APLSH-004: Spectrum analyser for visualizer
const spectrum = useSpectrumAnalyser(audioElement ?? null, player.isPlaying, showVisualizer && isOpen);
// Auto-scroll lyrics to active line (must be before early return - hooks rules)
useEffect(() => {
if (!isOpen || !currentTrack || !autoScrollLyrics || !lyrics?.length || !lyricsScrollRef.current) return;
@ -180,6 +187,17 @@ export function PlayerExpanded({ isOpen, onClose, currentTime, duration, onSeek,
<Mic2 className="w-5 h-5" />
</Button>
</Tooltip>
<Tooltip content={showVisualizer ? "Hide visualizer" : "Show visualizer"}>
<Button
size="icon"
variant="ghost"
className={cn("transition-colors", showVisualizer ? "text-primary" : "text-muted-foreground hover:text-foreground")}
onClick={() => setShowVisualizer(!showVisualizer)}
aria-label={showVisualizer ? "Hide visualizer" : "Show visualizer"}
>
<BarChart3 className="w-5 h-5" />
</Button>
</Tooltip>
<Tooltip content={showAudioSettings ? "Hide audio settings" : "Audio settings"}>
<Button
size="icon"
@ -195,6 +213,24 @@ export function PlayerExpanded({ isOpen, onClose, currentTime, duration, onSeek,
</div>
</div>
{/* Audio Visualizer (v0.13.4) */}
{showVisualizer && (
<div
className={cn(
"w-full md:flex-1 h-40 md:h-48 rounded-xl border border-white/10 bg-black/30 backdrop-blur-md overflow-hidden",
"animate-in slide-in-from-bottom-4 duration-300"
)}
>
<AudioVisualizer
bands={spectrum.bands}
frequencyData={spectrum.frequencyData}
waveformData={spectrum.waveformData}
isPlaying={player.isPlaying}
className="w-full h-full"
/>
</div>
)}
{/* Audio Settings Panel (v0.13.1) */}
{showAudioSettings && (
<div

View file

@ -1,9 +1,10 @@
/**
* PlayerBarRight Volume, waveform, queue, like, PiP
* PlayerBarRight Volume, waveform, queue, like, PiP, Cast, AirPlay
* v0.13.4: Added Cast/AirPlay buttons
* Micro-interactions: hover scale on all buttons
*/
import { Heart, ListMusic, PictureInPicture2, Volume2, VolumeX } from 'lucide-react';
import { Heart, ListMusic, PictureInPicture2, Volume2, VolumeX, Cast, Airplay } from 'lucide-react';
import { Button } from '@/components/ui/button';
import { Slider } from '@/components/ui/slider';
import { AudioWaveform } from './AudioWaveform';
@ -21,6 +22,10 @@ interface PlayerBarRightProps {
pipSupported?: boolean;
pipActive?: boolean;
onTogglePiP?: () => void;
castAvailable?: boolean;
onCast?: () => void;
airplayAvailable?: boolean;
onAirPlay?: () => void;
}
const btnClass = 'h-8 w-8 sm:h-9 sm:w-9 rounded-full transition-transform duration-150 active:scale-95';
@ -37,6 +42,10 @@ export function PlayerBarRight({
pipSupported,
pipActive,
onTogglePiP,
castAvailable,
onCast,
airplayAvailable,
onAirPlay,
}: PlayerBarRightProps) {
return (
<section
@ -81,6 +90,28 @@ export function PlayerBarRight({
<PictureInPicture2 className="w-4 h-4" />
</Button>
)}
{castAvailable && onCast && (
<Button
variant="ghost"
size="icon"
className={cn('hidden md:flex', btnClass, 'text-muted-foreground hover:text-foreground')}
onClick={onCast}
aria-label="Cast to device"
>
<Cast className="w-4 h-4" />
</Button>
)}
{airplayAvailable && onAirPlay && (
<Button
variant="ghost"
size="icon"
className={cn('hidden md:flex', btnClass, 'text-muted-foreground hover:text-foreground')}
onClick={onAirPlay}
aria-label="AirPlay"
>
<Airplay className="w-4 h-4" />
</Button>
)}
<Button
variant="ghost"
size="icon"

View file

@ -0,0 +1,24 @@
import { describe, it, expect } from 'vitest';
import { renderHook } from '@testing-library/react';
import { useAirPlaySupport } from './useAirPlaySupport';
describe('useAirPlaySupport', () => {
it('should report AirPlay as unavailable when no audio element', () => {
const { result } = renderHook(() => useAirPlaySupport(null));
expect(result.current.isAvailable).toBe(false);
});
it('should report AirPlay as unavailable on non-Safari browsers', () => {
const audio = document.createElement('audio');
const { result } = renderHook(() => useAirPlaySupport(audio));
expect(result.current.isAvailable).toBe(false);
});
it('should provide showPicker callback', () => {
const audio = document.createElement('audio');
const { result } = renderHook(() => useAirPlaySupport(audio));
expect(typeof result.current.showPicker).toBe('function');
// Should not throw even without Safari support
result.current.showPicker();
});
});

View file

@ -0,0 +1,55 @@
/**
* useAirPlaySupport AirPlay availability detection
* v0.13.4 TASK-APLSH-003: AirPlay support (optionnel v1.0)
*
* Detects Safari WebKit AirPlay support on the audio element.
* Actual AirPlay streaming is deferred this provides detection and trigger.
*/
import { useCallback, useState, useEffect } from 'react';
interface AirPlaySupport {
isAvailable: boolean;
showPicker: () => void;
}
export function useAirPlaySupport(audioElement: HTMLAudioElement | null): AirPlaySupport {
const [isAvailable, setIsAvailable] = useState(false);
useEffect(() => {
if (!audioElement) return;
// Safari exposes webkitShowPlaybackTargetPicker on media elements
const hasAirPlay = 'webkitShowPlaybackTargetPicker' in audioElement;
if (!hasAirPlay) return;
const onAvailabilityChanged = (event: Event) => {
const e = event as Event & { availability?: string };
setIsAvailable(e.availability === 'available');
};
audioElement.addEventListener(
'webkitplaybacktargetavailabilitychanged',
onAvailabilityChanged,
);
return () => {
audioElement.removeEventListener(
'webkitplaybacktargetavailabilitychanged',
onAvailabilityChanged,
);
};
}, [audioElement]);
const showPicker = useCallback(() => {
if (!audioElement) return;
const el = audioElement as HTMLAudioElement & {
webkitShowPlaybackTargetPicker?: () => void;
};
if (el.webkitShowPlaybackTargetPicker) {
el.webkitShowPlaybackTargetPicker();
}
}, [audioElement]);
return { isAvailable, showPicker };
}

View file

@ -0,0 +1,32 @@
import { describe, it, expect, vi, beforeEach } from 'vitest';
import { renderHook } from '@testing-library/react';
import { useCastSupport } from './useCastSupport';
describe('useCastSupport', () => {
beforeEach(() => {
vi.clearAllMocks();
});
it('should report cast as unavailable by default', () => {
const { result } = renderHook(() => useCastSupport());
expect(result.current.isAvailable).toBe(false);
expect(result.current.isConnected).toBe(false);
});
it('should provide requestSession callback', () => {
const { result } = renderHook(() => useCastSupport());
expect(typeof result.current.requestSession).toBe('function');
// Should not throw
result.current.requestSession();
});
it('should detect Chrome Cast when available', () => {
(window as Window & { chrome?: { cast?: { isAvailable?: boolean } } }).chrome = {
cast: { isAvailable: true },
};
const { result } = renderHook(() => useCastSupport());
expect(result.current.isAvailable).toBe(true);
// Cleanup
delete (window as Window & { chrome?: unknown }).chrome;
});
});

View file

@ -0,0 +1,48 @@
/**
* useCastSupport Chromecast availability detection
* v0.13.4 TASK-APLSH-002: Chromecast support (optionnel v1.0)
*
* Detects if the Google Cast SDK is available.
* Actual casting is deferred to a future version this provides the detection
* and a placeholder action.
*/
import { useEffect, useState } from 'react';
interface CastSupport {
isAvailable: boolean;
isConnected: false;
requestSession: () => void;
}
export function useCastSupport(): CastSupport {
const [isAvailable, setIsAvailable] = useState(false);
useEffect(() => {
// Check for Google Cast API
const check = () => {
const w = window as Window & { chrome?: { cast?: { isAvailable?: boolean } } };
if (w.chrome?.cast?.isAvailable) {
setIsAvailable(true);
}
};
// Cast SDK loads asynchronously
check();
window.addEventListener('__onGCastApiAvailable', (e: Event) => {
const detail = (e as CustomEvent<boolean>).detail;
setIsAvailable(!!detail);
});
return () => {
window.removeEventListener('__onGCastApiAvailable', check);
};
}, []);
const requestSession = () => {
// Stub — full casting implementation deferred to future version
console.info('[Veza] Chromecast session requested — feature coming soon');
};
return { isAvailable, isConnected: false, requestSession };
}

View file

@ -0,0 +1,68 @@
import { describe, it, expect, vi, beforeEach } from 'vitest';
import { renderHook, act } from '@testing-library/react';
import { usePictureInPicture } from './usePictureInPicture';
describe('usePictureInPicture', () => {
beforeEach(() => {
vi.clearAllMocks();
// Mock PiP support
Object.defineProperty(document, 'pictureInPictureEnabled', {
value: true,
writable: true,
configurable: true,
});
Object.defineProperty(document, 'pictureInPictureElement', {
value: null,
writable: true,
configurable: true,
});
});
it('should report PiP as supported when browser supports it', () => {
const { result } = renderHook(() => usePictureInPicture('https://example.com/cover.jpg'));
expect(result.current.isSupported).toBe(true);
expect(result.current.isPiPActive).toBe(false);
});
it('should report PiP as unsupported when not available', () => {
Object.defineProperty(document, 'pictureInPictureEnabled', {
value: false,
configurable: true,
});
const { result } = renderHook(() => usePictureInPicture());
expect(result.current.isSupported).toBe(false);
});
it('should provide setVideoRef callback', () => {
const { result } = renderHook(() => usePictureInPicture('https://example.com/cover.jpg'));
expect(typeof result.current.setVideoRef).toBe('function');
});
it('should provide togglePiP callback', () => {
const { result } = renderHook(() => usePictureInPicture());
expect(typeof result.current.togglePiP).toBe('function');
});
it('should provide updateTrackInfo callback', () => {
const { result } = renderHook(() => usePictureInPicture());
expect(typeof result.current.updateTrackInfo).toBe('function');
});
it('should set poster on video element when ref is set', () => {
const { result } = renderHook(() => usePictureInPicture('https://example.com/cover.jpg'));
const video = document.createElement('video');
act(() => {
result.current.setVideoRef(video);
});
expect(video.poster).toBe('https://example.com/cover.jpg');
});
it('should set silent video src as fallback', () => {
const { result } = renderHook(() => usePictureInPicture());
const video = document.createElement('video');
act(() => {
result.current.setVideoRef(video);
});
expect(video.src).toContain('data:video/webm');
});
});

View file

@ -1,24 +1,158 @@
/**
* Hook for Picture-in-Picture when supported.
* PiP requires a video element; for audio we use a minimal video with cover art poster.
* Hook for Picture-in-Picture with canvas-based rich display.
* v0.13.4 TASK-APLSH-001: Enhanced PiP with track info overlay & dynamic cover updates.
*
* Uses a canvas rendered to a captureStream video for richer PiP content
* (cover art + track title + artist), falling back to simple poster-based PiP.
*/
import { useCallback, useEffect, useRef, useState } from 'react';
interface PiPTrackInfo {
title: string;
artist: string;
cover?: string | null;
}
export function usePictureInPicture(coverUrl?: string | null) {
const videoRef = useRef<HTMLVideoElement | null>(null);
const canvasRef = useRef<HTMLCanvasElement | null>(null);
const animFrameRef = useRef<number | null>(null);
const trackInfoRef = useRef<PiPTrackInfo>({ title: '', artist: '', cover: coverUrl });
const coverImageRef = useRef<HTMLImageElement | null>(null);
const [isPiPActive, setIsPiPActive] = useState(false);
const isSupported = typeof document !== 'undefined' && 'pictureInPictureEnabled' in document && document.pictureInPictureEnabled;
const isSupported =
typeof document !== 'undefined' &&
'pictureInPictureEnabled' in document &&
document.pictureInPictureEnabled;
// Create offscreen canvas once
useEffect(() => {
const video = videoRef.current;
if (!video) return;
const onLeave = () => setIsPiPActive(false);
video.addEventListener('leavepictureinpicture', onLeave);
return () => video.removeEventListener('leavepictureinpicture', onLeave);
if (!isSupported) return;
const canvas = document.createElement('canvas');
canvas.width = 320;
canvas.height = 320;
canvasRef.current = canvas;
return () => {
if (animFrameRef.current) cancelAnimationFrame(animFrameRef.current);
canvasRef.current = null;
};
}, [isSupported]);
// Render canvas frame with cover + text
const renderFrame = useCallback(() => {
const canvas = canvasRef.current;
if (!canvas) return;
const ctx = canvas.getContext('2d');
if (!ctx || typeof ctx.fillText !== 'function') return;
const { title, artist } = trackInfoRef.current;
const img = coverImageRef.current;
const W = canvas.width;
const H = canvas.height;
// Background
ctx.fillStyle = '#0c0c0f';
ctx.fillRect(0, 0, W, H);
// Cover art (centered, with padding)
if (img && img.complete && img.naturalWidth > 0) {
const size = 200;
const x = (W - size) / 2;
const y = 24;
ctx.save();
ctx.beginPath();
if (typeof ctx.roundRect === 'function') {
ctx.roundRect(x, y, size, size, 12);
} else {
ctx.rect(x, y, size, size);
}
ctx.clip();
ctx.drawImage(img, x, y, size, size);
ctx.restore();
}
// Title
ctx.fillStyle = '#f0ede8';
ctx.font = 'bold 16px Inter, system-ui, sans-serif';
ctx.textAlign = 'center';
ctx.textBaseline = 'top';
const titleY = 240;
const maxTextWidth = W - 32;
const truncatedTitle = truncateText(ctx, title || 'No track', maxTextWidth);
ctx.fillText(truncatedTitle, W / 2, titleY);
// Artist
ctx.fillStyle = '#a8a4a0';
ctx.font = '13px Inter, system-ui, sans-serif';
const truncatedArtist = truncateText(ctx, artist || 'Unknown Artist', maxTextWidth);
ctx.fillText(truncatedArtist, W / 2, titleY + 24);
}, []);
// Animate canvas → video when PiP is active
useEffect(() => {
if (!isPiPActive) {
if (animFrameRef.current) cancelAnimationFrame(animFrameRef.current);
return;
}
const loop = () => {
renderFrame();
animFrameRef.current = requestAnimationFrame(loop);
};
// Render at ~4fps is enough for mostly-static content
const interval = setInterval(() => {
renderFrame();
}, 250);
return () => {
clearInterval(interval);
if (animFrameRef.current) cancelAnimationFrame(animFrameRef.current);
};
}, [isPiPActive, renderFrame]);
// Update cover image when URL changes
useEffect(() => {
trackInfoRef.current.cover = coverUrl;
if (coverUrl) {
const img = new Image();
img.crossOrigin = 'anonymous';
img.src = coverUrl;
img.onload = () => {
coverImageRef.current = img;
renderFrame();
};
} else {
coverImageRef.current = null;
renderFrame();
}
// Also update poster on the video element for simple PiP fallback
const video = videoRef.current;
if (video && coverUrl) {
video.poster = coverUrl;
}
}, [coverUrl, renderFrame]);
const updateTrackInfo = useCallback(
(info: PiPTrackInfo) => {
trackInfoRef.current = info;
if (info.cover && info.cover !== coverImageRef.current?.src) {
const img = new Image();
img.crossOrigin = 'anonymous';
img.src = info.cover;
img.onload = () => {
coverImageRef.current = img;
renderFrame();
};
}
renderFrame();
},
[renderFrame],
);
const togglePiP = useCallback(async () => {
const video = videoRef.current;
if (!video || !isSupported) return;
@ -28,24 +162,58 @@ export function usePictureInPicture(coverUrl?: string | null) {
await document.exitPictureInPicture();
setIsPiPActive(false);
} else {
// Try canvas-based PiP first for richer display
const canvas = canvasRef.current;
if (canvas && 'captureStream' in canvas) {
try {
renderFrame();
const stream = (canvas as HTMLCanvasElement & { captureStream(fps?: number): MediaStream }).captureStream(4);
video.srcObject = stream;
await video.play();
} catch {
// Fallback: simple poster-based PiP
video.srcObject = null;
}
}
await video.requestPictureInPicture();
setIsPiPActive(true);
}
} catch {
setIsPiPActive(false);
}
}, [isSupported]);
}, [isSupported, renderFrame]);
const setVideoRef = useCallback((el: HTMLVideoElement | null) => {
videoRef.current = el;
if (el) {
if (coverUrl) el.poster = coverUrl;
if (!el.src) {
const silentVideo = 'data:video/webm;base64,GkXfo59ChoEBQveBAULygQRC84EIQoKEd2VibUKHgQRChYECGFOAZwH/w0BZ/5ZQZ+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BA';
el.src = silentVideo;
const setVideoRef = useCallback(
(el: HTMLVideoElement | null) => {
videoRef.current = el;
if (el) {
if (coverUrl) el.poster = coverUrl;
// Provide silent video as fallback src
if (!el.src && !el.srcObject) {
const silentVideo =
'data:video/webm;base64,GkXfo59ChoEBQveBAULygQRC84EIQoKEd2VibUKHgQRChYECGFOAZwH/w0BZ/5ZQZ+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BA';
el.src = silentVideo;
}
el.addEventListener('leavepictureinpicture', () => {
setIsPiPActive(false);
// Clean up canvas stream
el.srcObject = null;
});
}
}
}, [coverUrl]);
},
[coverUrl],
);
return { setVideoRef, togglePiP, isPiPActive, isSupported };
return { setVideoRef, togglePiP, isPiPActive, isSupported, updateTrackInfo };
}
function truncateText(ctx: CanvasRenderingContext2D, text: string, maxWidth: number): string {
if (ctx.measureText(text).width <= maxWidth) return text;
let t = text;
while (t.length > 0 && ctx.measureText(t + '…').width > maxWidth) {
t = t.slice(0, -1);
}
return t + '…';
}

View file

@ -0,0 +1,130 @@
/**
* useSpectrumAnalyser High-resolution frequency data for visualizer
* v0.13.4 TASK-APLSH-004: Spectrogram & equalizer visualisers
*
* Unlike useAudioAnalyser (24 bars for the player bar), this provides:
* - 64 frequency bands for the equalizer view
* - Full frequency data array for the spectrogram view
* - Time-domain waveform data for oscilloscope view
*/
import { useEffect, useRef, useState, useCallback } from 'react';
import { audioPlayerService } from '../services/playerService';
const BAND_COUNT = 64;
const SMOOTHING = 0.8;
export type VisualizerMode = 'bars' | 'spectrogram' | 'wave';
interface SpectrumData {
/** Normalized frequency bands [0-1], length = BAND_COUNT */
bands: number[];
/** Raw frequency data for spectrogram, length = fftSize/2 */
frequencyData: Uint8Array;
/** Time-domain waveform data for oscilloscope */
waveformData: Uint8Array;
}
const EMPTY_BANDS = Array<number>(BAND_COUNT).fill(0);
const EMPTY_U8 = new Uint8Array(0);
const EMPTY: SpectrumData = { bands: EMPTY_BANDS, frequencyData: EMPTY_U8, waveformData: EMPTY_U8 };
export function useSpectrumAnalyser(
audioElement: HTMLAudioElement | null,
isPlaying: boolean,
active: boolean,
): SpectrumData {
const [data, setData] = useState<SpectrumData>(EMPTY);
const ctxRef = useRef<AudioContext | null>(null);
const analyserRef = useRef<AnalyserNode | null>(null);
const sourceRef = useRef<MediaElementAudioSourceNode | null>(null);
const rafRef = useRef<number | null>(null);
useEffect(() => {
if (!audioElement || !active) return;
const setup = () => {
try {
const ctx = new AudioContext();
const source = ctx.createMediaElementSource(audioElement);
const analyser = ctx.createAnalyser();
analyser.fftSize = 512;
analyser.smoothingTimeConstant = SMOOTHING;
analyser.minDecibels = -70;
analyser.maxDecibels = -10;
audioPlayerService.connectAudioGraph(ctx, source);
const gainNode = audioPlayerService.getGainNode();
if (gainNode) {
source.connect(gainNode);
gainNode.connect(analyser);
} else {
source.connect(analyser);
}
analyser.connect(ctx.destination);
ctxRef.current = ctx;
analyserRef.current = analyser;
sourceRef.current = source;
} catch {
// Web Audio API not supported
}
};
setup();
return () => {
if (rafRef.current) cancelAnimationFrame(rafRef.current);
sourceRef.current?.disconnect();
analyserRef.current?.disconnect();
ctxRef.current?.close();
ctxRef.current = null;
analyserRef.current = null;
sourceRef.current = null;
};
}, [audioElement, active]);
const updateData = useCallback(() => {
const analyser = analyserRef.current;
if (!analyser) return;
const freqData = new Uint8Array(analyser.frequencyBinCount);
const timeData = new Uint8Array(analyser.fftSize);
analyser.getByteFrequencyData(freqData);
analyser.getByteTimeDomainData(timeData);
const step = Math.floor(freqData.length / BAND_COUNT);
const bands = Array.from({ length: BAND_COUNT }, (_, i) => {
const idx = Math.min(i * step, freqData.length - 1);
return (freqData[idx] ?? 0) / 255;
});
setData({ bands, frequencyData: freqData, waveformData: timeData });
}, []);
useEffect(() => {
const analyser = analyserRef.current;
const ctx = ctxRef.current;
if (!analyser || !ctx || !isPlaying || !active) {
if (!isPlaying) setData(EMPTY);
return;
}
if (ctx.state === 'suspended') {
ctx.resume();
}
const loop = () => {
updateData();
rafRef.current = requestAnimationFrame(loop);
};
rafRef.current = requestAnimationFrame(loop);
return () => {
if (rafRef.current) cancelAnimationFrame(rafRef.current);
};
}, [isPlaying, active, updateData]);
return data;
}

View file

@ -37,6 +37,10 @@ export type { MiniPlayerProps } from './components/MiniPlayer';
export { usePlayer } from './hooks/usePlayer';
export { useKeyboardShortcuts } from './hooks/useKeyboardShortcuts';
export type { UseKeyboardShortcutsOptions } from './hooks/useKeyboardShortcuts';
export { useSpectrumAnalyser } from './hooks/useSpectrumAnalyser';
export type { VisualizerMode } from './hooks/useSpectrumAnalyser';
export { useCastSupport } from './hooks/useCastSupport';
export { useAirPlaySupport } from './hooks/useAirPlaySupport';
// Store
export { usePlayerStore } from './store/playerStore';