TASK-APLSH-001: Enhanced PiP with canvas-based display showing cover art + track info TASK-APLSH-002: Chromecast detection hook (useCastSupport) — full casting deferred TASK-APLSH-003: AirPlay detection hook (useAirPlaySupport) — Safari target picker TASK-APLSH-004: AudioVisualizer component with 3 modes (bars/wave/spectrogram) - useSpectrumAnalyser hook (64 bands, high-res FFT) - Canvas-based rendering with SUMI color palette - Integrated into PlayerExpanded with toggle button Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
219 lines
6.7 KiB
TypeScript
219 lines
6.7 KiB
TypeScript
/**
|
|
* Hook for Picture-in-Picture with canvas-based rich display.
|
|
* v0.13.4 TASK-APLSH-001: Enhanced PiP with track info overlay & dynamic cover updates.
|
|
*
|
|
* Uses a canvas rendered to a captureStream video for richer PiP content
|
|
* (cover art + track title + artist), falling back to simple poster-based PiP.
|
|
*/
|
|
|
|
import { useCallback, useEffect, useRef, useState } from 'react';
|
|
|
|
interface PiPTrackInfo {
|
|
title: string;
|
|
artist: string;
|
|
cover?: string | null;
|
|
}
|
|
|
|
export function usePictureInPicture(coverUrl?: string | null) {
|
|
const videoRef = useRef<HTMLVideoElement | null>(null);
|
|
const canvasRef = useRef<HTMLCanvasElement | null>(null);
|
|
const animFrameRef = useRef<number | null>(null);
|
|
const trackInfoRef = useRef<PiPTrackInfo>({ title: '', artist: '', cover: coverUrl });
|
|
const coverImageRef = useRef<HTMLImageElement | null>(null);
|
|
const [isPiPActive, setIsPiPActive] = useState(false);
|
|
|
|
const isSupported =
|
|
typeof document !== 'undefined' &&
|
|
'pictureInPictureEnabled' in document &&
|
|
document.pictureInPictureEnabled;
|
|
|
|
// Create offscreen canvas once
|
|
useEffect(() => {
|
|
if (!isSupported) return;
|
|
const canvas = document.createElement('canvas');
|
|
canvas.width = 320;
|
|
canvas.height = 320;
|
|
canvasRef.current = canvas;
|
|
|
|
return () => {
|
|
if (animFrameRef.current) cancelAnimationFrame(animFrameRef.current);
|
|
canvasRef.current = null;
|
|
};
|
|
}, [isSupported]);
|
|
|
|
// Render canvas frame with cover + text
|
|
const renderFrame = useCallback(() => {
|
|
const canvas = canvasRef.current;
|
|
if (!canvas) return;
|
|
const ctx = canvas.getContext('2d');
|
|
if (!ctx || typeof ctx.fillText !== 'function') return;
|
|
|
|
const { title, artist } = trackInfoRef.current;
|
|
const img = coverImageRef.current;
|
|
const W = canvas.width;
|
|
const H = canvas.height;
|
|
|
|
// Background
|
|
ctx.fillStyle = '#0c0c0f';
|
|
ctx.fillRect(0, 0, W, H);
|
|
|
|
// Cover art (centered, with padding)
|
|
if (img && img.complete && img.naturalWidth > 0) {
|
|
const size = 200;
|
|
const x = (W - size) / 2;
|
|
const y = 24;
|
|
ctx.save();
|
|
ctx.beginPath();
|
|
if (typeof ctx.roundRect === 'function') {
|
|
ctx.roundRect(x, y, size, size, 12);
|
|
} else {
|
|
ctx.rect(x, y, size, size);
|
|
}
|
|
ctx.clip();
|
|
ctx.drawImage(img, x, y, size, size);
|
|
ctx.restore();
|
|
}
|
|
|
|
// Title
|
|
ctx.fillStyle = '#f0ede8';
|
|
ctx.font = 'bold 16px Inter, system-ui, sans-serif';
|
|
ctx.textAlign = 'center';
|
|
ctx.textBaseline = 'top';
|
|
const titleY = 240;
|
|
const maxTextWidth = W - 32;
|
|
const truncatedTitle = truncateText(ctx, title || 'No track', maxTextWidth);
|
|
ctx.fillText(truncatedTitle, W / 2, titleY);
|
|
|
|
// Artist
|
|
ctx.fillStyle = '#a8a4a0';
|
|
ctx.font = '13px Inter, system-ui, sans-serif';
|
|
const truncatedArtist = truncateText(ctx, artist || 'Unknown Artist', maxTextWidth);
|
|
ctx.fillText(truncatedArtist, W / 2, titleY + 24);
|
|
}, []);
|
|
|
|
// Animate canvas → video when PiP is active
|
|
useEffect(() => {
|
|
if (!isPiPActive) {
|
|
if (animFrameRef.current) cancelAnimationFrame(animFrameRef.current);
|
|
return;
|
|
}
|
|
|
|
const loop = () => {
|
|
renderFrame();
|
|
animFrameRef.current = requestAnimationFrame(loop);
|
|
};
|
|
// Render at ~4fps is enough for mostly-static content
|
|
const interval = setInterval(() => {
|
|
renderFrame();
|
|
}, 250);
|
|
|
|
return () => {
|
|
clearInterval(interval);
|
|
if (animFrameRef.current) cancelAnimationFrame(animFrameRef.current);
|
|
};
|
|
}, [isPiPActive, renderFrame]);
|
|
|
|
// Update cover image when URL changes
|
|
useEffect(() => {
|
|
trackInfoRef.current.cover = coverUrl;
|
|
if (coverUrl) {
|
|
const img = new Image();
|
|
img.crossOrigin = 'anonymous';
|
|
img.src = coverUrl;
|
|
img.onload = () => {
|
|
coverImageRef.current = img;
|
|
renderFrame();
|
|
};
|
|
} else {
|
|
coverImageRef.current = null;
|
|
renderFrame();
|
|
}
|
|
|
|
// Also update poster on the video element for simple PiP fallback
|
|
const video = videoRef.current;
|
|
if (video && coverUrl) {
|
|
video.poster = coverUrl;
|
|
}
|
|
}, [coverUrl, renderFrame]);
|
|
|
|
const updateTrackInfo = useCallback(
|
|
(info: PiPTrackInfo) => {
|
|
trackInfoRef.current = info;
|
|
if (info.cover && info.cover !== coverImageRef.current?.src) {
|
|
const img = new Image();
|
|
img.crossOrigin = 'anonymous';
|
|
img.src = info.cover;
|
|
img.onload = () => {
|
|
coverImageRef.current = img;
|
|
renderFrame();
|
|
};
|
|
}
|
|
renderFrame();
|
|
},
|
|
[renderFrame],
|
|
);
|
|
|
|
const togglePiP = useCallback(async () => {
|
|
const video = videoRef.current;
|
|
if (!video || !isSupported) return;
|
|
|
|
try {
|
|
if (document.pictureInPictureElement) {
|
|
await document.exitPictureInPicture();
|
|
setIsPiPActive(false);
|
|
} else {
|
|
// Try canvas-based PiP first for richer display
|
|
const canvas = canvasRef.current;
|
|
if (canvas && 'captureStream' in canvas) {
|
|
try {
|
|
renderFrame();
|
|
const stream = (canvas as HTMLCanvasElement & { captureStream(fps?: number): MediaStream }).captureStream(4);
|
|
video.srcObject = stream;
|
|
await video.play();
|
|
} catch {
|
|
// Fallback: simple poster-based PiP
|
|
video.srcObject = null;
|
|
}
|
|
}
|
|
|
|
await video.requestPictureInPicture();
|
|
setIsPiPActive(true);
|
|
}
|
|
} catch {
|
|
setIsPiPActive(false);
|
|
}
|
|
}, [isSupported, renderFrame]);
|
|
|
|
const setVideoRef = useCallback(
|
|
(el: HTMLVideoElement | null) => {
|
|
videoRef.current = el;
|
|
if (el) {
|
|
if (coverUrl) el.poster = coverUrl;
|
|
// Provide silent video as fallback src
|
|
if (!el.src && !el.srcObject) {
|
|
const silentVideo =
|
|
'data:video/webm;base64,GkXfo59ChoEBQveBAULygQRC84EIQoKEd2VibUKHgQRChYECGFOAZwH/w0BZ/5ZQZ+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BAeBhkO+BA';
|
|
el.src = silentVideo;
|
|
}
|
|
|
|
el.addEventListener('leavepictureinpicture', () => {
|
|
setIsPiPActive(false);
|
|
// Clean up canvas stream
|
|
el.srcObject = null;
|
|
});
|
|
}
|
|
},
|
|
[coverUrl],
|
|
);
|
|
|
|
return { setVideoRef, togglePiP, isPiPActive, isSupported, updateTrackInfo };
|
|
}
|
|
|
|
function truncateText(ctx: CanvasRenderingContext2D, text: string, maxWidth: number): string {
|
|
if (ctx.measureText(text).width <= maxWidth) return text;
|
|
let t = text;
|
|
while (t.length > 0 && ctx.measureText(t + '…').width > maxWidth) {
|
|
t = t.slice(0, -1);
|
|
}
|
|
return t + '…';
|
|
}
|