feat(waveform): add playback cursor showing current position

This commit is contained in:
Hördle Bot
2025-11-23 01:06:19 +01:00
parent 86829af17d
commit 23c2697424

View File

@@ -18,8 +18,12 @@ export default function WaveformEditor({ audioUrl, startTime, duration, unlockSt
const [playingSegment, setPlayingSegment] = useState<number | null>(null); const [playingSegment, setPlayingSegment] = useState<number | null>(null);
const [zoom, setZoom] = useState(1); // 1 = full view, higher = zoomed in const [zoom, setZoom] = useState(1); // 1 = full view, higher = zoomed in
const [viewOffset, setViewOffset] = useState(0); // Offset in seconds for panning const [viewOffset, setViewOffset] = useState(0); // Offset in seconds for panning
const [playbackPosition, setPlaybackPosition] = useState<number | null>(null); // Current playback position in seconds
const audioContextRef = useRef<AudioContext | null>(null); const audioContextRef = useRef<AudioContext | null>(null);
const sourceRef = useRef<AudioBufferSourceNode | null>(null); const sourceRef = useRef<AudioBufferSourceNode | null>(null);
const playbackStartTimeRef = useRef<number>(0); // When playback started
const playbackOffsetRef = useRef<number>(0); // Offset in the audio file
const animationFrameRef = useRef<number | null>(null);
useEffect(() => { useEffect(() => {
const loadAudio = async () => { const loadAudio = async () => {
@@ -44,6 +48,9 @@ export default function WaveformEditor({ audioUrl, startTime, duration, unlockSt
if (sourceRef.current) { if (sourceRef.current) {
sourceRef.current.stop(); sourceRef.current.stop();
} }
if (animationFrameRef.current) {
cancelAnimationFrame(animationFrameRef.current);
}
}; };
}, [audioUrl]); }, [audioUrl]);
@@ -127,7 +134,29 @@ export default function WaveformEditor({ audioUrl, startTime, duration, unlockSt
}); });
ctx.setLineDash([]); ctx.setLineDash([]);
}, [audioBuffer, startTime, duration, audioDuration, zoom, viewOffset, unlockSteps]); // Draw playback cursor
if (playbackPosition !== null) {
const cursorPx = ((playbackPosition - visibleStart) / visibleDuration) * width;
if (cursorPx >= 0 && cursorPx <= width) {
ctx.strokeStyle = '#10b981'; // Green
ctx.lineWidth = 2;
ctx.beginPath();
ctx.moveTo(cursorPx, 0);
ctx.lineTo(cursorPx, height);
ctx.stroke();
// Draw playhead triangle
ctx.fillStyle = '#10b981';
ctx.beginPath();
ctx.moveTo(cursorPx, 0);
ctx.lineTo(cursorPx - 5, 10);
ctx.lineTo(cursorPx + 5, 10);
ctx.closePath();
ctx.fill();
}
}
}, [audioBuffer, startTime, duration, audioDuration, zoom, viewOffset, unlockSteps, playbackPosition]);
const handleCanvasClick = (e: React.MouseEvent<HTMLCanvasElement>) => { const handleCanvasClick = (e: React.MouseEvent<HTMLCanvasElement>) => {
if (!canvasRef.current || !audioDuration) return; if (!canvasRef.current || !audioDuration) return;
@@ -151,6 +180,21 @@ export default function WaveformEditor({ audioUrl, startTime, duration, unlockSt
sourceRef.current?.stop(); sourceRef.current?.stop();
setIsPlaying(false); setIsPlaying(false);
setPlayingSegment(null); setPlayingSegment(null);
setPlaybackPosition(null);
if (animationFrameRef.current) {
cancelAnimationFrame(animationFrameRef.current);
animationFrameRef.current = null;
}
};
const updatePlaybackPosition = () => {
if (!audioContextRef.current || !isPlaying) return;
const elapsed = audioContextRef.current.currentTime - playbackStartTimeRef.current;
const currentPos = playbackOffsetRef.current + elapsed;
setPlaybackPosition(currentPos);
animationFrameRef.current = requestAnimationFrame(updatePlaybackPosition);
}; };
const handlePlaySegment = (segmentIndex: number) => { const handlePlaySegment = (segmentIndex: number) => {
@@ -166,14 +210,25 @@ export default function WaveformEditor({ audioUrl, startTime, duration, unlockSt
const segmentStart = startTime + (segmentIndex > 0 ? unlockSteps[segmentIndex - 1] : 0); const segmentStart = startTime + (segmentIndex > 0 ? unlockSteps[segmentIndex - 1] : 0);
const segmentDuration = unlockSteps[segmentIndex] - (segmentIndex > 0 ? unlockSteps[segmentIndex - 1] : 0); const segmentDuration = unlockSteps[segmentIndex] - (segmentIndex > 0 ? unlockSteps[segmentIndex - 1] : 0);
playbackStartTimeRef.current = audioContextRef.current.currentTime;
playbackOffsetRef.current = segmentStart;
source.start(0, segmentStart, segmentDuration); source.start(0, segmentStart, segmentDuration);
sourceRef.current = source; sourceRef.current = source;
setIsPlaying(true); setIsPlaying(true);
setPlayingSegment(segmentIndex); setPlayingSegment(segmentIndex);
setPlaybackPosition(segmentStart);
animationFrameRef.current = requestAnimationFrame(updatePlaybackPosition);
source.onended = () => { source.onended = () => {
setIsPlaying(false); setIsPlaying(false);
setPlayingSegment(null); setPlayingSegment(null);
setPlaybackPosition(null);
if (animationFrameRef.current) {
cancelAnimationFrame(animationFrameRef.current);
animationFrameRef.current = null;
}
}; };
}; };
@@ -186,12 +241,24 @@ export default function WaveformEditor({ audioUrl, startTime, duration, unlockSt
const source = audioContextRef.current.createBufferSource(); const source = audioContextRef.current.createBufferSource();
source.buffer = audioBuffer; source.buffer = audioBuffer;
source.connect(audioContextRef.current.destination); source.connect(audioContextRef.current.destination);
playbackStartTimeRef.current = audioContextRef.current.currentTime;
playbackOffsetRef.current = startTime;
source.start(0, startTime, duration); source.start(0, startTime, duration);
sourceRef.current = source; sourceRef.current = source;
setIsPlaying(true); setIsPlaying(true);
setPlaybackPosition(startTime);
animationFrameRef.current = requestAnimationFrame(updatePlaybackPosition);
source.onended = () => { source.onended = () => {
setIsPlaying(false); setIsPlaying(false);
setPlaybackPosition(null);
if (animationFrameRef.current) {
cancelAnimationFrame(animationFrameRef.current);
animationFrameRef.current = null;
}
}; };
} }
}; };