Files
hoerdle/components/WaveformEditor.tsx

441 lines
18 KiB
TypeScript
Raw Permalink Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
'use client';
import { useEffect, useRef, useState } from 'react';
interface WaveformEditorProps {
audioUrl: string;
startTime: number;
duration: number; // Total puzzle duration (e.g., 60s)
unlockSteps: number[]; // e.g., [2, 4, 7, 11, 16, 30, 60]
onStartTimeChange: (newStartTime: number) => void;
}
export default function WaveformEditor({ audioUrl, startTime, duration, unlockSteps, onStartTimeChange }: WaveformEditorProps) {
const canvasRef = useRef<HTMLCanvasElement>(null);
const [audioBuffer, setAudioBuffer] = useState<AudioBuffer | null>(null);
const [audioDuration, setAudioDuration] = useState(0);
const [isPlaying, setIsPlaying] = useState(false);
const [playingSegment, setPlayingSegment] = useState<number | null>(null);
const [zoom, setZoom] = useState(1); // 1 = full view, higher = zoomed in
const [viewOffset, setViewOffset] = useState(0); // Offset in seconds for panning
const [playbackPosition, setPlaybackPosition] = useState<number | null>(null); // Current playback position in seconds
const [hoverPreviewTime, setHoverPreviewTime] = useState<number | null>(null); // Preview position on hover
const audioContextRef = useRef<AudioContext | null>(null);
const sourceRef = useRef<AudioBufferSourceNode | null>(null);
const playbackStartTimeRef = useRef<number>(0); // When playback started
const playbackOffsetRef = useRef<number>(0); // Offset in the audio file
const animationFrameRef = useRef<number | null>(null);
useEffect(() => {
const loadAudio = async () => {
try {
const response = await fetch(audioUrl);
const arrayBuffer = await response.arrayBuffer();
const audioContext = new (window.AudioContext || (window as any).webkitAudioContext)();
audioContextRef.current = audioContext;
const buffer = await audioContext.decodeAudioData(arrayBuffer);
setAudioBuffer(buffer);
setAudioDuration(buffer.duration);
} catch (error) {
console.error('Error loading audio:', error);
}
};
loadAudio();
return () => {
if (sourceRef.current) {
sourceRef.current.stop();
}
if (animationFrameRef.current) {
cancelAnimationFrame(animationFrameRef.current);
}
};
}, [audioUrl]);
useEffect(() => {
if (!audioBuffer || !canvasRef.current) return;
const canvas = canvasRef.current;
const ctx = canvas.getContext('2d');
if (!ctx) return;
const width = canvas.width;
const height = canvas.height;
// Calculate visible range based on zoom and offset
const visibleDuration = audioDuration / zoom;
const visibleStart = Math.max(0, Math.min(viewOffset, audioDuration - visibleDuration));
const visibleEnd = Math.min(audioDuration, visibleStart + visibleDuration);
// Clear canvas
ctx.fillStyle = '#f3f4f6';
ctx.fillRect(0, 0, width, height);
// Draw waveform for visible range
const data = audioBuffer.getChannelData(0);
const samplesPerPixel = Math.ceil((data.length * visibleDuration / audioDuration) / width);
const startSample = Math.floor(data.length * visibleStart / audioDuration);
const amp = height / 2;
ctx.fillStyle = '#4f46e5';
for (let i = 0; i < width; i++) {
let min = 1.0;
let max = -1.0;
const sampleIndex = startSample + (i * samplesPerPixel);
for (let j = 0; j < samplesPerPixel && sampleIndex + j < data.length; j++) {
const datum = data[sampleIndex + j];
if (datum < min) min = datum;
if (datum > max) max = datum;
}
ctx.fillRect(i, (1 + min) * amp, 1, Math.max(1, (max - min) * amp));
}
// Draw selection overlay
const selectionStartPx = ((startTime - visibleStart) / visibleDuration) * width;
const selectionWidthPx = (duration / visibleDuration) * width;
if (selectionStartPx + selectionWidthPx > 0 && selectionStartPx < width) {
ctx.fillStyle = 'rgba(79, 70, 229, 0.3)';
ctx.fillRect(Math.max(0, selectionStartPx), 0, Math.min(selectionWidthPx, width - selectionStartPx), height);
// Draw selection borders
ctx.strokeStyle = '#4f46e5';
ctx.lineWidth = 2;
ctx.strokeRect(Math.max(0, selectionStartPx), 0, Math.min(selectionWidthPx, width - selectionStartPx), height);
}
// Draw segment markers (vertical lines)
ctx.strokeStyle = '#ef4444';
ctx.lineWidth = 1;
ctx.setLineDash([5, 5]);
let cumulativeTime = 0;
unlockSteps.forEach((step, index) => {
const segmentTime = startTime + cumulativeTime;
const segmentPx = ((segmentTime - visibleStart) / visibleDuration) * width;
if (segmentPx >= 0 && segmentPx <= width) {
ctx.beginPath();
ctx.moveTo(segmentPx, 0);
ctx.lineTo(segmentPx, height);
ctx.stroke();
// Draw segment number
ctx.setLineDash([]);
ctx.fillStyle = '#ef4444';
ctx.font = 'bold 12px sans-serif';
ctx.fillText(`${index + 1}`, segmentPx + 3, 15);
ctx.setLineDash([5, 5]);
}
cumulativeTime = step;
});
ctx.setLineDash([]);
// Draw hover preview (semi-transparent)
if (hoverPreviewTime !== null) {
const previewStartPx = ((hoverPreviewTime - visibleStart) / visibleDuration) * width;
const previewWidthPx = (duration / visibleDuration) * width;
if (previewStartPx + previewWidthPx > 0 && previewStartPx < width) {
ctx.fillStyle = 'rgba(16, 185, 129, 0.2)'; // Light green
ctx.fillRect(Math.max(0, previewStartPx), 0, Math.min(previewWidthPx, width - previewStartPx), height);
// Draw preview borders
ctx.strokeStyle = '#10b981';
ctx.lineWidth = 2;
ctx.setLineDash([5, 5]);
ctx.strokeRect(Math.max(0, previewStartPx), 0, Math.min(previewWidthPx, width - previewStartPx), height);
ctx.setLineDash([]);
}
}
// Draw playback cursor
if (playbackPosition !== null) {
const cursorPx = ((playbackPosition - visibleStart) / visibleDuration) * width;
if (cursorPx >= 0 && cursorPx <= width) {
ctx.strokeStyle = '#10b981'; // Green
ctx.lineWidth = 2;
ctx.beginPath();
ctx.moveTo(cursorPx, 0);
ctx.lineTo(cursorPx, height);
ctx.stroke();
// Draw playhead triangle
ctx.fillStyle = '#10b981';
ctx.beginPath();
ctx.moveTo(cursorPx, 0);
ctx.lineTo(cursorPx - 5, 10);
ctx.lineTo(cursorPx + 5, 10);
ctx.closePath();
ctx.fill();
}
}
}, [audioBuffer, startTime, duration, audioDuration, zoom, viewOffset, unlockSteps, playbackPosition, hoverPreviewTime]);
const handleCanvasClick = (e: React.MouseEvent<HTMLCanvasElement>) => {
if (!canvasRef.current || !audioDuration) return;
const rect = canvasRef.current.getBoundingClientRect();
const x = e.clientX - rect.left;
const visibleDuration = audioDuration / zoom;
const visibleStart = Math.max(0, Math.min(viewOffset, audioDuration - visibleDuration));
const clickedTime = visibleStart + (x / rect.width) * visibleDuration;
// Center the selection on the clicked point
let newStartTime = clickedTime - (duration / 2);
// Clamp to valid range
newStartTime = Math.max(0, Math.min(newStartTime, audioDuration - duration));
onStartTimeChange(Math.floor(newStartTime));
};
const handleCanvasMouseMove = (e: React.MouseEvent<HTMLCanvasElement>) => {
if (!canvasRef.current || !audioDuration) return;
const rect = canvasRef.current.getBoundingClientRect();
const x = e.clientX - rect.left;
const visibleDuration = audioDuration / zoom;
const visibleStart = Math.max(0, Math.min(viewOffset, audioDuration - visibleDuration));
const hoveredTime = visibleStart + (x / rect.width) * visibleDuration;
// Calculate where the selection would be centered on this point
let previewStartTime = hoveredTime - (duration / 2);
previewStartTime = Math.max(0, Math.min(previewStartTime, audioDuration - duration));
setHoverPreviewTime(previewStartTime);
};
const handleCanvasMouseLeave = () => {
setHoverPreviewTime(null);
};
const stopPlayback = () => {
sourceRef.current?.stop();
setIsPlaying(false);
setPlayingSegment(null);
setPlaybackPosition(null);
if (animationFrameRef.current) {
cancelAnimationFrame(animationFrameRef.current);
animationFrameRef.current = null;
}
};
// Animation loop for playback cursor
useEffect(() => {
if (!isPlaying || !audioContextRef.current) {
return;
}
const animate = () => {
if (!audioContextRef.current || !isPlaying) return;
const elapsed = audioContextRef.current.currentTime - playbackStartTimeRef.current;
const currentPos = playbackOffsetRef.current + elapsed;
setPlaybackPosition(currentPos);
animationFrameRef.current = requestAnimationFrame(animate);
};
animationFrameRef.current = requestAnimationFrame(animate);
return () => {
if (animationFrameRef.current) {
cancelAnimationFrame(animationFrameRef.current);
}
};
}, [isPlaying]);
const handlePlaySegment = (segmentIndex: number) => {
if (!audioBuffer || !audioContextRef.current) return;
stopPlayback();
const source = audioContextRef.current.createBufferSource();
source.buffer = audioBuffer;
source.connect(audioContextRef.current.destination);
// Calculate segment start and duration
const segmentStart = startTime + (segmentIndex > 0 ? unlockSteps[segmentIndex - 1] : 0);
const segmentDuration = unlockSteps[segmentIndex] - (segmentIndex > 0 ? unlockSteps[segmentIndex - 1] : 0);
playbackStartTimeRef.current = audioContextRef.current.currentTime;
playbackOffsetRef.current = segmentStart;
source.start(0, segmentStart, segmentDuration);
sourceRef.current = source;
setIsPlaying(true);
setPlayingSegment(segmentIndex);
setPlaybackPosition(segmentStart);
source.onended = () => {
setIsPlaying(false);
setPlayingSegment(null);
setPlaybackPosition(null);
if (animationFrameRef.current) {
cancelAnimationFrame(animationFrameRef.current);
animationFrameRef.current = null;
}
};
};
const handlePlayFull = () => {
if (!audioBuffer || !audioContextRef.current) return;
if (isPlaying) {
stopPlayback();
} else {
const source = audioContextRef.current.createBufferSource();
source.buffer = audioBuffer;
source.connect(audioContextRef.current.destination);
playbackStartTimeRef.current = audioContextRef.current.currentTime;
playbackOffsetRef.current = startTime;
source.start(0, startTime, duration);
sourceRef.current = source;
setIsPlaying(true);
setPlaybackPosition(startTime);
source.onended = () => {
setIsPlaying(false);
setPlaybackPosition(null);
if (animationFrameRef.current) {
cancelAnimationFrame(animationFrameRef.current);
animationFrameRef.current = null;
}
};
}
};
const handleZoomIn = () => setZoom(prev => Math.min(prev * 1.5, 10));
const handleZoomOut = () => setZoom(prev => Math.max(prev / 1.5, 1));
const handlePanLeft = () => {
const visibleDuration = audioDuration / zoom;
setViewOffset(prev => Math.max(0, prev - visibleDuration * 0.2));
};
const handlePanRight = () => {
const visibleDuration = audioDuration / zoom;
setViewOffset(prev => Math.min(audioDuration - visibleDuration, prev + visibleDuration * 0.2));
};
return (
<div style={{ marginTop: '1rem' }}>
{/* Zoom and Pan Controls */}
<div style={{ marginBottom: '0.5rem', display: 'flex', gap: '0.5rem', alignItems: 'center' }}>
<button
onClick={handleZoomOut}
disabled={zoom <= 1}
style={{
padding: '0.25rem 0.5rem',
background: zoom <= 1 ? '#e5e7eb' : '#4f46e5',
color: zoom <= 1 ? '#9ca3af' : 'white',
border: 'none',
borderRadius: '0.25rem',
cursor: zoom <= 1 ? 'not-allowed' : 'pointer',
fontSize: '0.875rem'
}}
>
🔍
</button>
<button
onClick={handleZoomIn}
disabled={zoom >= 10}
style={{
padding: '0.25rem 0.5rem',
background: zoom >= 10 ? '#e5e7eb' : '#4f46e5',
color: zoom >= 10 ? '#9ca3af' : 'white',
border: 'none',
borderRadius: '0.25rem',
cursor: zoom >= 10 ? 'not-allowed' : 'pointer',
fontSize: '0.875rem'
}}
>
🔍+
</button>
<span style={{ fontSize: '0.75rem', color: '#666' }}>Zoom: {zoom.toFixed(1)}x</span>
{zoom > 1 && (
<>
<button onClick={handlePanLeft} style={{ padding: '0.25rem 0.5rem', background: '#4f46e5', color: 'white', border: 'none', borderRadius: '0.25rem', cursor: 'pointer', fontSize: '0.875rem' }}>
</button>
<button onClick={handlePanRight} style={{ padding: '0.25rem 0.5rem', background: '#4f46e5', color: 'white', border: 'none', borderRadius: '0.25rem', cursor: 'pointer', fontSize: '0.875rem' }}>
</button>
</>
)}
</div>
<canvas
ref={canvasRef}
width={800}
height={150}
onClick={handleCanvasClick}
onMouseMove={handleCanvasMouseMove}
onMouseLeave={handleCanvasMouseLeave}
style={{
width: '100%',
height: 'auto',
cursor: 'pointer',
border: '1px solid #e5e7eb',
borderRadius: '0.5rem'
}}
/>
{/* Playback Controls */}
<div style={{ marginTop: '1rem', display: 'flex', gap: '1rem', alignItems: 'center', flexWrap: 'wrap' }}>
<button
onClick={handlePlayFull}
style={{
padding: '0.5rem 1rem',
background: '#4f46e5',
color: 'white',
border: 'none',
borderRadius: '0.5rem',
cursor: 'pointer',
fontWeight: 'bold'
}}
>
{isPlaying && playingSegment === null ? '⏸ Pause' : '▶ Play Full Selection'}
</button>
<div style={{ fontSize: '0.875rem', color: '#666' }}>
Start: {startTime}s | Duration: {duration}s | Total: {Math.floor(audioDuration)}s
</div>
</div>
{/* Segment Playback Buttons */}
<div style={{ marginTop: '1rem', display: 'flex', gap: '0.5rem', flexWrap: 'wrap' }}>
<span style={{ fontSize: '0.875rem', color: '#666', marginRight: '0.5rem' }}>Play Segments:</span>
{unlockSteps.map((step, index) => {
const segmentStart = index > 0 ? unlockSteps[index - 1] : 0;
const segmentDuration = step - segmentStart;
return (
<button
key={index}
onClick={() => handlePlaySegment(index)}
style={{
padding: '0.25rem 0.75rem',
background: playingSegment === index ? '#ef4444' : '#f3f4f6',
color: playingSegment === index ? 'white' : '#374151',
border: '1px solid #e5e7eb',
borderRadius: '0.25rem',
cursor: 'pointer',
fontSize: '0.75rem',
fontWeight: playingSegment === index ? 'bold' : 'normal'
}}
>
{index + 1} ({segmentDuration}s)
</button>
);
})}
</div>
</div>
);
}