'use client'; import { useEffect, useRef, useState } from 'react'; interface WaveformEditorProps { audioUrl: string; startTime: number; duration: number; // Total puzzle duration (e.g., 60s) unlockSteps: number[]; // e.g., [2, 4, 7, 11, 16, 30, 60] onStartTimeChange: (newStartTime: number) => void; } export default function WaveformEditor({ audioUrl, startTime, duration, unlockSteps, onStartTimeChange }: WaveformEditorProps) { const canvasRef = useRef(null); const [audioBuffer, setAudioBuffer] = useState(null); const [audioDuration, setAudioDuration] = useState(0); const [isPlaying, setIsPlaying] = useState(false); const [playingSegment, setPlayingSegment] = useState(null); const [zoom, setZoom] = useState(1); // 1 = full view, higher = zoomed in const [viewOffset, setViewOffset] = useState(0); // Offset in seconds for panning const [playbackPosition, setPlaybackPosition] = useState(null); // Current playback position in seconds const audioContextRef = useRef(null); const sourceRef = useRef(null); const playbackStartTimeRef = useRef(0); // When playback started const playbackOffsetRef = useRef(0); // Offset in the audio file const animationFrameRef = useRef(null); useEffect(() => { const loadAudio = async () => { try { const response = await fetch(audioUrl); const arrayBuffer = await response.arrayBuffer(); const audioContext = new (window.AudioContext || (window as any).webkitAudioContext)(); audioContextRef.current = audioContext; const buffer = await audioContext.decodeAudioData(arrayBuffer); setAudioBuffer(buffer); setAudioDuration(buffer.duration); } catch (error) { console.error('Error loading audio:', error); } }; loadAudio(); return () => { if (sourceRef.current) { sourceRef.current.stop(); } if (animationFrameRef.current) { cancelAnimationFrame(animationFrameRef.current); } }; }, [audioUrl]); useEffect(() => { if (!audioBuffer || !canvasRef.current) return; const canvas = canvasRef.current; const ctx = canvas.getContext('2d'); if (!ctx) return; const width = canvas.width; const height = canvas.height; // Calculate visible range based on zoom and offset const visibleDuration = audioDuration / zoom; const visibleStart = Math.max(0, Math.min(viewOffset, audioDuration - visibleDuration)); const visibleEnd = Math.min(audioDuration, visibleStart + visibleDuration); // Clear canvas ctx.fillStyle = '#f3f4f6'; ctx.fillRect(0, 0, width, height); // Draw waveform for visible range const data = audioBuffer.getChannelData(0); const samplesPerPixel = Math.ceil((data.length * visibleDuration / audioDuration) / width); const startSample = Math.floor(data.length * visibleStart / audioDuration); const amp = height / 2; ctx.fillStyle = '#4f46e5'; for (let i = 0; i < width; i++) { let min = 1.0; let max = -1.0; const sampleIndex = startSample + (i * samplesPerPixel); for (let j = 0; j < samplesPerPixel && sampleIndex + j < data.length; j++) { const datum = data[sampleIndex + j]; if (datum < min) min = datum; if (datum > max) max = datum; } ctx.fillRect(i, (1 + min) * amp, 1, Math.max(1, (max - min) * amp)); } // Draw selection overlay const selectionStartPx = ((startTime - visibleStart) / visibleDuration) * width; const selectionWidthPx = (duration / visibleDuration) * width; if (selectionStartPx + selectionWidthPx > 0 && selectionStartPx < width) { ctx.fillStyle = 'rgba(79, 70, 229, 0.3)'; ctx.fillRect(Math.max(0, selectionStartPx), 0, Math.min(selectionWidthPx, width - selectionStartPx), height); // Draw selection borders ctx.strokeStyle = '#4f46e5'; ctx.lineWidth = 2; ctx.strokeRect(Math.max(0, selectionStartPx), 0, Math.min(selectionWidthPx, width - selectionStartPx), height); } // Draw segment markers (vertical lines) ctx.strokeStyle = '#ef4444'; ctx.lineWidth = 1; ctx.setLineDash([5, 5]); let cumulativeTime = 0; unlockSteps.forEach((step, index) => { const segmentTime = startTime + cumulativeTime; const segmentPx = ((segmentTime - visibleStart) / visibleDuration) * width; if (segmentPx >= 0 && segmentPx <= width) { ctx.beginPath(); ctx.moveTo(segmentPx, 0); ctx.lineTo(segmentPx, height); ctx.stroke(); // Draw segment number ctx.setLineDash([]); ctx.fillStyle = '#ef4444'; ctx.font = 'bold 12px sans-serif'; ctx.fillText(`${index + 1}`, segmentPx + 3, 15); ctx.setLineDash([5, 5]); } cumulativeTime = step; }); ctx.setLineDash([]); // Draw playback cursor if (playbackPosition !== null) { const cursorPx = ((playbackPosition - visibleStart) / visibleDuration) * width; if (cursorPx >= 0 && cursorPx <= width) { ctx.strokeStyle = '#10b981'; // Green ctx.lineWidth = 2; ctx.beginPath(); ctx.moveTo(cursorPx, 0); ctx.lineTo(cursorPx, height); ctx.stroke(); // Draw playhead triangle ctx.fillStyle = '#10b981'; ctx.beginPath(); ctx.moveTo(cursorPx, 0); ctx.lineTo(cursorPx - 5, 10); ctx.lineTo(cursorPx + 5, 10); ctx.closePath(); ctx.fill(); } } }, [audioBuffer, startTime, duration, audioDuration, zoom, viewOffset, unlockSteps, playbackPosition]); const handleCanvasClick = (e: React.MouseEvent) => { if (!canvasRef.current || !audioDuration) return; const rect = canvasRef.current.getBoundingClientRect(); const x = e.clientX - rect.left; const visibleDuration = audioDuration / zoom; const visibleStart = Math.max(0, Math.min(viewOffset, audioDuration - visibleDuration)); const clickedTime = visibleStart + (x / rect.width) * visibleDuration; // Center the selection on the clicked point let newStartTime = clickedTime - (duration / 2); // Clamp to valid range newStartTime = Math.max(0, Math.min(newStartTime, audioDuration - duration)); onStartTimeChange(Math.floor(newStartTime)); }; const stopPlayback = () => { sourceRef.current?.stop(); setIsPlaying(false); setPlayingSegment(null); setPlaybackPosition(null); if (animationFrameRef.current) { cancelAnimationFrame(animationFrameRef.current); animationFrameRef.current = null; } }; // Animation loop for playback cursor useEffect(() => { if (!isPlaying || !audioContextRef.current) { return; } const animate = () => { if (!audioContextRef.current || !isPlaying) return; const elapsed = audioContextRef.current.currentTime - playbackStartTimeRef.current; const currentPos = playbackOffsetRef.current + elapsed; setPlaybackPosition(currentPos); animationFrameRef.current = requestAnimationFrame(animate); }; animationFrameRef.current = requestAnimationFrame(animate); return () => { if (animationFrameRef.current) { cancelAnimationFrame(animationFrameRef.current); } }; }, [isPlaying]); const handlePlaySegment = (segmentIndex: number) => { if (!audioBuffer || !audioContextRef.current) return; stopPlayback(); const source = audioContextRef.current.createBufferSource(); source.buffer = audioBuffer; source.connect(audioContextRef.current.destination); // Calculate segment start and duration const segmentStart = startTime + (segmentIndex > 0 ? unlockSteps[segmentIndex - 1] : 0); const segmentDuration = unlockSteps[segmentIndex] - (segmentIndex > 0 ? unlockSteps[segmentIndex - 1] : 0); playbackStartTimeRef.current = audioContextRef.current.currentTime; playbackOffsetRef.current = segmentStart; source.start(0, segmentStart, segmentDuration); sourceRef.current = source; setIsPlaying(true); setPlayingSegment(segmentIndex); setPlaybackPosition(segmentStart); source.onended = () => { setIsPlaying(false); setPlayingSegment(null); setPlaybackPosition(null); if (animationFrameRef.current) { cancelAnimationFrame(animationFrameRef.current); animationFrameRef.current = null; } }; }; const handlePlayFull = () => { if (!audioBuffer || !audioContextRef.current) return; if (isPlaying) { stopPlayback(); } else { const source = audioContextRef.current.createBufferSource(); source.buffer = audioBuffer; source.connect(audioContextRef.current.destination); playbackStartTimeRef.current = audioContextRef.current.currentTime; playbackOffsetRef.current = startTime; source.start(0, startTime, duration); sourceRef.current = source; setIsPlaying(true); setPlaybackPosition(startTime); source.onended = () => { setIsPlaying(false); setPlaybackPosition(null); if (animationFrameRef.current) { cancelAnimationFrame(animationFrameRef.current); animationFrameRef.current = null; } }; } }; const handleZoomIn = () => setZoom(prev => Math.min(prev * 1.5, 10)); const handleZoomOut = () => setZoom(prev => Math.max(prev / 1.5, 1)); const handlePanLeft = () => { const visibleDuration = audioDuration / zoom; setViewOffset(prev => Math.max(0, prev - visibleDuration * 0.2)); }; const handlePanRight = () => { const visibleDuration = audioDuration / zoom; setViewOffset(prev => Math.min(audioDuration - visibleDuration, prev + visibleDuration * 0.2)); }; return (
{/* Zoom and Pan Controls */}
Zoom: {zoom.toFixed(1)}x {zoom > 1 && ( <> )}
{/* Playback Controls */}
Start: {startTime}s | Duration: {duration}s | Total: {Math.floor(audioDuration)}s
{/* Segment Playback Buttons */}
Play Segments: {unlockSteps.map((step, index) => { const segmentStart = index > 0 ? unlockSteps[index - 1] : 0; const segmentDuration = step - segmentStart; return ( ); })}
); }