diff --git a/app/admin/specials/[id]/page.tsx b/app/admin/specials/[id]/page.tsx index ae32b15..8219d9e 100644 --- a/app/admin/specials/[id]/page.tsx +++ b/app/admin/specials/[id]/page.tsx @@ -183,6 +183,7 @@ export default function SpecialEditorPage() { audioUrl={`/uploads/${selectedSpecialSong.song.filename}`} startTime={selectedSpecialSong.startTime} duration={totalDuration} + unlockSteps={unlockSteps} onStartTimeChange={(newStartTime) => handleStartTimeChange(selectedSpecialSong.songId, newStartTime)} /> {saving && ( diff --git a/components/WaveformEditor.tsx b/components/WaveformEditor.tsx index 5aed8f2..c949f16 100644 --- a/components/WaveformEditor.tsx +++ b/components/WaveformEditor.tsx @@ -6,15 +6,18 @@ interface WaveformEditorProps { audioUrl: string; startTime: number; duration: number; // Total puzzle duration (e.g., 60s) + unlockSteps: number[]; // e.g., [2, 4, 7, 11, 16, 30, 60] onStartTimeChange: (newStartTime: number) => void; } -export default function WaveformEditor({ audioUrl, startTime, duration, onStartTimeChange }: WaveformEditorProps) { +export default function WaveformEditor({ audioUrl, startTime, duration, unlockSteps, onStartTimeChange }: WaveformEditorProps) { const canvasRef = useRef(null); const [audioBuffer, setAudioBuffer] = useState(null); const [audioDuration, setAudioDuration] = useState(0); - const [isDragging, setIsDragging] = useState(false); const [isPlaying, setIsPlaying] = useState(false); + const [playingSegment, setPlayingSegment] = useState(null); + const [zoom, setZoom] = useState(1); // 1 = full view, higher = zoomed in + const [viewOffset, setViewOffset] = useState(0); // Offset in seconds for panning const audioContextRef = useRef(null); const sourceRef = useRef(null); @@ -54,21 +57,28 @@ export default function WaveformEditor({ audioUrl, startTime, duration, onStartT const width = canvas.width; const height = canvas.height; + // Calculate visible range based on zoom and offset + const visibleDuration = audioDuration / zoom; + const visibleStart = Math.max(0, Math.min(viewOffset, audioDuration - visibleDuration)); + const visibleEnd = Math.min(audioDuration, visibleStart + visibleDuration); + // Clear canvas ctx.fillStyle = '#f3f4f6'; ctx.fillRect(0, 0, width, height); - // Draw waveform + // Draw waveform for visible range const data = audioBuffer.getChannelData(0); - const step = Math.ceil(data.length / width); + const samplesPerPixel = Math.ceil((data.length * visibleDuration / audioDuration) / width); + const startSample = Math.floor(data.length * visibleStart / audioDuration); const amp = height / 2; ctx.fillStyle = '#4f46e5'; for (let i = 0; i < width; i++) { let min = 1.0; let max = -1.0; - for (let j = 0; j < step; j++) { - const datum = data[(i * step) + j]; + const sampleIndex = startSample + (i * samplesPerPixel); + for (let j = 0; j < samplesPerPixel && sampleIndex + j < data.length; j++) { + const datum = data[sampleIndex + j]; if (datum < min) min = datum; if (datum > max) max = datum; } @@ -76,25 +86,57 @@ export default function WaveformEditor({ audioUrl, startTime, duration, onStartT } // Draw selection overlay - const selectionStart = (startTime / audioDuration) * width; - const selectionWidth = (duration / audioDuration) * width; + const selectionStartPx = ((startTime - visibleStart) / visibleDuration) * width; + const selectionWidthPx = (duration / visibleDuration) * width; - ctx.fillStyle = 'rgba(79, 70, 229, 0.3)'; - ctx.fillRect(selectionStart, 0, selectionWidth, height); + if (selectionStartPx + selectionWidthPx > 0 && selectionStartPx < width) { + ctx.fillStyle = 'rgba(79, 70, 229, 0.3)'; + ctx.fillRect(Math.max(0, selectionStartPx), 0, Math.min(selectionWidthPx, width - selectionStartPx), height); - // Draw selection borders - ctx.strokeStyle = '#4f46e5'; - ctx.lineWidth = 2; - ctx.strokeRect(selectionStart, 0, selectionWidth, height); + // Draw selection borders + ctx.strokeStyle = '#4f46e5'; + ctx.lineWidth = 2; + ctx.strokeRect(Math.max(0, selectionStartPx), 0, Math.min(selectionWidthPx, width - selectionStartPx), height); + } - }, [audioBuffer, startTime, duration, audioDuration]); + // Draw segment markers (vertical lines) + ctx.strokeStyle = '#ef4444'; + ctx.lineWidth = 1; + ctx.setLineDash([5, 5]); + + let cumulativeTime = 0; + unlockSteps.forEach((step, index) => { + const segmentTime = startTime + cumulativeTime; + const segmentPx = ((segmentTime - visibleStart) / visibleDuration) * width; + + if (segmentPx >= 0 && segmentPx <= width) { + ctx.beginPath(); + ctx.moveTo(segmentPx, 0); + ctx.lineTo(segmentPx, height); + ctx.stroke(); + + // Draw segment number + ctx.setLineDash([]); + ctx.fillStyle = '#ef4444'; + ctx.font = 'bold 12px sans-serif'; + ctx.fillText(`${index + 1}`, segmentPx + 3, 15); + ctx.setLineDash([5, 5]); + } + + cumulativeTime = step; + }); + ctx.setLineDash([]); + + }, [audioBuffer, startTime, duration, audioDuration, zoom, viewOffset, unlockSteps]); const handleCanvasClick = (e: React.MouseEvent) => { if (!canvasRef.current || !audioDuration) return; const rect = canvasRef.current.getBoundingClientRect(); const x = e.clientX - rect.left; - const clickedTime = (x / rect.width) * audioDuration; + const visibleDuration = audioDuration / zoom; + const visibleStart = Math.max(0, Math.min(viewOffset, audioDuration - visibleDuration)); + const clickedTime = visibleStart + (x / rect.width) * visibleDuration; // Center the selection on the clicked point let newStartTime = clickedTime - (duration / 2); @@ -105,12 +147,41 @@ export default function WaveformEditor({ audioUrl, startTime, duration, onStartT onStartTimeChange(Math.floor(newStartTime)); }; - const handlePlayPause = () => { + const stopPlayback = () => { + sourceRef.current?.stop(); + setIsPlaying(false); + setPlayingSegment(null); + }; + + const handlePlaySegment = (segmentIndex: number) => { + if (!audioBuffer || !audioContextRef.current) return; + + stopPlayback(); + + const source = audioContextRef.current.createBufferSource(); + source.buffer = audioBuffer; + source.connect(audioContextRef.current.destination); + + // Calculate segment start and duration + const segmentStart = startTime + (segmentIndex > 0 ? unlockSteps[segmentIndex - 1] : 0); + const segmentDuration = unlockSteps[segmentIndex] - (segmentIndex > 0 ? unlockSteps[segmentIndex - 1] : 0); + + source.start(0, segmentStart, segmentDuration); + sourceRef.current = source; + setIsPlaying(true); + setPlayingSegment(segmentIndex); + + source.onended = () => { + setIsPlaying(false); + setPlayingSegment(null); + }; + }; + + const handlePlayFull = () => { if (!audioBuffer || !audioContextRef.current) return; if (isPlaying) { - sourceRef.current?.stop(); - setIsPlaying(false); + stopPlayback(); } else { const source = audioContextRef.current.createBufferSource(); source.buffer = audioBuffer; @@ -125,8 +196,64 @@ export default function WaveformEditor({ audioUrl, startTime, duration, onStartT } }; + const handleZoomIn = () => setZoom(prev => Math.min(prev * 1.5, 10)); + const handleZoomOut = () => setZoom(prev => Math.max(prev / 1.5, 1)); + const handlePanLeft = () => { + const visibleDuration = audioDuration / zoom; + setViewOffset(prev => Math.max(0, prev - visibleDuration * 0.2)); + }; + const handlePanRight = () => { + const visibleDuration = audioDuration / zoom; + setViewOffset(prev => Math.min(audioDuration - visibleDuration, prev + visibleDuration * 0.2)); + }; + return (
+ {/* Zoom and Pan Controls */} +
+ + + Zoom: {zoom.toFixed(1)}x + {zoom > 1 && ( + <> + + + + )} +
+ -
+ + {/* Playback Controls */} +
+
Start: {startTime}s | Duration: {duration}s | Total: {Math.floor(audioDuration)}s
+ + {/* Segment Playback Buttons */} +
+ Play Segments: + {unlockSteps.map((step, index) => { + const segmentStart = index > 0 ? unlockSteps[index - 1] : 0; + const segmentDuration = step - segmentStart; + return ( + + ); + })} +
); }