feat(waveform): add segment markers, zoom, and individual segment playback
This commit is contained in:
@@ -183,6 +183,7 @@ export default function SpecialEditorPage() {
|
||||
audioUrl={`/uploads/${selectedSpecialSong.song.filename}`}
|
||||
startTime={selectedSpecialSong.startTime}
|
||||
duration={totalDuration}
|
||||
unlockSteps={unlockSteps}
|
||||
onStartTimeChange={(newStartTime) => handleStartTimeChange(selectedSpecialSong.songId, newStartTime)}
|
||||
/>
|
||||
{saving && (
|
||||
|
||||
@@ -6,15 +6,18 @@ interface WaveformEditorProps {
|
||||
audioUrl: string;
|
||||
startTime: number;
|
||||
duration: number; // Total puzzle duration (e.g., 60s)
|
||||
unlockSteps: number[]; // e.g., [2, 4, 7, 11, 16, 30, 60]
|
||||
onStartTimeChange: (newStartTime: number) => void;
|
||||
}
|
||||
|
||||
export default function WaveformEditor({ audioUrl, startTime, duration, onStartTimeChange }: WaveformEditorProps) {
|
||||
export default function WaveformEditor({ audioUrl, startTime, duration, unlockSteps, onStartTimeChange }: WaveformEditorProps) {
|
||||
const canvasRef = useRef<HTMLCanvasElement>(null);
|
||||
const [audioBuffer, setAudioBuffer] = useState<AudioBuffer | null>(null);
|
||||
const [audioDuration, setAudioDuration] = useState(0);
|
||||
const [isDragging, setIsDragging] = useState(false);
|
||||
const [isPlaying, setIsPlaying] = useState(false);
|
||||
const [playingSegment, setPlayingSegment] = useState<number | null>(null);
|
||||
const [zoom, setZoom] = useState(1); // 1 = full view, higher = zoomed in
|
||||
const [viewOffset, setViewOffset] = useState(0); // Offset in seconds for panning
|
||||
const audioContextRef = useRef<AudioContext | null>(null);
|
||||
const sourceRef = useRef<AudioBufferSourceNode | null>(null);
|
||||
|
||||
@@ -54,21 +57,28 @@ export default function WaveformEditor({ audioUrl, startTime, duration, onStartT
|
||||
const width = canvas.width;
|
||||
const height = canvas.height;
|
||||
|
||||
// Calculate visible range based on zoom and offset
|
||||
const visibleDuration = audioDuration / zoom;
|
||||
const visibleStart = Math.max(0, Math.min(viewOffset, audioDuration - visibleDuration));
|
||||
const visibleEnd = Math.min(audioDuration, visibleStart + visibleDuration);
|
||||
|
||||
// Clear canvas
|
||||
ctx.fillStyle = '#f3f4f6';
|
||||
ctx.fillRect(0, 0, width, height);
|
||||
|
||||
// Draw waveform
|
||||
// Draw waveform for visible range
|
||||
const data = audioBuffer.getChannelData(0);
|
||||
const step = Math.ceil(data.length / width);
|
||||
const samplesPerPixel = Math.ceil((data.length * visibleDuration / audioDuration) / width);
|
||||
const startSample = Math.floor(data.length * visibleStart / audioDuration);
|
||||
const amp = height / 2;
|
||||
|
||||
ctx.fillStyle = '#4f46e5';
|
||||
for (let i = 0; i < width; i++) {
|
||||
let min = 1.0;
|
||||
let max = -1.0;
|
||||
for (let j = 0; j < step; j++) {
|
||||
const datum = data[(i * step) + j];
|
||||
const sampleIndex = startSample + (i * samplesPerPixel);
|
||||
for (let j = 0; j < samplesPerPixel && sampleIndex + j < data.length; j++) {
|
||||
const datum = data[sampleIndex + j];
|
||||
if (datum < min) min = datum;
|
||||
if (datum > max) max = datum;
|
||||
}
|
||||
@@ -76,25 +86,57 @@ export default function WaveformEditor({ audioUrl, startTime, duration, onStartT
|
||||
}
|
||||
|
||||
// Draw selection overlay
|
||||
const selectionStart = (startTime / audioDuration) * width;
|
||||
const selectionWidth = (duration / audioDuration) * width;
|
||||
const selectionStartPx = ((startTime - visibleStart) / visibleDuration) * width;
|
||||
const selectionWidthPx = (duration / visibleDuration) * width;
|
||||
|
||||
ctx.fillStyle = 'rgba(79, 70, 229, 0.3)';
|
||||
ctx.fillRect(selectionStart, 0, selectionWidth, height);
|
||||
if (selectionStartPx + selectionWidthPx > 0 && selectionStartPx < width) {
|
||||
ctx.fillStyle = 'rgba(79, 70, 229, 0.3)';
|
||||
ctx.fillRect(Math.max(0, selectionStartPx), 0, Math.min(selectionWidthPx, width - selectionStartPx), height);
|
||||
|
||||
// Draw selection borders
|
||||
ctx.strokeStyle = '#4f46e5';
|
||||
ctx.lineWidth = 2;
|
||||
ctx.strokeRect(selectionStart, 0, selectionWidth, height);
|
||||
// Draw selection borders
|
||||
ctx.strokeStyle = '#4f46e5';
|
||||
ctx.lineWidth = 2;
|
||||
ctx.strokeRect(Math.max(0, selectionStartPx), 0, Math.min(selectionWidthPx, width - selectionStartPx), height);
|
||||
}
|
||||
|
||||
}, [audioBuffer, startTime, duration, audioDuration]);
|
||||
// Draw segment markers (vertical lines)
|
||||
ctx.strokeStyle = '#ef4444';
|
||||
ctx.lineWidth = 1;
|
||||
ctx.setLineDash([5, 5]);
|
||||
|
||||
let cumulativeTime = 0;
|
||||
unlockSteps.forEach((step, index) => {
|
||||
const segmentTime = startTime + cumulativeTime;
|
||||
const segmentPx = ((segmentTime - visibleStart) / visibleDuration) * width;
|
||||
|
||||
if (segmentPx >= 0 && segmentPx <= width) {
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(segmentPx, 0);
|
||||
ctx.lineTo(segmentPx, height);
|
||||
ctx.stroke();
|
||||
|
||||
// Draw segment number
|
||||
ctx.setLineDash([]);
|
||||
ctx.fillStyle = '#ef4444';
|
||||
ctx.font = 'bold 12px sans-serif';
|
||||
ctx.fillText(`${index + 1}`, segmentPx + 3, 15);
|
||||
ctx.setLineDash([5, 5]);
|
||||
}
|
||||
|
||||
cumulativeTime = step;
|
||||
});
|
||||
ctx.setLineDash([]);
|
||||
|
||||
}, [audioBuffer, startTime, duration, audioDuration, zoom, viewOffset, unlockSteps]);
|
||||
|
||||
const handleCanvasClick = (e: React.MouseEvent<HTMLCanvasElement>) => {
|
||||
if (!canvasRef.current || !audioDuration) return;
|
||||
|
||||
const rect = canvasRef.current.getBoundingClientRect();
|
||||
const x = e.clientX - rect.left;
|
||||
const clickedTime = (x / rect.width) * audioDuration;
|
||||
const visibleDuration = audioDuration / zoom;
|
||||
const visibleStart = Math.max(0, Math.min(viewOffset, audioDuration - visibleDuration));
|
||||
const clickedTime = visibleStart + (x / rect.width) * visibleDuration;
|
||||
|
||||
// Center the selection on the clicked point
|
||||
let newStartTime = clickedTime - (duration / 2);
|
||||
@@ -105,12 +147,41 @@ export default function WaveformEditor({ audioUrl, startTime, duration, onStartT
|
||||
onStartTimeChange(Math.floor(newStartTime));
|
||||
};
|
||||
|
||||
const handlePlayPause = () => {
|
||||
const stopPlayback = () => {
|
||||
sourceRef.current?.stop();
|
||||
setIsPlaying(false);
|
||||
setPlayingSegment(null);
|
||||
};
|
||||
|
||||
const handlePlaySegment = (segmentIndex: number) => {
|
||||
if (!audioBuffer || !audioContextRef.current) return;
|
||||
|
||||
stopPlayback();
|
||||
|
||||
const source = audioContextRef.current.createBufferSource();
|
||||
source.buffer = audioBuffer;
|
||||
source.connect(audioContextRef.current.destination);
|
||||
|
||||
// Calculate segment start and duration
|
||||
const segmentStart = startTime + (segmentIndex > 0 ? unlockSteps[segmentIndex - 1] : 0);
|
||||
const segmentDuration = unlockSteps[segmentIndex] - (segmentIndex > 0 ? unlockSteps[segmentIndex - 1] : 0);
|
||||
|
||||
source.start(0, segmentStart, segmentDuration);
|
||||
sourceRef.current = source;
|
||||
setIsPlaying(true);
|
||||
setPlayingSegment(segmentIndex);
|
||||
|
||||
source.onended = () => {
|
||||
setIsPlaying(false);
|
||||
setPlayingSegment(null);
|
||||
};
|
||||
};
|
||||
|
||||
const handlePlayFull = () => {
|
||||
if (!audioBuffer || !audioContextRef.current) return;
|
||||
|
||||
if (isPlaying) {
|
||||
sourceRef.current?.stop();
|
||||
setIsPlaying(false);
|
||||
stopPlayback();
|
||||
} else {
|
||||
const source = audioContextRef.current.createBufferSource();
|
||||
source.buffer = audioBuffer;
|
||||
@@ -125,8 +196,64 @@ export default function WaveformEditor({ audioUrl, startTime, duration, onStartT
|
||||
}
|
||||
};
|
||||
|
||||
const handleZoomIn = () => setZoom(prev => Math.min(prev * 1.5, 10));
|
||||
const handleZoomOut = () => setZoom(prev => Math.max(prev / 1.5, 1));
|
||||
const handlePanLeft = () => {
|
||||
const visibleDuration = audioDuration / zoom;
|
||||
setViewOffset(prev => Math.max(0, prev - visibleDuration * 0.2));
|
||||
};
|
||||
const handlePanRight = () => {
|
||||
const visibleDuration = audioDuration / zoom;
|
||||
setViewOffset(prev => Math.min(audioDuration - visibleDuration, prev + visibleDuration * 0.2));
|
||||
};
|
||||
|
||||
return (
|
||||
<div style={{ marginTop: '1rem' }}>
|
||||
{/* Zoom and Pan Controls */}
|
||||
<div style={{ marginBottom: '0.5rem', display: 'flex', gap: '0.5rem', alignItems: 'center' }}>
|
||||
<button
|
||||
onClick={handleZoomOut}
|
||||
disabled={zoom <= 1}
|
||||
style={{
|
||||
padding: '0.25rem 0.5rem',
|
||||
background: zoom <= 1 ? '#e5e7eb' : '#4f46e5',
|
||||
color: zoom <= 1 ? '#9ca3af' : 'white',
|
||||
border: 'none',
|
||||
borderRadius: '0.25rem',
|
||||
cursor: zoom <= 1 ? 'not-allowed' : 'pointer',
|
||||
fontSize: '0.875rem'
|
||||
}}
|
||||
>
|
||||
🔍−
|
||||
</button>
|
||||
<button
|
||||
onClick={handleZoomIn}
|
||||
disabled={zoom >= 10}
|
||||
style={{
|
||||
padding: '0.25rem 0.5rem',
|
||||
background: zoom >= 10 ? '#e5e7eb' : '#4f46e5',
|
||||
color: zoom >= 10 ? '#9ca3af' : 'white',
|
||||
border: 'none',
|
||||
borderRadius: '0.25rem',
|
||||
cursor: zoom >= 10 ? 'not-allowed' : 'pointer',
|
||||
fontSize: '0.875rem'
|
||||
}}
|
||||
>
|
||||
🔍+
|
||||
</button>
|
||||
<span style={{ fontSize: '0.75rem', color: '#666' }}>Zoom: {zoom.toFixed(1)}x</span>
|
||||
{zoom > 1 && (
|
||||
<>
|
||||
<button onClick={handlePanLeft} style={{ padding: '0.25rem 0.5rem', background: '#4f46e5', color: 'white', border: 'none', borderRadius: '0.25rem', cursor: 'pointer', fontSize: '0.875rem' }}>
|
||||
←
|
||||
</button>
|
||||
<button onClick={handlePanRight} style={{ padding: '0.25rem 0.5rem', background: '#4f46e5', color: 'white', border: 'none', borderRadius: '0.25rem', cursor: 'pointer', fontSize: '0.875rem' }}>
|
||||
→
|
||||
</button>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<canvas
|
||||
ref={canvasRef}
|
||||
width={800}
|
||||
@@ -140,9 +267,11 @@ export default function WaveformEditor({ audioUrl, startTime, duration, onStartT
|
||||
borderRadius: '0.5rem'
|
||||
}}
|
||||
/>
|
||||
<div style={{ marginTop: '1rem', display: 'flex', gap: '1rem', alignItems: 'center' }}>
|
||||
|
||||
{/* Playback Controls */}
|
||||
<div style={{ marginTop: '1rem', display: 'flex', gap: '1rem', alignItems: 'center', flexWrap: 'wrap' }}>
|
||||
<button
|
||||
onClick={handlePlayPause}
|
||||
onClick={handlePlayFull}
|
||||
style={{
|
||||
padding: '0.5rem 1rem',
|
||||
background: '#4f46e5',
|
||||
@@ -153,12 +282,40 @@ export default function WaveformEditor({ audioUrl, startTime, duration, onStartT
|
||||
fontWeight: 'bold'
|
||||
}}
|
||||
>
|
||||
{isPlaying ? '⏸ Pause' : '▶ Play Selection'}
|
||||
{isPlaying && playingSegment === null ? '⏸ Pause' : '▶ Play Full Selection'}
|
||||
</button>
|
||||
|
||||
<div style={{ fontSize: '0.875rem', color: '#666' }}>
|
||||
Start: {startTime}s | Duration: {duration}s | Total: {Math.floor(audioDuration)}s
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Segment Playback Buttons */}
|
||||
<div style={{ marginTop: '1rem', display: 'flex', gap: '0.5rem', flexWrap: 'wrap' }}>
|
||||
<span style={{ fontSize: '0.875rem', color: '#666', marginRight: '0.5rem' }}>Play Segments:</span>
|
||||
{unlockSteps.map((step, index) => {
|
||||
const segmentStart = index > 0 ? unlockSteps[index - 1] : 0;
|
||||
const segmentDuration = step - segmentStart;
|
||||
return (
|
||||
<button
|
||||
key={index}
|
||||
onClick={() => handlePlaySegment(index)}
|
||||
style={{
|
||||
padding: '0.25rem 0.75rem',
|
||||
background: playingSegment === index ? '#ef4444' : '#f3f4f6',
|
||||
color: playingSegment === index ? 'white' : '#374151',
|
||||
border: '1px solid #e5e7eb',
|
||||
borderRadius: '0.25rem',
|
||||
cursor: 'pointer',
|
||||
fontSize: '0.75rem',
|
||||
fontWeight: playingSegment === index ? 'bold' : 'normal'
|
||||
}}
|
||||
>
|
||||
{index + 1} ({segmentDuration}s)
|
||||
</button>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user