- Database: SpecialSong model with startTime - Backend: API endpoints for curation - Admin: Waveform editor and curation page - Game: startTime support in AudioPlayer - UI: Curate button in admin dashboard
165 lines
5.5 KiB
TypeScript
165 lines
5.5 KiB
TypeScript
'use client';
|
|
|
|
import { useEffect, useRef, useState } from 'react';
|
|
|
|
interface WaveformEditorProps {
|
|
audioUrl: string;
|
|
startTime: number;
|
|
duration: number; // Total puzzle duration (e.g., 60s)
|
|
onStartTimeChange: (newStartTime: number) => void;
|
|
}
|
|
|
|
export default function WaveformEditor({ audioUrl, startTime, duration, onStartTimeChange }: WaveformEditorProps) {
|
|
const canvasRef = useRef<HTMLCanvasElement>(null);
|
|
const [audioBuffer, setAudioBuffer] = useState<AudioBuffer | null>(null);
|
|
const [audioDuration, setAudioDuration] = useState(0);
|
|
const [isDragging, setIsDragging] = useState(false);
|
|
const [isPlaying, setIsPlaying] = useState(false);
|
|
const audioContextRef = useRef<AudioContext | null>(null);
|
|
const sourceRef = useRef<AudioBufferSourceNode | null>(null);
|
|
|
|
useEffect(() => {
|
|
const loadAudio = async () => {
|
|
try {
|
|
const response = await fetch(audioUrl);
|
|
const arrayBuffer = await response.arrayBuffer();
|
|
|
|
const audioContext = new (window.AudioContext || (window as any).webkitAudioContext)();
|
|
audioContextRef.current = audioContext;
|
|
|
|
const buffer = await audioContext.decodeAudioData(arrayBuffer);
|
|
setAudioBuffer(buffer);
|
|
setAudioDuration(buffer.duration);
|
|
} catch (error) {
|
|
console.error('Error loading audio:', error);
|
|
}
|
|
};
|
|
|
|
loadAudio();
|
|
|
|
return () => {
|
|
if (sourceRef.current) {
|
|
sourceRef.current.stop();
|
|
}
|
|
};
|
|
}, [audioUrl]);
|
|
|
|
useEffect(() => {
|
|
if (!audioBuffer || !canvasRef.current) return;
|
|
|
|
const canvas = canvasRef.current;
|
|
const ctx = canvas.getContext('2d');
|
|
if (!ctx) return;
|
|
|
|
const width = canvas.width;
|
|
const height = canvas.height;
|
|
|
|
// Clear canvas
|
|
ctx.fillStyle = '#f3f4f6';
|
|
ctx.fillRect(0, 0, width, height);
|
|
|
|
// Draw waveform
|
|
const data = audioBuffer.getChannelData(0);
|
|
const step = Math.ceil(data.length / width);
|
|
const amp = height / 2;
|
|
|
|
ctx.fillStyle = '#4f46e5';
|
|
for (let i = 0; i < width; i++) {
|
|
let min = 1.0;
|
|
let max = -1.0;
|
|
for (let j = 0; j < step; j++) {
|
|
const datum = data[(i * step) + j];
|
|
if (datum < min) min = datum;
|
|
if (datum > max) max = datum;
|
|
}
|
|
ctx.fillRect(i, (1 + min) * amp, 1, Math.max(1, (max - min) * amp));
|
|
}
|
|
|
|
// Draw selection overlay
|
|
const selectionStart = (startTime / audioDuration) * width;
|
|
const selectionWidth = (duration / audioDuration) * width;
|
|
|
|
ctx.fillStyle = 'rgba(79, 70, 229, 0.3)';
|
|
ctx.fillRect(selectionStart, 0, selectionWidth, height);
|
|
|
|
// Draw selection borders
|
|
ctx.strokeStyle = '#4f46e5';
|
|
ctx.lineWidth = 2;
|
|
ctx.strokeRect(selectionStart, 0, selectionWidth, height);
|
|
|
|
}, [audioBuffer, startTime, duration, audioDuration]);
|
|
|
|
const handleCanvasClick = (e: React.MouseEvent<HTMLCanvasElement>) => {
|
|
if (!canvasRef.current || !audioDuration) return;
|
|
|
|
const rect = canvasRef.current.getBoundingClientRect();
|
|
const x = e.clientX - rect.left;
|
|
const clickedTime = (x / rect.width) * audioDuration;
|
|
|
|
// Center the selection on the clicked point
|
|
let newStartTime = clickedTime - (duration / 2);
|
|
|
|
// Clamp to valid range
|
|
newStartTime = Math.max(0, Math.min(newStartTime, audioDuration - duration));
|
|
|
|
onStartTimeChange(Math.floor(newStartTime));
|
|
};
|
|
|
|
const handlePlayPause = () => {
|
|
if (!audioBuffer || !audioContextRef.current) return;
|
|
|
|
if (isPlaying) {
|
|
sourceRef.current?.stop();
|
|
setIsPlaying(false);
|
|
} else {
|
|
const source = audioContextRef.current.createBufferSource();
|
|
source.buffer = audioBuffer;
|
|
source.connect(audioContextRef.current.destination);
|
|
source.start(0, startTime, duration);
|
|
sourceRef.current = source;
|
|
setIsPlaying(true);
|
|
|
|
source.onended = () => {
|
|
setIsPlaying(false);
|
|
};
|
|
}
|
|
};
|
|
|
|
return (
|
|
<div style={{ marginTop: '1rem' }}>
|
|
<canvas
|
|
ref={canvasRef}
|
|
width={800}
|
|
height={150}
|
|
onClick={handleCanvasClick}
|
|
style={{
|
|
width: '100%',
|
|
height: 'auto',
|
|
cursor: 'pointer',
|
|
border: '1px solid #e5e7eb',
|
|
borderRadius: '0.5rem'
|
|
}}
|
|
/>
|
|
<div style={{ marginTop: '1rem', display: 'flex', gap: '1rem', alignItems: 'center' }}>
|
|
<button
|
|
onClick={handlePlayPause}
|
|
style={{
|
|
padding: '0.5rem 1rem',
|
|
background: '#4f46e5',
|
|
color: 'white',
|
|
border: 'none',
|
|
borderRadius: '0.5rem',
|
|
cursor: 'pointer',
|
|
fontWeight: 'bold'
|
|
}}
|
|
>
|
|
{isPlaying ? '⏸ Pause' : '▶ Play Selection'}
|
|
</button>
|
|
<div style={{ fontSize: '0.875rem', color: '#666' }}>
|
|
Start: {startTime}s | Duration: {duration}s | Total: {Math.floor(audioDuration)}s
|
|
</div>
|
|
</div>
|
|
</div>
|
|
);
|
|
}
|