Created
January 26, 2024 17:22
-
-
Save corysimmons/00e0e458b253d415a7537e44c002fd9b to your computer and use it in GitHub Desktop.
Next.js app router Wavesurfer.js + Tone.js to apply effects and scrub a timeline both in realtime.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
'use client' | |
import React, { useRef, useState, useEffect } from 'react'; | |
import { useWavesurfer } from '@wavesurfer/react'; | |
import * as Tone from 'tone'; | |
export default function Page() { | |
const containerRef = useRef<HTMLDivElement>(null); | |
const audioRef = useRef<HTMLAudioElement | null>(null); | |
const [playing, setPlaying] = useState(false); | |
const pitchShift = useRef<Tone.PitchShift | null>(null); | |
const isPitchShiftConnected = useRef(false); | |
const reverb = useRef<Tone.Reverb | null>(null); | |
const isReverbConnected = useRef(false); | |
const { wavesurfer } = useWavesurfer({ | |
container: containerRef, | |
height: 100, | |
dragToSeek: true, | |
media: audioRef.current!, | |
}); | |
useEffect(() => { | |
if (!audioRef.current) { | |
audioRef.current = new Audio('/piano-loop.mp3'); | |
} | |
}, []); | |
useEffect(() => { | |
let audioContext: Tone.Context | null = null; | |
if (wavesurfer) { | |
wavesurfer.on('ready', () => { | |
audioContext = new Tone.Context(); | |
Tone.setContext(audioContext); | |
const mediaNode = audioContext.createMediaElementSource(wavesurfer.getMediaElement()) | |
// Effects | |
const filter = new Tone.Filter(300, 'lowpass').toDestination(); | |
Tone.connect(mediaNode, filter); | |
pitchShift.current = new Tone.PitchShift({ | |
pitch: 12 | |
}).toDestination(); | |
reverb.current = new Tone.Reverb({ | |
decay: 3, | |
wet: 0.5 | |
}).toDestination(); | |
const updateEffects = () => { | |
const currentTime = wavesurfer.getCurrentTime(); | |
// Handle pitch shift | |
if (currentTime >= 2 && !isPitchShiftConnected.current && pitchShift.current) { | |
Tone.connect(mediaNode, pitchShift.current); | |
isPitchShiftConnected.current = true; | |
} else if (currentTime < 2 && isPitchShiftConnected.current && pitchShift.current) { | |
Tone.disconnect(mediaNode, pitchShift.current); | |
isPitchShiftConnected.current = false; | |
} | |
// Handle reverb | |
if (currentTime >= 1 && !isReverbConnected.current && reverb.current) { | |
Tone.connect(mediaNode, reverb.current); | |
isReverbConnected.current = true; | |
} else if (currentTime < 1 && isReverbConnected.current && reverb.current) { | |
Tone.disconnect(mediaNode, reverb.current); | |
isReverbConnected.current = false; | |
} | |
}; | |
wavesurfer.on('play', () => { | |
setPlaying(true); | |
updateEffects(); | |
}); | |
wavesurfer.on('audioprocess', updateEffects); | |
wavesurfer.on('seeking', updateEffects); | |
wavesurfer.on('pause', () => { | |
setPlaying(false); | |
}); | |
wavesurfer.on('finish', () => { | |
setPlaying(false); | |
if (isPitchShiftConnected.current && pitchShift.current) { | |
Tone.disconnect(mediaNode, pitchShift.current); | |
isPitchShiftConnected.current = false; | |
} | |
if (isReverbConnected.current && reverb.current) { | |
Tone.disconnect(mediaNode, reverb.current); | |
isReverbConnected.current = false; | |
} | |
}); | |
}); | |
} | |
return () => { | |
if (wavesurfer) { | |
wavesurfer.destroy(); | |
} | |
if (audioContext) { | |
audioContext.dispose(); | |
} | |
}; | |
}, [wavesurfer]); | |
const handlePlayPause = () => { | |
if (wavesurfer) { | |
Tone.start(); | |
wavesurfer.playPause(); | |
} | |
}; | |
return ( | |
<div> | |
<div ref={containerRef}></div> | |
<button onClick={handlePlayPause}> | |
{playing ? 'Pause' : 'Play'} | |
</button> | |
</div> | |
); | |
} | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment