Created
June 29, 2024 22:36
-
-
Save anisayari/484cb00a59bcee89f27409b3b7d8fa73 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import React, { useEffect, useRef } from 'react'; | |
import * as THREE from 'three'; | |
import { EffectComposer } from 'three/examples/jsm/postprocessing/EffectComposer'; | |
import { RenderPass } from 'three/examples/jsm/postprocessing/RenderPass'; | |
import { UnrealBloomPass } from 'three/examples/jsm/postprocessing/UnrealBloomPass'; | |
import { OutputPass } from 'three/examples/jsm/postprocessing/OutputPass'; | |
import ShaderCode from './ShaderCode'; | |
const AudioVisualizer3D = ({ audioStream, type }) => { | |
const { vertexShader, fragmentShader } = ShaderCode(); | |
const containerRef = useRef(null); | |
const sceneRef = useRef(null); | |
const cameraRef = useRef(null); | |
const rendererRef = useRef(null); | |
const meshRef = useRef(null); | |
const composerRef = useRef(null); | |
const analyserRef = useRef(null); | |
const uniformsRef = useRef({ | |
u_time: { value: 0.0 }, | |
u_frequency: { value: 0.0 }, | |
u_red: { value: 0.0 }, | |
u_green: { value: 0.0 }, | |
u_blue: { value: 0.0 }, | |
}); | |
useEffect(() => { | |
if (!containerRef.current) return; | |
const scene = new THREE.Scene(); | |
/*scene.background = new THREE.Color("rgb(226,237,231)"); // Royal Blue background*/ | |
sceneRef.current = scene; | |
const camera = new THREE.PerspectiveCamera(60, window.innerWidth / window.innerHeight, 0.1, 1000); | |
camera.position.set(0, 0, 15); | |
camera.lookAt(0, 0, 0); | |
cameraRef.current = camera; | |
const renderer = new THREE.WebGLRenderer({ | |
antialias: true, | |
alpha: true | |
}); | |
renderer.setClearColor( 0xffffff, 1 ); | |
renderer.setSize(window.innerWidth/3, window.innerHeight/3); | |
renderer.outputColorSpace = THREE.SRGBColorSpace; | |
containerRef.current.appendChild(renderer.domElement); | |
rendererRef.current = renderer; | |
// Create transparent black circle | |
const circleGeometry = new THREE.CircleGeometry(3, 30); | |
const circleMaterial = new THREE.MeshBasicMaterial({ | |
color: 0x000000, | |
transparent: true, | |
opacity: 0.5 | |
}); | |
const circleMesh = new THREE.Mesh(circleGeometry, circleMaterial); | |
scene.add(circleMesh); | |
// Shader setup | |
const material = new THREE.ShaderMaterial({ | |
uniforms: uniformsRef.current, | |
vertexShader, | |
fragmentShader | |
}); | |
const geometry = new THREE.IcosahedronGeometry(4, 30); | |
const mesh = new THREE.Mesh(geometry, material); | |
mesh.material.wireframe = true; | |
scene.add(mesh); | |
meshRef.current = mesh; | |
// Post-processing setup | |
const composer = new EffectComposer(renderer); | |
const renderPass = new RenderPass(scene, camera); | |
composer.addPass(renderPass); | |
const bloomPass = new UnrealBloomPass(new THREE.Vector2(window.innerWidth, window.innerHeight), 0.5, 0.8, 0.5); | |
composer.addPass(bloomPass); | |
const outputPass = new OutputPass(); | |
composer.addPass(outputPass); | |
composerRef.current = composer; | |
// Audio setup | |
let audioContext, analyser; | |
if (type === 'recording' && audioStream) { | |
audioContext = new (window.AudioContext || window.webkitAudioContext)(); | |
analyser = audioContext.createAnalyser(); | |
const source = audioContext.createMediaStreamSource(audioStream); | |
source.connect(analyser); | |
analyser.fftSize = 256; | |
} else if (type === 'playback' && audioStream) { | |
analyser = audioStream; | |
} | |
analyserRef.current = analyser; | |
const animate = () => { | |
requestAnimationFrame(animate); | |
if (analyserRef.current) { | |
const dataArray = new Uint8Array(analyserRef.current.frequencyBinCount); | |
analyserRef.current.getByteFrequencyData(dataArray); | |
const averageFrequency = dataArray.reduce((a, b) => a + b) / dataArray.length; | |
uniformsRef.current.u_frequency.value = averageFrequency / 256; | |
uniformsRef.current.u_red.value = Math.sin(uniformsRef.current.u_time.value * 0.5) * 0.5 + 0.5; | |
uniformsRef.current.u_green.value = Math.sin(uniformsRef.current.u_time.value * 0.6) * 0.5 + 0.5; | |
uniformsRef.current.u_blue.value = Math.sin(uniformsRef.current.u_time.value * 0.7) * 0.5 + 0.5; | |
} | |
mesh.rotation.x += 0.001; | |
mesh.rotation.y += 0.002; | |
composerRef.current.render(); | |
uniformsRef.current.u_time.value += 0.05; | |
}; | |
animate(); | |
return () => { | |
if (containerRef.current) { | |
containerRef.current.removeChild(renderer.domElement); | |
} | |
if (audioContext && audioContext.state !== 'closed') { | |
audioContext.close(); | |
} | |
}; | |
}, [audioStream, type, vertexShader, fragmentShader]); | |
return <div ref={containerRef} style={{ width: '100%', height: '100%' }} />; | |
}; | |
export default AudioVisualizer3D; |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment