Created
February 18, 2020 17:26
-
-
Save valentin7/03041c717fb2f55cda39be3f08ea4500 to your computer and use it in GitHub Desktop.
One example of how we use OpenTok React in Monthly's web application.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import React, { Component } from 'react'; | |
import HomepageLogo from '../../../svg/homepage-logo.js'; | |
import {BrowserRouter as Router, Route, Link, withRouter, Prompt} from 'react-router-dom'; | |
import Backend from '../../../backend.js'; | |
import LaptopIcon from '../../../svg/laptop-icon.js'; | |
import PersonIcon from '../../../svg/person-icon.js'; | |
import ReactionModeIcon from '../../../svg/reaction-mode-icon.js'; | |
import ExpandToFullscreenIcon from '../../../svg/expand-to-fullscreen-icon.js'; | |
import AnimatingSpinnerButton from '../../../svg/animating-spinner-button.js' | |
import {OTSession, OTPublisher, OTStreams, OTSubscriber, preloadScript } from 'opentok-react'; | |
import ProfilePic from '../../basics/profile-pic.js'; | |
import { isAppleMobile, videoTime, isSafari } from '../../../util/utils.js'; | |
import MonthlyAudioRecorder from './monthly-audio-recorder.js'; | |
import Draggable from 'react-draggable'; | |
import {OT_API_KEY, OT_CLIENT_URL} from '../../../constants.js'; | |
class AudioRecordingModal extends Component { | |
constructor(props) { | |
super(props); | |
this.recordingModal = React.createRef(); | |
this.videoCameraPlayer = React.createRef(); | |
this.controlsContainer = React.createRef(); | |
this.videoPlayer = React.createRef(); | |
let isScreenRecording = false; | |
let defaultVideoMode = "CAM"; | |
this.state = { | |
OTSessionId: "", | |
OTToken: "", | |
archiveId: "", | |
videoURL: this.props.videoURL ? this.props.videoURL : "", | |
cameraVideoObject: "", | |
isRecording: false, | |
isWaiting: false, | |
stoppedArchive: false, | |
isBigScreen: false, | |
isOnFullScreen: false, | |
isOnReactionMode: false, | |
allowedAccessToScreen: false, | |
deleteRecording: false, | |
isStreamReady: false, | |
isScreenRecording: isScreenRecording, | |
videoModeSelected: defaultVideoMode, | |
}; | |
this.onCancelClick = this.onCancelClick.bind(this); | |
} | |
componentWillUnmount() { | |
// if (this.state.isRecording) { | |
// //this.props.history.push("/ten-hundred-painting/classroom/path"); | |
// //alert("Moving to another page will stop your recording.."); | |
// } | |
this.stopStreamedVideo(); | |
localStorage.setItem("isRecording", false); | |
} | |
hasGetUserMedia() { | |
return !!(navigator.mediaDevices && | |
navigator.mediaDevices.getUserMedia); | |
} | |
stopStreamedVideo() { | |
let videoElement = this.videoCameraPlayer.current; | |
if (!videoElement) { | |
return; | |
} | |
let stream = videoElement.srcObject; | |
if (!stream) { | |
return; | |
} | |
let tracks = stream.getTracks(); | |
tracks.forEach(function(track) { | |
track.stop(); | |
}); | |
videoElement.srcObject = null; | |
} | |
isDoneAndPreviewing = () => { | |
return this.state.videoURL.length > 0 && !this.state.isRecording; // && this.state.stoppedArchive; | |
} | |
containerShouldShowAsInReactionMode = () => { | |
if (this.isDoneAndPreviewing()) { | |
console.log("done and in reaction mode ? ", this.state.isOnReactionMode); | |
return (this.isDoneAndPreviewing() && this.props.isReactionMode) || this.state.isOnReactionMode; | |
} else { | |
console.log("HEYYIII is on reaction mode?? ", this.props.isReactionMode, this.state.isOnReactionMode, this.props.isOnReactionModeDisplay) | |
return this.props.isReactionMode && !this.props.isOnReactionModeDisplay; | |
} | |
//return this.props.isOnReactionMode; | |
} | |
toggleRecording = async () => { | |
if (!this.state.isStreamReady) { | |
// if it's not ready yet, do nothing. | |
return; | |
} | |
// add to comment | |
if (this.isDoneAndPreviewing()) { | |
console.log("here about to call the onRecordingURL ", this.props.onRecordingURL); | |
this.props.onRecordingURL(this.state.videoURL, this.state.isBigScreen, this.state.isOnReactionMode, this.state.archiveId); | |
this.props.toggleAudioRecordingModal(); | |
if (this.props.onRecordingPreviewMode) { | |
this.props.onRecordingPreviewMode(false); | |
} | |
} else { | |
// start archiving | |
if (!this.state.isRecording) { | |
this.setState({isRecording: true}); | |
// stop archiving | |
} else { | |
this.setState({isRecording: false}); | |
//this.stopArchiving(); | |
} | |
} | |
} | |
async onCancelClick() { | |
console.log("on cancel clickk ", this.props); | |
this.setState({deleteRecording: true}, () => { | |
this.props.toggleAudioRecordingModal(); | |
}); | |
} | |
isVideoRecordingSupported = () => { | |
if (navigator && navigator.mediaDevices && navigator.mediaDevices.getUserMedia) { | |
return true; | |
} | |
return false; | |
} | |
mainActionButtonText() { | |
if (this.isDoneAndPreviewing()) { | |
return this.props.finalActionMessage; | |
} | |
let startText = window.innerWidth < 800 ? "Record" : "Start Recording"; | |
if (!this.state.isStreamReady) { | |
startText = "Loading Recorder"; | |
} | |
let stopText = window.innerWidth < 800 ? "Stop" : "Stop Recording"; | |
return this.state.isRecording ? stopText : startText; | |
} | |
recordingModalInnerContainerHeight() { | |
console.log("container height w shouldBeBigscreen ", this.shouldBeBigscreen()); | |
if (this.recordingModal && this.recordingModal.current && this.shouldBeBigscreen()) { | |
console.log("hey height is ", this.recordingModal.current.clientHeight - 90); | |
if (this.containerShouldShowAsInReactionMode()) { | |
return this.recordingModal.current.clientHeight; | |
} | |
return this.recordingModal.current.clientHeight - 90; | |
} | |
return '344px'; | |
} | |
shouldBeBigscreen() { | |
return this.props.isBigScreen || (this.state.videoModeSelected != "CAM" && this.state.videoURL.length > 0); | |
} | |
onAudioRecordingReady = (soundFile) => { | |
this.props.toggleAudioRecordingModal(); | |
this.props.onAudioRecordingReady(soundFile); | |
} | |
onIsStreamReady = (isStreamReady) => { | |
this.setState({isStreamReady: isStreamReady}); | |
} | |
render() { | |
let recordingIcon = <div className='video-recording-modal-recording-circle' /> | |
if (this.state.videoURL.length > 0 || this.state.isWaiting || !this.state.isRecording) { | |
recordingIcon = null; | |
} | |
return ( | |
<div> | |
<div className={this.state.videoModeSelected != "SCREEN" || this.state.videoURL.length > 0 ? 'video-recording-modal-container' : 'video-recording-screen-hidden'} ref={this.recordingModal} > | |
<div className={'video-recording-modal-inner-container'} > | |
<div className='video-recording-modal-top-bar'> | |
{this.props.user && ( | |
<div className='simple-row align-items-center'> | |
<ProfilePic user={this.props.user} ></ProfilePic> | |
<div className='activity-card-summary-info-person-name-no-underline'>{this.props.user.firstName + " " + this.props.user.lastName}</div> | |
</div> | |
)} | |
</div> | |
<MonthlyAudioRecorder isRecording={this.state.isRecording} opentokClientUrl={OT_CLIENT_URL} deleteRecording={this.state.deleteRecording} onAudioRecordingReady={this.onAudioRecordingReady} onIsStreamReady={this.onIsStreamReady} /> | |
</div> | |
</div> | |
<div className={'video-recording-modal-controls-container'} ref={this.controlsContainer} > | |
<div className='video-recording-modal-controls-container-row'> | |
<div className='video-recording-modal-mode-container'> | |
<div className={this.state.isWaiting || !this.state.isStreamReady ? 'primary-button-disabled' : 'primary-button primary-button-non-shrink'} onClick={this.toggleRecording}> | |
{this.state.isWaiting ? | |
(<AnimatingSpinnerButton/>) | |
: | |
this.mainActionButtonText() | |
} | |
</div> | |
</div> | |
{recordingIcon} | |
<div className='video-recording-modal-mode-container'> | |
<div className='secondary-button primary-button-non-shrink' onClick={this.onCancelClick}>Cancel</div> | |
</div> | |
</div> | |
</div> | |
{(this.state.videoModeSelected == "CAM" || this.state.videoURL.length > 0) && | |
<div className='general-overlay overlay-video-recording' style={{zIndex: 5900}} onClick={this.state.stoppedArchive || this.state.isRecording ? undefined : this.onCancelClick}></div> | |
} | |
<Prompt | |
when={this.state.isRecording || this.state.videoModeSelected != "CAM"} | |
message={location => | |
`Going to another page will delete your recording. Do you still want to go?` | |
} | |
/> | |
</div> | |
); | |
} | |
} | |
export default withRouter(AudioRecordingModal); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import React, { Component } from 'react'; | |
import HomepageLogo from '../../../svg/homepage-logo.js'; | |
import {BrowserRouter as Router, Route, Link, withRouter, Prompt} from 'react-router-dom'; | |
import Backend from '../../../backend.js'; | |
import LaptopIcon from '../../../svg/laptop-icon.js'; | |
import PersonIcon from '../../../svg/person-icon.js'; | |
import HomepageCurriculumMic from "../../../svg/homepage-curriculum-mic.js"; | |
import ReactionModeIcon from '../../../svg/reaction-mode-icon.js'; | |
import ExpandToFullscreenIcon from '../../../svg/expand-to-fullscreen-icon.js'; | |
import AnimatingSpinnerButton from '../../../svg/animating-spinner-button.js' | |
import ProfilePic from '../../basics/profile-pic.js'; | |
import {OTSession, OTPublisher, OTStreams, OTSubscriber, preloadScript } from 'opentok-react'; | |
import { isAppleMobile, videoTime, isSafari } from '../../../util/utils.js'; | |
import Draggable from 'react-draggable'; | |
import P5Wrapper from 'react-p5-wrapper'; | |
import "p5/lib/addons/p5.sound"; | |
import * as p55 from 'p5'; | |
import {OT_API_KEY, OT_CLIENT_URL} from '../../../constants.js'; | |
import './monthly-audio-recorder.css'; | |
var source, source2, fft, lowPass, bandPassFilter, amplitude, recorder, soundFile, fontUsed; | |
var shouldShowRepeatText = true; | |
const monthlyChillBlueColor = {"r": 233, "g": 248, "b": 255}; | |
class MonthlyAudioRecorder extends Component { | |
constructor(props) { | |
super(props); | |
this.recordingModal = React.createRef(); | |
this.videoCameraPlayer = React.createRef(); | |
this.controlsContainer = React.createRef(); | |
this.videoPlayer = React.createRef(); | |
this.state = { | |
OTSessionId: "", | |
OTToken: "", | |
redValue: monthlyChillBlueColor.r, | |
greenValue: monthlyChillBlueColor.g, | |
blueValue: monthlyChillBlueColor.b, | |
centerDisplayText: this.props.targetFrequency ? "Repeat what you hear" : "", | |
shouldShowRepeatText: true, | |
hasTriedToSetP5Recorder: false, | |
isStreamReady: false, | |
} | |
this.fft = undefined; | |
this.audioPublisherProperties = { | |
showControls: false, | |
videoSource: null, | |
audioSource: true, | |
publishVideo: false, | |
publishAudio: true, | |
} | |
this.publisherEventHandlers = { | |
streamCreated: event => { | |
console.log('Publisher stream created! ', event); | |
this.setState({isStreamReady: true}); | |
this.props.onIsStreamReady(true); | |
}, | |
streamDestroyed: event => { | |
console.log('Publisher stream destroyed! ', event); | |
}, | |
accessDenied: event => { | |
console.log("access denied! "); | |
alert("You must allow access for audio recording to work.. Please try again."); | |
//this.onCancelClick(); | |
}, | |
accessAllowed: event => { | |
console.log("video ACCESS ALLOWED!! ", event); | |
} | |
}; | |
} | |
componentWillUnmount() { | |
// if (this.state.isRecording) { | |
// //this.props.history.push("/ten-hundred-painting/classroom/path"); | |
// //alert("Moving to another page will stop your recording.."); | |
// } | |
//this.stopStreamedVideo(); | |
localStorage.setItem("isRecording", false); | |
} | |
async componentDidMount() { | |
let result = await Backend.getVideoSession(); | |
console.log("the result is ", result); | |
if (!result) { | |
return; | |
} | |
this.setState({OTSessionId: result.session_id, OTToken: result.token}); | |
setTimeout(() => { | |
this.setState({shouldShowRepeatText: false}) | |
}, 1400); | |
} | |
async componentDidUpdate(prevProps) { | |
if (prevProps.isRecording != this.props.isRecording) { | |
if (this.props.isRecording) { | |
// recording archiving requires having the OTSessionId already. | |
if (this.state.OTSessionId && this.state.OTSessionId.length > 1) { | |
this.startRecordingArchiving(); | |
} else { | |
let result = await Backend.getVideoSession(); | |
console.log("GETTING OT SESSION w the result is ", result); | |
if (!result) { | |
return; | |
} | |
this.setState({OTSessionId: result.session_id, OTToken: result.token}, async () => { | |
this.startRecordingArchiving(); | |
}); | |
} | |
} else { | |
console.log("HEYYY doing this ") | |
this.stopRecordingArchiving(); | |
// console.log("HEY THE SOUNDFILE IS ", soundFile); | |
// let soundBlob = soundFile.getBlob(); | |
// console.log("the soundblob is ", soundBlob) | |
// //this.props.onRecor | |
// soundFile.play(); | |
//p55.save(soundFile, 'soundFile.wav'); | |
} | |
} | |
if (prevProps.deleteRecording != this.props.deleteRecording) { | |
console.log("DELETE RECORDING SHOULD ? ", this.props.deleteRecording); | |
if (this.props.deleteRecording) { | |
this.deleteRecordingArchive(); | |
} | |
} | |
// setTimeout(() => { | |
// recorder.stop(); | |
// soundFile.play(); | |
// p.save(soundFile, 'soundFile.wav'); | |
// }, 3000) | |
} | |
startRecordingArchiving = async () => { | |
console.log("DOING THISS HEY THE RECORDER HERE IS ", recorder); | |
if (!isSafari() && recorder) { | |
// if it's not safari and actually has a recorder from p5 initialized, then record. | |
recorder.record(soundFile); | |
} | |
if (this.state.OTSessionId && this.state.OTSessionId.length > 1) { | |
let result = await Backend.startVideoArchive(this.state.OTSessionId); | |
console.log("started archiving w result ", result); | |
if (!result) { | |
return; | |
} | |
let archiveId = result.archiveId; | |
localStorage.setItem("isRecording", true); | |
this.setState({archiveId: archiveId, isRecording: true}); | |
} else { | |
} | |
} | |
stopRecordingArchiving = async () => { | |
if (!isSafari() && recorder) { | |
recorder.stop(); | |
} | |
let result = await Backend.stopVideoArchive(this.state.archiveId); | |
console.log("the archive stop result: ", result); | |
this.setState({isRecording: false}); | |
localStorage.setItem("isRecording", false); | |
let s3URL = "https://s3-us-west-1.amazonaws.com/monthly.progressvideos/" + OT_API_KEY + "/" + this.state.archiveId + "/archive.mp4"; | |
console.log("THE VIDEO URL IS ", s3URL); | |
//setTimeout(() => { | |
this.props.onAudioRecordingReady(s3URL); | |
//}, 3000); | |
} | |
deleteRecordingArchive = async () => { | |
console.log("trynna delete recording audio archive ", this.state.archiveId); | |
if (this.state.archiveId && this.state.archiveId.length > 0) { | |
if (!this.state.stoppedArchive) { | |
let resultFromStop = await Backend.stopVideoArchive(this.state.archiveId); | |
console.log("stopped ", resultFromStop); | |
setTimeout(async function() { | |
console.log("now deleting"); | |
let result = await Backend.deleteVideoArchive(this.state.archiveId); | |
console.log("deleted video archive: ", result); | |
}.bind(this), 2000); | |
return; | |
} | |
console.log("now deleting"); | |
let result = await Backend.deleteVideoArchive(this.state.archiveId); | |
console.log("deleted video archive: ", result); | |
} | |
} | |
sketch = p => { | |
let rotation = 0; | |
// center clip nullifies samples below a clip amount | |
var doCenterClip = false; | |
var centerClipThreshold = 0.65; | |
// normalize pre / post autocorrelation | |
var preNormalize = true; | |
var postNormalize = true; | |
p.setup = () => { | |
//p.createCanvas(344, 344, p.WEBGL); | |
p.createCanvas(344,344) | |
p.noFill(); | |
//this.amp = new p55.Amplitude(); | |
amplitude = new p55.Amplitude(); | |
source = new p55.AudioIn(); | |
source.start(); | |
//fontUsed = p.loadFont('src/fonts/Larsseit-Medium.ttf'); | |
// source2 = new p55.AudioIn(); | |
// source2.start(); | |
// create a sound recorder | |
recorder = new p55.SoundRecorder(); | |
recorder.setInput(source); | |
// this sound file will be used to | |
// playback & save the recording | |
soundFile = new p55.SoundFile(); | |
// lowPass = new p55.LowPass(); | |
// lowPass.disconnect(); | |
// source.connect(lowPass); | |
// bandPassFilter = new p55.BandPass(); | |
// bandPassFilter.disconnect(); | |
// bandPassFilter.set(30, 22300); | |
// source.connect(bandPassFilter); | |
fft = new p55.FFT(); | |
fft.setInput(source); | |
this.setState({hasTriedToSetP5Recorder: true}, () => { | |
console.log(" THE RECORDER HAS BEEN SET ", recorder); | |
//alert("DID IT RECORDER "); | |
}); | |
// setTimeout(()=> { | |
// p55.save(soundFile, 'soundFile.wav'); | |
// }, 4000); | |
//fft.setInput(source); | |
}; | |
p.myCustomRedrawAccordingToNewPropsHandler = function (props) { | |
// if (props.rotation !== null){ | |
// rotation = props.rotation * Math.PI / 180; | |
// } | |
}; | |
p.draw = () => { | |
let monthlyChillBlueColor = {"r": 233, "g": 248, "b": 255}; | |
if (!this.props.isWaveformHidden) { | |
p.background(monthlyChillBlueColor.r, monthlyChillBlueColor.g, monthlyChillBlueColor.b); | |
} | |
// | |
this.setState({redValue: monthlyChillBlueColor.r, greenValue: monthlyChillBlueColor.g, blueValue: monthlyChillBlueColor.b}); | |
//p.style('transition', '0.5 ease'); | |
let width = p.width; | |
let height = p.height; | |
if (this.state.shouldShowRepeatText) { | |
this.setState({centerDisplayColor: "#03AEF8"}); | |
return; // don't show | |
} | |
var timeDomain; | |
if (isSafari()) { | |
timeDomain = fft.waveform(1024); | |
//timeDomain = JSON.parse(JSON.stringify(fft.waveform(1024))); | |
} else { | |
//timeDomain = JSON.parse(JSON.stringify(fft.waveform(1024))); | |
timeDomain = fft.waveform(1024); | |
// timeDomain = fft.waveform(1024, 'float32'); | |
} | |
//var timeDomain = fft.waveform(1024); | |
//console.log("the time domain is ", timeDomain) | |
// let timeDomainBuffer; | |
// if (timeDomainBufferOG.constructor === Float32Array) { | |
// timeDomainBuffer = new Float32Array(timeDomainBufferOG); | |
// } else { | |
// timeDomainBuffer = timeDomainBufferOG.concat(); | |
// } | |
let timeDomainBuffer = JSON.parse(JSON.stringify(fft.waveform(1024))); | |
let amplitudePeak = findAmplitudePeak(p, timeDomainBuffer); | |
//console.log("the amplitude peak is ", amplitudePeak); | |
var corrBuff = autoCorrelate(p, timeDomainBuffer); // concat to create a shallow copy (fixes weird sounding bug) | |
//console.log("THE CORRELATION BUFFER ", corrBuff) | |
// amplitudePeak = findAmplitudePeak(p, corrBuff); | |
// console.log("the amplitude peak AFTER CORRELATION is ", amplitudePeak); | |
//var corrBuff = timeDomainBuffer | |
var freq = findFrequency(p, corrBuff); | |
let waveform = fft.waveform(); | |
p.noFill(); | |
p.beginShape(); | |
p.stroke(3,174,248); | |
p.strokeWeight(2); | |
let waveFormToDraw = waveform; | |
//let amplitudeThreshold = 0.08; | |
let amplitudeThreshold = this.props.targetFrequency ? 0.08 : 0.06; | |
// console.log("THE AMPLITUDE PEAK IS ", amplitudePeak) | |
// if (isSafari()) { | |
// amplitudeThreshold = -0.003 | |
// } | |
//console.log("THE FREQ IS ", freq) | |
if (freq != 0 && amplitudePeak >= amplitudeThreshold) { | |
if (!this.props.isWaveformHidden) { | |
for (var i = 0; i< waveFormToDraw.length; i++){ | |
let x = p.map(i, 0, waveFormToDraw.length, 0, width); | |
let y = p.map( waveFormToDraw[i], -1, 1, 0, height); | |
p.vertex(x,y); | |
} | |
} | |
if (this.props.targetFrequency) { | |
let targetGreenColor = {"r": 46, "g": 204, "b": 113}; | |
let frequencyDistance = freq - this.props.targetFrequency; | |
let isTooHigh = false; | |
if (frequencyDistance > 0) { | |
isTooHigh = true; | |
} | |
let displayObject = getColorAndTextBasedOnDistanceFrom(frequencyDistance, targetGreenColor, this.props.targetFrequency); | |
let displayColor = displayObject.color; | |
let displayText = displayObject.displayText; | |
this.setState({centerDisplayText: displayText, redValue: displayColor.r, greenValue: displayColor.g, blueValue: displayColor.b, centerDisplayColor: "#FFFFFF"}); | |
let loudEnoughToCount = amplitudePeak >= amplitudeThreshold; | |
// this is only for the beginning 2 seconds, because there could be background noise that doesn't let the person see the text. | |
if (loudEnoughToCount) { | |
this.setState({shouldShowRepeatText: false}); | |
} | |
//p.background(displayColor.r, displayColor.g, displayColor.b); | |
} | |
} else { | |
if (!this.props.isWaveformHidden) { | |
p.line (0, p.height/2, p.width, p.height/2); | |
} | |
this.setState({centerDisplayColor: "#03AEF8"}); | |
if (this.state.shouldShowRepeatText && this.props.targetFrequency) { | |
this.setState({centerDisplayText: "Repeat what you hear"}); | |
} else { | |
//this.setState({centerDisplayText: "Repeat what you hear"}); | |
this.setState({centerDisplayText: ""}); | |
} | |
freq = 0; | |
} | |
p.endShape(); | |
//p.fill(0); | |
// p.strokeWeight(1); | |
// p.textSize(40); | |
// p.textFont(fontUsed); | |
if (this.props.targetFrequency) { | |
//p.style('background-color', ); | |
//p.text ('Too High', 344/4, 344/2); | |
} | |
// | |
//p.text ('Center Clip: ' + centerClipThreshold, 5, 25); | |
//p.line (0, p.height/2, p.width, p.height/2); | |
}; | |
function getColorAndTextBasedOnDistanceFrom(distance, targetColor, targetFrequency) { | |
let displayText = ""; | |
// if it's close to the target frequency, return the good green color. | |
// if it's too high (above) | |
if (distance > 0 ) { | |
if (p.abs(distance) < targetFrequency * 0.0325) { | |
displayText = "Perfect" | |
return {"color": targetColor, "displayText": displayText}; | |
} | |
} else { | |
// if it's too low | |
if (p.abs(distance) < targetFrequency * 0.0525) { | |
displayText = "Perfect" | |
return {"color": targetColor, "displayText": displayText}; | |
} | |
} | |
let monthlyTanOrange = {"r": 255, "g": 160, "b": 75}; | |
let monthlyGoldenOrange = {"r": 254, "g": 183, "b": 0}; | |
targetColor = monthlyTanOrange; | |
// if it's too high, use the monthly golden orange. otherwise use monthlyTanOrange | |
if (distance > 0) { | |
displayText = "Too High"; | |
targetColor = monthlyGoldenOrange | |
} else { | |
displayText = "Too Low"; | |
} | |
let toSubtract = p.abs(distance) / 3; | |
// console.log("distance is ", distance, toSubtract); | |
let color = {"r": targetColor.r + toSubtract, "g": targetColor.g - toSubtract, "b": targetColor.b - toSubtract}; | |
return {"color": color, "displayText": displayText} | |
} | |
function findPeak(p, buffer) { | |
var biggestVal = 0; | |
var biggestIndex = 0; | |
var nSamples = buffer.length; | |
for (var index = 0; index < nSamples; index++){ | |
if (p.abs(buffer[index]) > biggestVal){ | |
biggestVal = p.abs(buffer[index]); | |
biggestIndex = index; | |
} | |
} | |
return biggestIndex; | |
} | |
function findAmplitudePeak(p, buffer) { | |
var biggestVal = 0; | |
var biggestIndex = 0; | |
var nSamples = buffer.length; | |
for (var index = 0; index < nSamples; index++){ | |
//console.log("BUFFER ", buffer[index]) | |
if (p.abs(buffer[index]) > biggestVal){ | |
biggestVal = p.abs(buffer[index]); | |
biggestIndex = index; | |
} | |
} | |
return biggestVal; | |
} | |
// accepts a timeDomainBuffer and multiplies every value | |
function autoCorrelate(p, timeDomainBuffer) { | |
//timeDomainBuffer = Object.assign({}, ...timeDomainBuffer); | |
var nSamples = timeDomainBuffer.length; | |
// pre-normalize the input buffer | |
if (preNormalize){ | |
timeDomainBuffer = normalize(p, timeDomainBuffer); | |
} | |
let timeDomainBufferCopy = JSON.parse(JSON.stringify(timeDomainBuffer)); | |
//let timeDomainBufferCopy = timeDomainBuffer; | |
// zero out any values below the centerClipThreshold | |
if (doCenterClip) { | |
timeDomainBufferCopy = centerClip(p, timeDomainBufferCopy); | |
} | |
var autoCorrBuffer = []; | |
for (var lag = 0; lag < nSamples; lag++){ | |
var sum = 0; | |
for (var index = 0; index < nSamples; index++){ | |
var indexLagged = index+lag; | |
if (indexLagged < nSamples){ | |
var sound1 = timeDomainBufferCopy[index]; | |
var sound2 = timeDomainBufferCopy[indexLagged]; | |
var product = sound1 * sound2; | |
sum += product; | |
} | |
} | |
// average to a value between -1 and 1 | |
autoCorrBuffer[lag] = sum/nSamples; | |
} | |
// normalize the output buffer | |
if (postNormalize){ | |
autoCorrBuffer = normalize(p, autoCorrBuffer); | |
} | |
return autoCorrBuffer; | |
} | |
// Find the biggest value in a buffer, set that value to 1.0, | |
// and scale every other value by the same amount. | |
function normalize(p, buffer) { | |
var biggestVal = 0; | |
var nSamples = buffer.length; | |
for (var index = 0; index < nSamples; index++){ | |
if (p.abs(buffer[index]) > biggestVal){ | |
biggestVal = p.abs(buffer[index]); | |
} | |
} | |
for (var index = 0; index < nSamples; index++){ | |
// divide each sample of the buffer by the biggest val | |
buffer[index] /= biggestVal; | |
} | |
return buffer; | |
} | |
// Accepts a buffer of samples, and sets any samples whose | |
// amplitude is below the centerClipThreshold to zero. | |
// This factors them out of the autocorrelation. | |
function centerClip(p, buffer) { | |
let newBuffer = [];//JSON.parse(JSON.stringify(buffer)); | |
var nSamples = buffer.length; | |
// center clip removes any samples whose abs is less than centerClipThreshold | |
//centerClipThreshold = p.map(p.mouseY, 0, p.height, 0,1); | |
if (centerClipThreshold > 0.0) { | |
for (var i = 0; i < nSamples; i++) { | |
var val = buffer[i]; | |
console.log("THE BUFFER VAL abs is ", Math.abs(val)) | |
//var newVal = (Math.abs(val) > centerClipThreshold) ? val : 0; | |
buffer[i] = (Math.abs(val) > centerClipThreshold) ? val : 0; | |
//newBuffer.push(newVal) | |
} | |
} | |
return buffer; | |
} | |
// Calculate the fundamental frequency of a buffer | |
// by finding the peaks, and counting the distance | |
// between peaks in samples, and converting that | |
// number of samples to a frequency value. | |
function findFrequency(p, autocorr) { | |
var nSamples = autocorr.length; | |
var valOfLargestPeakSoFar = 0; | |
var indexOfLargestPeakSoFar = -1; | |
for (var index = 1; index < nSamples; index++){ | |
var valL = autocorr[index-1]; | |
var valC = autocorr[index]; | |
var valR = autocorr[index+1]; | |
var bIsPeak = ((valL < valC) && (valR < valC)); | |
if (bIsPeak){ | |
if (valC > valOfLargestPeakSoFar){ | |
valOfLargestPeakSoFar = valC; | |
indexOfLargestPeakSoFar = index; | |
} | |
} | |
} | |
var distanceToNextLargestPeak = indexOfLargestPeakSoFar - 0; | |
// convert sample count to frequency | |
var fundamentalFrequency = p.sampleRate() / distanceToNextLargestPeak; //p.sampleRate() / distanceToNextLargestPeak; | |
//console.log("THE FUNDAMENTAL FREQUENCY is ", fundamentalFrequency); | |
if (Math.abs(fundamentalFrequency) > 900) { | |
return 0; | |
} | |
return fundamentalFrequency; | |
} | |
} | |
render() { | |
return ( | |
<div> | |
{/*<Sketch setup={this.setup} draw={this.draw} /> */} | |
<div className='monthly-audio-player' style={{backgroundColor: 'rgb('+ this.state.redValue +','+ this.state.greenValue +',' + this.state.blueValue+ ')'}}> | |
<div className='monthly-audio-recorder-text' style={{color: this.state.centerDisplayColor, top: this.state.centerDisplayText.length > 12 ? "35%" : "40%"}}>{this.state.centerDisplayText}</div> | |
{isSafari() || (!recorder && this.state.hasTriedToSetP5Recorder) ? | |
<div className='monthly-audio-recorder-screen'> | |
<div className='monthly-audio-recorder-mic'> | |
<HomepageCurriculumMic /> | |
</div> | |
</div> | |
: | |
<P5Wrapper sketch={this.sketch}/> | |
} | |
</div> | |
{ | |
this.state.OTToken.length > 0 && | |
<OTSession className='video-recording-screen0' apiKey={OT_API_KEY} sessionId={this.state.OTSessionId} token={this.state.OTToken}> | |
<div className='video-recording-screen-hidden'><OTPublisher className='audio-recording-publisher' properties={this.audioPublisherProperties} eventHandlers={this.publisherEventHandlers}/></div> | |
</OTSession> | |
} | |
</div> | |
); | |
} | |
} | |
export default preloadScript(MonthlyAudioRecorder, OT_CLIENT_URL); |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment