Skip to content

Instantly share code, notes, and snippets.

@diyfr
Last active November 22, 2019 07:06
Show Gist options
  • Save diyfr/dda1a89fd051927cd236831c22763076 to your computer and use it in GitHub Desktop.
Save diyfr/dda1a89fd051927cd236831c22763076 to your computer and use it in GitHub Desktop.
Record hotword with html5
<!DOCTYPE html>
<html lang="fr">
<head>
<meta charset="UTF-8">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/meyer-reset/2.0/reset.min.css">
<link rel='stylesheet' href='https://maxcdn.bootstrapcdn.com/font-awesome/4.7.0/css/font-awesome.min.css'>
<style>
@keyframes spin {
from {
transform: rotate(0deg);
}
to {
transform: rotate(360deg);
}
}
* {
box-sizing: border-box;
}
html {
width: 100%;
height: 100%;
background: linear-gradient(180deg, #2E748F, #153A49);
font-size: 10px;
}
.waveform {
position: relative;
padding: 4rem 0;
}
.waveform__canvas {
width: 320px;
margin-right: auto;
margin-left: auto;
display: block;
height: 240px;
}
.toolbar {
text-align: center;
}
.button {
transition: background .4s ease-in-out, color .4s ease-in-out;
position: relative;
display: inline-block;
width: 8rem;
height: 8rem;
margin: 0 1rem;
padding: 0;
background: #ffffff;
border-radius: 50%;
border: none;
outline: none;
color: rgba(0, 0, 0, 0.5);
font-size: 4rem;
cursor: pointer;
}
.button--record::before {
animation: spin 4s linear infinite;
transition: opacity .4s ease-in-out;
content: "";
position: absolute;
top: 0;
left: 0;
width: 8rem;
height: 8rem;
margin: -.4rem;
padding: -.4rem;
background: transparent;
opacity: 0;
border-radius: 50%;
border: 0.4rem solid rgba(46, 116, 143, 0.8);
border-top-color: rgba(46, 116, 143, 0);
}
.button--record.button--active {
background: #ED6A5F;
color: #ffffff;
}
.button--record.button--active::before {
opacity: 1;
}
.button--play , .button--trash , .button--save{
width: 5rem;
height: 5rem;
font-size: 2rem;
vertical-align: inherit;
}
.button--play.button--active {
background: #49F1D5;
color: #ffffff;
}
.button--disabled {
opacity: .2;
pointer-events: none;
cursor: not-allowed;
}
.audio {
width: 0;
height: 0;
opacity: 0;
visibility: 0;
}
.message {
transtion: opacity .4s ease-in-out;
padding: 1rem 2rem;
background: #ED6A5F;
opacity: 0;
font-size: 1.6rem;
font-family: Helvetica, Arial, sans-serif;
color: #ffffff;
line-height: 1.5;
}
.message--visible {
opacity: 1;
}
</style>
</head>
<body translate="no">
<div class="js-message message">
</div>
<div class="recorder">
<div class="waveform">
<canvas class="js-canvas waveform__canvas"></canvas>
</div>
<div class="toolbar">
<button class="js-record button button--record"><i class="fa fa-microphone" aria-hidden="true"></i></button>
<button class="js-play button button--play button--disabled"><i class="fa fa-play" aria-hidden="true"></i></button>
<button class="js-trash button button--trash button--disabled"><i class="fa fa-trash" aria-hidden="true"></i></button>
<button class="js-save button button--save button--disabled"><i class="fa fa-save" aria-hidden="true"></i></button>
<audio class="js-audio audio audio--hidden" controls />
</div>
</div>
<script>
(function () {
if (!window.AudioContext) {
setMessage('Your browser does not support window.Audiocontext. This is needed for this demo to work. Please try again in a differen browser.');
}
// UI Elements
const messageContainer = document.querySelector('.js-message');
const canvas = document.querySelector('.js-canvas');
const recordButton = document.querySelector('.js-record');
const playButton = document.querySelector('.js-play');
const trashButton = document.querySelector('.js-trash');
const saveButton = document.querySelector('.js-save');
const audioPlayer = document.querySelector('.js-audio');
const playButtonIcon = document.querySelector('.js-play .fa');
const AudioContext = window.AudioContext || window.webkitAudioContext;
// Constants
let audioContext = null;
let analyser = null;
let scriptProcessor = null;
let chunks = [];
const fftSize=512;
// Variables
let stream = null;
let input = null;
let recorder = null;
let recording = null;
let isRecording = false;
let isPlaying = false;
// Canvas variables
const barWidth = 2;
const barGutter = 2;
const barColor = "#49F1D5";
let canvasContext = canvas.getContext('2d');
let bars = [];
let width = 0;
let height = 0;
let halfHeight = 0;
let drawing = false;
// Show a message in the UI
const setMessage = message => {
messageContainer.innerHTML = message;
messageContainer.classList.add('message--visible');
};
// Hide the message
const hideMessage = () => {
messageContainer.classList.remove('message--visible');
};
// Request access to the user's microphone.
const requestMicrophoneAccess = () => {
if (navigator.mediaDevices) {
navigator.mediaDevices.getUserMedia({ audio: true }).then(stream => {
setAudioStream(stream);
}, error => {
setMessage('Something went wrong requesting the userMedia. <br/>Please make sure you\'re viewing this demo over https.');
});
} else {
setMessage('Your browser does not support navigator.mediadevices. <br/>This is needed for this demo to work. Please try again in a differen browser.');
}
};
let voiceDetected=false;
// Set all variables which needed the audio stream
const setAudioStream = stream => {
stream = stream;
audioContext = new AudioContext();
analyser = audioContext.createAnalyser();
scriptProcessor = audioContext.createScriptProcessor(2048, 1, 1);
analyser.smoothingTimeConstant = 0.3;
analyser.fftSize = fftSize;
input = audioContext.createMediaStreamSource(stream);
recorder = new window.MediaRecorder(stream);
setRecorderActions();
setupWaveform();
};
// Setup the recorder actions
const setRecorderActions = () => {
recorder.ondataavailable = saveChunkToRecording;
recorder.onstop = saveRecording;
};
// Save chunks of the incomming audio to the chuncks array
const saveChunkToRecording = event => {
chunks.push(event.data);
};
// Save the recording
const saveRecording = () => {
recording = URL.createObjectURL(new Blob(chunks, { 'type': 'audio/ogg; codecs=opus' }));
chunks: [];
audioPlayer.setAttribute('src', recording);
playButton.classList.remove('button--disabled');
trashButton.classList.remove('button--disabled');
saveButton.classList.remove('button--disabled');
};
let timeout=null;
let countMaxPeak=0;
// Start recording
const startRecording = () => {
isRecording = true;
countMaxPeak=0;
chunks=[];
recordButton.classList.add('button--active');
recorder.start();
timeout = setTimeout(stopRecording,4000);
};
// Stop recording
const stopRecording = () => {
clearTimeout(timeout);
isRecording = false;
recordButton.classList.remove('button--active');
recorder.stop();
};
// Toggle the recording button
const toggleRecording = () => {
if (isRecording) {
stopRecording();
} else {
startRecording();
}
};
// Setup the canvas to draw the waveform
const setupWaveform = () => {
canvasContext = canvas.getContext('2d');
canvas.width=320;
canvas.heigth=240;
width = canvas.offsetWidth;
height =canvas.offsetHeight;
halfHeight = canvas.offsetHeight / 2;
canvasContext.canvas.width = width;
canvasContext.canvas.height = height;
input.connect(analyser);
analyser.connect(scriptProcessor);
scriptProcessor.connect(audioContext.destination);
scriptProcessor.onaudioprocess = processInput;
};
const peak =30;
const maxCount=3;
// Process the microphone input
const processInput = audioProcessingEvent => {
if (isRecording) {
const a_rray = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteFrequencyData(a_rray);
let avg = getAverageVolume(a_rray);
bars.push(getAverageVolume(a_rray));
if (avg > peak) countMaxPeak++;
if (bars.length <= Math.floor(width / (barWidth + barGutter))) {
renderBars(bars);
} else {
renderBars(bars.slice(bars.length - Math.floor(width / (barWidth + barGutter))), bars.length);
}
if (countMaxPeak > maxCount && avg < 10) stopRecording();
} else {
bars = [];
}
};
// Calculate the average volume
const getAverageVolume = array => {
const length = array.length;
let values = 0;
let i = 0;
for (; i < length; i++) {
values += array[i];
}
return values / length;
};
// Render the bars
const renderBars = bars => {
if (!drawing) {
drawing = true;
window.requestAnimationFrame(() => {
canvasContext.clearRect(0, 0, width, height);
bars.forEach((bar, index) => {
canvasContext.fillStyle = barColor;
canvasContext.fillRect(index * (barWidth + barGutter), halfHeight, barWidth, halfHeight * (bar / 100));
canvasContext.fillRect(index * (barWidth + barGutter), halfHeight - halfHeight * (bar / 100), barWidth, halfHeight * (bar / 100));
});
drawing = false;
});
}
};
// Play the recording
const play = () => {
isPlaying = true;
audioPlayer.play();
playButton.classList.add('button--active');
playButtonIcon.classList.add('fa-pause');
playButtonIcon.classList.remove('fa-play');
};
const trashData = () => {
stop();
canvasContext.clearRect(0, 0, width, height);
chunks=[];
playButton.classList.add('button--disabled');
trashButton.classList.add('button--disabled');
saveButton.classList.add('button--disabled');
};
const saveData = () => {
setMessage("sending");
// Promise id sended then
setMessage("sended");
// and
trashData();
};
// Stop the recording
const stop = () => {
isPlaying = false;
audioPlayer.pause();
audioPlayer.currentTime = 0;
playButton.classList.remove('button--active');
playButtonIcon.classList.add('fa-play');
playButtonIcon.classList.remove('fa-pause');
};
// Toggle the play button
const togglePlay = () => {
if (isPlaying) {
stop();
} else {
play();
}
};
// Setup the audio player
const setupPlayer = () => {
audioPlayer.addEventListener('ended', () => {
stop();
});
};
// Start the application
requestMicrophoneAccess();
setupPlayer();
// Add event listeners to the buttons
recordButton.addEventListener('mouseup', toggleRecording);
playButton.addEventListener('mouseup', togglePlay);
trashButton.addEventListener('mouseup', trashData);
saveButton.addEventListener('mouseup', saveData);
})();
</script>
</body>
</html>
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment