Skip to content

Instantly share code, notes, and snippets.

@acidsound
Created February 27, 2019 18:54
Show Gist options
  • Save acidsound/41f97a35e183d1fd1d3284add70acf4b to your computer and use it in GitHub Desktop.
Save acidsound/41f97a35e183d1fd1d3284add70acf4b to your computer and use it in GitHub Desktop.
Audio Input // source https://jsbin.com/tidoqi
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>Audio Input</title>
<link rel="stylesheet" href="https://storage.googleapis.com/code.getmdl.io/1.0.4/material.indigo-blue.min.css" />
<style id="jsbin-css">
.container {
display: flexbox;
display: -webkit-flexbox;
flex-wrap: wrap;
-webkit-flex-wrap: wrap;
}
.container .mdl-button {
width: 30vw;
height: 30vh;
}
canvas {
width: 100vw;
height: 100vh;
}
</style>
</head>
<body>
<div class="container">
</div>
<canvas></canvas>
<script id="jsbin-javascript">
var init, paint, unlock;
this.AudioContext = this.AudioContext || this.webkitAudioContext;
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia;
document.addEventListener('DOMContentLoaded', function(e) {
var iOS;
iOS = /iPad|iPhone|iPod/.test(navigator.userAgent) && !window.MSStream;
return iOS && window.addEventListener('touchstart', unlock) || init();
});
unlock = function() {
var ctx, source, _ref;
ctx = new AudioContext();
source = ctx.createBufferSource();
source.buffer = ctx.createBuffer(1, 1, ctx.sampleRate);
source.connect(ctx.destination);
source.start(0);
if ((_ref = source.playbackState) === source.PLAYING_STATE || _ref === source.FINISHED_STATE) {
return unlock();
}
};
this.fft = this.buffer = null;
init = function() {
console.log("init");
this.ctx = new this.AudioContext();
this.cv = document.getElementsByTagName('canvas')[0].getContext('2d');
this.fft = this.ctx.createAnalyser();
this.fft.fftSize = 2048;
this.buffer1 = new Uint8Array(this.fft.fftSize);
this.buffer2 = new Uint8Array(this.fft.fftSize);
this.fft.getByteTimeDomainData(this.buffer);
paint();
return navigator.getUserMedia({
audio: true
}, function(stream) {
var src;
src = ctx.createMediaStreamSource(stream);
return src.connect(fft);
}, function(err) {
return console.log(err);
});
};
paint = function() {
var h, i, l, w, ww, _i, _j, _ref, _ref1, _results;
requestAnimationFrame(paint);
this.fft.getByteTimeDomainData(this.buffer1);
this.fft.getByteFrequencyData(this.buffer2);
w = cv.canvas.clientWidth;
h = cv.canvas.clientHeight;
l = this.fft.fftSize;
ww = w / l;
this.cv.clearRect(0, 0, w, h);
this.cv.fillStyle = "#000";
for (i = _i = 0, _ref = this.fft.frequencyBinCount - 1; 0 <= _ref ? _i <= _ref : _i >= _ref; i = 0 <= _ref ? ++_i : --_i) {
this.cv.fillRect(ww * i, h * this.buffer1[i] / 256 / 4, 1, 1);
}
this.cv.fillStyle = "rgba(128,53,72,0.2)";
_results = [];
for (i = _j = 0, _ref1 = this.fft.frequencyBinCount - 1; 0 <= _ref1 ? _j <= _ref1 : _j >= _ref1; i = 0 <= _ref1 ? ++_j : --_j) {
_results.push(this.cv.fillRect(ww * i, h - (h * this.buffer2[i] / 256), 3, h * this.buffer2[i] / 256));
}
return _results;
};
</script>
<script id="jsbin-source-css" type="text/css">.container {
display: flexbox;
display: -webkit-flexbox;
flex-wrap: wrap;
-webkit-flex-wrap: wrap;
.mdl-button {
width: 30vw;
height: 30vh;
}
}
canvas {
width: 100vw;
height: 100vh;
}</script>
<script id="jsbin-source-javascript" type="text/javascript">@AudioContext = @AudioContext or @webkitAudioContext
navigator.getUserMedia = navigator.getUserMedia or navigator.webkitGetUserMedia
document.addEventListener 'DOMContentLoaded', (e)->
iOS = /iPad|iPhone|iPod/.test(navigator.userAgent) and not window.MSStream
iOS and window.addEventListener('touchstart', unlock) or init()
unlock= ->
ctx= new AudioContext()
source = ctx.createBufferSource()
source.buffer = ctx.createBuffer 1,1,ctx.sampleRate
source.connect ctx.destination
source.start 0
unlock() if source.playbackState in [
source.PLAYING_STATE
source.FINISHED_STATE
]
@fft= @buffer=null
init= ->
console.log "init"
@ctx = new @AudioContext()
@cv = document.getElementsByTagName('canvas')[0].getContext '2d'
@fft = @ctx.createAnalyser()
@fft.fftSize=2048
@buffer1 = new Uint8Array @fft.fftSize
@buffer2 = new Uint8Array @fft.fftSize
@fft.getByteTimeDomainData @buffer
paint()
navigator.getUserMedia audio: yes, (stream)->
src = ctx.createMediaStreamSource stream
src.connect fft
, (err)->
console.log err
paint= ->
requestAnimationFrame paint
@fft.getByteTimeDomainData @buffer1
@fft.getByteFrequencyData @buffer2
w = cv.canvas.clientWidth
h = cv.canvas.clientHeight
l = @fft.fftSize
ww= w/l
@cv.clearRect 0,0,w,h
@cv.fillStyle="#000"
@cv.fillRect ww*i,h*@buffer1[i]/256/4,1, 1 for i in [[email protected]]
@cv.fillStyle="rgba(128,53,72,0.2)"
@cv.fillRect ww*i,h-(h*@buffer2[i]/256),3, h*@buffer2[i]/256 for i in [[email protected]]</script></body>
</html>
.container {
display: flexbox;
display: -webkit-flexbox;
flex-wrap: wrap;
-webkit-flex-wrap: wrap;
}
.container .mdl-button {
width: 30vw;
height: 30vh;
}
canvas {
width: 100vw;
height: 100vh;
}
var init, paint, unlock;
this.AudioContext = this.AudioContext || this.webkitAudioContext;
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia;
document.addEventListener('DOMContentLoaded', function(e) {
var iOS;
iOS = /iPad|iPhone|iPod/.test(navigator.userAgent) && !window.MSStream;
return iOS && window.addEventListener('touchstart', unlock) || init();
});
unlock = function() {
var ctx, source, _ref;
ctx = new AudioContext();
source = ctx.createBufferSource();
source.buffer = ctx.createBuffer(1, 1, ctx.sampleRate);
source.connect(ctx.destination);
source.start(0);
if ((_ref = source.playbackState) === source.PLAYING_STATE || _ref === source.FINISHED_STATE) {
return unlock();
}
};
this.fft = this.buffer = null;
init = function() {
console.log("init");
this.ctx = new this.AudioContext();
this.cv = document.getElementsByTagName('canvas')[0].getContext('2d');
this.fft = this.ctx.createAnalyser();
this.fft.fftSize = 2048;
this.buffer1 = new Uint8Array(this.fft.fftSize);
this.buffer2 = new Uint8Array(this.fft.fftSize);
this.fft.getByteTimeDomainData(this.buffer);
paint();
return navigator.getUserMedia({
audio: true
}, function(stream) {
var src;
src = ctx.createMediaStreamSource(stream);
return src.connect(fft);
}, function(err) {
return console.log(err);
});
};
paint = function() {
var h, i, l, w, ww, _i, _j, _ref, _ref1, _results;
requestAnimationFrame(paint);
this.fft.getByteTimeDomainData(this.buffer1);
this.fft.getByteFrequencyData(this.buffer2);
w = cv.canvas.clientWidth;
h = cv.canvas.clientHeight;
l = this.fft.fftSize;
ww = w / l;
this.cv.clearRect(0, 0, w, h);
this.cv.fillStyle = "#000";
for (i = _i = 0, _ref = this.fft.frequencyBinCount - 1; 0 <= _ref ? _i <= _ref : _i >= _ref; i = 0 <= _ref ? ++_i : --_i) {
this.cv.fillRect(ww * i, h * this.buffer1[i] / 256 / 4, 1, 1);
}
this.cv.fillStyle = "rgba(128,53,72,0.2)";
_results = [];
for (i = _j = 0, _ref1 = this.fft.frequencyBinCount - 1; 0 <= _ref1 ? _j <= _ref1 : _j >= _ref1; i = 0 <= _ref1 ? ++_j : --_j) {
_results.push(this.cv.fillRect(ww * i, h - (h * this.buffer2[i] / 256), 3, h * this.buffer2[i] / 256));
}
return _results;
};
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment