Skip to content

Instantly share code, notes, and snippets.

@cat-in-136
Created May 24, 2020 09:40
Show Gist options
  • Save cat-in-136/efe21ea1567c4fdf2b26187cbb0d4a86 to your computer and use it in GitHub Desktop.
Save cat-in-136/efe21ea1567c4fdf2b26187cbb0d4a86 to your computer and use it in GitHub Desktop.
Study for webrtc audio
const { app, BrowserWindow } = require("electron");
(async function () {
await app.whenReady();
const win = new BrowserWindow({
width: 800,
height: 600,
webPreferences: {
nodeIntegration: true,
},
});
win.loadFile("hoge.html");
})();
<textarea id="config" style="width: 100%" rows="10">
{
"autoGainControl": false,
"echoCancellation": true,
"noiseSuppression": true
}
</textarea>
<input type="button" id="start" value="Start!">
<input type="button" id="apply" value="Apply!" disabled>
<div>
<canvas id="canvas" height="100", width="500"></canvas>
</div>
<script>
document.getElementById("start").addEventListener("click", async (event) => {
const constraints = {
audio: JSON.parse(document.getElementById("config").value),
video: false
};
const stream = await navigator.mediaDevices.getUserMedia(constraints);
console.info(stream.getAudioTracks()[0].getSettings());
document.getElementById("apply").addEventListener("click", async () => {
const newAudioConstraints = JSON.parse(document.getElementById("config").value);
await stream.getAudioTracks()[0].applyConstraints(newAudioConstraints);
console.info(stream.getAudioTracks()[0].getSettings());
}, false);
const audioCtx = new AudioContext();
const audioSrc = audioCtx.createMediaStreamSource(stream);
const synthDelay = audioCtx.createDelay();
synthDelay.delayTime.setValueAtTime(1, audioCtx.currentTime)
const analyser = audioCtx.createAnalyser();
audioSrc.connect(synthDelay);
synthDelay.connect(audioCtx.destination);
synthDelay.connect(analyser);
const analyserDataLength = analyser.frequencyBinCount;
const analyserTimeData = new Uint8Array(analyserDataLength);
const analyserFreqData = new Uint8Array(analyserDataLength);
const canvas = document.getElementById("canvas");
const canvasCtx = canvas.getContext("2d");
const draw = () => {
requestAnimationFrame(draw);
canvasCtx.fillStyle = "#000";
canvasCtx.fillRect(0, 0, canvas.width, canvas.height);
// Freq Domain
analyser.getByteFrequencyData(analyserFreqData);
analyserFreqData.forEach((v, i) => {
const x = i * canvas.width * 1.0 / analyserDataLength;
const y = canvas.height - v * canvas.height / 255.0;
const w = canvas.width * 1.0 / analyserDataLength;
const h = canvas.height; //v * canvas.height / 255.0;
canvasCtx.fillStyle = `hsl(${v+100}, 100%, 50%)`;
canvasCtx.fillRect(x, y, w, h);
});
// Time Domain
analyser.getByteTimeDomainData(analyserTimeData);
canvasCtx.lineWidth = 2;
canvasCtx.strokeStyle = "rgba(255,255,255,0.5)";
canvasCtx.beginPath();
canvasCtx.moveTo(0, canvas.height / 2);
analyserTimeData.forEach((v, i) => {
const x = i * canvas.width * 1.0 / analyserDataLength;
const y = canvas.height - v * canvas.height / 255.0;
canvasCtx.lineTo(x, y);
});
canvasCtx.lineTo(canvas.width, canvas.height / 2);
canvasCtx.stroke();
};
draw();
document.getElementById("start").disabled = true;
document.getElementById("apply").disabled = false;
}, false);
window.addEventListener("DOMContentLoaded", () => {
document.getElementById("start").disabled = false;
document.getElementById("apply").disabled = true;
}, false);
</script>
{
"name": "hoge",
"version": "0.1.0",
"description": "",
"main": "app.js",
"scripts": {
"start": "electron ."
},
"author": "@cat_in_136",
"license": "MIT",
"devDependencies": {
"electron": "^9.0.0"
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment