Skip to content

Instantly share code, notes, and snippets.

@flpvsk
Last active October 4, 2024 04:11
Show Gist options
  • Save flpvsk/047140b31c968001dc563998f7440cc1 to your computer and use it in GitHub Desktop.
Save flpvsk/047140b31c968001dc563998f7440cc1 to your computer and use it in GitHub Desktop.
An example of a recorder based on AudioWorklet API.
/*
A worklet for recording in sync with AudioContext.currentTime.
More info about the API:
https://developers.google.com/web/updates/2017/12/audio-worklet
How to use:
1. Serve this file from your server (e.g. put it in the "public" folder) as is.
2. Register the worklet:
const audioContext = new AudioContext();
audioContext.audioWorklet.addModule('path/to/recorderWorkletProcessor.js')
.then(() => {
// your code here
})
3. Whenever you need to record anything, create a WorkletNode, route the
audio into it, and schedule the values for 'isRecording' parameter:
const recorderNode = new window.AudioWorkletNode(
audioContext,
'recorder-worklet'
);
yourSourceNode.connect(recorderNode);
recorderNode.connect(audioContext.destination);
recorderNode.port.onmessage = (e) => {
if (e.data.eventType === 'data') {
const audioData = e.data.audioBuffer;
// process pcm data
}
if (e.data.eventType === 'stop') {
// recording has stopped
}
};
recorderNode.parameters.get('isRecording').setValueAtTime(1, time);
recorderNode.parameters.get('isRecording').setValueAtTime(
0,
time + duration
);
yourSourceNode.start(time);
*/
class RecorderWorkletProcessor extends AudioWorkletProcessor {
static get parameterDescriptors() {
return [{
name: 'isRecording',
defaultValue: 0
}];
}
constructor() {
super();
this._bufferSize = 2048;
this._buffer = new Float32Array(this._bufferSize);
this._initBuffer();
}
_initBuffer() {
this._bytesWritten = 0;
}
_isBufferEmpty() {
return this._bytesWritten === 0;
}
_isBufferFull() {
return this._bytesWritten === this._bufferSize;
}
_appendToBuffer(value) {
if (this._isBufferFull()) {
this._flush();
}
this._buffer[this._bytesWritten] = value;
this._bytesWritten += 1;
}
_flush() {
let buffer = this._buffer;
if (this._bytesWritten < this._bufferSize) {
buffer = buffer.slice(0, this._bytesWritten);
}
this.port.postMessage({
eventType: 'data',
audioBuffer: buffer
});
this._initBuffer();
}
_recordingStopped() {
this.port.postMessage({
eventType: 'stop'
});
}
process(inputs, outputs, parameters) {
const isRecordingValues = parameters.isRecording;
for (
let dataIndex = 0;
dataIndex < isRecordingValues.length;
dataIndex++
) {
const shouldRecord = isRecordingValues[dataIndex] === 1;
if (!shouldRecord && !this._isBufferEmpty()) {
this._flush();
this._recordingStopped();
}
if (shouldRecord) {
this._appendToBuffer(inputs[0][0][dataIndex]);
}
}
return true;
}
}
registerProcessor('recorder-worklet', RecorderWorkletProcessor);
@badpaybad
Copy link

mic-audio-recorder.js

`

class MicAudioProcessor extends AudioWorkletProcessor {
constructor() {
super();
this._lastUpdate = (new Date()).getTime();
this.waveformData = [];
}

  process(inputs, outputs) {

      this.port.postMessage(inputs[0]);

      //// Copy inputs to outputs.
      //output[0].set(input[0]);

      return true;
  }

}

registerProcessor("micAudioworklet", MicAudioProcessor);

`

from https://github.com/addpipe/simple-recorderjs-demo/blob/master/js/recorder.js

change line 57 -> 74

`

                this.context = source.context;//AudioContext
                var micAudioWorklet;
                //window.AudioWorkletNode=null;
                if (window.AudioWorkletNode) {
                    this.context.audioWorklet.addModule('./assets/mic-audio-recorder.js')
                        .then(async () => {
                            micAudioWorklet = new AudioWorkletNode(this.context, "micAudioworklet");
                            micAudioWorklet.connect(this.context.destination);
                            var bufferWithChannel = [];
                            for (var channel = 0; channel < _this.config.numChannels; channel++) {
                                bufferWithChannel.push([]);
                            }
                            micAudioWorklet.port.onmessage = ({ data }) => {
                                if (!_this.recording) return;
                                var buffer = [];
                                var canPost = true;
                                for (var channel = 0; channel < _this.config.numChannels; channel++) {
                                    bufferWithChannel[channel].push(...data[channel]);
                                    if (bufferWithChannel[channel].length >= this.config.bufferLen) {
                                        var temp = bufferWithChannel[channel].slice(0, this.config.bufferLen);
                                        //console.log(temp.length)
                                        buffer[channel]=temp;
                                        bufferWithChannel[channel] = bufferWithChannel[channel].slice(this.config.bufferLen);
                                    } else {
                                        canPost = false;
                                    }
                                }
                                if (canPost) {
                                    _this.worker.postMessage({
                                        command: 'record',
                                        buffer: buffer
                                    });
                                    buffer = [];
                                }
                                //// draw wave form: https://codesandbox.io/p/devbox/audioworket-port-1-y7ctsn?file=%2Findex.js%3A24%2C69
                            };
                            source.connect(micAudioWorklet).connect(this.context.destination);
                        })
                        .catch(error => {
                            console.error('Error registering audio worklet:', error);
                        });
                } else {
                    //// this is old one 
                    this.node = (this.context.createScriptProcessor || this.context.createJavaScriptNode).call(this.context, this.config.bufferLen, this.config.numChannels, this.config.numChannels);
                    this.node.onaudioprocess = function (e) {
                        if (!_this.recording) return;
                        var buffer = [];
                        for (var channel = 0; channel < _this.config.numChannels; channel++) {
                            //console.log(e.inputBuffer.getChannelData(channel).length)
                            buffer.push(e.inputBuffer.getChannelData(channel));
                        }
                        _this.worker.postMessage({
                            command: 'record',
                            buffer: buffer
                        });
                    };
                    source.connect(this.node);
                    this.node.connect(this.context.destination); //this should not be necessary
                }

`

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment