Chromium does not support capture of monitor devices (system audio output to headphones and speakers; "What-U-Hear")
when navigator.mediaDevices.getUserMedia()
is called and does not list monitor devices when
navigator.mediaDevices.enumerateDevices()
is called at Linux, see
- Issue 931749: DOMException: could not start audio source when trying to access audioinput.
- Clarify "audiooutput" does not mean capture of audio output to headphones or speakers
Firefox does support capture of monitor devices at Linux.
Using https://github.com/fippo/paste as a template capture monitor device at Nightly stream the captured monitor device
to Chromium using RTCPeerConnection
.
Using clipboard for signaling is not ideal.
TODO: Improve the means of signaling to establish WebRTC peer connection between different applications.
At Firefox set the following preferences to true
dom.events.testing.asyncClipboard
media.navigator.permission.disabled
index.html
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
</head>
<body>
<script>
(async _ => {
const webrtc = new RTCPeerConnection({ sdpSemantics: 'unified-plan' });
[
'onsignalingstatechange',
'oniceconnectionstatechange',
'onicegatheringstatechange',
].forEach(event => webrtc.addEventListener(event, console.log));
let sdp;
webrtc.onicecandidate = async event => {
console.log('candidate', event.candidate);
if (!event.candidate) {
sdp = webrtc.localDescription.sdp;
if (sdp.indexOf('a=end-of-candidates') === -1) {
sdp += 'a=end-of-candidates\r\n';
}
try {
await navigator.clipboard.writeText(sdp);
async function* readClipboard() {
while (true) {
try {
await new Promise(resolve => setTimeout(resolve, 1000));
// dom.events.testing.asyncClipboard
// optionally dom.events.asyncClipboard.dataTransfer
const text = await navigator.clipboard.readText();
if (
text.replace(/[\n\s]+/g, '') !==
sdp.replace(/[\n\s]+/g, '')
) {
sdp = text;
console.log({ sdp, text });
break;
}
yield text;
} catch (e) {
console.error(e);
throw e;
}
}
}
for await (const text of readClipboard()) {
console.log(text);
}
await webrtc.setRemoteDescription({ type: 'answer', sdp: sdp });
} catch (e) {
throw e;
}
}
};
try {
// media.navigator.permission.disabled
let stream = await navigator.mediaDevices.getUserMedia({
audio: true,
});
const label = 'Monitor of Built-in Audio Analog Stereo';
let [track] = stream.getAudioTracks();
if (track.label !== label) {
const device = (
await navigator.mediaDevices.enumerateDevices()
).find(({ label: _ }) => label === _);
const { deviceId } = device;
console.log(device);
track.stop();
stream = await navigator.mediaDevices.getUserMedia({
audio: { deviceId: { exact: deviceId } },
});
[track] = stream.getAudioTracks();
}
const sender = webrtc.addTransceiver(stream.getAudioTracks()[0], {
streams: [stream],
direction: 'sendonly',
});
const offer = await webrtc.createOffer();
webrtc.setLocalDescription(offer);
} catch (e) {
throw e;
}
})().catch(console.error);
</script>
</body>
</html>
answer.html
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<style>
body *:not(script) {
display: block;
}
</style>
</head>
<body>
<button id="capture">Capture system audio</button>
<audio id="audio" autoplay controls muted></audio>
<script>
const audio = document.getElementById('audio');
const capture = document.getElementById('capture');
['loadedmetadata', 'play', 'playing'].forEach(event =>
audio.addEventListener(event, console.log)
);
const webrtc = new RTCPeerConnection({ sdpSemantics: 'unified-plan' });
[
'onsignalingstatechange',
'oniceconnectionstatechange',
'onicegatheringstatechange',
].forEach(event => webrtc.addEventListener(event, console.log));
webrtc.onicecandidate = async event => {
if (!event.candidate) {
let sdp = webrtc.localDescription.sdp;
if (sdp.indexOf('a=end-of-candidates') === -1) {
sdp += 'a=end-of-candidates\r\n';
}
try {
await navigator.clipboard.writeText(sdp);
} catch (e) {
console.error(e);
}
}
};
webrtc.ontrack = ({ transceiver, streams: [stream] }) => {
console.log(transceiver);
const {
receiver: { track },
} = transceiver;
track.onmute = track.onunmute = e => console.log(e);
audio.srcObject = stream;
};
onfocus = async _ => {
onfocus = null;
try {
const sdp = await navigator.clipboard.readText();
console.log(sdp);
await webrtc.setRemoteDescription({ type: 'offer', sdp });
const answer = await webrtc.createAnswer();
webrtc.setLocalDescription(answer);
await navigator.clipboard.writeText();
} catch (e) {
console.error(e);
}
};
</script>
</body>
</html>
Launch with
$ $HOME/firefox/firefox-bin -new-instance -devtools -P "webrtc" & $HOME/chrome-linux/chrome-wrapper