Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Save kwindla/12a7a095459ae92435f49f269052166f to your computer and use it in GitHub Desktop.
Save kwindla/12a7a095459ae92435f49f269052166f to your computer and use it in GitHub Desktop.
<html>
<head>
<title>custom camera track</title>
<meta name="viewport" content="width=device-width, initial-scale=1" />
<script src="https://unpkg.com/@daily-co/daily-js"></script>
</head>
<body>
<div id="buttons" style="width: 50%; float: left;">
room url:
<input size="40" type="text" id="roomUrl"
value ="put a room url here"
/><br />
<hr />
<button id="join" onclick="startCamera()">
start camera and mic</button>
<br />
<button id="join" onclick="joinMeeting()">
join</button>
<br />
<hr />
<button id="join" onclick="muteCameraAndMic()">
mute camera and mic</button>
<br />
<button id="join" onclick="unmuteCameraAndMic()">
unmute camera and mic</button>
<br />
<button id="join" onclick="unmuteMic()">
unmute mic only</button>
<br />
<hr />
</div>
<div id="videos" style="width: 50%; float: right;"></div>
<script>
async function startCamera(constraints = { video: true, audio: true }) {
console.log('Calling getUserMedia() from application code. You will see the camera light come on, and the "recording" Chrome tab icon will display. But we are not doing anything with the tracks, yet, so you will not see the video track in the page.');
try {
window.camStream = await navigator.mediaDevices.getUserMedia(
constraints
);
} catch (e) {
console.error('Error getting cam stream', e);
}
console.log('got cam stream from getUserMedia()', camStream);
}
async function joinMeeting() {
if (window.callObject) {
console.log('reload to run again');
return;
}
if (!window.camStream) {
await startCamera();
}
window.callObject = DailyIframe.createCallObject({
url: document.getElementById("roomUrl").value,
dailyConfig: {
experimentalChromeVideoMuteLightOff: true,
},
audioSource: window.camStream.getAudioTracks()[0],
videoSource: window.camStream.getVideoTracks()[0],
});
window.callObject.on("track-started", trackStarted);
window.callObject.on("track-stopped", trackStopped);
window.callObject.on("left-meeting", leftMeeting);
await window.callObject.join();
}
async function muteCameraAndMic() {
window.callObject.setInputDevices({
audioSource: false,
videoSource: false,
});
window.callObject.setLocalAudio(false);
window.callObject.setLocalVideo(false);
window.camStream.getTracks().forEach((t) => t.stop());
window.camStream = null;
}
async function unmuteCameraAndMic() {
if (window.camStream) {
console.log('already unmuted - noop');
return;
}
await startCamera();
window.callObject.setInputDevices({
audioSource: window.camStream.getAudioTracks()[0],
videoSource: window.camStream.getVideoTracks()[0],
});
window.callObject.setLocalAudio(true);
window.callObject.setLocalVideo(true);
}
async function unmuteMic() {
if (window.camStream) {
console.log('already unmuted - noop');
return;
}
await startCamera({ video: false, audio: true });
window.callObject.setInputDevices({
audioSource: window.camStream.getAudioTracks()[0],
});
window.callObject.setLocalAudio(true);
}
//
//
// ---- general utility functions to display video and playback audio ----
//
//
function trackStarted(e) {
let vidsContainer = document.getElementById("videos");
if (e.track && e.track.kind === "video") {
let isScreenTrack = e.track === e.participant.screenVideoTrack;
let vid = findVideoForParticipant(e.participant.session_id, isScreenTrack);
if (!vid) {
vid = document.createElement("video");
vid.session_id = e.participant.session_id;
vid.is_screen_track = isScreenTrack;
vid.style.width = "100%";
vid.autoplay = true;
vid.muted = true;
vid.playsInline = true;
vidsContainer.appendChild(vid);
}
vid.srcObject = new MediaStream([e.track]);
} else if (e.track && e.track.kind === "audio") {
let aud = findAudioForParticipant(e.participant.session_id);
if (!aud) {
aud = document.createElement("audio");
aud.session_id = e.participant.session_id;
if (e.participant && e.participant.local) {
console.log("local audio track ... not playing locally");
aud.muted = true;
} else {
aud.autoplay = true;
}
vidsContainer.appendChild(aud);
}
aud.srcObject = new MediaStream([e.track]);
}
}
function trackStopped(e) {
let el =
findVideoForTrack(e.track && e.track.id) ||
findAudioForTrack(e.track && e.track.id);
if (el) {
el.remove();
}
}
function findVideoForParticipant(session_id, isScreenTrack) {
for (const vid of document.getElementsByTagName("video")) {
if (
vid.session_id === session_id &&
vid.is_screen_track === isScreenTrack
) {
return vid;
}
}
}
function findVideoForTrack(trackId) {
for (const vid of document.getElementsByTagName("video")) {
if (
vid.srcObject &&
vid.srcObject.getTracks().find((t) => t.id === trackId)
) {
return vid;
}
}
}
function findAudioForParticipant(session_id) {
for (const aud of document.getElementsByTagName("audio")) {
if (aud.session_id === session_id) {
return aud;
}
}
}
function findAudioForTrack(trackId) {
for (const aud of document.getElementsByTagName("audio")) {
if (
aud.srcObject &&
aud.srcObject.getTracks().find((t) => t.id === trackId)
) {
return aud;
}
}
}
function leftMeeting(e) {
document.getElementById("videos").innerHTML = "";
}
async function aTimeout(ms) {
return new Promise((resolve) => setTimeout(() => resolve(), ms));
}
</script>
</body>
</html>
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment