Skip to content

Instantly share code, notes, and snippets.

@mekya
Last active February 14, 2022 09:54
Show Gist options
  • Save mekya/d7d21f78e7ecb2c34d89bd6ec5bf5799 to your computer and use it in GitHub Desktop.
Save mekya/d7d21f78e7ecb2c34d89bd6ec5bf5799 to your computer and use it in GitHub Desktop.
Custom canvas streaming in Ant Media Server
<!DOCTYPE html>
<html>
<head>
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta charset="UTF-8">
<link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.4.1/css/bootstrap.min.css" integrity="sha384-Vkoo8x4CGsO3+Hhxv8T/Q5PaXtkKtu6ug5TOeNV6gBiFeWPGFN9MuhOf23Q9Ifjh" crossorigin="anonymous">
<script src="https://webrtc.github.io/adapter/adapter-latest.js"></script>
<style>
video {
width: 100%;
max-width: 640px;
}
/* Everything but the jumbotron gets side spacing for mobile first views */
.header, .marketing, .footer {
padding: 15px;
}
/* Custom page header */
.header {
padding-bottom: 20px;
}
/* Customize container */
@media ( min-width : 768px) {
.container {
max-width: 730px;
}
}
.container-narrow>hr {
margin: 30px 0;
}
/* Main marketing message and sign up button */
.jumbotron {
text-align: center;
}
/* Responsive: Portrait tablets and up */
@media screen and (min-width: 768px) {
/* Remove the padding we set earlier */
.header, .marketing, .footer {
padding-right: 0;
padding-left: 0;
}
}
.options {
display:none;
}
</style>
</head>
<body>
<div class="container">
<div class="header clearfix">
<div class="row">
<h3 class="col text-muted">WebRTC Publish</h3>
<nav class="col align-self-center">
<ul class="nav float-right">
<li><a href="http://antmedia.io">Contact</a></li>
</ul>
</nav>
</div>
</div>
<div class="jumbotron">
<canvas id="canvas" width="640" height="360"></canvas>
<div class="col-sm-12 form-group">
<video id="localVideo" autoplay muted controls playsinline></video>
</div>
<div class="form-group col-sm-12 text-left">
<input type="text" class="form-control" value="stream1"
id="streamName" name="streamIdTextBox" placeholder="Type stream name">
</div>
<div class="col-sm-12 text-right">
<button type="button" class="btn btn-outline-primary btn-sm" onclick="toggleOptions()">Options</button>
</div>
<div class="form-group col-sm-12 text-left options">
<legend class="col-form-label video-source-legend">Video Source</legend>
<div class="form-check form-check-inline">
<input class="form-check-input video-source" disabled name="videoSource" onchange="switchMode(event.target)" type="radio" value="screen"
id="screen_share_checkbox">
<label class="form-check-label" for="screen_share_checkbox" style="font-weight:normal">
Screen
</label>
</div>
<div class="form-check form-check-inline">
<input class="form-check-input video-source" disabled name="videoSource" onchange="switchMode(event.target)" type="radio" value="screen+camera"
id="screen_share_with_camera_checkbox">
<label class="form-check-label" for="screen_share_with_camera_checkbox" style="font-weight:normal">
Screen with Camera
</label>
<a id="browser_screen_share_doesnt_support" href="https://caniuse.com/#search=getDisplayMedia">Your browser doesn't support screen share. You can see supported browsers in this link </a>
</div>
</div>
<div class="form-group col-sm-12 text-left options">
<label>Data Channel Messages</label>
<textarea class="form-control" id="dataMessagesTextarea" style="font-size:10px" rows="8"></textarea>
<div class="form-row">
<div class="form-group col-sm-10">
<input type="text" class="form-control" id="dataTextbox" placeholder="Write your message to send players">
</div>
<div class="form-group col-sm-2">
<button type="button" class="btn btn-outline-primary btn-block" onclick="sendData()">Send</button>
</div>
</div>
</div>
<div class="form-group">
<button onclick="startPublishing()" class="btn btn-primary" disabled
id="start_publish_button">Start Publishing</button>
<button onclick="stopPublishing()" class="btn btn-primary" disabled
id="stop_publish_button">Stop Publishing</button>
</div>
<span class="badge badge-success" id="broadcastingInfo" style="font-size:14px;display:none"
style="display: none">Publishing</span>
</div>
<footer class="footer text-center">
<p><a href="http://antmedia.io">Ant Media Server</a></p>
</footer>
</div>
<script src="https://code.jquery.com/jquery-3.4.1.min.js" crossorigin="anonymous"></script>
<script src="https://cdn.jsdelivr.net/npm/[email protected]/dist/umd/popper.min.js" crossorigin="anonymous"></script>
<script src="https://stackpath.bootstrapcdn.com/bootstrap/4.4.1/js/bootstrap.min.js" crossorigin="anonymous"></script>
</body>
<script type="module" lang="javascript">
import {WebRTCAdaptor} from "./js/webrtc_adaptor.js"
import {getUrlParameter} from "./js/fetch.stream.js"
import {SoundMeter} from "./js/soundmeter.js"
var token = "<%= request.getParameter("token") %>";
var start_publish_button = document.getElementById("start_publish_button");
var stop_publish_button = document.getElementById("stop_publish_button");
var streamNameBox = document.getElementById("streamName");
/**
* If publishing stops for any reason, it tries to republish again.
*/
var autoRepublishEnabled = true;
/**
* Timer job that checks the WebRTC connection
*/
var autoRepublishIntervalJob = null;
var streamId;
var name = getUrlParameter("name");
if(name !== "undefined")
{
streamNameBox.value = name;
}
// It should be true
var rtmpForward = getUrlParameter("rtmpForward");
function startPublishing() {
streamId = streamNameBox.value;
webRTCAdaptor.publish(streamId, token);
}
window.startPublishing = startPublishing;
function stopPublishing() {
if (autoRepublishIntervalJob != null) {
clearInterval(autoRepublishIntervalJob);
autoRepublishIntervalJob = null;
}
webRTCAdaptor.stop(streamId);
}
window.stopPublishing = stopPublishing;
function switchMode(chbx) {
if(chbx.value == "screen") {
webRTCAdaptor.switchDesktopCapture(streamId);
}
else if(chbx.value == "screen+camera"){
webRTCAdaptor.switchDesktopCaptureWithCamera(streamId);
}
else {
webRTCAdaptor.switchVideoCameraCapture(streamId, chbx.value);
}
}
function getCameraRadioButton(deviceName, deviceId) {
return "<div class=\"form-check form-check-inline\">" +
"<input class=\"form-check-input video-source\" name=\"videoSource\" onchange=\"switchMode(event.target)\" type=\"radio\" value=\"" + deviceId + "\" id=\"" + deviceId + "\">" +
"<label class=\"form-check-label\" for=\"" + deviceId + "\" style=\"font-weight:normal\">" +
deviceName +
"</label>" +
"</div>";
}
function toggleOptions() {
$(".options").toggle();
}
function sendData() {
try {
var iceState = webRTCAdaptor.iceConnectionState(streamId);
if (iceState != null && iceState != "failed" && iceState != "disconnected") {
webRTCAdaptor.sendData($("#streamName").val(), $("#dataTextbox").val());
$("#dataMessagesTextarea").append("Sent: " + $("#dataTextbox").val() + "\r\n");
$("#dataTextbox").val("");
}
else {
alert("WebRTC publishing is not active. Please click Start Publishing first")
}
}
catch (exception) {
console.error(exception);
alert("Message cannot be sent. Make sure you've enabled data channel on server web panel");
}
}
function checkAndRepublishIfRequired() {
var iceState = webRTCAdaptor.signallingState(streamId);
if (iceState == null || iceState == "failed" || iceState == "disconnected"){
console.log("Publish has stopped and will try to re-publish");
webRTCAdaptor.stop(streamId);
webRTCAdaptor.closePeerConnection(streamId);
webRTCAdaptor.closeWebSocket();
initWebRTCAdaptor(true, autoRepublishEnabled);
}
}
function startAnimation() {
$("#broadcastingInfo").fadeIn(800, function () {
$("#broadcastingInfo").fadeOut(800, function () {
var state = webRTCAdaptor.signallingState(streamId);
if (state != null && state != "closed") {
var iceState = webRTCAdaptor.iceConnectionState(streamId);
if (iceState != null && iceState != "failed" && iceState != "disconnected") {
startAnimation();
}
}
});
});
}
var pc_config = {
'iceServers' : [ {
'urls' : 'stun:stun.l.google.com:19302'
} ]
};
/*
//sample turn configuration
{
iceServers: [
{ urls: "",
username: "",
credential: "",
}
]
};
*/
var sdpConstraints = {
OfferToReceiveAudio : false,
OfferToReceiveVideo : false
};
var mediaConstraints = {
video : { aspectRatio: {
exact: 16 / 9
}
},
audio : true
};
var appName = location.pathname.substring(0, location.pathname.lastIndexOf("/")+1);
var path = location.hostname + ":" + location.port + appName + "websocket?rtmpForward=" + rtmpForward;
var websocketURL = "ws://" + path;
if (location.protocol.startsWith("https")) {
websocketURL = "wss://" + path;
}
var webRTCAdaptor = null;
function initWebRTCAdaptor(publishImmediately, autoRepublishEnabled)
{
webRTCAdaptor = new WebRTCAdaptor({
websocket_url : websocketURL,
mediaConstraints : mediaConstraints,
peerconnection_config : pc_config,
sdp_constraints : sdpConstraints,
localStream: localStream,
//localVideoId : "localVideo",
debug:true,
bandwidth:900,
callback : function(info, obj) {
if (info == "initialized") {
console.log("initialized");
start_publish_button.disabled = false;
stop_publish_button.disabled = true;
if (publishImmediately) {
webRTCAdaptor.publish(streamId, token)
}
} else if (info == "publish_started") {
//stream is being published
console.log("publish started");
start_publish_button.disabled = true;
stop_publish_button.disabled = false;
startAnimation();
if (autoRepublishEnabled && autoRepublishIntervalJob == null)
{
autoRepublishIntervalJob = setInterval(() => {
checkAndRepublishIfRequired();
}, 3000);
}
} else if (info == "publish_finished") {
//stream is being finished
console.log("publish finished");
start_publish_button.disabled = false;
stop_publish_button.disabled = true;
}
else if (info == "browser_screen_share_supported") {
$(".video-source").prop("disabled", false);
console.log("browser screen share supported");
browser_screen_share_doesnt_support.style.display = "none";
}
else if (info == "screen_share_stopped") {
//choose the first video source. It may not be correct for all cases.
$(".video-source").first().prop("checked", true);
console.log("screen share stopped");
}
else if (info == "closed") {
//console.log("Connection closed");
if (typeof obj != "undefined") {
console.log("Connecton closed: " + JSON.stringify(obj));
}
}
else if (info == "pong") {
//ping/pong message are sent to and received from server to make the connection alive all the time
//It's especially useful when load balancer or firewalls close the websocket connection due to inactivity
}
else if (info == "refreshConnection") {
checkAndRepublishIfRequired();
}
else if (info == "ice_connection_state_changed") {
console.log("iceConnectionState Changed: ",JSON.stringify(obj));
}
else if (info == "updated_stats") {
//obj is the PeerStats which has fields
//averageOutgoingBitrate - kbits/sec
//currentOutgoingBitrate - kbits/sec
console.log("Average outgoing bitrate " + obj.averageOutgoingBitrate + " kbits/sec"
+ " Current outgoing bitrate: " + obj.currentOutgoingBitrate + " kbits/sec");
}
else if (info == "data_received") {
console.log("Data received: " + obj.event.data + " type: " + obj.event.type + " for stream: " + obj.streamId);
$("#dataMessagesTextarea").append("Received: " + obj.event.data + "\r\n");
}
else if (info == "available_devices") {
var htmlContent = "";
obj.forEach(function(device) {
if (device.kind == "videoinput") {
htmlContent += getCameraRadioButton(device.label, device.deviceId);
}
});
$(htmlContent).insertAfter(".video-source-legend");
$(".video-source").first().prop("checked", true);
}
else {
console.log( info + " notification received");
}
},
callbackError : function(error, message) {
//some of the possible errors, NotFoundError, SecurityError,PermissionDeniedError
console.log("error callback: " + JSON.stringify(error));
var errorMessage = JSON.stringify(error);
if (typeof message != "undefined") {
errorMessage = message;
}
var errorMessage = JSON.stringify(error);
if (error.indexOf("NotFoundError") != -1) {
errorMessage = "Camera or Mic are not found or not allowed in your device";
}
else if (error.indexOf("NotReadableError") != -1 || error.indexOf("TrackStartError") != -1) {
errorMessage = "Camera or Mic is being used by some other process that does not let read the devices";
}
else if(error.indexOf("OverconstrainedError") != -1 || error.indexOf("ConstraintNotSatisfiedError") != -1) {
errorMessage = "There is no device found that fits your video and audio constraints. You may change video and audio constraints"
}
else if (error.indexOf("NotAllowedError") != -1 || error.indexOf("PermissionDeniedError") != -1) {
errorMessage = "You are not allowed to access camera and mic.";
}
else if (error.indexOf("TypeError") != -1) {
errorMessage = "Video/Audio is required";
}
else if (error.indexOf("ScreenSharePermissionDenied") != -1) {
errorMessage = "You are not allowed to access screen share";
$(".video-source").first().prop("checked", true);
}
else if (error.indexOf("WebSocketNotConnected") != -1) {
errorMessage = "WebSocket Connection is disconnected.";
}
alert(errorMessage);
}
});
}
//initialize the WebRTCAdaptor
//initWebRTCAdaptor(false, autoRepublishEnabled);
var canvas = document.getElementById('canvas');
var vid = document.getElementById('localVideo');
var image=new Image();
image.src="images/play.png";
function draw() {
if (canvas.getContext) {
var ctx = canvas.getContext('2d');
ctx.drawImage(vid, 0, 0, 640, 360);
ctx.drawImage(image,50, 10, 100, 30);
}
}
setInterval(function() { draw(); }, 50);
//capture stream from canvas
var localStream = canvas.captureStream(25);
navigator.mediaDevices.getUserMedia({video: { aspectRatio: { exact: 16/9 }}, audio:true}).then(function (stream) {
var video = document.querySelector('video#localVideo');
video.srcObject = stream;
video.onloadedmetadata = function(e) {
video.play();
};
//initialize the webRTCAdaptor with the localStream created.
//initWebRTCAdaptor method is implemented below
localStream.addTrack(stream.getAudioTracks()[0]);
initWebRTCAdaptor(false, autoRepublishEnabled);
});
</script>
</html>
@nocpbxchap
Copy link

I tried to implement, getting PublishTimeoutError.

@mekya
Copy link
Author

mekya commented Aug 7, 2021

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment