Skip to content

Instantly share code, notes, and snippets.

@InputBlackBoxOutput
Created February 5, 2022 07:23
Show Gist options
  • Save InputBlackBoxOutput/b02cf5984243c969911e73fc3335e638 to your computer and use it in GitHub Desktop.
Save InputBlackBoxOutput/b02cf5984243c969911e73fc3335e638 to your computer and use it in GitHub Desktop.
Mediapipe: Face Mesh
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<title>Face mesh</title>
<script
src="https://cdn.jsdelivr.net/npm/@mediapipe/[email protected]/camera_utils.js"
crossorigin="anonymous"
></script>
<script
src="https://cdn.jsdelivr.net/npm/@mediapipe/[email protected]/control_utils.js"
crossorigin="anonymous"
></script>
<script
src="https://cdn.jsdelivr.net/npm/@mediapipe/[email protected]/drawing_utils.js"
crossorigin="anonymous"
></script>
<script
src="https://cdn.jsdelivr.net/npm/@mediapipe/[email protected]/face_mesh.js"
crossorigin="anonymous"
></script>
</head>
<body>
<style>
.container {
position: absolute;
top: 50%;
left: 50%;
-moz-transform: translateX(-50%) translateY(-50%);
-webkit-transform: translateX(-50%) translateY(-50%);
transform: translateX(-50%) translateY(-50%);
text-align: center;
}
</style>
<div class="container">
<h2>Mediapipe: Face mesh</h2>
<canvas id="output" width="640px" height="480px"></canvas>
</div>
<script>
const video = document.createElement("video");
const output = document.getElementById("output");
const canvasContext = output.getContext("2d");
function onResultsFaceMesh(results) {
canvasContext.save();
canvasContext.clearRect(0, 0, output.width, output.height);
canvasContext.drawImage(
results.image,
0,
0,
output.width,
output.height
);
if (results.multiFaceLandmarks) {
for (const landmarks of results.multiFaceLandmarks) {
drawConnectors(canvasContext, landmarks, FACEMESH_TESSELATION, {
color: "#C0C0C070",
lineWidth: 1,
});
drawConnectors(canvasContext, landmarks, FACEMESH_RIGHT_EYE, {
color: "#FF3030",
});
drawConnectors(canvasContext, landmarks, FACEMESH_RIGHT_EYEBROW, {
color: "#FF3030",
});
drawConnectors(canvasContext, landmarks, FACEMESH_LEFT_EYE, {
color: "#30FF30",
});
drawConnectors(canvasContext, landmarks, FACEMESH_LEFT_EYEBROW, {
color: "#30FF30",
});
drawConnectors(canvasContext, landmarks, FACEMESH_FACE_OVAL, {
color: "#E0E0E0",
});
drawConnectors(canvasContext, landmarks, FACEMESH_LIPS, {
color: "#E0E0E0",
});
}
}
canvasContext.restore();
}
const faceMesh = new FaceMesh({
locateFile: (file) => {
return `https://cdn.jsdelivr.net/npm/@mediapipe/[email protected]/${file}`;
},
});
faceMesh.onResults(onResultsFaceMesh);
const camera = new Camera(video, {
onFrame: async () => {
await faceMesh.send({ image: video });
},
width: 640,
height: 480,
});
camera.start();
new ControlPanel(document.createElement("div"), {
selfieMode: true,
maxNumFaces: 1,
minDetectionConfidence: 0.5,
minTrackingConfidence: 0.5,
}).on((options) => {
video.classList.toggle("selfie", options.selfieMode);
faceMesh.setOptions(options);
});
</script>
</body>
</html>
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment