Skip to content

Instantly share code, notes, and snippets.

@Steven24K
Created June 24, 2021 07:46
Show Gist options
  • Save Steven24K/425efe000a797ef65f8838d00c909bbe to your computer and use it in GitHub Desktop.
Save Steven24K/425efe000a797ef65f8838d00c909bbe to your computer and use it in GitHub Desktop.
A barcode scanner component for React using the users webcam and image procesing library @zxinf/library.
import * as React from 'react'
import { BrowserMultiFormatReader, DecodeHintType, Result, BarcodeFormat } from '@zxing/library'
import Webcam from './Webcam'
// https://github.com/dashboardphilippines/react-webcam-barcode-scanner
interface BarcodeScannerProps {
width: number
height: number
onUpdate: (res: Result) => void
onError: (err: any) => void
}
export default class BarcodeScanner extends React.Component<BarcodeScannerProps, {
ref_cam?: Webcam,
img: string
}> {
constructor(props: BarcodeScannerProps) {
super(props)
this.state = { ref_cam: undefined, img: "" }
this.capture = this.capture.bind(this)
}
capture() {
if (this.state.ref_cam == undefined) return
const hints = new Map();
const formats = Object.values(BarcodeFormat);
hints.set(DecodeHintType.POSSIBLE_FORMATS, formats);
const codeReader = new BrowserMultiFormatReader(hints)
const imageSrc = this.state.ref_cam?.getScreenshot()
if (imageSrc) {
// this.setState(s => ({ ...s, img: imageSrc }))
// console.log(imageSrc)
codeReader.decodeFromImage(undefined, imageSrc).then(result => {
this.props.onUpdate(result)
}).catch((err) => {
this.props.onError(err)
})
}
}
componentWillUnmount() {
this.setState(s => ({ ...s, ref_cam: undefined }))
}
componentDidMount() {
let capture_loop = () => setTimeout(() => {
if (this.state.ref_cam == undefined) return
this.capture()
capture_loop()
}, 500);
capture_loop()
}
render() {
return <>
<Webcam
width={this.props.width}
height={this.props.height}
ref={r => {
if (r == null || r == undefined || this.state.ref_cam != undefined) return
this.setState(s => ({ ...s, ref_cam: r }))
}}
screenshotFormat="image/png"
videoConstraints={{
facingMode: 'environment'
}}
/>
{/* <img src={this.state.img} /> */}
</>
}
}
import * as React from "react";
// https://github.com/mozmorris/react-webcam/blob/master/src/react-webcam.tsx
function hasGetUserMedia() {
return !!(navigator.mediaDevices && navigator.mediaDevices.getUserMedia);
}
interface ScreenshotDimensions {
width: number;
height: number;
}
export type WebcamProps = Omit<React.HTMLProps<HTMLVideoElement>, "ref"> & {
audio: boolean;
audioConstraints?: MediaStreamConstraints["audio"];
forceScreenshotSourceSize: boolean;
imageSmoothing: boolean;
mirrored: boolean;
minScreenshotHeight?: number;
minScreenshotWidth?: number;
onUserMedia: (stream: MediaStream) => void;
onUserMediaError: (error: string | DOMException) => void;
screenshotFormat: "image/webp" | "image/png" | "image/jpeg";
screenshotQuality: number;
videoConstraints?: MediaStreamConstraints["video"];
}
interface WebcamState {
hasUserMedia: boolean;
src?: string;
}
export default class Webcam extends React.Component<WebcamProps, WebcamState> {
static defaultProps = {
audio: true,
forceScreenshotSourceSize: false,
imageSmoothing: true,
mirrored: false,
onUserMedia: () => undefined,
onUserMediaError: () => undefined,
screenshotFormat: "image/webp",
screenshotQuality: 0.92,
};
private canvas: HTMLCanvasElement | null = null;
private ctx: CanvasRenderingContext2D | null = null;
private unmounted = false;
stream: MediaStream | null;
video: HTMLVideoElement | null;
constructor(props: WebcamProps) {
super(props);
this.state = {
hasUserMedia: false
};
}
componentDidMount() {
const { state, props } = this;
if (!hasGetUserMedia()) {
props.onUserMediaError("getUserMedia not supported");
return;
}
if (!state.hasUserMedia) {
this.requestUserMedia();
}
}
componentDidUpdate(nextProps: WebcamProps) {
const { props } = this;
if (!hasGetUserMedia()) {
props.onUserMediaError("getUserMedia not supported");
return;
}
const audioConstraintsChanged =
JSON.stringify(nextProps.audioConstraints) !==
JSON.stringify(props.audioConstraints);
const videoConstraintsChanged =
JSON.stringify(nextProps.videoConstraints) !==
JSON.stringify(props.videoConstraints);
const minScreenshotWidthChanged =
nextProps.minScreenshotWidth !== props.minScreenshotWidth;
const minScreenshotHeightChanged =
nextProps.minScreenshotHeight !== props.minScreenshotHeight;
if (
videoConstraintsChanged ||
minScreenshotWidthChanged ||
minScreenshotHeightChanged
) {
this.canvas = null;
this.ctx = null;
}
if (audioConstraintsChanged || videoConstraintsChanged) {
this.stopAndCleanup();
this.requestUserMedia();
}
}
componentWillUnmount() {
this.unmounted = true;
this.stopAndCleanup();
}
private static stopMediaStream(stream: MediaStream | null) {
if (stream) {
if (stream.getVideoTracks && stream.getAudioTracks) {
stream.getVideoTracks().map(track => {
stream.removeTrack(track);
track.stop();
});
stream.getAudioTracks().map(track => {
stream.removeTrack(track);
track.stop()
});
} else {
((stream as unknown) as MediaStreamTrack).stop();
}
}
}
private stopAndCleanup() {
const { state } = this;
if (state.hasUserMedia) {
Webcam.stopMediaStream(this.stream);
if (state.src) {
window.URL.revokeObjectURL(state.src);
}
}
}
getScreenshot(screenshotDimensions?: ScreenshotDimensions) {
const { state, props } = this;
if (!state.hasUserMedia) return null;
const canvas = this.getCanvas(screenshotDimensions);
return (
canvas &&
canvas.toDataURL(props.screenshotFormat, props.screenshotQuality)
);
}
getCanvas(screenshotDimensions?: ScreenshotDimensions) {
const { state, props } = this;
if (!this.video) {
return null;
}
if (!state.hasUserMedia || !this.video.videoHeight) return null;
if (!this.ctx) {
let canvasWidth = this.video.videoWidth;
let canvasHeight = this.video.videoHeight;
if (!this.props.forceScreenshotSourceSize) {
const aspectRatio = canvasWidth / canvasHeight;
canvasWidth = props.minScreenshotWidth || this.video.clientWidth;
canvasHeight = canvasWidth / aspectRatio;
if (
props.minScreenshotHeight &&
canvasHeight < props.minScreenshotHeight
) {
canvasHeight = props.minScreenshotHeight;
canvasWidth = canvasHeight * aspectRatio;
}
}
this.canvas = document.createElement("canvas");
this.canvas.width = screenshotDimensions?.width || canvasWidth;
this.canvas.height = screenshotDimensions?.height || canvasHeight;
this.ctx = this.canvas.getContext("2d");
}
const { ctx, canvas } = this;
if (ctx && canvas) {
// mirror the screenshot
if (props.mirrored) {
ctx.translate(canvas.width, 0);
ctx.scale(-1, 1);
}
ctx.imageSmoothingEnabled = props.imageSmoothing;
ctx.drawImage(this.video, 0, 0, screenshotDimensions?.width || canvas.width, screenshotDimensions?.height || canvas.height);
// invert mirroring
if (props.mirrored) {
ctx.scale(-1, 1);
ctx.translate(-canvas.width, 0);
}
}
return canvas;
}
private requestUserMedia() {
const { props } = this;
const sourceSelected = (
audioConstraints: boolean | MediaTrackConstraints | undefined,
videoConstraints: boolean | MediaTrackConstraints | undefined,
) => {
const constraints: MediaStreamConstraints = {
video: typeof videoConstraints !== "undefined" ? videoConstraints : true
};
if (props.audio) {
constraints.audio =
typeof audioConstraints !== "undefined" ? audioConstraints : true;
}
navigator.mediaDevices
.getUserMedia(constraints)
.then(stream => {
if (this.unmounted) {
Webcam.stopMediaStream(stream);
} else {
this.handleUserMedia(null, stream);
}
})
.catch(e => {
this.handleUserMedia(e);
});
};
if ("mediaDevices" in navigator) {
sourceSelected(props.audioConstraints, props.videoConstraints);
} else {
const optionalSource = (id: string | null) => ({ optional: [{ sourceId: id }] }) as MediaTrackConstraints;
const constraintToSourceId = (constraint) => {
const { deviceId } = constraint;
if (typeof deviceId === "string") {
return deviceId;
}
if (Array.isArray(deviceId) && deviceId.length > 0) {
return deviceId[0];
}
if (typeof deviceId === "object" && deviceId.ideal) {
return deviceId.ideal;
}
return null;
};
// @ts-ignore: deprecated api
MediaStreamTrack.getSources(sources => {
let audioSource: string | null = null;
let videoSource: string | null = null;
sources.forEach((source: MediaStreamTrack) => {
if (source.kind === "audio") {
audioSource = source.id;
} else if (source.kind === "video") {
videoSource = source.id;
}
});
const audioSourceId = constraintToSourceId(props.audioConstraints);
if (audioSourceId) {
audioSource = audioSourceId;
}
const videoSourceId = constraintToSourceId(props.videoConstraints);
if (videoSourceId) {
videoSource = videoSourceId;
}
sourceSelected(
optionalSource(audioSource),
optionalSource(videoSource)
);
});
}
}
private handleUserMedia(err, stream?: MediaStream) {
const { props } = this;
if (err || !stream) {
this.setState({ hasUserMedia: false });
props.onUserMediaError(err);
return;
}
this.stream = stream;
try {
if (this.video) {
this.video.srcObject = stream;
}
this.setState({ hasUserMedia: true });
} catch (error) {
this.setState({
hasUserMedia: true,
src: window.URL.createObjectURL(stream)
});
}
props.onUserMedia(stream);
}
render() {
const { state, props } = this;
const {
audio,
forceScreenshotSourceSize,
onUserMedia,
onUserMediaError,
screenshotFormat,
screenshotQuality,
minScreenshotWidth,
minScreenshotHeight,
audioConstraints,
videoConstraints,
imageSmoothing,
mirrored,
style = {},
...rest
} = props;
const videoStyle = mirrored ? { ...style, transform: `${style.transform || ""} scaleX(-1)` } : style;
return (
<video
autoPlay
src={state.src}
muted={audio}
playsInline
ref={ref => {
this.video = ref;
}}
style={videoStyle}
{...rest}
/>
);
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment