Skip to content

Instantly share code, notes, and snippets.

@CoderNamedHendrick
Created June 15, 2022 20:19
Show Gist options
  • Save CoderNamedHendrick/53b472e97d497b5014d3ca36e56c4f05 to your computer and use it in GitHub Desktop.
Save CoderNamedHendrick/53b472e97d497b5014d3ca36e56c4f05 to your computer and use it in GitHub Desktop.
import 'dart:async';
import 'dart:io';
import 'dart:typed_data';
import 'package:camera/camera.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:flutter_screenutil/flutter_screenutil.dart';
import 'package:google_mlkit_face_detection/google_mlkit_face_detection.dart';
class SelfieCamera extends StatefulWidget {
const SelfieCamera({
Key? key,
required this.camera,
}) : super(key: key);
final CameraDescription camera;
@override
State<SelfieCamera> createState() => _SelfieCameraState();
}
class _SelfieCameraState extends State<SelfieCamera> with UIToolMixin {
late CameraController _controller;
FlashMode? _currentFlashMode;
late Future<void> _initializeControllerFuture;
bool faceDetected = false;
final ValueNotifier<File?> _file = ValueNotifier(null);
late final FaceDetector faceDetector;
late double? deviceRatio;
@override
void initState() {
super.initState();
faceDetector = FaceDetector(
options: FaceDetectorOptions(
enableTracking: true,
enableLandmarks: true,
enableClassification: true,
enableContours: true,
performanceMode: FaceDetectorMode.accurate,
),
);
_controller = CameraController(
widget.camera,
ResolutionPreset.ultraHigh,
);
_initializeControllerFuture = _controller.initialize();
}
@override
void dispose() {
_controller.dispose();
_controller.stopImageStream();
faceDetector.close();
super.dispose();
}
@override
Widget build(BuildContext context) {
deviceRatio = width(context) / height(context);
return ClipRRect(
borderRadius: const BorderRadius.vertical(top: Radius.circular(20)),
child: Stack(
children: [
ValueListenableBuilder<File?>(
valueListenable: _file,
builder: (context, value, child) {
if (value == null) {
return Stack(
fit: StackFit.expand,
children: [
Column(
children: [
SizedBox(
width: width(context),
height: height(context) * 0.85,
child: FutureBuilder(
future: _initializeControllerFuture,
builder: (context, snapshot) {
if (snapshot.connectionState ==
ConnectionState.done) {
if (Platform.isIOS) {
_controller
.startImageStream(_processCameraImage);
}
return AspectRatio(
aspectRatio: deviceRatio ?? 0,
child: CameraPreview(
_controller,
child: CustomPaint(
painter: CameraShape(),
),
),
);
} else {
return const Center(
child: CircularProgressIndicator.adaptive(),
);
}
},
),
),
Expanded(
child: Container(
color: Colors.white,
),
),
],
),
],
);
}
return Container(
color: Colors.white,
child: Column(
crossAxisAlignment: CrossAxisAlignment.stretch,
children: [
SizedBox(
height: 100.h,
),
Padding(
padding: EdgeInsets.symmetric(horizontal: 50.w),
child: Container(
width: 320.w,
height: 320.h,
decoration: BoxDecoration(
color: Colors.blue,
borderRadius:
BorderRadius.all(Radius.circular(700.r)),
image: DecorationImage(
image: FileImage(value),
fit: BoxFit.cover,
),
),
),
),
Expanded(
flex: 1,
child: Padding(
padding: const EdgeInsets.symmetric(
horizontal: 22.0, vertical: 40),
child: Column(
children: [
Padding(
padding:
const EdgeInsets.symmetric(horizontal: 42.0),
child: Text(
'Take a selfie of yourself, We\'ll match this photo with the one on your BVN and ID card',
style: TextStyle(
fontSize: 14.sp,
),
textAlign: TextAlign.center,
),
),
const Spacer(),
AppOutlineButton(
title: 'Take Another Image',
useMerchantTheme: false,
onTap: () async {
_file.value!.delete();
_file.value = null;
_controller.resumePreview();
},
),
const SizedBox(height: 10),
AkuFlatButton(
title: 'Submit',
useMerchantTheme: false,
onTap: () async {
if (faceDetected) {
Navigator.of(context).pop(value);
return;
}
showMessage(
context: context,
useMerchantTheme: false,
title: 'Image Error',
subtitle:
'Please capture an image with a face',
isDismissible: true,
isRestriction: false,
);
},
),
],
),
),
),
],
),
);
},
),
Container(
color: Colors.white,
child: Padding(
padding: const EdgeInsets.only(
left: 18.0, right: 18.0, top: 19, bottom: 35),
child: Row(
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: [
Text(
'Take Picture',
style: TextStyle(
fontSize: 15.sp,
fontWeight: FontWeight.w600,
color: Colors.black,
),
),
InkWell(
onTap: () {
Navigator.pop(context);
},
child: Image.asset(
'assets/cancel.png',
height: 27.h,
),
),
],
),
),
),
ValueListenableBuilder<File?>(
valueListenable: _file,
builder: (context, value, child) {
if (value == null) {
return Positioned(
top: 620.h,
left: 0,
right: 0,
bottom: 46.h,
child: Container(
color: Colors.white,
child: Padding(
padding: const EdgeInsets.symmetric(horizontal: 22.0),
child: Column(
children: [
SizedBox(height: 40.h),
Padding(
padding:
const EdgeInsets.symmetric(horizontal: 42.0),
child: Text(
'Take a selfie of yourself, We\'ll match this photo with the one on your BVN and ID card',
style: TextStyle(
fontSize: 14.sp,
color: Colors.black,
),
textAlign: TextAlign.center,
),
),
const Spacer(),
AkuFlatButton(
title: 'Capture Picture',
useMerchantTheme: false,
onTap: () async {
if (Platform.isIOS) {
if (faceDetected) {
await _controller.setFlashMode(
FlashMode.off,
);
_controller.pausePreview();
XFile image =
await _controller.takePicture();
_file.value = File(image.path);
_controller.stopImageStream();
return;
}
showMessage(
context: context,
useMerchantTheme: false,
title: 'Selfie Error',
subtitle: 'Picture Target is not a face',
isDismissible: true,
isRestriction: false,
);
} else {
await _controller.setFlashMode(
FlashMode.off,
);
_controller.pausePreview();
XFile image = await _controller.takePicture();
_file.value = File(image.path);
_recognizeImage(File(image.path));
}
},
),
],
),
),
),
);
} else {
return Container();
}
}),
],
),
);
}
Future<bool> _detect(InputImage inputImage) async {
final List<Face> faces = await faceDetector.processImage(inputImage);
for (Face face in faces) {
final FaceLandmark? leftEar = face.landmarks[FaceLandmarkType.leftEar];
final FaceLandmark? rightEar = face.landmarks[FaceLandmarkType.rightEye];
final FaceLandmark? rightEye = face.landmarks[FaceLandmarkType.rightEye];
final FaceLandmark? leftEye = face.landmarks[FaceLandmarkType.leftEye];
if (leftEar == null && rightEar == null) {
return false;
}
if (leftEye == null && rightEye == null) {
return false;
}
if (face.smilingProbability == null) {
return false;
}
return true;
}
return false;
}
// this function is for detecting faces in an already taken image
// this is for android because doing it from the image stream consumes a lot of resources
void _recognizeImage(File image) async {
final inputImage = InputImage.fromFilePath(image.path);
final faceDetected = await _detect(inputImage);
if (!mounted) {
return;
}
setState(() {
this.faceDetected = faceDetected;
});
}
// this function is for detecting on ios because checking the file type returned on ios
// is not in a format the plugin can work with, so you have detect the face while working with the image
// stream
Future _processCameraImage(CameraImage image) async {
final WriteBuffer allBytes = WriteBuffer();
for (final Plane plane in image.planes) {
allBytes.putUint8List(plane.bytes);
}
final bytes = allBytes.done().buffer.asUint8List();
final Size imageSize =
Size(image.width.toDouble(), image.height.toDouble());
const imageRotation = InputImageRotation.rotation90deg;
final inputImageFormat =
InputImageFormatValue.fromRawValue(image.format.raw) ??
InputImageFormat.nv21;
final planeData = image.planes.map(
(Plane plane) {
return InputImagePlaneMetadata(
bytesPerRow: plane.bytesPerRow,
height: plane.height,
width: plane.width,
);
},
).toList();
final inputImageData = InputImageData(
size: imageSize,
imageRotation: imageRotation,
inputImageFormat: inputImageFormat,
planeData: planeData,
);
final inputImage =
InputImage.fromBytes(bytes: bytes, inputImageData: inputImageData);
final faceDetected = await _detect(inputImage);
if (!mounted) {
return;
}
setState(() {
this.faceDetected = faceDetected;
});
}
}
class CameraShape extends CustomPainter {
@override
void paint(Canvas canvas, Size size) {
canvas.drawPath(
Path()
..moveTo(0, 0)
..lineTo(0, size.height)
..lineTo(size.width, size.height)
..lineTo(size.width, 0)
..close()
..moveTo(size.width / 2, size.height / 2)
..addOval(Rect.fromCircle(
center: Offset(size.width / 2, size.height / 2), radius: 170))
..close(),
Paint()..color = Colors.black54);
canvas.drawPath(
Path()
..moveTo(size.width / 2, size.height / 2)
..addOval(Rect.fromCircle(
center: Offset(size.width / 2, size.height / 2), radius: 170))
..close(),
Paint()
..color = Colors.cyan
..style = PaintingStyle.stroke,
);
}
@override
bool shouldRepaint(covariant CustomPainter oldDelegate) {
return false;
}
}
class CameraClip extends CustomClipper<Rect> {
@override
bool shouldReclip(covariant CustomClipper oldClipper) {
return false;
}
@override
Rect getClip(Size size) {
return Rect.fromCircle(
center: Offset(size.width / 2, size.width / 3), radius: 150);
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment