Skip to content

Instantly share code, notes, and snippets.

@psobko
Created October 31, 2013 19:54
Show Gist options
  • Save psobko/7255999 to your computer and use it in GitHub Desktop.
Save psobko/7255999 to your computer and use it in GitHub Desktop.
Rough VIdeoPreview+Image Capture from AVCaptureSession
@property (strong, nonatomic) AVCaptureDevice* device;
@property (strong, nonatomic) AVCaptureDeviceInput* input;
@property (strong, nonatomic) AVCaptureMetadataOutput* output;
@property (strong, nonatomic) AVCaptureSession* session;
@property (strong, nonatomic) AVCaptureVideoPreviewLayer* preview;
@property (strong, nonatomic) AVCaptureStillImageOutput* imgIO;
self.session = [[AVCaptureSession alloc] init];
self.device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
self.input = [AVCaptureDeviceInput deviceInputWithDevice:self.device
error:&error];
if (!self.input)
{
NSLog(@"Error: %@", error);
return;
}
[self.session addInput:self.input];
//Turn on point autofocus for middle of view
[self.device lockForConfiguration:&error];
CGPoint point = CGPointMake(0.5,0.5);
[self.device setFocusPointOfInterest:point];
[self.device setFocusMode:AVCaptureFocusModeContinuousAutoFocus];
[self.device unlockForConfiguration];
//Add the metadata output device
AVCaptureMetadataOutput *output = [[AVCaptureMetadataOutput alloc] init];
// [output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
[self.session addOutput:output];
// NSLog(@"%lu",output.availableMetadataObjectTypes.count);
// for (NSString *s in output.availableMetadataObjectTypes)
// NSLog(@"%@",s);
//You should check here to see if the session supports these types, if they aren't support you'll get an exception
output.metadataObjectTypes = @[AVMetadataObjectTypeEAN13Code, AVMetadataObjectTypeEAN8Code, AVMetadataObjectTypeUPCECode];
output.rectOfInterest = self.tableView.bounds;
self.preview = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session];
self.preview.frame = self.tableView.bounds;
self.preview.videoGravity = AVLayerVideoGravityResizeAspectFill;
// [self.tableView.backgroundView.layer insertSublayer:self.preview below:self.bgImage.layer];
UIView *newView = [[UIView alloc] initWithFrame:self.tableView.frame];
[self.view addSubview:newView];
[newView.layer insertSublayer:self.preview above: newView.layer];
////
self.imgIO = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[self.imgIO setOutputSettings:outputSettings];
[self.session addOutput:self.imgIO];
self.session.sessionPreset = AVCaptureSessionPresetLow;
[self.session startRunning];
//-------------------------------
-(void)capture
{
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection) { break; }
}
NSLog(@"about to request a capture from: %@", stillImageOutput);
[stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection
completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *e
{
CFDictionaryRef exifAttachments = CMGetAttachment( imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
if (exifAttachments)
{
// Do something with the attachments.
NSLog(@"attachements: %@", exifAttachments);
}
else
NSLog(@"no attachments");
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
self.vImage.image = image;
}];
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment