Created
May 3, 2013 16:30
-
-
Save iggym/5510752 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// | |
// CCamera.h | |
// CCamera | |
// | |
// Created by Jonathan Wight on 7/12/12. | |
// Copyright (c) 2012 Jonathan Wight. All rights reserved. | |
// | |
#import <Foundation/Foundation.h> | |
#import <AVFoundation/AVFoundation.h> | |
@interface CCamera : NSObject | |
@property (readwrite, nonatomic, assign) AVCaptureDevicePosition captureDevicePosition; | |
@property (readwrite, nonatomic, strong) NSString *preset; | |
@property (readonly, nonatomic, strong) AVCaptureDevice *captureDevice; | |
@property (readonly, nonatomic, strong) AVCaptureVideoPreviewLayer *previewLayer; | |
+ (CCamera *)sharedInstance; | |
- (void)startRunning; | |
- (void)stopRunning; | |
- (CGSize)size; | |
- (void)captureStillCMSampleBuffer:(void (^)(CMSampleBufferRef sampleBuffer, NSError *error))inCompletionBlock; | |
- (void)captureStillCVImageBuffer:(void (^)(CVImageBufferRef imageBuffer, NSError *error))inCompletionBlock; | |
- (void)captureStillCIImage:(void (^)(CIImage *image, NSError *error))inCompletionBlock; | |
- (void)captureStillCGImage:(void (^)(CGImageRef image, NSError *error))inCompletionBlock; | |
- (void)captureStillUIImage:(void (^)(UIImage *image, NSError *error))inCompletionBlock; | |
@end | |
// #### SNIP HERE ############################################################## | |
// | |
// CCamera.m | |
// Camera | |
// | |
// Created by Jonathan Wight on 7/12/12. | |
// Copyright (c) 2012 Jonathan Wight. All rights reserved. | |
// | |
#import "CCamera.h" | |
#import <AVFoundation/AVFoundation.h> | |
#import <QuartzCore/QuartzCore.h> | |
@interface CCamera () | |
@property (readwrite, nonatomic, strong) AVCaptureSession *captureSession; | |
@property (readwrite, nonatomic, strong) AVCaptureDevice *captureDevice; | |
@property (readwrite, nonatomic, strong) AVCaptureVideoPreviewLayer *previewLayer; | |
@property (readwrite, nonatomic, strong) AVCaptureStillImageOutput *imageOutput; | |
@end | |
#pragma mark - | |
@implementation CCamera | |
static CCamera *gSharedInstance = NULL; | |
+ (CCamera *)sharedInstance | |
{ | |
static dispatch_once_t sOnceToken = 0; | |
dispatch_once(&sOnceToken, ^{ | |
gSharedInstance = [[CCamera alloc] init]; | |
}); | |
return(gSharedInstance); | |
} | |
- (id)init | |
{ | |
if ((self = [super init]) != NULL) | |
{ | |
_captureDevicePosition = AVCaptureDevicePositionUnspecified; | |
_preset = AVCaptureSessionPresetPhoto; | |
} | |
return(self); | |
} | |
- (void)dealloc | |
{ | |
[_captureSession stopRunning]; | |
} | |
- (AVCaptureDevice *)captureDevice | |
{ | |
if (_captureDevice == NULL) | |
{ | |
if (self.captureDevicePosition == AVCaptureDevicePositionUnspecified) | |
{ | |
_captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; | |
} | |
else | |
{ | |
for (AVCaptureDevice *theDevice in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) | |
{ | |
if (theDevice.position == self.captureDevicePosition) | |
{ | |
_captureDevice = theDevice; | |
break; | |
} | |
} | |
} | |
} | |
return(_captureDevice); | |
} | |
- (AVCaptureVideoPreviewLayer *)previewLayer | |
{ | |
if (_previewLayer == NULL) | |
{ | |
_previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession]; | |
} | |
return(_previewLayer); | |
} | |
- (void)startRunning | |
{ | |
NSError *theError = NULL; | |
self.captureSession = [[AVCaptureSession alloc] init]; | |
self.captureSession.sessionPreset = self.preset; | |
AVCaptureDeviceInput *theCaptureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:self.captureDevice error:&theError]; | |
[self.captureSession addInput:theCaptureDeviceInput]; | |
self.imageOutput = [[AVCaptureStillImageOutput alloc] init]; | |
self.imageOutput.outputSettings = @{ | |
(__bridge NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange) | |
}; | |
[self.captureSession addOutput:self.imageOutput]; | |
[self.captureSession startRunning]; | |
} | |
- (void)stopRunning | |
{ | |
[self.captureSession stopRunning]; | |
self.captureDevice = NULL; | |
self.captureSession = NULL; | |
self.imageOutput = NULL; | |
self.previewLayer = NULL; | |
} | |
- (CGSize)size | |
{ | |
AVCaptureConnection *theConnection = [self.imageOutput.connections objectAtIndex:0]; | |
__block BOOL theFinishedFlag = NO; | |
__block CGSize theSize; | |
[self.imageOutput captureStillImageAsynchronouslyFromConnection:theConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) { | |
CVImageBufferRef theImageBuffer = CMSampleBufferGetImageBuffer(imageDataSampleBuffer); | |
theSize = CVImageBufferGetEncodedSize(theImageBuffer); | |
theFinishedFlag = YES; | |
}]; | |
while (theFinishedFlag == NO) | |
{ | |
[[NSRunLoop mainRunLoop] runUntilDate:[NSDate dateWithTimeIntervalSinceNow:0.1]]; | |
} | |
return(theSize); | |
} | |
#pragma mark - | |
- (void)captureStillCMSampleBuffer:(void (^)(CMSampleBufferRef sampleBuffer, NSError *error))inCompletionBlock | |
{ | |
NSParameterAssert(inCompletionBlock != NULL); | |
AVCaptureConnection *theConnection = [self.imageOutput.connections objectAtIndex:0]; | |
[self.imageOutput captureStillImageAsynchronouslyFromConnection:theConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) { | |
inCompletionBlock(imageDataSampleBuffer, error); | |
}]; | |
} | |
- (void)captureStillCVImageBuffer:(void (^)(CVImageBufferRef imageBuffer, NSError *error))inCompletionBlock | |
{ | |
NSParameterAssert(inCompletionBlock != NULL); | |
[self captureStillCMSampleBuffer:^(CMSampleBufferRef sampleBuffer, NSError *error) { | |
CVImageBufferRef theImageBuffer = NULL; | |
if (sampleBuffer != NULL) | |
{ | |
theImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); | |
} | |
inCompletionBlock(theImageBuffer, error); | |
}]; | |
} | |
- (void)captureStillCIImage:(void (^)(CIImage *image, NSError *error))inCompletionBlock | |
{ | |
NSParameterAssert(inCompletionBlock != NULL); | |
[self captureStillCVImageBuffer:^(CVImageBufferRef imageBuffer, NSError *error) { | |
CIImage *theImage = NULL; | |
if (imageBuffer != NULL) | |
{ | |
theImage = [CIImage imageWithCVPixelBuffer:imageBuffer]; | |
} | |
inCompletionBlock(theImage, error); | |
}]; | |
} | |
- (void)captureStillCGImage:(void (^)(CGImageRef image, NSError *error))inCompletionBlock | |
{ | |
NSParameterAssert(inCompletionBlock != NULL); | |
[self captureStillCIImage:^(CIImage *image, NSError *error) { | |
CGImageRef theCGImage = NULL; | |
if (image != NULL) | |
{ | |
NSDictionary *theOptions = @{ | |
// TODO | |
}; | |
CIContext *theCIContext = [CIContext contextWithOptions:theOptions]; | |
theCGImage = [theCIContext createCGImage:image fromRect:image.extent]; | |
} | |
inCompletionBlock(theCGImage, error); | |
CGImageRelease(theCGImage); | |
}]; | |
} | |
- (void)captureStillUIImage:(void (^)(UIImage *image, NSError *error))inCompletionBlock | |
{ | |
NSParameterAssert(inCompletionBlock != NULL); | |
[self captureStillCIImage:^(CIImage *image, NSError *error) { | |
UIImage *theUIImage = NULL; | |
if (image != NULL) | |
{ | |
theUIImage = [UIImage imageWithCIImage:image]; | |
} | |
inCompletionBlock(theUIImage, error); | |
}]; | |
} | |
@end |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment