Created
September 16, 2014 20:35
-
-
Save HeidiHansen/0f91ac3e9ec7e5bd9d86 to your computer and use it in GitHub Desktop.
11. THCameraViewController.m
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#import "THCameraViewController.h" | |
#import "THCameraButton.h" | |
#define DegreesToRadians(x) ((x) * M_PI / 180.0) | |
@interface THCameraViewController () | |
@property (strong, nonatomic) UIView *videoPreview; | |
//@property (nonatomic) CGFloat videoPreviewWidth; | |
@property (nonatomic) CGFloat excessSpacePerSide; | |
@end | |
@implementation THCameraViewController | |
- (void)viewDidLoad | |
{ | |
[super viewDidLoad]; | |
//self.videoPreviewWidth = (self.view.bounds.size.height - 64) / 2; | |
self.videoPreview = [[UIView alloc] initWithFrame:CGRectMake(0,0,320,568)]; | |
[self.view addSubview:self.videoPreview]; | |
NSLog(@"Self.view frame: %f %f %f %f ", self.view.frame.origin.x, self.view.frame.origin.y, self.view.frame.size.width, self.view.frame.size.height); | |
NSLog(@"self bounds: %f %f %f %f ", self.view.bounds.origin.x, self.view.bounds.origin.y, self.view.bounds.size.width, self.view.bounds.size.height); | |
self.videoPreview.backgroundColor = [UIColor blackColor]; | |
self.videoPreview.clipsToBounds = YES; | |
self.frontCamera = YES; | |
//cameraSwitch.selectedSegmentIndex = 0; | |
} | |
- (void)viewDidAppear:(BOOL)animated | |
{ | |
self.takePhotoButton = [[THCameraButton alloc] initWithFrame:CGRectMake(125,485,70,70)]; | |
[self.takePhotoButton addTarget:self | |
action:@selector(takePhotoTapped:) | |
forControlEvents:UIControlEventTouchUpInside]; | |
[self.view addSubview:self.takePhotoButton]; | |
[self initializeCamera]; | |
} | |
- (void)didReceiveMemoryWarning | |
{ | |
[super didReceiveMemoryWarning]; | |
// Dispose of any resources that can be recreated. | |
} | |
//method to capture image from AVCaptureSession video feed | |
- (void) capImage | |
{ | |
AVCaptureConnection *videoConnection = nil; | |
for (AVCaptureConnection *connection in self.stillImageOutput.connections) { | |
for (AVCaptureInputPort *port in [connection inputPorts]) { | |
if ([[port mediaType] isEqual:AVMediaTypeVideo] ) { | |
videoConnection = connection; | |
break; | |
} | |
} | |
if (videoConnection) { | |
break; | |
} | |
} | |
// NSLog(@"about to request a capture from: %@", self.stillImageOutput); | |
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error) { | |
if (imageSampleBuffer != NULL) { | |
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer]; | |
[self processImage:[UIImage imageWithData:imageData]]; | |
} | |
}]; | |
} | |
//AVCaptureSession to show live video feed in view | |
- (void) initializeCamera { | |
AVCaptureSession *session = [[AVCaptureSession alloc] init]; | |
session.sessionPreset = AVCaptureSessionPresetPhoto; | |
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session]; | |
captureVideoPreviewLayer.bounds = self.videoPreview.bounds; | |
[captureVideoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspect]; | |
captureVideoPreviewLayer.backgroundColor = [UIColor clearColor].CGColor; | |
self.view.backgroundColor = [UIColor clearColor]; | |
// [self.videoPreview.layer addSublayer:captureVideoPreviewLayer]; | |
[self.videoPreview bringSubviewToFront:self.takePhotoButton]; | |
CALayer *viewLayer = [self.view layer]; | |
[viewLayer setMasksToBounds:YES]; | |
CGRect bounds = [self.view bounds]; | |
[captureVideoPreviewLayer setFrame:bounds]; | |
NSArray *devices = [AVCaptureDevice devices]; | |
AVCaptureDevice *frontCamera; | |
AVCaptureDevice *backCamera; | |
for (AVCaptureDevice *device in devices) { | |
// NSLog(@"Device name: %@", [device localizedName]); | |
if ([device hasMediaType:AVMediaTypeVideo]) { | |
if ([device position] == AVCaptureDevicePositionBack) { | |
// NSLog(@"Device position : back"); | |
backCamera = device; | |
} | |
else { | |
//NSLog(@"Device position : front"); | |
frontCamera = device; | |
} | |
} | |
} | |
if (!self.frontCamera) { | |
NSError *error = nil; | |
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:backCamera error:&error]; | |
if (!input) { | |
//NSLog(@"ERROR: trying to open camera: %@", error); | |
} | |
[session addInput:input]; | |
} | |
if (self.frontCamera) { | |
NSError *error = nil; | |
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:frontCamera error:&error]; | |
if (!input) { | |
//NSLog(@"ERROR: trying to open camera: %@", error); | |
} | |
[session addInput:input]; | |
} | |
self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init]; | |
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil]; | |
[self.stillImageOutput setOutputSettings:outputSettings]; | |
[session addOutput:self.stillImageOutput]; | |
[session startRunning]; | |
} | |
//process captured image, crop, resize and rotate | |
- (void) processImage:(UIImage *)image | |
{ | |
UIGraphicsBeginImageContext(CGSizeMake(320, 400)); | |
[image drawInRect: CGRectMake(0, 0, 320, 400)]; | |
UIImage *smallImage = UIGraphicsGetImageFromCurrentImageContext(); | |
UIGraphicsEndImageContext(); | |
CGRect cropRect = CGRectMake(60,0,200,400); | |
CGImageRef imageRef = CGImageCreateWithImageInRect([smallImage CGImage], cropRect); | |
UIImage *croppedImage = [UIImage imageWithCGImage:imageRef]; | |
self.stillImageOutput = nil; | |
CGImageRelease(imageRef); | |
//adjust image orientation based on device orientation | |
if ([[UIDevice currentDevice] orientation] == UIDeviceOrientationLandscapeLeft) { | |
// NSLog(@"landscape left image"); | |
[self adjustImageOrientationByDegrees:-90]; | |
} | |
if ([[UIDevice currentDevice] orientation] == UIDeviceOrientationLandscapeRight) { | |
// NSLog(@"landscape right"); | |
[self adjustImageOrientationByDegrees:90]; | |
} | |
if ([[UIDevice currentDevice] orientation] == UIDeviceOrientationPortraitUpsideDown) { | |
// NSLog(@"upside down"); | |
[self adjustImageOrientationByDegrees:-180]; | |
} | |
if ([[UIDevice currentDevice] orientation] == UIDeviceOrientationPortrait) { | |
// NSLog(@"upside upright"); | |
[self adjustImageOrientationByDegrees:0]; | |
} | |
[self.delegate takePhotoTapped:croppedImage]; | |
} | |
- (void)adjustImageOrientationByDegrees:(NSInteger)integer | |
{ | |
//[UIView beginAnimations:@"rotate" context:nil]; | |
//[UIView setAnimationDuration:0.5]; | |
//self.imageFeed.transform = CGAffineTransformMakeRotation(DegreesToRadians(integer)); | |
//[UIView commitAnimations]; | |
} | |
//switch cameras front and rear cameras | |
- (IBAction)toggleBetweenCameras:(id)sender | |
{ | |
if (self.frontBackToggle.selectedSegmentIndex == 0) { | |
self.frontCamera = YES; | |
[self initializeCamera]; | |
} | |
else { | |
self.frontCamera = NO; | |
[self initializeCamera]; | |
} | |
} | |
- (void)takePhotoTapped:(id)sender | |
{ | |
[self capImage]; | |
} | |
@end |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment