Created
April 23, 2014 06:50
-
-
Save harisris/11204965 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// | |
// ViewController.m | |
// views3 | |
// | |
// Created by Srihari Sridharan. | |
// Copyright (c) 2013 Srihari Sridharan. All rights reserved. | |
// | |
#import "ViewController.h" | |
/*=========================OPENCV DECLARATIONS=============================*/ | |
// Cascade File Selector (Use any other for optional requirements) | |
NSString* const kFaceCascadeName = @"haarcascade_frontalface_alt"; | |
#ifdef __cplusplus | |
CascadeClassifier face_cascade; | |
#endif | |
const int HaarOptions = CV_HAAR_FIND_BIGGEST_OBJECT | CV_HAAR_DO_ROUGH_SEARCH; | |
/*=========================================================================*/ | |
@interface ViewController () | |
{ | |
UIImageView *imageView2; //Background Image View | |
UIView *mView; //Main View whose children are imageView2 and graphicsView | |
} | |
/*=====================================================================*/ | |
@end | |
@implementation ViewController | |
@synthesize image; //Background Image View. Recieved from Camera Controller// | |
-(void) loadView | |
{ | |
mView = [[UIView alloc] initWithFrame:[[UIScreen mainScreen] bounds]]; | |
self.view = mView; | |
mView.autoresizingMask = UIViewAutoresizingFlexibleHeight | UIViewAutoresizingFlexibleWidth; | |
CGRect screenBounds = [[UIScreen mainScreen] bounds]; | |
NSLog(@"%@", NSStringFromCGRect(screenBounds)); //Resolution in log | |
/*=========================IMAGEVIEW INITIALISATION=============================*/ | |
/*Uncomment the following line to have custom images from file for background for testing purpose*/ | |
// image = [UIImage imageNamed:@"raptor.jpeg"]; | |
//Initialisation of imageView | |
imageView2 = [[UIImageView alloc] initWithImage:image]; | |
[imageView2 setFrame:[[UIScreen mainScreen] bounds]]; | |
// imageView2.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight ; | |
// imageView2.contentMode = UIViewContentModeScaleAspectFit; | |
[self.view addSubview:imageView2]; | |
/*=========================OPENGL INITIALIZATION=============================*/ | |
nglGlobalColorFormat(NGLColorFormatRGBA); | |
nglGlobalFlush(); | |
graphicsView = [[NGLView alloc] initWithFrame:[[UIScreen mainScreen] bounds]]; | |
graphicsView.autoresizingMask = UIViewAutoresizingFlexibleHeight | UIViewAutoresizingFlexibleWidth; | |
// graphicsView.contentMode = UIViewContentModeScaleAspectFit; | |
graphicsView.backgroundColor = [UIColor clearColor]; | |
graphicsView.delegate = self; | |
//FOR MULTITOUCHE IN GRAPHICS VIEW | |
graphicsView.multipleTouchEnabled = YES; | |
//RESET SWITCH TO ZERO | |
reset = 0;// IF INITIALLY RESET =0 THEN GRAPHICS LAYER PLACED ACCORDING TO INITIAL OPENCV DETECTION | |
// USED BECAUSE LATER AFTER MOVING THE OPENGL MESH USING TOUCH POSITION USED | |
/*=========================OPENCV DETECTION=============================*/ | |
NSString *faceCascadePath = [[NSBundle mainBundle] pathForResource:kFaceCascadeName | |
ofType:@"xml"]; | |
face_cascade.load([faceCascadePath UTF8String]); | |
matPhoto = [self cvMatFromUIImage:image]; | |
vector<cv::Rect> faces; | |
Mat frame_gray; | |
//Grey Conversion and Histogram equalization | |
cvtColor(matPhoto, frame_gray, CV_BGRA2GRAY); | |
equalizeHist(frame_gray, frame_gray); | |
face_cascade.detectMultiScale(frame_gray, faces, 1.1, 2, HaarOptions, cv::Size(20, 20)); | |
//DRAW RECTANGLES AROUND FACE. COMMENT RECTANGLE IF NOT WANTED | |
for(unsigned int i = 0; i < faces.size(); ++i) { | |
NSLog(@"%d",i); | |
// rectangle(matPhoto, cv::Point(faces[i].x, faces[i].y),cv::Point(faces[i].x + faces[i].width, faces[i].y + faces[i].height),cv::Scalar(255,255,255)); | |
faceRect = [self faceToCGRect:faces[i]]; | |
} | |
NSLog(@"%@",NSStringFromCGRect(faceRect)); //Shows face bounds in Log | |
CGRect modifiedFaceBounds = faceRect; | |
//SCALING OF THE DETECTED FACES FOR SCREEN FRAME | |
modifiedFaceBounds.size.height = (imageView2.bounds.size.width/image.size.width)*modifiedFaceBounds.size.height; | |
modifiedFaceBounds.size.width = (imageView2.bounds.size.width/image.size.width)*modifiedFaceBounds.size.width; | |
modifiedFaceBounds.origin.x = (imageView2.bounds.size.width/image.size.width)*modifiedFaceBounds.origin.x; | |
modifiedFaceBounds.origin.y = (imageView2.bounds.size.height/image.size.height)*modifiedFaceBounds.origin.y; | |
faceRect = modifiedFaceBounds; | |
NSLog(@"Modified Face Bounds : %@", NSStringFromCGRect(modifiedFaceBounds)); | |
//SHIFT FROM UIIMAGE COORDINATES TO NGL COORDINATES | |
faceRect.origin.x = -(imageView2.bounds.size.width/2 - faceRect.origin.x); | |
//faceRect.origin.y = -faceRect.origin.y; | |
faceRect.origin.y = (imageView2.bounds.size.height/2 - faceRect.origin.y); | |
//WE HAVE X,Y COORDINATES W.R.T Origin AT CENTER OF SCREEN FOR SCREEN RATIO | |
NSLog(@"NGL Coordinates: %@", NSStringFromCGRect(faceRect)); | |
//Initializing imageView with this image | |
imageView2.image = [self UIImageFromCVMat:matPhoto]; | |
//Adding graphicsView to main View | |
[self.view addSubview:graphicsView]; | |
} | |
//USED FOR TOUCH | |
- (float) distanceFromPoint:(CGPoint)pointA toPoint:(CGPoint)pointB | |
{ | |
float xD = fabs(pointA.x - pointB.x); | |
float yD = fabs(pointA.y - pointB.y); | |
return sqrt(xD*xD + yD*yD); | |
} | |
//THE MAIN OPENGL VIEW IN WHICH ANIMATIONS CAN BE PERFORMED | |
//ALL NINEVEHGL OPERATIONS SHOULD BE CARRIED OUT HERE | |
//DRAW VIEW RUNS IN LOOP | |
-(void) drawView | |
{ | |
//Make the mesh look at the camera | |
[_mesh lookAtObject:_camera]; | |
//If it is rendering the graphics layer for the first time... | |
if(reset == 0) | |
{ | |
_mesh.x = ((0.8/graphicsView.bounds.size.width) * (faceRect.origin.x + (faceRect.size.width/2))); | |
_mesh.y = ((0.8/graphicsView.bounds.size.height) * (faceRect.origin.y - 1.5*faceRect.size.height)); | |
} | |
//Graphics control using touch | |
_mesh.x += _position.x * 0.001; | |
_mesh.y -= _position.y * 0.001; | |
_mesh.z += _distance; | |
_position.x = 0.0; | |
_position.y = 0.0; | |
_distance = 0.0; | |
reset =1; | |
[_camera drawCamera]; | |
} | |
- (void)viewDidLoad | |
{ | |
[super viewDidLoad]; | |
//ON LOADING OF VIEW ONLY GRAPHICS RENDERING OCCURS | |
//KEEP THE RENDERING TILL END | |
graphicsView.contentScaleFactor = [[UIScreen mainScreen] scale]; | |
NSDictionary *settings = [NSDictionary dictionaryWithObjectsAndKeys:kNGLMeshCentralizeYes,kNGLMeshKeyCentralize,@"0.3",kNGLMeshKeyNormalize,nil]; | |
_mesh = [[NGLMesh alloc] initWithFile:@"chaintu.obj" settings:settings delegate:nil]; | |
_camera = [[NGLCamera alloc] initWithMeshes:_mesh, nil]; | |
} | |
- (void) touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event | |
{ | |
UITouch *touchA, *touchB; | |
CGPoint pointA, pointB; | |
// Pan gesture. | |
if ([touches count] == 1) | |
{ | |
NSLog(@"%i", [touches count]); | |
touchA = [[touches allObjects] objectAtIndex:0]; | |
pointA = [touchA locationInView:self.view]; | |
pointB = [touchA previousLocationInView:self.view]; | |
_position.x = (pointA.x - pointB.x); | |
_position.y = (pointA.y - pointB.y); | |
} | |
// Pinch gesture. | |
else if ([touches count] == 2) | |
{ | |
NSLog(@"%i", [touches count]); | |
touchA = [[touches allObjects] objectAtIndex:0]; | |
touchB = [[touches allObjects] objectAtIndex:1]; | |
// Current distance. | |
pointA = [touchA locationInView:self.view]; | |
pointB = [touchB locationInView:self.view]; | |
float currDistance = [self distanceFromPoint:pointA toPoint:pointB]; | |
// Previous distance. | |
pointA = [touchA previousLocationInView:self.view]; | |
pointB = [touchB previousLocationInView:self.view]; | |
float prevDistance = [self distanceFromPoint:pointA toPoint:pointB]; | |
//For Rotation | |
CGPoint previousPoint1 = [touchA previousLocationInView:nil]; | |
CGPoint previousPoint2 = [touchB previousLocationInView:nil]; | |
CGFloat previousAngle = atan2 (previousPoint2.y - previousPoint1.y, previousPoint2.x - previousPoint1.x); | |
CGPoint currentPoint1 = [touchA locationInView:nil]; | |
CGPoint currentPoint2 = [touchB locationInView:nil]; | |
CGFloat currentAngle = atan2 (currentPoint2.y - currentPoint1.y, currentPoint2.x - currentPoint1.x); | |
_rotate = (currentAngle - previousAngle); | |
_distance = (currDistance - prevDistance) * 0.005; | |
} | |
} | |
-(void)addSubViewWithFrame:(CGRect)frame | |
{ | |
UIView* highlitView = [[UIView alloc] initWithFrame:frame]; | |
highlitView.layer.borderWidth = 1; | |
highlitView.layer.borderColor = [[UIColor whiteColor] CGColor]; | |
// highlitView.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight ; | |
[imageView2 addSubview:highlitView]; | |
} | |
//ASSIGNING FACERECT BOUNDS TO BOUNDS PASSED TO THIS FUNCTION | |
- (CGRect)faceToCGRect:(cv::Rect)face | |
{ | |
CGRect faceRect; | |
faceRect.origin.x = face.x; | |
faceRect.origin.y = face.y; | |
faceRect.size.width = face.width; | |
faceRect.size.height = face.height; | |
return faceRect; | |
} | |
- (cv::Mat)cvMatFromUIImage:(UIImage *)image | |
{ | |
CGColorSpaceRef colorSpace = CGImageGetColorSpace(image.CGImage); | |
CGFloat cols = image.size.width; | |
CGFloat rows = image.size.height; | |
cv::Mat cvMat(rows, cols, CV_8UC4); // 8 bits per component, 4 channels | |
CGContextRef contextRef = CGBitmapContextCreate(cvMat.data, // Pointer to data | |
cols, // Width of bitmap | |
rows, // Height of bitmap | |
8, // Bits per component | |
cvMat.step[0], // Bytes per row | |
colorSpace, // Colorspace | |
kCGImageAlphaNoneSkipLast | | |
kCGBitmapByteOrderDefault); // Bitmap info flags | |
CGContextDrawImage(contextRef, CGRectMake(0, 0, cols, rows), image.CGImage); | |
CGContextRelease(contextRef); | |
CGColorSpaceRelease(colorSpace); | |
return cvMat; | |
} | |
-(UIImage *)UIImageFromCVMat:(cv::Mat)cvMat | |
{ | |
NSData *data = [NSData dataWithBytes:cvMat.data length:cvMat.elemSize()*cvMat.total()]; | |
CGColorSpaceRef colorSpace; | |
if (cvMat.elemSize() == 1) { | |
colorSpace = CGColorSpaceCreateDeviceGray(); | |
} else { | |
colorSpace = CGColorSpaceCreateDeviceRGB(); | |
} | |
CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data); | |
// Creating CGImage from cv::Mat | |
CGImageRef imageRef = CGImageCreate(cvMat.cols, //width | |
cvMat.rows, //height | |
8, //bits per component | |
8 * cvMat.elemSize(), //bits per pixel | |
cvMat.step[0], //bytesPerRow | |
colorSpace, //colorspace | |
kCGImageAlphaNone|kCGBitmapByteOrderDefault,//bitmap info | |
provider, //CGDataProviderRef | |
NULL, //decode | |
false, //should interpolate | |
kCGRenderingIntentDefault //intent | |
); | |
// Getting UIImage from CGImage | |
UIImage *finalImage = [UIImage imageWithCGImage:imageRef]; | |
CGImageRelease(imageRef); | |
CGDataProviderRelease(provider); | |
CGColorSpaceRelease(colorSpace); | |
return finalImage; | |
} | |
- (void)didReceiveMemoryWarning | |
{ | |
[super didReceiveMemoryWarning]; | |
// Dispose of any resources that can be recreated. | |
} | |
@end |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment