Skip to content

Instantly share code, notes, and snippets.

@michaeleisel
Created July 18, 2013 01:43
Show Gist options
  • Save michaeleisel/6026087 to your computer and use it in GitHub Desktop.
Save michaeleisel/6026087 to your computer and use it in GitHub Desktop.
#import <MobileCoreServices/MobileCoreServices.h>
#import <AssetsLibrary/AssetsLibrary.h>
#import "TTVideo.h"
#define BYTES_PER_PIXEL 4
@interface TTVideo ()
// Redeclared as readwrite so that we can write to the property and still be atomic with external readers.
@property (readwrite, nonatomic) Float64 videoFrameRate;
@property (readwrite, nonatomic) CMVideoDimensions videoDimensions;
@property (readwrite, nonatomic) CMVideoCodecType videoType;
@property (readwrite, getter=isRecording) BOOL recording;
@property (readwrite, nonatomic) AVCaptureVideoOrientation videoOrientation;
@end
@implementation TTVideo
@synthesize videoFrameRate, videoDimensions, videoType;
@synthesize referenceOrientation;
@synthesize videoOrientation;
@synthesize recording;
- (id) initWithPreviewView:(UIView*)previewView delegate:(id<TTVideoProcessingDelegate>)delegate
{
if (self = [super init]) {
self.delegate = delegate;
previousSecondTimestamps = [[NSMutableArray alloc] init];
referenceOrientation = UIDeviceOrientationPortrait;
[self setupCaptureSession];
self.previewView = previewView;
[self setupPreviewLayer]; //must come after previewView and captureSession are set up
for (AVCaptureDevice *device in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
if (device.position == AVCaptureDevicePositionFront) {
self.frontCamera = device;
} else {
self.backCamera = device;
}
}
}
return self;
}
#pragma mark Utilities
- (void) calculateFramerateAtTimestamp:(CMTime) timestamp
{
[previousSecondTimestamps addObject:[NSValue valueWithCMTime:timestamp]];
CMTime oneSecond = CMTimeMake( 1, 1 );
CMTime oneSecondAgo = CMTimeSubtract( timestamp, oneSecond );
while( CMTIME_COMPARE_INLINE( [[previousSecondTimestamps objectAtIndex:0] CMTimeValue], <, oneSecondAgo ) )
[previousSecondTimestamps removeObjectAtIndex:0];
Float64 newRate = (Float64) [previousSecondTimestamps count];
self.videoFrameRate = (self.videoFrameRate + newRate) / 2;
}
- (void)removeFile:(NSURL *)fileURL
{
NSFileManager *fileManager = [NSFileManager defaultManager];
NSString *filePath = [fileURL path];
if ([fileManager fileExistsAtPath:filePath]) {
NSError *error;
BOOL success = [fileManager removeItemAtPath:filePath error:&error];
if (!success)
[self showError:error];
}
}
- (CGFloat)angleOffsetFromPortraitOrientationToOrientation:(AVCaptureVideoOrientation)orientation
{
CGFloat angle = 0.0;
switch (orientation) {
case AVCaptureVideoOrientationPortrait:
angle = 0.0;
break;
case AVCaptureVideoOrientationPortraitUpsideDown:
angle = M_PI;
break;
case AVCaptureVideoOrientationLandscapeRight:
angle = -M_PI_2;
break;
case AVCaptureVideoOrientationLandscapeLeft:
angle = M_PI_2;
break;
default:
break;
}
return angle;
}
- (CGAffineTransform)transformFromCurrentVideoOrientationToOrientation:(AVCaptureVideoOrientation)orientation
{
CGAffineTransform transform = CGAffineTransformIdentity;
// Calculate offsets from an arbitrary reference orientation (portrait)
CGFloat orientationAngleOffset = [self angleOffsetFromPortraitOrientationToOrientation:orientation];
CGFloat videoOrientationAngleOffset = [self angleOffsetFromPortraitOrientationToOrientation:self.videoOrientation];
// Find the difference in angle between the passed in orientation and the current video orientation
CGFloat angleOffset = orientationAngleOffset - videoOrientationAngleOffset;
transform = CGAffineTransformMakeRotation(angleOffset);
return transform;
}
#pragma mark Recording
- (void)saveMovieToCameraRoll
{
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
[library writeVideoAtPathToSavedPhotosAlbum:movieURL
completionBlock:^(NSURL *assetURL, NSError *error) {
if (error)
[self showError:error];
else
[self removeFile:movieURL];
dispatch_async(movieWritingQueue, ^{
recordingWillBeStopped = NO;
self.recording = NO;
[self.delegate recordingDidStopWithFileURL:movieURL];
});
}];
}
- (void) writeSampleBuffer:(CMSampleBufferRef)sampleBuffer ofType:(NSString *)mediaType
{
if ( assetWriter.status == AVAssetWriterStatusUnknown ) {
if ([assetWriter startWriting]) {
[assetWriter startSessionAtSourceTime:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)];
}
else {
[self showError:[assetWriter error]];
}
}
if ( assetWriter.status == AVAssetWriterStatusWriting ) {
if (mediaType == AVMediaTypeVideo) {
if (assetWriterVideoIn.readyForMoreMediaData) {
if (![assetWriterVideoIn appendSampleBuffer:sampleBuffer]) {
[self showError:[assetWriter error]];
}
}
}
else if (mediaType == AVMediaTypeAudio) {
if (assetWriterAudioIn.readyForMoreMediaData) {
if (![assetWriterAudioIn appendSampleBuffer:sampleBuffer]) {
[self showError:[assetWriter error]];
}
}
}
}
}
- (BOOL) setupAssetWriterAudioInput:(CMFormatDescriptionRef)currentFormatDescription
{
const AudioStreamBasicDescription *currentASBD = CMAudioFormatDescriptionGetStreamBasicDescription(currentFormatDescription);
size_t aclSize = 0;
const AudioChannelLayout *currentChannelLayout = CMAudioFormatDescriptionGetChannelLayout(currentFormatDescription, &aclSize);
NSData *currentChannelLayoutData = nil;
// AVChannelLayoutKey must be specified, but if we don't know any better give an empty data and let AVAssetWriter decide.
if ( currentChannelLayout && aclSize > 0 )
currentChannelLayoutData = [NSData dataWithBytes:currentChannelLayout length:aclSize];
else
currentChannelLayoutData = [NSData data];
NSDictionary *audioCompressionSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInteger:kAudioFormatMPEG4AAC], AVFormatIDKey,
[NSNumber numberWithFloat:currentASBD->mSampleRate], AVSampleRateKey,
[NSNumber numberWithInt:64000], AVEncoderBitRatePerChannelKey,
[NSNumber numberWithInteger:currentASBD->mChannelsPerFrame], AVNumberOfChannelsKey,
currentChannelLayoutData, AVChannelLayoutKey,
nil];
if ([assetWriter canApplyOutputSettings:audioCompressionSettings forMediaType:AVMediaTypeAudio]) {
assetWriterAudioIn = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeAudio outputSettings:audioCompressionSettings];
assetWriterAudioIn.expectsMediaDataInRealTime = YES;
if ([assetWriter canAddInput:assetWriterAudioIn])
[assetWriter addInput:assetWriterAudioIn];
else {
NSLog(@"Couldn't add asset writer audio input.");
return NO;
}
}
else {
NSLog(@"Couldn't apply audio output settings.");
return NO;
}
return YES;
}
- (BOOL) setupAssetWriterVideoInput:(CMFormatDescriptionRef)currentFormatDescription
{
float bitsPerPixel;
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(currentFormatDescription);
int numPixels = dimensions.width * dimensions.height;
int bitsPerSecond;
// Assume that lower-than-SD resolutions are intended for streaming, and use a lower bitrate
if ( numPixels < (640 * 480) )
bitsPerPixel = 4.05; // This bitrate matches the quality produced by AVCaptureSessionPresetMedium or Low.
else
bitsPerPixel = 11.4; // This bitrate matches the quality produced by AVCaptureSessionPresetHigh.
bitsPerSecond = numPixels * bitsPerPixel;
NSDictionary *videoCompressionSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInteger:dimensions.width], AVVideoWidthKey,
[NSNumber numberWithInteger:dimensions.height], AVVideoHeightKey,
[NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInteger:bitsPerSecond], AVVideoAverageBitRateKey,
[NSNumber numberWithInteger:30], AVVideoMaxKeyFrameIntervalKey,
nil], AVVideoCompressionPropertiesKey,
nil];
if ([assetWriter canApplyOutputSettings:videoCompressionSettings forMediaType:AVMediaTypeVideo]) {
assetWriterVideoIn = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:videoCompressionSettings];//sourceFormatHint:CMVideoFormatDescriptionCreate(kCFAllocatorDefault, 875704438, dimensions.width, dimensions.height, <#CFDictionaryRef extensions#>, <#CMVideoFormatDescriptionRef *outDesc#>)
assetWriterVideoIn.expectsMediaDataInRealTime = YES;
assetWriterVideoIn.transform = [self transformFromCurrentVideoOrientationToOrientation:self.referenceOrientation];
if ([assetWriter canAddInput:assetWriterVideoIn])
[assetWriter addInput:assetWriterVideoIn];
else {
NSLog(@"Couldn't add asset writer video input.");
return NO;
}
}
else {
NSLog(@"Couldn't apply video output settings.");
return NO;
}
return YES;
}
-(void)startRecording
{
dispatch_async(movieWritingQueue, ^{
[self startRecordingOnMovieQueue];
});
}
- (void) startRecordingOnMovieQueue
{
if ( recordingWillBeStarted || self.recording )
return;
movieURL = [NSURL fileURLWithPath:[NSTemporaryDirectory() : @(arc4random()).asString : @"Movie.MOV"]];
recordingWillBeStarted = YES;
// recordingDidStart is called from captureOutput:didOutputSampleBuffer:fromConnection: once the asset writer is setup
[self.delegate recordingWillStart];
// Remove the file if one with the same name already exists
[self removeFile:movieURL];
// Create an asset writer
NSError *error;
assetWriter = [[AVAssetWriter alloc] initWithURL:movieURL fileType:(NSString *)kUTTypeQuickTimeMovie error:&error];
if (error)
[self showError:error];
}
- (void) stopRecordingAndRestart:(BOOL)shouldRestart
{
dispatch_async(movieWritingQueue, ^{
if ( recordingWillBeStopped || (self.recording == NO) )
return;
self.recording = NO;
recordingWillBeStopped = YES;
self.readyToRecordVideo = NO;
//readyToRecordAudio = NO;
[self.delegate recordingWillStop];
AVAssetWriter *writer = assetWriter;
[assetWriter finishWritingWithCompletionHandler:^{
[self.delegate recordingDidStopWithFileURL: writer.outputURL];
recordingWillBeStopped = NO;
}];
if(shouldRestart)
[self startRecordingOnMovieQueue];
});
}
#pragma mark Processing
- (void)processPixelBuffer: (CVImageBufferRef)pixelBuffer
{
CVPixelBufferLockBaseAddress( pixelBuffer, 0 );
int bufferWidth = CVPixelBufferGetWidth(pixelBuffer);
int bufferHeight = CVPixelBufferGetHeight(pixelBuffer);
unsigned char *pixel = (unsigned char *)CVPixelBufferGetBaseAddress(pixelBuffer);
for( int row = 0; row < bufferHeight; row++ ) {
for( int column = 0; column < bufferWidth; column++ ) {
pixel[1] = 0; // De-green (second pixel in BGRA is green)
pixel += BYTES_PER_PIXEL;
}
}
CVPixelBufferUnlockBaseAddress( pixelBuffer, 0 );
}
#pragma mark Capture
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef) sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer);
if ( connection == videoConnection ) {
// Get framerate
CMTime timestamp = CMSampleBufferGetPresentationTimeStamp( sampleBuffer );
[self calculateFramerateAtTimestamp:timestamp];
// Get frame dimensions (for onscreen display)
if (self.videoDimensions.width == 0 && self.videoDimensions.height == 0)
self.videoDimensions = CMVideoFormatDescriptionGetDimensions( formatDescription );
// Get buffer type
if ( self.videoType == 0 )
self.videoType = CMFormatDescriptionGetMediaSubType( formatDescription );
//CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// Synchronously process the pixel buffer to de-green it.
//[self processPixelBuffer:pixelBuffer];
// Enqueue it for preview. This is a shallow queue, so if image processing is taking too long,
// we'll drop this frame for preview (this keeps preview latency low).
/*OSStatus err = CMBufferQueueEnqueue(previewBufferQueue, sampleBuffer);
if ( !err ) {
dispatch_async(dispatch_get_main_queue(), ^{
CMSampleBufferRef sbuf = (CMSampleBufferRef)CMBufferQueueDequeueAndRetain(previewBufferQueue);
if (sbuf) {
CVImageBufferRef pixBuf = CMSampleBufferGetImageBuffer(sbuf);
[self.delegate pixelBufferReadyForDisplay:pixBuf];
CFRelease(sbuf);
}
});
}*/
}
if(connection != audioConnection){
videoConnection = connection;
}
CFRetain(sampleBuffer);
CFRetain(formatDescription);
dispatch_async(movieWritingQueue, ^{
if ( assetWriter ) {
BOOL wasReadyToRecord = (readyToRecordAudio && self.readyToRecordVideo);
if( ! wasReadyToRecord)
DDLogError(@"was not ready to record");
if (connection == videoConnection) {
// Initialize the video input if this is not done yet
if (!self.readyToRecordVideo){
self.readyToRecordVideo = [self setupAssetWriterVideoInput:formatDescription];
if( ! self.readyToRecordVideo)
DDLogError(@"fail ready to record video");
else
DDLogError(@"setup record video success");
}
// Write video data to file
if (self.readyToRecordVideo && readyToRecordAudio)
[self writeSampleBuffer:sampleBuffer ofType:AVMediaTypeVideo];
}
else if (connection == audioConnection) {
// Initialize the audio input if this is not done yet
if (!readyToRecordAudio){
readyToRecordAudio = [self setupAssetWriterAudioInput:formatDescription];
if( ! readyToRecordAudio)
DDLogError(@"fail ready to record audio");
else
DDLogError(@"setup record audio success");
}
// Write audio data to file
if (readyToRecordAudio && self.readyToRecordVideo){
/*if(self.volumeLevel > .99)
[self applyVolumeLevelToSampleBuffer: sampleBuffer]; //originally this was working with CMBlockBuffer, see Recording.m if anything goes wrong*/
[self writeSampleBuffer: sampleBuffer ofType: AVMediaTypeAudio];
}
}
BOOL isReadyToRecord = (readyToRecordAudio && self.readyToRecordVideo);
if ( !wasReadyToRecord && isReadyToRecord ) {
DDLogError(@"resetting recording");
recordingWillBeStarted = NO;
self.recording = YES;
[self.delegate recordingDidStart];
}
} else {
NSLog(@"no asset writer");
}
CFRelease(sampleBuffer);
CFRelease(formatDescription);
});
}
-(CMBlockBufferRef) applyVolumeLevelToSampleBuffer:(CMSampleBufferRef)sampleBuffer
{
CMBlockBufferRef buffer = CMSampleBufferGetDataBuffer(sampleBuffer);
unsigned long current = 0,
total = CMBlockBufferGetDataLength(buffer);
while(current < total) {
// Get pointer to next continuous memory region that contains audio samples.
short *data;
unsigned long samples;
if (CMBlockBufferGetDataPointer(buffer, current, &samples, NULL, (char **)&data))
break;
// Increment offset by the number of samples to be processed.
current += samples;
samples /= sizeof(*data);
// Attenuate all contained samples.
for (unsigned int i = 0; i < samples; i++) {
data[i] = self.volumeLevel * data[i];
}
}
return buffer;
}
- (AVCaptureDevice *)videoDeviceWithPosition:(AVCaptureDevicePosition)position
{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices)
if ([device position] == position)
return device;
return nil;
}
- (AVCaptureDevice *)audioDevice
{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
if ([devices count] > 0)
return [devices objectAtIndex:0];
return nil;
}
- (BOOL) setupCaptureSession
{
/*
Overview: RosyWriter uses separate GCD queues for audio and video capture. If a single GCD queue
is used to deliver both audio and video buffers, and our video processing consistently takes
too long, the delivery queue can back up, resulting in audio being dropped.
When recording, RosyWriter creates a third GCD queue for calls to AVAssetWriter. This ensures
that AVAssetWriter is not called to start or finish writing from multiple threads simultaneously.
*/
/*
* Create capture session
*/
captureSession = [[AVCaptureSession alloc] init];
[captureSession beginConfiguration];
captureSession.sessionPreset = AVCaptureSessionPresetLow;
[captureSession commitConfiguration];
AVCaptureDeviceInput *audioIn = [[AVCaptureDeviceInput alloc] initWithDevice:[self audioDevice] error:nil];
if ([captureSession canAddInput:audioIn])
[captureSession addInput:audioIn];
AVCaptureAudioDataOutput *audioOut = [[AVCaptureAudioDataOutput alloc] init];
dispatch_queue_t audioCaptureQueue = dispatch_queue_create("Audio Capture Queue", DISPATCH_QUEUE_SERIAL);
[audioOut setSampleBufferDelegate:self queue:audioCaptureQueue];
if ([captureSession canAddOutput:audioOut])
[captureSession addOutput:audioOut];
audioConnection = [audioOut connectionWithMediaType:AVMediaTypeAudio];
AVCaptureDeviceInput *videoIn = [[AVCaptureDeviceInput alloc] initWithDevice:[self videoDeviceWithPosition:AVCaptureDevicePositionBack] error:nil];
if ([captureSession canAddInput: videoIn])
[captureSession addInput: videoIn];
AVCaptureVideoDataOutput *videoOut = [[AVCaptureVideoDataOutput alloc] init];
videoOut.alwaysDiscardsLateVideoFrames = YES;
dispatch_queue_t videoCaptureQueue = dispatch_queue_create("Video Capture Queue", DISPATCH_QUEUE_SERIAL);
[videoOut setSampleBufferDelegate:self queue:videoCaptureQueue];
if ([captureSession canAddOutput:videoOut])
[captureSession addOutput:videoOut];
videoConnection = [videoOut connectionWithMediaType:AVMediaTypeVideo];
if(videoConnection.isVideoStabilizationSupported)
[videoConnection setEnablesVideoStabilizationWhenAvailable:YES];
CMTime maxDuration = CMTimeMakeWithSeconds(INT_MAX, 10);
videoConnection.videoMinFrameDuration = maxDuration;
self.videoOrientation = [videoConnection videoOrientation];
return YES;
}
-(AVCaptureDevicePosition)cameraPosition
{
for(AVCaptureDeviceInput *input in captureSession.inputs){
if(input.device == self.frontCamera)
return AVCaptureDevicePositionFront;
if(input.device == self.backCamera)
return AVCaptureDevicePositionBack;
}
assert(NO);
return 0;
}
-(AVCaptureDeviceInput*)videoIn
{
for(AVCaptureDeviceInput *input in captureSession.inputs)
if(input.device == self.frontCamera || input.device == self.backCamera)
return input;
return nil;
}
-(void)setCameraPosition:(AVCaptureDevicePosition)cameraPosition
{
dispatch_async(movieWritingQueue, ^{
if(cameraPosition == self.cameraPosition)
return;
[captureSession beginConfiguration];
[captureSession removeInput: self.videoIn];
AVCaptureDevice *newDevice = (cameraPosition == AVCaptureDevicePositionFront) ? self.frontCamera : self.backCamera;
NSError *error;
AVCaptureDeviceInput *newInput = [AVCaptureDeviceInput deviceInputWithDevice:newDevice error: &error];
assert( ! error);
[captureSession addInput: newInput];
[captureSession commitConfiguration];
//[self setupAssetWriterVideoInput: newInput];
self.readyToRecordVideo = NO;
});
}
-(void)setMuted:(BOOL)muted
{
}
-(BOOL)torchActive
{
return self.videoIn.device.torchActive;
}
-(void)setTorchActive:(BOOL)torchActive
{
if(torchActive == self.torchActive)
return;
NSError *error;
[self.videoIn.device lockForConfiguration: &error];
AVCaptureTorchMode newMode = torchActive ? AVCaptureTorchModeOn : AVCaptureTorchModeOff;
[self.videoIn.device setTorchMode: newMode];
[self.videoIn.device unlockForConfiguration];
assert( ! error);
}
-(void)captureOutput:(AVCaptureOutput *)captureOutput didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
DDLogError(@"DROPPED BUFFER FROM CONNECTION %@, %@", connection, sampleBuffer);
}
- (void) setupAndStartCaptureSession
{
// Create a shallow queue for buffers going to the display for preview.
OSStatus err = CMBufferQueueCreate(kCFAllocatorDefault, 1, CMBufferQueueGetCallbacksForUnsortedSampleBuffers(), &previewBufferQueue);
if (err)
[self showError:[NSError errorWithDomain:NSOSStatusErrorDomain code:err userInfo:nil]];
// Create serial queue for movie writing
movieWritingQueue = dispatch_queue_create("Movie Writing Queue", DISPATCH_QUEUE_SERIAL);
if ( !captureSession )
[self setupCaptureSession];
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(captureSessionStoppedRunningNotification:) name:AVCaptureSessionDidStopRunningNotification object:captureSession];
if ( !captureSession.isRunning )
[captureSession startRunning];
}
-(void)setupPreviewLayer
{
assert(self.previewView && captureSession);
self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession: captureSession];
self.previewLayer.frame = self.previewView.bounds;
[self.previewView.layer addSublayer:self.previewLayer];
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
if ([[[UIDevice currentDevice] systemVersion] floatValue] >= 5.9) {
if (self.previewLayer.connection.isVideoOrientationSupported) {
self.previewLayer.connection.videoOrientation = AVCaptureVideoOrientationLandscapeRight;
}
} else {
//@@R fix this
if (self.previewLayer.connection.isVideoOrientationSupported) {
self.previewLayer.connection.videoOrientation = AVCaptureVideoOrientationLandscapeRight;
}
}
}
- (void) pauseCaptureSession
{
if ( captureSession.isRunning )
[captureSession stopRunning];
}
- (void) resumeCaptureSession
{
if ( !captureSession.isRunning )
[captureSession startRunning];
}
- (void)captureSessionStoppedRunningNotification:(NSNotification *)notification
{
dispatch_async(movieWritingQueue, ^{
if ( [self isRecording] ) {
[self stopRecordingAndRestart: NO];
}
});
}
- (void) stopAndTearDownCaptureSession
{
[self.previewLayer removeFromSuperlayer];
[captureSession stopRunning];
if (captureSession)
[[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureSessionDidStopRunningNotification object:captureSession];
captureSession = nil;
if (previewBufferQueue) {
CFRelease(previewBufferQueue);
previewBufferQueue = NULL;
}
if (movieWritingQueue) {
movieWritingQueue = NULL;
}
}
#pragma mark Error Handling
- (void)showError:(NSError *)error
{
CFRunLoopPerformBlock(CFRunLoopGetMain(), kCFRunLoopCommonModes, ^(void) {
UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:[error localizedDescription]
message:[error localizedFailureReason]
delegate:nil
cancelButtonTitle:@"OK"
otherButtonTitles:nil];
[alertView show];
});
}
@end
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment