Skip to content

Instantly share code, notes, and snippets.

@maerlyn5
Last active July 21, 2016 14:56
Show Gist options
  • Save maerlyn5/9840293 to your computer and use it in GitHub Desktop.
Save maerlyn5/9840293 to your computer and use it in GitHub Desktop.
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
if (!(_capturing && _videoCaptureConsumer)) {
return;
}
CMTime time = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVReturn lock = CVPixelBufferLockBaseAddress(imageBuffer, 0);
_videoFrame.timestamp = time;
size_t height = CVPixelBufferGetHeight(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
if (width != _captureWidth || height != _captureHeight) {
[self updateCaptureFormatWithWidth:width height:height];
}
_videoFrame.format.imageWidth = width;
_videoFrame.format.imageHeight = height;
CMTime minFrameDuration;
if (SYSTEM_VERSION_GREATER_THAN_OR_EQUAL_TO(@"7.0")) {
minFrameDuration = _videoInput.device.activeVideoMinFrameDuration;
} else {
AVCaptureConnection *conn = [_videoOutput connectionWithMediaType:AVMediaTypeVideo];
minFrameDuration = conn.videoMinFrameDuration;
}
_videoFrame.format.estimatedFramesPerSecond = minFrameDuration.timescale / minFrameDuration.value;
// TODO: how do we measure this from AVFoundation?
_videoFrame.format.estimatedCaptureDelay = 100;
[_videoFrame clearPlanes];
_videoFrame.orientation = [self currentDeviceOrientation];
OSType pixelFormat = CVPixelBufferGetPixelFormatType(imageBuffer);
if (pixelFormat == '420v' || pixelFormat == '420f') {
assert(CVPixelBufferGetPlaneCount(imageBuffer) == 2);
// kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange = '420v',
// kCVPixelFormatType_420YpCbCr8BiPlanarFullRange = '420f',
// Bi-Planar Component Y'CbCr 8-bit 4:2:0, video-range (luma=[16,235] chroma=[16,240]).
// Bi-Planar Component Y'CbCr 8-bit 4:2:0, full-range (luma=[0,255] chroma=[1,255]).
// baseAddress0 points to a big-endian CVPlanarPixelBufferInfo_YCbCrBiPlanar struct
// i.e.: Y-channel in this format is in the first third of the buffer!
size_t bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0);
void *baseAddress0 = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
size_t bytesPerRow1 = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 1);
size_t height1 = CVPixelBufferGetHeightOfPlane(imageBuffer, 1);
void *baseAddress1 = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 1);
size_t grayscaleBufferSize = height * bytesPerRow;
size_t cbCrBufferSize = height1 * bytesPerRow1;
void *grayscaleBufferWithUVTackedON = malloc(grayscaleBufferSize + cbCrBufferSize);
if (grayscaleBufferWithUVTackedON == NULL) {
NSLog(@"allooc errr");
}
memset(grayscaleBufferWithUVTackedON, 0, grayscaleBufferSize + cbCrBufferSize);
memcpy (grayscaleBufferWithUVTackedON, baseAddress0, grayscaleBufferSize);
//i should be able to pass the CBCR plane in as the second pointer
memcpy (grayscaleBufferWithUVTackedON + grayscaleBufferSize, baseAddress1, cbCrBufferSize);
//Allocate something that SHOULD fail when passed in
void *bsPointer = malloc(1);
//Pass in the Y plane
[_videoFrame.planes addPointer:grayscaleBufferWithUVTackedON];
//Pass in the BS pointer (this SHOULD cause consumeFrame to crash)
[_videoFrame.planes addPointer:bsPointer]; //This line should read: [_videoFrame.planes addPointer:baseAddress1];
[_videoCaptureConsumer consumeFrame:_videoFrame];
free(grayscaleBufferWithUVTackedON);
free(bsPointer);
} else {
NSLog(@"Fatal Error! Unknown Format");
}
CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment