Skip to content

Instantly share code, notes, and snippets.

@wjlafrance
Last active January 26, 2016 21:16
Show Gist options
  • Save wjlafrance/446c93cdf1248d01f3b3 to your computer and use it in GitHub Desktop.
Save wjlafrance/446c93cdf1248d01f3b3 to your computer and use it in GitHub Desktop.
xeye - combine side-by-side 3d into red/blue glasses format
// Written March 26th 2013
#import <Cocoa/Cocoa.h>
#import <AVFoundation/AVFoundation.h>
int main(int argc, char **argv) {
NSLog(@"Hello world");
NSError *err;
NSURL *inURL = [NSURL fileURLWithPath:@"/Users/william/Desktop/Hobbit3d.mp4"];
NSURL *outURL = [NSURL fileURLWithPath:@"/Users/william/Desktop/Hobbit3d.Converted.mov"];
AVAsset *asset = [AVAsset assetWithURL:inURL];
assert(asset);
// Construct reader
AVAssetReader *reader = [AVAssetReader assetReaderWithAsset:asset error:&err];
if (!reader) { NSLog(@"Couldn't create reader: %@", err); }
assert(reader);
NSArray *audioTracks = [asset tracksWithMediaType:AVMediaTypeAudio];
NSArray *videoTracks = [asset tracksWithMediaType:AVMediaTypeVideo];
// Add audio track outputs to reader
AVAssetReaderOutput *audioReaderOutput = [AVAssetReaderAudioMixOutput assetReaderAudioMixOutputWithAudioTracks:audioTracks audioSettings:nil];
assert(audioReaderOutput);
assert([reader canAddOutput:audioReaderOutput]);
[reader addOutput:audioReaderOutput];
// Add video track outputs to reader
NSDictionary *videoSettings = @{(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA)};
AVAssetReaderTrackOutput *videoReaderOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTracks[0] outputSettings:videoSettings];
assert(videoReaderOutput);
assert([reader canAddOutput:videoReaderOutput]);
[reader addOutput:videoReaderOutput];
// -----
// Construct writer
AVAssetWriter *writer = [AVAssetWriter assetWriterWithURL:outURL fileType:AVFileTypeQuickTimeMovie error:&err];
if (!writer) { NSLog(@"Couldn't create writer: %@", err); }
assert(writer);
// Add audio track inputs to writer
// AudioChannelLayout channelLayout;
// memset(&channelLayout, 0, sizeof(AudioChannelLayout));
// channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
// NSDictionary *outputSettings = @{
// AVFormatIDKey: @(kAudioFormatLinearPCM),
// AVSampleRateKey: @44100.0,
// AVNumberOfChannelsKey: @2,
// AVChannelLayoutKey: [NSData dataWithBytes:&channelLayout length:sizeof(AudioChannelLayout)],
// AVLinearPCMBitDepthKey: @16,
// AVLinearPCMIsNonInterleaved: @NO,
// AVLinearPCMIsFloatKey: @NO,
// AVLinearPCMIsBigEndianKey: @NO
// };
AVAssetWriterInput *audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:nil];
assert(audioWriterInput);
assert([writer canAddInput:audioWriterInput]);
[writer addInput:audioWriterInput];
// Add video track input to writer
// NSDictionary *videoSettings = @{(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA)};
AVAssetWriterInput *videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:nil];
assert(videoWriterInput);
assert([writer canAddInput:videoWriterInput]);
[writer addInput:videoWriterInput];
// -----
// Begin!
[writer startWriting];
[reader startReading];
AVAssetTrack *soundTrack = audioTracks[0];
CMTime startTime = CMTimeMake(0, soundTrack.naturalTimeScale);
[writer startSessionAtSourceTime:startTime];
BOOL audioFinished = NO;
BOOL videoFinished = NO;
int videoFrame = 0;
@try {
while (reader.status == AVAssetReaderStatusReading) {
if (!audioFinished && audioWriterInput.readyForMoreMediaData) {
CMSampleBufferRef buffer = [audioReaderOutput copyNextSampleBuffer];
if (buffer) {
[audioWriterInput appendSampleBuffer:buffer];
CFRelease(buffer);
} else {
audioFinished = YES;
}
}
if (!videoFinished && videoWriterInput.readyForMoreMediaData) {
videoFrame ++;
CMSampleBufferRef buffer = [videoReaderOutput copyNextSampleBuffer];
if (buffer) {
@autoreleasepool {
if (videoFrame == 2000) {
NSLog(@"Beginning effect");
} else if (videoFrame == 3000) {
NSLog(@"Finished with effect");
break;
}
if (videoFrame > 2000 && videoFrame < 3000) {
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(buffer);
CVPixelBufferRef pixelBuffer = (CVPixelBufferRef)imageBuffer;
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
uint8_t *pixels = CVPixelBufferGetBaseAddress(pixelBuffer);
size_t width = CVPixelBufferGetWidth(pixelBuffer);
size_t bpr = CVPixelBufferGetBytesPerRow(pixelBuffer);
size_t bpp = bpr / width;
for (int y = 0; y < CVPixelBufferGetHeight(pixelBuffer); y++) {
for (int x = width / 2; x < width; x++) {
uint8_t leftB = pixels[y * bpr + x * bpp - bpp / 2 * width];
uint8_t leftG = pixels[y * bpr + x * bpp - bpp / 2 * width + 1];
uint8_t leftR = pixels[y * bpr + x * bpp - bpp / 2 * width + 2];
uint8_t rightB = pixels[y * bpr + x * bpp];
uint8_t rightG = pixels[y * bpr + x * bpp + 1];
uint8_t rightR = pixels[y * bpr + x * bpp + 2];
pixels[y * bpr + x * bpp - bpp / 2 * width] = 0;
pixels[y * bpr + x * bpp - bpp / 2 * width + 1] = 0;
pixels[y * bpr + x * bpp - bpp / 2 * width + 2] = 0;
pixels[y * bpr + x * bpp + 2] = (uint8_t)((0.7 * leftG) + (0.3 * leftB));
}
}
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
[videoWriterInput appendSampleBuffer:buffer];
CFRelease(imageBuffer);
} else {
[videoWriterInput appendSampleBuffer:buffer];
}
CFRelease(buffer);
}
} else {
videoFinished = YES;
}
}
}
} @catch (NSException *ex) {
NSLog(@"boom! %@", ex);
}
NSLog(@"Finishing up.");
[writer finishWriting];
[reader cancelReading];
NSLog(@"Done");
return 0;
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment