Skip to content

Instantly share code, notes, and snippets.

@yangyi
Last active January 31, 2016 18:55
Show Gist options
  • Save yangyi/99f347fb1616342569cf to your computer and use it in GitHub Desktop.
Save yangyi/99f347fb1616342569cf to your computer and use it in GitHub Desktop.
read video and process using core image
import Foundation
import AVFoundation
import CoreImage
protocol SequenceReadable {
func readNext() -> CIImage
}
protocol YMSticky {
func stickOn(background: YMImage) -> YMImage
}
struct SampleSticky : YMFilter {
func filterOn(background: YMImage) -> YMImage {
// let params = [kCIInputImageKey: sticky, kCIInputBackgroundImageKey: background]
// let filter = CIFilter(name: "CISourceOverCompositing", withInputParameters: params)
// let filter = CIFilter(name: "CIGaussianBlur")
let filter = CIFilter(name: "CIHatchedScreen")
filter.setValue(background, forKey: kCIInputImageKey)
filter.setDefaults()
return filter.outputImage
}
}
typealias YMImage = CIImage
protocol YMFilter {
func filterOn(source: YMImage) -> YMImage
}
extension YMImage {
func filterWith(filters: [YMFilter]) -> YMImage {
var temp = self
for filter in filters {
temp = filter.filterOn(temp)
}
return temp
}
}
let PX_BUFFER_OPTS = [kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA]
final class YMVideoSeqReader {
let videoOutput: AVAssetReaderTrackOutput
let reader: AVAssetReader
let nominalFrameRate: Float
init(asset: AVAsset) {
var error: NSError?
reader = AVAssetReader(asset: asset, error: &error)
let videoTrack = asset.tracksWithMediaType(AVMediaTypeVideo)[0] as! AVAssetTrack
videoOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: PX_BUFFER_OPTS)
reader.addOutput(videoOutput)
nominalFrameRate = videoTrack.nominalFrameRate
reader.startReading()
assert(reader.status != .Failed, "reader started failed error \(reader.error)")
}
func next() -> YMImage? {
if let sb = videoOutput.copyNextSampleBuffer() {
let pxbuffer = CMSampleBufferGetImageBuffer(sb)
return YMImage(CVPixelBuffer: pxbuffer)
}
return nil
}
}
final class YMVideoWriter {
let glContext : EAGLContext
let ciContext : CIContext
let writer : AVAssetWriter
class func setupWriter(outputFileURL: NSURL) -> AVAssetWriter {
let fileManager = NSFileManager.defaultManager()
let outputFileExists = fileManager.fileExistsAtPath(outputFileURL.path!)
if outputFileExists {
fileManager.removeItemAtURL(outputFileURL, error: nil)
}
var error : NSError?
let writer = AVAssetWriter(URL: outputFileURL, fileType: AVFileTypeMPEG4, error: &error)
assert(error == nil, "init video writer should not failed: \(error)")
return writer
}
let videoSize: CGSize
var videoWidth : NSNumber {
return Float(videoSize.width)
}
var videoHeight : NSNumber {
return Float(videoSize.height)
}
var videoOutputSettings : [String: NSObject] {
return [
AVVideoCodecKey: AVVideoCodecH264,
AVVideoWidthKey: videoWidth,
AVVideoHeightKey: videoHeight
]
}
var sourcePixelBufferAttributes: [String: NSObject] {
return [
String(kCVPixelBufferPixelFormatTypeKey): kCVPixelFormatType_32BGRA,
String(kCVPixelBufferWidthKey): videoWidth,
String(kCVPixelBufferHeightKey): videoHeight
]
}
var videoInput: AVAssetWriterInput!
var writerInputAdapater: AVAssetWriterInputPixelBufferAdaptor!
let render: YMCompositionRender
// create an YMVideoWriter will remove the file specified at outputFileURL if the file exists
init(outputFileURL: NSURL, render: YMCompositionRender, videoSize: CGSize = CGSize(width: 640.0, height: 640.0)) {
self.render = render
self.videoSize = videoSize
glContext = EAGLContext(API: .OpenGLES2)
ciContext = CIContext(EAGLContext: glContext)
writer = YMVideoWriter.setupWriter(outputFileURL)
videoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoOutputSettings)
writer.addInput(videoInput)
writerInputAdapater = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoInput, sourcePixelBufferAttributes: sourcePixelBufferAttributes)
writer.startWriting()
writer.startSessionAtSourceTime(kCMTimeZero)
}
private func finishWriting(completion: () -> ()) {
videoInput.markAsFinished()
writer.endSessionAtSourceTime(lastTime)
writer.finishWritingWithCompletionHandler(completion)
}
private var lastTime: CMTime = kCMTimeZero
private var inputQueue = dispatch_queue_create("writequeue.kaipai.tv", DISPATCH_QUEUE_SERIAL)
// write image in CIContext, may failed if no available space
private func write(image: YMImage, withPresentationTime time: CMTime) {
lastTime = time
println("write image at time \(CMTimeGetSeconds(time))")
var unmanaged : Unmanaged<CVPixelBufferRef>?
CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, writerInputAdapater.pixelBufferPool, &unmanaged)
let pxbuffer = unmanaged!.takeRetainedValue()
ciContext.render(image, toCVPixelBuffer: pxbuffer)
writerInputAdapater.appendPixelBuffer(pxbuffer, withPresentationTime: time)
}
func startRender() {
videoInput.requestMediaDataWhenReadyOnQueue(inputQueue, usingBlock: { [unowned self]() -> Void in
while self.videoInput.readyForMoreMediaData {
if let (image, time) = self.render.next() {
self.write(image, withPresentationTime: time)
} else {
self.finishWriting({ () -> () in
println("finish writing")
})
break
}
}
})
}
}
final class YMCompositionRender {
let filters: [YMFilter]
let reader: YMVideoSeqReader
var frameCount = 0
init(asset: AVAsset, filters: [YMFilter]) {
self.filters = filters
self.reader = YMVideoSeqReader(asset: asset)
}
func next() -> (YMImage, CMTime)? {
if let frame = reader.next() {
let frameRate = reader.nominalFrameRate
let presentationTime = CMTimeMake(Int64(frameCount * 600), Int32(600 * frameRate))
let image = frame.filterWith(filters)
frameCount += 1
return (image, presentationTime)
}
return nil
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment