Last active
July 8, 2020 07:28
-
-
Save isthisjoe/7f712512f6efd3f4d7500e98a7c48f8f to your computer and use it in GitHub Desktop.
Swift 3 | macOs | Write NSImage(s) to a movie file. Modified from http://stackoverflow.com/a/36297656/1275125
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import AppKit | |
import AVFoundation | |
class MovieWriter: NSObject { | |
func writeImagesAsMovie(_ allImages: [NSImage], videoPath: String, videoSize: CGSize, videoFPS: Int32) { | |
// Create AVAssetWriter to write video | |
guard let assetWriter = createAssetWriter(videoPath, size: videoSize) else { | |
print("Error converting images to video: AVAssetWriter not created") | |
return | |
} | |
// If here, AVAssetWriter exists so create AVAssetWriterInputPixelBufferAdaptor | |
let writerInput = assetWriter.inputs.filter{ $0.mediaType == AVMediaTypeVideo }.first! | |
let sourceBufferAttributes : [String : AnyObject] = [ | |
kCVPixelBufferPixelFormatTypeKey as String : Int(kCVPixelFormatType_32ARGB) as AnyObject, | |
kCVPixelBufferWidthKey as String : videoSize.width as AnyObject, | |
kCVPixelBufferHeightKey as String : videoSize.height as AnyObject, | |
] | |
let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: writerInput, sourcePixelBufferAttributes: sourceBufferAttributes) | |
// Start writing session | |
assetWriter.startWriting() | |
assetWriter.startSession(atSourceTime: kCMTimeZero) | |
if (pixelBufferAdaptor.pixelBufferPool == nil) { | |
print("Error converting images to video: pixelBufferPool nil after starting session") | |
return | |
} | |
// -- Create queue for <requestMediaDataWhenReadyOnQueue> | |
let mediaQueue = DispatchQueue(label: "mediaInputQueue", attributes: []) | |
// -- Set video parameters | |
let frameDuration = CMTimeMake(1, videoFPS) | |
var frameCount = 0 | |
// -- Add images to video | |
let numImages = allImages.count | |
writerInput.requestMediaDataWhenReady(on: mediaQueue, using: { () -> Void in | |
// Append unadded images to video but only while input ready | |
while (writerInput.isReadyForMoreMediaData && frameCount < numImages) { | |
let lastFrameTime = CMTimeMake(Int64(frameCount), videoFPS) | |
let presentationTime = frameCount == 0 ? lastFrameTime : CMTimeAdd(lastFrameTime, frameDuration) | |
if !self.appendPixelBufferForImageAtURL(allImages[frameCount], pixelBufferAdaptor: pixelBufferAdaptor, presentationTime: presentationTime) { | |
print("Error converting images to video: AVAssetWriterInputPixelBufferAdapter failed to append pixel buffer") | |
return | |
} | |
frameCount += 1 | |
} | |
// No more images to add? End video. | |
if (frameCount >= numImages) { | |
writerInput.markAsFinished() | |
assetWriter.finishWriting { | |
if (assetWriter.error != nil) { | |
print("Error converting images to video: \(assetWriter.error)") | |
} else { | |
print("Converted images to movie @ \(videoPath)") | |
} | |
} | |
} | |
}) | |
} | |
func createAssetWriter(_ path: String, size: CGSize) -> AVAssetWriter? { | |
// Convert <path> to NSURL object | |
let pathURL = URL(fileURLWithPath: path) | |
// Return new asset writer or nil | |
do { | |
// Create asset writer | |
let newWriter = try AVAssetWriter(outputURL: pathURL, fileType: AVFileTypeMPEG4) | |
// Define settings for video input | |
let videoSettings: [String : AnyObject] = [ | |
AVVideoCodecKey : AVVideoCodecH264 as AnyObject, | |
AVVideoWidthKey : size.width as AnyObject, | |
AVVideoHeightKey : size.height as AnyObject, | |
] | |
// Add video input to writer | |
let assetWriterVideoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings) | |
newWriter.add(assetWriterVideoInput) | |
// Return writer | |
print("Created asset writer for \(size.width)x\(size.height) video") | |
return newWriter | |
} catch { | |
print("Error creating asset writer: \(error)") | |
return nil | |
} | |
} | |
func appendPixelBufferForImageAtURL(_ image: NSImage, pixelBufferAdaptor: AVAssetWriterInputPixelBufferAdaptor, presentationTime: CMTime) -> Bool { | |
var appendSucceeded = false | |
autoreleasepool { | |
if let pixelBufferPool = pixelBufferAdaptor.pixelBufferPool { | |
let pixelBufferPointer = UnsafeMutablePointer<CVPixelBuffer?>.allocate(capacity:1) | |
let status: CVReturn = CVPixelBufferPoolCreatePixelBuffer( | |
kCFAllocatorDefault, | |
pixelBufferPool, | |
pixelBufferPointer | |
) | |
if let pixelBuffer = pixelBufferPointer.pointee , status == 0 { | |
fillPixelBufferFromImage(image, pixelBuffer: pixelBuffer) | |
appendSucceeded = pixelBufferAdaptor.append(pixelBuffer, withPresentationTime: presentationTime) | |
pixelBufferPointer.deinitialize() | |
} else { | |
NSLog("Error: Failed to allocate pixel buffer from pool") | |
} | |
pixelBufferPointer.deallocate(capacity: 1) | |
} | |
} | |
return appendSucceeded | |
} | |
func fillPixelBufferFromImage(_ image: NSImage, pixelBuffer: CVPixelBuffer) { | |
CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) | |
let pixelData = CVPixelBufferGetBaseAddress(pixelBuffer) | |
let rgbColorSpace = CGColorSpaceCreateDeviceRGB() | |
// Create CGBitmapContext | |
let context = CGContext( | |
data: pixelData, | |
width: Int(image.size.width), | |
height: Int(image.size.height), | |
bitsPerComponent: 8, | |
bytesPerRow: CVPixelBufferGetBytesPerRow(pixelBuffer), | |
space: rgbColorSpace, | |
bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue | |
)! | |
// Draw image into context | |
let drawCGRect = CGRect(x:0, y:0, width:image.size.width, height:image.size.height) | |
var drawRect = NSRectFromCGRect(drawCGRect); | |
let cgImage = image.cgImage(forProposedRect: &drawRect, context: nil, hints: nil)! | |
context.draw(cgImage, in: CGRect(x: 0.0,y: 0.0,width: image.size.width,height: image.size.height)) | |
CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) | |
} | |
} | |
fixed it by following the answer: https://stackoverflow.com/questions/44192861/uiimage-to-cvpixelbuffer-memory-issue
For your code, I change to pass into UIImage
array to Data
array. Then only convert Data
to UIImage
in the method fillPixelBufferFromImage
, it will make the image release immediately after this method is called.
i got this error : pixelBufferPool nil after starting session
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Did you get the error "Cannot allocate memory" when you convert a lot of images, thanks.