-
-
Save mspvirajpatel/f7e1e258f3c1fff96917d82fa9c4c137 to your computer and use it in GitHub Desktop.
import Foundation | |
import AVFoundation | |
import ReplayKit | |
class RPScreenWriter { | |
// Write video | |
var videoOutputURL: URL | |
var videoWriter: AVAssetWriter? | |
var videoInput: AVAssetWriterInput? | |
// Write audio | |
var audioOutputURL: URL | |
var audioWriter: AVAssetWriter? | |
var micAudioInput:AVAssetWriterInput? | |
var appAudioInput:AVAssetWriterInput? | |
var isVideoWritingFinished = false | |
var isAudioWritingFinished = false | |
var isPaused: Bool = false | |
var sessionStartTime: CMTime = kCMTimeZero | |
var currentTime: CMTime = kCMTimeZero { | |
didSet { | |
print("currentTime => \(currentTime.seconds)") | |
didUpdateSeconds?(currentTime.seconds) | |
} | |
} | |
var didUpdateSeconds: ((Double) -> ())? | |
init() { | |
let documentsPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] as NSString | |
self.videoOutputURL = URL(fileURLWithPath: documentsPath.appendingPathComponent("RPScreenWriterVideo.mp4")) | |
self.audioOutputURL = URL(fileURLWithPath: documentsPath.appendingPathComponent("RPScreenWriterAudio.mp4")) | |
removeURLsIfNeeded() | |
} | |
func removeURLsIfNeeded() { | |
do { | |
try FileManager.default.removeItem(at: self.videoOutputURL) | |
try FileManager.default.removeItem(at: self.audioOutputURL) | |
} catch {} | |
} | |
func setUpWriter() { | |
do { | |
try videoWriter = AVAssetWriter(outputURL: self.videoOutputURL, fileType: .mp4) | |
} catch let writerError as NSError { | |
print("Error opening video file \(writerError)") | |
} | |
let videoSettings = [ | |
AVVideoCodecKey : AVVideoCodecType.h264, | |
AVVideoScalingModeKey : AVVideoScalingModeResizeAspectFill, | |
AVVideoWidthKey : UIScreen.main.bounds.width*2, | |
AVVideoHeightKey : (UIScreen.main.bounds.height - (UIApplication.shared.statusBarFrame.height + 80)*2)*2 | |
] as [String : Any] | |
videoInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoSettings) | |
if let videoInput = self.videoInput, | |
let canAddInput = videoWriter?.canAdd(videoInput), | |
canAddInput { | |
videoWriter?.add(videoInput) | |
} else { | |
print("couldn't add video input") | |
} | |
do { | |
try audioWriter = AVAssetWriter(outputURL: self.audioOutputURL, fileType: .mp4) | |
} catch let writerError as NSError { | |
print("Error opening video file \(writerError)") | |
} | |
var channelLayout = AudioChannelLayout() | |
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_MPEG_5_1_D | |
let audioOutputSettings = [ | |
AVNumberOfChannelsKey : 6, | |
AVFormatIDKey : kAudioFormatMPEG4AAC_HE, | |
AVSampleRateKey : 44100, | |
AVChannelLayoutKey : NSData(bytes: &channelLayout, length: MemoryLayout.size(ofValue: channelLayout)) | |
] as [String : Any] | |
appAudioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: audioOutputSettings) | |
if let appAudioInput = self.appAudioInput, | |
let canAddInput = audioWriter?.canAdd(appAudioInput), | |
canAddInput { | |
audioWriter?.add(appAudioInput) | |
} else { | |
print("couldn't add app audio input") | |
} | |
micAudioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: audioOutputSettings) | |
if let micAudioInput = self.micAudioInput, | |
let canAddInput = audioWriter?.canAdd(micAudioInput), | |
canAddInput { | |
audioWriter?.add(micAudioInput) | |
} else { | |
print("couldn't add mic audio input") | |
} | |
} | |
func writeBuffer(_ cmSampleBuffer: CMSampleBuffer, rpSampleType: RPSampleBufferType) { | |
if self.videoWriter == nil { | |
self.setUpWriter() | |
} | |
guard let videoWriter = self.videoWriter, | |
let audioWriter = self.audioWriter, | |
!isPaused else { | |
return | |
} | |
let presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(cmSampleBuffer) | |
switch rpSampleType { | |
case .video: | |
if videoWriter.status == .unknown { | |
if videoWriter.startWriting() { | |
print("video writing started") | |
self.sessionStartTime = presentationTimeStamp | |
videoWriter.startSession(atSourceTime: presentationTimeStamp) | |
} | |
} else if videoWriter.status == .writing { | |
if let isReadyForMoreMediaData = videoInput?.isReadyForMoreMediaData, | |
isReadyForMoreMediaData { | |
self.currentTime = CMTimeSubtract(presentationTimeStamp, self.sessionStartTime) | |
if let appendInput = videoInput?.append(cmSampleBuffer), | |
!appendInput { | |
print("couldn't write video buffer") | |
} | |
} | |
} | |
break | |
case .audioApp: | |
if audioWriter.status == .unknown { | |
if audioWriter.startWriting() { | |
print("audio writing started") | |
audioWriter.startSession(atSourceTime: presentationTimeStamp) | |
} | |
} else if audioWriter.status == .writing { | |
if let isReadyForMoreMediaData = appAudioInput?.isReadyForMoreMediaData, | |
isReadyForMoreMediaData { | |
if let appendInput = appAudioInput?.append(cmSampleBuffer), | |
!appendInput { | |
print("couldn't write app audio buffer") | |
} | |
} | |
} | |
break | |
case .audioMic: | |
if audioWriter.status == .unknown { | |
if audioWriter.startWriting() { | |
print("audio writing started") | |
audioWriter.startSession(atSourceTime: presentationTimeStamp) | |
} | |
} else if audioWriter.status == .writing { | |
if let isReadyForMoreMediaData = micAudioInput?.isReadyForMoreMediaData, | |
isReadyForMoreMediaData { | |
if let appendInput = micAudioInput?.append(cmSampleBuffer), | |
!appendInput { | |
print("couldn't write mic audio buffer") | |
} | |
} | |
} | |
break | |
} | |
} | |
func finishWriting(completionHandler handler: @escaping (URL?, Error?) -> Void) { | |
self.videoInput?.markAsFinished() | |
self.videoWriter?.finishWriting { | |
self.isVideoWritingFinished = true | |
completion() | |
} | |
self.appAudioInput?.markAsFinished() | |
self.micAudioInput?.markAsFinished() | |
self.audioWriter?.finishWriting { | |
self.isAudioWritingFinished = true | |
completion() | |
} | |
func completion() { | |
if self.isVideoWritingFinished && self.isAudioWritingFinished { | |
self.isVideoWritingFinished = false | |
self.isAudioWritingFinished = false | |
self.isPaused = false | |
self.videoInput = nil | |
self.videoWriter = nil | |
self.appAudioInput = nil | |
self.micAudioInput = nil | |
self.audioWriter = nil | |
merge() | |
} | |
} | |
func merge() { | |
let mergeComposition = AVMutableComposition() | |
let videoAsset = AVAsset(url: self.videoOutputURL) | |
let videoTracks = videoAsset.tracks(withMediaType: .video) | |
print(videoAsset.duration.seconds) | |
let videoCompositionTrack = mergeComposition.addMutableTrack(withMediaType: .video, | |
preferredTrackID: kCMPersistentTrackID_Invalid) | |
do { | |
try videoCompositionTrack?.insertTimeRange(CMTimeRange(start: kCMTimeZero, end: videoAsset.duration), | |
of: videoTracks.first!, | |
at: kCMTimeZero) | |
} catch let error { | |
removeURLsIfNeeded() | |
handler(nil, error) | |
} | |
videoCompositionTrack?.preferredTransform = videoTracks.first!.preferredTransform | |
let audioAsset = AVAsset(url: self.audioOutputURL) | |
let audioTracks = audioAsset.tracks(withMediaType: .audio) | |
print(audioAsset.duration.seconds) | |
for audioTrack in audioTracks { | |
let audioCompositionTrack = mergeComposition.addMutableTrack(withMediaType: .audio, | |
preferredTrackID: kCMPersistentTrackID_Invalid) | |
do { | |
try audioCompositionTrack?.insertTimeRange(CMTimeRange(start: kCMTimeZero, end: audioAsset.duration), | |
of: audioTrack, | |
at: kCMTimeZero) | |
} catch let error { | |
print(error) | |
} | |
} | |
let documentsPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] as NSString | |
let outputURL = URL(fileURLWithPath: documentsPath.appendingPathComponent("RPScreenWriterMergeVideo.mp4")) | |
do { | |
try FileManager.default.removeItem(at: outputURL) | |
} catch {} | |
let exportSession = AVAssetExportSession(asset: mergeComposition, | |
presetName: AVAssetExportPresetHighestQuality) | |
exportSession?.outputFileType = .mp4 | |
exportSession?.shouldOptimizeForNetworkUse = true | |
exportSession?.outputURL = outputURL | |
exportSession?.exportAsynchronously { | |
if let error = exportSession?.error { | |
self.removeURLsIfNeeded() | |
handler(nil, error) | |
} else { | |
self.removeURLsIfNeeded() | |
handler(exportSession?.outputURL, nil) | |
} | |
} | |
} | |
} | |
} |
Actually, sorry but on reading takashi1975's response I see that has a solution that addresses my problem. Left my issue posted in case it helps others. Again Viraj...thanks for your gist - the only available full, solution posted I could find. Cheers!
@julesburt What was the solution for you? The link above no longer works.
Apologies for delay in responding. And sad to see that link isn't working? I will upload a gist shortly. Need to clean up a little maybe. I modified as recordings of L & R audio were separate and resulting in captured screens not sharing microphone channel when on Android or Windows playback...
will yo use uploading the gist? thanks
@julestburt Can you tell me about your solution, I‘' having the same issue. Thanks.
@julestburt , having same issue as well. @T2Je , have you been able to find any solutions? Thanks
@spiresweet Actually, I don't figure out why setting 6 channels for the kAudioFormatMPEG4AAC_HE
format to AVAssetWriterInput outputSettings caused the crash.
I found this problem when I was doing video compression, but I never found a solution to deal with this 6-channel video, then I directly set channels to 2, format to kAudioFormatMPEG4AAC, and set the outputSettings of the video's AVAssetReaderTrackOutput to channel 2, not to consider the multi-channel problem, and then it did not crash.
Here is my video compressor project, hope it helps.
Hello. I was implementing your gist - it seems to work very well, thank you. Yours seems to be the only full answer I found as to how to record buffers directly from Apple's ScreenRecorder. However, I've seen crash problems on one iPad Air 2 (iOS13) and one iPhone 11 (iOS14) respectively. Wonder if you have any thoughts - as this code is very new to me? It does run on several other devices with no problem - iPhone5, iPhone 14, iPhoneX
The issue is a crash when setting up:
[AVAssetWriterInput initWithMediaType:outputSettings:sourceFormatHint:] 6 is not a valid channel count for Format ID 'aach'. Use kAudioFormatProperty_AvailableEncodeNumberChannels (<AudioToolbox/AudioFormat.h>) to enumerate available channel counts for a given format.'
*** First throw call stack:
(0x19a52a794 0x19a24cbcc 0x1a4756720 0x10086b6ac 0x10086b4fc 0x10086a038 0x10086b8e4 0x100843618 0x1008436c0 0x1c493f6f0 0x101b02338 0x101b03730 0x101b0a740 0x101b0b2e0 0x101b166c4 0x19a241b74 0x19a244740)
libc++abi.dylib: terminating with uncaught exception of type NSException
*** Terminating app due to uncaught exception 'NSInvalidArgumentException', reason: '*** -[AVAssetWriterInput initWithMediaType:outputSettings:sourceFormatHint:] 6 is not a valid channel count for Format ID 'aach'. Use kAudioFormatProperty_AvailableEncodeNumberChannels (<AudioToolbox/AudioFormat.h>) to enumerate available channel counts for a given format.'
terminating with uncaught exception of type NSException
It happens when instantiating AVAssetWriterInput()
var channelLayout = AudioChannelLayout()
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_MPEG_5_1_D
Any thoughts/feedback would be much appreciated...Jules