-
-
Save yusuke024/b5cd3909d9d7f9e919291491f6b381f0 to your computer and use it in GitHub Desktop.
import UIKit | |
import AVFoundation | |
class ViewController: UIViewController { | |
override func viewDidAppear(_ animated: Bool) { | |
super.viewDidAppear(animated) | |
switch AVCaptureDevice.authorizationStatus(for: .video) { | |
case .notDetermined: | |
AVCaptureDevice.requestAccess(for: .video) { granted in | |
if granted { | |
self._setupCaptureSession() | |
} | |
} | |
case .restricted: | |
break | |
case .denied: | |
break | |
case .authorized: | |
_setupCaptureSession() | |
} | |
} | |
private var _captureSession: AVCaptureSession? | |
private var _videoOutput: AVCaptureVideoDataOutput? | |
private var _assetWriter: AVAssetWriter? | |
private var _assetWriterInput: AVAssetWriterInput? | |
private var _adpater: AVAssetWriterInputPixelBufferAdaptor? | |
private var _filename = "" | |
private var _time: Double = 0 | |
private func _setupCaptureSession() { | |
let session = AVCaptureSession() | |
session.sessionPreset = .hd1920x1080 | |
guard | |
let device = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .unspecified), | |
let input = try? AVCaptureDeviceInput(device: device), | |
session.canAddInput(input) else { return } | |
session.beginConfiguration() | |
session.addInput(input) | |
session.commitConfiguration() | |
let output = AVCaptureVideoDataOutput() | |
guard session.canAddOutput(output) else { return } | |
output.setSampleBufferDelegate(self, queue: DispatchQueue(label: "com.yusuke024.video")) | |
session.beginConfiguration() | |
session.addOutput(output) | |
session.commitConfiguration() | |
DispatchQueue.main.async { | |
let previewView = _PreviewView() | |
previewView.videoPreviewLayer.session = session | |
previewView.frame = self.view.bounds | |
previewView.autoresizingMask = [.flexibleWidth, .flexibleHeight] | |
self.view.insertSubview(previewView, at: 0) | |
} | |
session.startRunning() | |
_videoOutput = output | |
_captureSession = session | |
} | |
private enum _CaptureState { | |
case idle, start, capturing, end | |
} | |
private var _captureState = _CaptureState.idle | |
@IBAction func capture(_ sender: Any) { | |
switch _captureState { | |
case .idle: | |
_captureState = .start | |
case .capturing: | |
_captureState = .end | |
default: | |
break | |
} | |
} | |
} | |
extension ViewController: AVCaptureVideoDataOutputSampleBufferDelegate { | |
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { | |
let timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer).seconds | |
switch _captureState { | |
case .start: | |
// Set up recorder | |
_filename = UUID().uuidString | |
let videoPath = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!.appendingPathComponent("\(_filename).mov") | |
let writer = try! AVAssetWriter(outputURL: videoPath, fileType: .mov) | |
let settings = _videoOutput!.recommendedVideoSettingsForAssetWriter(writingTo: .mov) | |
let input = AVAssetWriterInput(mediaType: .video, outputSettings: settings) // [AVVideoCodecKey: AVVideoCodecType.h264, AVVideoWidthKey: 1920, AVVideoHeightKey: 1080]) | |
input.mediaTimeScale = CMTimeScale(bitPattern: 600) | |
input.expectsMediaDataInRealTime = true | |
input.transform = CGAffineTransform(rotationAngle: .pi/2) | |
let adapter = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: input, sourcePixelBufferAttributes: nil) | |
if writer.canAdd(input) { | |
writer.add(input) | |
} | |
writer.startWriting() | |
writer.startSession(atSourceTime: .zero) | |
_assetWriter = writer | |
_assetWriterInput = input | |
_adpater = adapter | |
_captureState = .capturing | |
_time = timestamp | |
case .capturing: | |
if _assetWriterInput?.isReadyForMoreMediaData == true { | |
let time = CMTime(seconds: timestamp - _time, preferredTimescale: CMTimeScale(600)) | |
_adpater?.append(CMSampleBufferGetImageBuffer(sampleBuffer)!, withPresentationTime: time) | |
} | |
break | |
case .end: | |
guard _assetWriterInput?.isReadyForMoreMediaData == true, _assetWriter!.status != .failed else { break } | |
let url = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!.appendingPathComponent("\(_filename).mov") | |
_assetWriterInput?.markAsFinished() | |
_assetWriter?.finishWriting { [weak self] in | |
self?._captureState = .idle | |
self?._assetWriter = nil | |
self?._assetWriterInput = nil | |
DispatchQueue.main.async { | |
let activity = UIActivityViewController(activityItems: [url], applicationActivities: nil) | |
self?.present(activity, animated: true, completion: nil) | |
} | |
} | |
default: | |
break | |
} | |
} | |
} | |
private class _PreviewView: UIView { | |
override class var layerClass: AnyClass { | |
return AVCaptureVideoPreviewLayer.self | |
} | |
var videoPreviewLayer: AVCaptureVideoPreviewLayer { | |
return layer as! AVCaptureVideoPreviewLayer | |
} | |
} |
The video begins with a 1.2 second delay
I had even longer delay, so I moved everything regarding recording into a class and tried to initiate the class on different thread asynchronously and when this object is initialised, I started to write new frames from that instant.. if u notice in this, two different delegate calls might be using the deciding variables at once. because this delegate calls are around 30-60 cps (directly related to fps)
I had no way around cuz I was using metal to render the stuff on screen and I need at least one texture to initialise this class. so I had to do it that way
Yes, initialised classes, async on a custom high priority thread boosts startup speed.
I also found an odd hack to combat this dunno why it works. Getting startup times at 0.1 second
Adding an audio input is making this happen wonder why.
Hi, could you please share the code?
was anybody able to find a solution for this?. i am currently working to integrate the timestamp labels into the live capturing. (Video Output)
was anybody able to find a solution for this?. i am currently working to integrate the timestamp labels into the live capturing. (Video Output)
Can u post what’s your exact issue. Your code attempts. So I can help u if u provide me the specifics
I am using captureOutput(_:didOutput:from:)
to capture video. I am trying to add the Timestamp
to the buffer output from the above code.
How you guys manage audio recording with a microphone using AVAssetWriter?
I too am unable to output any audio and am admittedly lost as a new programmer on proper implementation. Any guidance would be extemely appreciated.
Following the above example, I have a front-facing live preview camera that starts/ends recording (with an applied CIFilter on a button tap (i.e. Instagram Stories).
Added my stackoverflow post for reference to applicable code.
How you guys manage audio recording with a microphone using AVAssetWriter?
I too am unable to output any audio and am admittedly lost as a new programmer on proper implementation. Any guidance would be extemely appreciated.
Following the above example, I have a front-facing live preview camera that starts/ends recording (with an applied CIFilter on a button tap (i.e. Instagram Stories).
Added my stackoverflow post for reference to applicable code.
So using the delegate method ‘captureOutput(_:didOutput:from:)’ u get two types of outputs , u can check if it’s audio data or video data and asset writer includes audio and video writers , provide respective inputs to the writers and usually it takes a little experimentation to get it working , don’t use preview screen if u want to do this way , and try to use metal renderer to get finest control
Hi,
I want to add two AVAssetWriterInput for two different video, to record both video on same screen. Like one video on top and another below the above. Is there any possibility to record in this way using AVAssetWriter?
Please help.
yes, it is possible to do so, you will have to play around with CALayer in video composting and add all the audio tracks u want to export with. make sure to specify no audio if no tracks are involved. I am kinda busy right now else I would have attached a sample code for you. u can add multiple ca layers on the base video layer and they will all be baked into a single video.
I am using AVKits VideoPlayer and passing in the url to display the video, however, the video is just black. Any Ideas why. Thank you so much for the help!!!!
check few things,
If it’s network URL, ensure its playing on a browser (corrupted video encoding can be a reason too)
If it’s local URL, ensure the extension is correct
If both are correct, check your view hierarchy, and fix your constraints or resizing masks just in case.
Thank you for the speedy reply!!! It turns out it was just an issue with the video settings. Thanks again!
this code is getting 6 to 7 frames per second I want 30 fps i.e captureOutput(_:didOutput:from:) should get called 30 times in second is there any way
i guess the device has issues with frame rate or you might have configured some preset which is too heavy to process many frames at once. The delegate is called directly from AVCameraSession.
Hey - thanks for sharing that code.
The initialization of this takes ages for me, it introduces up to 5 seconds of delay on an iPhone 11 Pro (in debug mode). In release it's a bit faster, but still far from what the default Movie output can achieve.
Any tips?
same issue @mrousavy are you have solution ?
The video begins with a 1.2 second delay