-
-
Save saroar/a6aa9e918c6df645eb1cbe2cdeda0f6d to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import SwiftUI | |
import Foundation | |
import CoreGraphics | |
import AVFoundation | |
import VideoToolbox | |
import ComposableArchitecture | |
struct ContentView: View { | |
let store: Store<CameraState, CameraAction> | |
var body: some View { | |
WithViewStore(self.store) { viewStore in | |
if let image = viewStore.feed { | |
GeometryReader { geometry in | |
Image(decorative: image, scale: 1.0, orientation: .upMirrored) | |
.resizable() | |
.scaledToFill() | |
.frame( | |
width: geometry.size.width, | |
height: geometry.size.height, | |
alignment: .center | |
) | |
.clipped() | |
} | |
} else { | |
Color.black.onAppear(perform: { | |
viewStore.send(.open) | |
}) | |
} | |
} | |
} | |
} | |
public struct CameraState: Equatable { | |
var feed: CGImage? | |
var isRecording: Bool | |
} | |
enum CameraAction: Equatable { | |
case open | |
case start | |
case receive(CGImage) | |
case authorizationResponse(AVAuthorizationStatus) | |
} | |
struct CameraClient { | |
var requestAuthorization: @Sendable () async -> AVAuthorizationStatus | |
var startFeed: @Sendable (AVCaptureSession, AVCaptureVideoDataOutput, DispatchQueue) async -> AsyncStream<CGImage> | |
} | |
private final class Delegate: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate { | |
let continuation: AsyncStream<CGImage>.Continuation | |
init(continuation: AsyncStream<CGImage>.Continuation) { | |
self.continuation = continuation | |
} | |
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { | |
if let imageBuffer = sampleBuffer.imageBuffer { | |
var image: CGImage? | |
VTCreateCGImageFromCVPixelBuffer(imageBuffer, options: nil, imageOut: &image) | |
if let image = image { | |
self.continuation.yield(image) | |
} | |
} | |
} | |
} | |
private final actor Camera { | |
var delegate: Delegate? | |
func startFeed(_ session: AVCaptureSession, _ output: AVCaptureVideoDataOutput, _ queue: DispatchQueue) async -> AsyncStream<CGImage> { | |
defer { | |
session.beginConfiguration() | |
output.setSampleBufferDelegate(self.delegate, queue: queue) | |
session.commitConfiguration() | |
} | |
return AsyncStream<CGImage>(bufferingPolicy: .bufferingNewest(1)) { continuation in | |
self.delegate = Delegate(continuation: continuation) | |
} | |
} | |
} | |
extension CameraClient { | |
static var live: Self { | |
let camera = Camera() | |
return Self( | |
requestAuthorization: { | |
return AVCaptureDevice.authorizationStatus(for: .video) | |
}, startFeed: { session, output, queue in | |
await camera.startFeed(session, output, queue) | |
} | |
) | |
} | |
} | |
struct CameraEnvironment { | |
var cameraClient: CameraClient | |
var session = AVCaptureSession() | |
var sessionQueue = DispatchQueue(label: "com.demo.camera", qos: .userInitiated, autoreleaseFrequency: .workItem) | |
var videoOutput = AVCaptureVideoDataOutput() | |
} | |
let reducer = Reducer<CameraState, CameraAction, CameraEnvironment> { | |
state, action, environment in | |
switch action { | |
case .open: | |
return .run { send in | |
let status = await environment.cameraClient.requestAuthorization() | |
await send(.authorizationResponse(status)) | |
guard status == .authorized else { | |
return | |
} | |
await send(.start) | |
for await frame in await environment.cameraClient.startFeed(environment.session, environment.videoOutput, environment.sessionQueue) { | |
await send(.receive(frame)) | |
} | |
} | |
case .start: | |
return .fireAndForget { | |
environment.sessionQueue.async { | |
environment.session.beginConfiguration() | |
defer { | |
environment.session.commitConfiguration() | |
environment.session.startRunning() | |
} | |
let device = AVCaptureDevice.default( | |
.builtInWideAngleCamera, | |
for: .video, | |
position: .back | |
) | |
guard let camera = device else { | |
// TODO: Handle error | |
fatalError() | |
} | |
do { | |
let cameraInput = try AVCaptureDeviceInput(device: camera) | |
if environment.session.canAddInput(cameraInput) { | |
environment.session.addInput(cameraInput) | |
} else { | |
// TODO: Handle error | |
fatalError() | |
} | |
} catch { | |
// TODO: Handle error | |
fatalError() | |
} | |
if environment.session.canAddOutput(environment.videoOutput) { | |
environment.session.addOutput(environment.videoOutput) | |
environment.videoOutput.videoSettings = [ | |
kCVPixelBufferPixelFormatTypeKey as String : kCVPixelFormatType_32BGRA | |
] | |
let videoConnection = environment.videoOutput.connection(with: .video) | |
videoConnection?.videoOrientation = .portrait | |
} else { | |
// TODO: Handle error | |
fatalError() | |
} | |
} | |
} | |
case .receive(let live): | |
state.feed = live | |
// Buffer is not being released. | |
return .none | |
case .authorizationResponse(let status): | |
// TODO: Handle response | |
switch status { | |
default: | |
return .none | |
} | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment