Created
October 20, 2022 04:50
-
-
Save lotusirous/f1133561405330f777596210cf01e3f6 to your computer and use it in GitHub Desktop.
Live record and stream the buffer with combine Passthrough
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// | |
// StreamRecorder.swift | |
// pesos | |
// | |
// Created by Gru-2019015 on 2022/08/26. | |
// | |
import AVFoundation | |
import Combine | |
import Foundation | |
extension AVAudioPCMBuffer { | |
func dataPCMInt16() -> Data { | |
let channelCount = 1 // given PCMBuffer channel count is 1 | |
let channels = UnsafeBufferPointer(start: int16ChannelData, count: channelCount) | |
let ch0Data = NSData(bytes: channels[0], length: Int(frameCapacity * format.streamDescription.pointee.mBytesPerFrame)) | |
return ch0Data as Data | |
} | |
} | |
// Refer to: https://stackoverflow.com/questions/42885443/ios-record-audio-and-split-it-into-files-in-real-time | |
class AudioStreamRecorder: ObservableObject { | |
private var logger = newLogger(category: "stream-audio-engine") | |
let engine: AVAudioEngine | |
@Published var isRecording = false | |
let bufferPublisher = PassthroughSubject<Data, Never>() | |
var chunkFrames: AVAudioFrameCount = 0 | |
var chunkFileNumber: Int = 0 | |
var converter: AVAudioConverter? | |
var convertBuffer: AVAudioPCMBuffer? | |
// sync this settings with backend. The backend is using the following configuration. | |
// sampleRate = | |
// sampleSizeInBits = 16 | |
// channels = 1 | |
// framesize = 2 | |
// frameRate = 44100 | |
// bigEndian = false | |
// AudioFormat(AudioFormat.Encoding.PCM_SIGNED, 48000, 16, 1, 2, 44100, false); | |
// AudioFormat(AudioFormat.Encoding encoding, float sampleRate, int sampleSizeInBits, int channels, int frameSize, float frameRate, boolean bigEndian) | |
let targetFormat = AVAudioFormat(commonFormat: .pcmFormatInt16, sampleRate: 48000, channels: 1, interleaved: false) | |
init() { | |
self.engine = AVAudioEngine() | |
setup() | |
} | |
func setup() { | |
let input = engine.inputNode | |
let bus = 0 | |
let inputFormat = input.inputFormat(forBus: bus) | |
let bufferSize = 512 | |
input.installTap(onBus: bus, bufferSize: AVAudioFrameCount(bufferSize), format: inputFormat, block: recordCallback) | |
} | |
func recordCallback(buffer: AVAudioPCMBuffer, time: AVAudioTime) { | |
if converter == nil { | |
convertBuffer = AVAudioPCMBuffer(pcmFormat: targetFormat!, frameCapacity: buffer.frameCapacity) | |
convertBuffer?.frameLength = convertBuffer!.frameCapacity | |
converter = AVAudioConverter(from: buffer.format, to: convertBuffer!.format) | |
converter?.sampleRateConverterAlgorithm = AVSampleRateConverterAlgorithm_Normal | |
converter?.sampleRateConverterQuality = .max | |
} | |
let inputBlock: AVAudioConverterInputBlock = { _, outStatus in | |
outStatus.pointee = AVAudioConverterInputStatus.haveData | |
return buffer | |
} | |
guard let convertBuffer = convertBuffer else { return } | |
var error: NSError? | |
_ = converter!.convert(to: convertBuffer, error: &error, withInputFrom: inputBlock) | |
if let error = error { | |
print("Failed to convert the audio to target format: \(error.localizedDescription)") | |
logger.error("Failed to convert audio: \(String(describing: error.localizedDescription))") | |
} else { | |
// send the buffer to the out stream | |
bufferPublisher.send(convertBuffer.dataPCMInt16()) | |
} | |
} | |
public func startRecording() { | |
if engine.isRunning { | |
logger.error("engine is started") | |
return | |
} | |
do { | |
try engine.start() | |
isRecording = true | |
} catch { | |
logger.error("Failed to start engine: \(error.localizedDescription)") | |
} | |
} | |
public func stopRecording() { | |
engine.stop() | |
isRecording = false | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment