Created
June 17, 2017 00:14
-
-
Save tfwio/6f5970eb654f363cc5fadcd6df3bac6e to your computer and use it in GitHub Desktop.
Abstraction on NAudio AudioDevice as implemented in a particular (private) ds2 library
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/* tfwxo * 1/19/2016 * 2:10 AM */ | |
using System; | |
using System.Linq; | |
using System.Threading; | |
using NAudio.Wave; | |
using on.drumsynth2; | |
namespace on.audio | |
{ | |
public enum EngineType | |
{ | |
Default = 2, // DirectSound is default | |
ASIO = 1, | |
DirectSound = 2, | |
WaveOut = 3, | |
WASAPI = 4, | |
} | |
// blank placeholder used only to detect type-info. | |
public interface GeneralModel { | |
/// Parent | |
AudioDevice AudioDevice { get; set; } | |
} | |
public interface IAudioDevice : IDisposable | |
{ | |
//IWavePlayer AudioEngine; // private member object. | |
EngineType AudioEngineType { get; set; } | |
GeneralModel Model { get; set; } | |
void Play(); | |
void Stop(); | |
void SwitchAudioEngine(EngineType engineType, int latency=80, int deviceID=0); | |
// make private IAudioDevice.Setup | |
void Setup(EngineType engineType, int latency=80, int deviceID=0); | |
// make private | |
void OnAudioDeviceStopped(object sender, StoppedEventArgs e); | |
void OnStoppedHandler(object sender, EventArgs e); | |
} | |
public class AudioDevice : IAudioDevice | |
{ | |
const int default_dispose_failed_timeout = 400; | |
IWavePlayer AudioEngine; | |
public DsProvider Synth { get; set; } | |
public GeneralModel Model { get; set; } | |
// disable once ConvertToConstant.Local | |
int DeviceID = 0; // This is set to default --- we need to update this per user setting. | |
// This should not be here and/or never should be used in this fashion. | |
internal double BPM { | |
get { return bpm; } | |
set { | |
bpm = value; | |
if (Synth != null) Synth.Pattern.Clock.SetBpm(bpm); | |
else Console.WriteLine ("ABORTED: BUFFER NOT SET."); | |
} | |
} internal double bpm = 126D; | |
/// <summary> | |
/// - samplerate always 44100 (for drumsynth) | |
/// </summary> | |
internal int DesiredSamplesPerBuffer { set { DesiredLatency = (int)Math.Round(value / 44100D * 1000D); } } | |
internal int DesiredLatency = 300; | |
int BufferSize { get { return (AudioEngine is AsioOut) ? AudioEngine.Field32("nbSamples") : MsToBytes(DesiredLatency); } } | |
public EngineType AudioEngineType { get; set; } = EngineType.Default; | |
// --- this isn't expected to be exactly correct --- | |
// this is a horrible little snippit of code found somewhere in | |
// a particular audio api naudio particularly for that api and | |
// not anything else such as here. We're using it to generalize | |
// a 'desired latency'. | |
int MsToBytes(int ms) { | |
int num = ms * (Synth.WaveFormat.AverageBytesPerSecond / 1000); | |
return num - num % Synth.WaveFormat.BlockAlign; | |
} | |
// on.drumsynth2.MainWindow.Initialize() <-- Most significant (in order of significance) | |
// on.drumsynth2.MainWindow.BtnChkAudioOn(object sender, EventArgs e) | |
// on.drumsynth2.MainWindow.RecheckAudioBtn(Button btn, EngineType engineType) | |
// on.drumsynth2.MainWindow.PlayStop() | |
public void Play() | |
{ | |
if (AudioEngine == null) return; | |
if (AudioEngine.PlaybackState==PlaybackState.Playing) { | |
if (Model!=null) Synth.Initialize (bpm, BufferSize); | |
return; | |
} | |
AudioEngine.Play(); | |
} | |
public void Stop() | |
{ | |
if (AudioEngine == null) return; | |
if (AudioEngine.PlaybackState==PlaybackState.Stopped) { | |
Console.WriteLine("Already stopped..."); | |
return; | |
} | |
AudioEngine.Stop(); | |
} | |
// not inherited from audio-device master-interface (or class) | |
// MainWindow.SelectProgram () | |
virtual public void InitializePgm(DsFormModel dsynth, bool forceInit=false) | |
{ | |
Model = dsynth; | |
var model = Model as DsFormModel; | |
Synth.Pattern.Preset [Synth.Pattern.SelectedIndex] = model.SelectedPreset; | |
if (forceInit) Synth.Initialize (bpm, BufferSize); | |
} | |
public AudioDevice() | |
{ | |
} | |
/// <summary> | |
/// We need you to preconfigure a DeviceID before calling switch. | |
/// Imagine an apply button where you have to select a latency before | |
/// applying (type of thing). | |
/// </summary> | |
/// <param name="engineType"></param> | |
/// <param name="latency"></param> | |
/// <param name="deviceID"></param> | |
public void SwitchAudioEngine(EngineType engineType, int latency=80, int deviceID=0) | |
{ | |
if (AudioEngine!=null) | |
{ | |
if (AudioEngine.PlaybackState != PlaybackState.Stopped) | |
{ | |
AudioEngine.PlaybackStopped += (o,s)=> (this as IAudioDevice).Setup(engineType,latency,deviceID); | |
AudioEngine.Stop(); | |
} | |
else (this as IAudioDevice).Setup(engineType, latency, deviceID); | |
} | |
else (this as IAudioDevice).Setup(engineType, latency, deviceID); | |
} | |
void IAudioDevice.Setup(EngineType engineType, int latency=80, int deviceID=0) | |
{ | |
if (AudioEngine!=null) | |
{ | |
AudioEngine.Dispose(); | |
AudioEngine = null; | |
Thread.Sleep(500); | |
} | |
switch (engineType) | |
{ | |
case EngineType.ASIO: | |
AudioEngine = AudioEngine = new AsioOut(DeviceID); | |
break; | |
case EngineType.DirectSound: | |
AudioEngine = new DirectSoundOut (NAudio.Wave.DirectSoundOut.Devices.First().Guid,DesiredLatency); | |
break; | |
case EngineType.WaveOut: | |
AudioEngine = new WaveOut(); | |
(AudioEngine as WaveOut).DesiredLatency = DesiredLatency*2; | |
(AudioEngine as WaveOut).DeviceNumber = DeviceID; | |
break; | |
case EngineType.WASAPI: | |
AudioEngine = new WasapiOut(NAudio.CoreAudioApi.AudioClientShareMode.Shared,DesiredLatency); | |
break; | |
} | |
AudioEngineType = engineType; | |
AudioEngine.PlaybackStopped += (this as IAudioDevice).OnAudioDeviceStopped; | |
if (Synth == null) | |
{ | |
Synth = new DsProvider(); | |
Synth.EndOfProgram += (this as IAudioDevice).OnStoppedHandler; | |
} | |
AudioEngine.Init(Synth); | |
Console.WriteLine ("ENGINE STARTED"); | |
} | |
void IAudioDevice.OnAudioDeviceStopped(object sender, StoppedEventArgs e) { | |
Console.WriteLine("ENGINE STOPPED"); | |
} | |
void IAudioDevice.OnStoppedHandler(object sender, EventArgs e) { | |
Console.WriteLine("ENGINE STOP REQUEST"); | |
} | |
#region IDisposable implementation | |
public void Dispose() | |
{ | |
if (AudioEngine!=null) | |
{ | |
System.Diagnostics.Debug.Print("BEGIN: WAITING TO DISPOSE ASIO-PLAYER..."); | |
while (AudioEngine.PlaybackState!=PlaybackState.Stopped) | |
{ | |
try { | |
System.Diagnostics.Debug.Print("Waiting for audio idle..."); | |
AudioEngine.Stop(); | |
} catch (Exception) { | |
System.Diagnostics.Debug.Print("ERROR: Waiting for audio idle..."); | |
//throw; | |
} | |
Thread.Sleep(default_dispose_failed_timeout); | |
} | |
} | |
AudioEngine.Dispose(); | |
} | |
#endregion | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment