Last active
November 12, 2018 09:23
-
-
Save socantre/8858172 to your computer and use it in GitHub Desktop.
Test with windows audio API
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#define NOMINMAX | |
#include "scoped_resource.h" | |
#include <Audioclient.h> | |
#include <atlbase.h> | |
#include <audiopolicy.h> | |
#include <mmdeviceapi.h> | |
#include <chrono> | |
#include <cmath> | |
#include <codecvt> | |
#include <cstdlib> | |
#include <iomanip> | |
#include <iostream> | |
#include <limits> | |
#include <random> | |
#include <stdexcept> | |
#include <string> | |
#include <thread> | |
// using reference_time_duration = std::chrono::duration< | |
// REFERENCE_TIME, std::ratio<1, 10000000> >; // IAudioClient::Initialize | |
// // uses units of 100 | |
// // nanoseconds | |
typedef std::chrono::duration< ::REFERENCE_TIME, std::ratio<1, 10000000> > | |
reference_time_duration; // IAudioClient::Initialize uses units of 100 | |
// nanoseconds | |
void verify_hresult(::HRESULT hresult, const char *error) { | |
if (FAILED(hresult)) { | |
throw std::runtime_error(error); | |
} | |
} | |
std::ostream &operator<<(std::ostream &os, ::GUID const &guid) { | |
::OLECHAR *s; | |
verify_hresult(StringFromCLSID(guid, &s), "StringFromCLSID failed"); | |
static std::wstring_convert<std::codecvt_utf8_utf16<wchar_t>, wchar_t> | |
convert; | |
os << convert.to_bytes(s); | |
::CoTaskMemFree(s); | |
return os; | |
} | |
void fill_buffer(::BYTE *buffer, ::UINT32 frames, ::WAVEFORMATEX *format) { | |
static auto frames_processed = 0; | |
static_assert(sizeof(int) * CHAR_BIT == 32, "we expect 32 bit ints"); | |
if (format->wBitsPerSample != 32) { | |
throw std::runtime_error("wrong number of bits per sample"); | |
} | |
if (format->nChannels != 2) { | |
throw std::runtime_error("wrong number of channels"); | |
} | |
for (int i = 0; i < frames; ++i) { | |
int frames_index = frames_processed + i; | |
double t = frames_index / static_cast<double>(format->nSamplesPerSec); | |
const double min_frequency = 20.0; | |
const double max_frequency = 30.0; | |
const double modulation_frequency = 1.0; | |
const double base_frequency = (max_frequency + min_frequency) / 2.0; | |
const double pi_2 = 2.0 * 3.1415926535; | |
double sample = std::sin(pi_2 * t * base_frequency + | |
std::sin(pi_2 * t * modulation_frequency) * | |
(max_frequency - min_frequency) / 2.0 / | |
modulation_frequency); | |
double instantaneous_frequency = | |
std::fabs(base_frequency + std::cos(pi_2 * modulation_frequency * t) * | |
((max_frequency - min_frequency) / 2.0)); | |
double amplitude = 0.125; | |
float computed_sample[2] = { sample * amplitude, | |
sample * amplitude }; // left, right | |
std::memcpy(buffer, computed_sample, sizeof computed_sample); | |
buffer += sizeof computed_sample; | |
} | |
frames_processed += frames; | |
} | |
int main() try { | |
// Initialize WASAPI | |
auto init = std::make_scoped_resource_unchecked(::CoInitialize(nullptr), | |
[](::HRESULT hresult) { | |
if (SUCCEEDED(hresult)) { | |
::CoUninitialize(); | |
} | |
}); | |
verify_hresult(init.get(), "CoInitialize failed"); | |
// Get audio client | |
ATL::CComPtr< ::IMMDeviceEnumerator> enumerator; | |
verify_hresult(enumerator.CoCreateInstance(__uuidof(::MMDeviceEnumerator)), | |
"CoCreateInstance failed"); | |
ATL::CComPtr< ::IMMDevice> audio_endpoint; | |
verify_hresult(enumerator->GetDefaultAudioEndpoint(::eRender, ::eConsole, | |
&audio_endpoint), | |
"GetDefaultAudioEndpoint failed"); | |
ATL::CComPtr< ::IAudioClient> audio_client; | |
verify_hresult( | |
audio_endpoint->Activate(__uuidof(::IAudioClient), CLSCTX_ALL, nullptr, | |
reinterpret_cast<void **>(&audio_client)), | |
"IMMDevice::Activate failed"); | |
// Get audio client format | |
::WAVEFORMATEX *format = nullptr; | |
verify_hresult(audio_client->GetMixFormat(&format), "GetMixFormat failed"); | |
std::cout << format->nChannels << " channels\n"; | |
std::cout << format->nSamplesPerSec << " Hz\n"; | |
std::cout << format->wBitsPerSample << " bits\n"; | |
std::cout << format->nBlockAlign << " bytes per frame\n"; | |
if (format->wFormatTag == WAVE_FORMAT_EXTENSIBLE) { | |
::WAVEFORMATEXTENSIBLE *ex = | |
reinterpret_cast< ::WAVEFORMATEXTENSIBLE *>(format); | |
std::cout << "Format GUID " << ex->SubFormat << '\n'; | |
std::cout << std::hex << ex->dwChannelMask << std::dec << '\n'; | |
} | |
// Initialize buffer | |
reference_time_duration requested_duration = std::chrono::milliseconds(20); | |
verify_hresult(audio_client->Initialize(::AUDCLNT_SHAREMODE_SHARED, 0, | |
requested_duration.count(), 0, format, | |
nullptr), | |
"IAudioClient::Initialize failed"); | |
UINT32 buffer_frames; | |
verify_hresult(audio_client->GetBufferSize(&buffer_frames), | |
"IAudioClient::GetBufferSize failed"); | |
// Get audio render client | |
ATL::CComPtr< ::IAudioRenderClient> audio_render_client; | |
verify_hresult( | |
audio_client->GetService(__uuidof(::IAudioRenderClient), | |
reinterpret_cast<void **>(&audio_render_client)), | |
"GetService failed"); | |
// Begin rendering audio into buffer | |
BYTE *audio_buffer; | |
verify_hresult(audio_render_client->GetBuffer(buffer_frames, &audio_buffer), | |
"GetBuffer failed"); | |
auto total_buffer_duration = std::chrono::nanoseconds( | |
std::nano::den * (buffer_frames / format->nChannels) / | |
format->nSamplesPerSec); | |
{ | |
auto buffer_releaser = std::make_scoped_resource_unchecked( | |
0, [audio_render_client, buffer_frames](int) { | |
auto hr = audio_render_client->ReleaseBuffer(buffer_frames, 0); | |
if (FAILED(hr)) { | |
std::cerr << "ReleaseBuffer failed\n"; | |
} | |
}); | |
fill_buffer(audio_buffer, buffer_frames, format); | |
} | |
verify_hresult(audio_client->Start(), "Start failed"); | |
auto stopper = std::make_scoped_resource_unchecked(0, [audio_client](int) { | |
auto hr = audio_client->Stop(); | |
if (FAILED(hr)) { | |
std::cerr << "Stop failed\n"; | |
} | |
}); | |
auto finished_time = | |
std::chrono::high_resolution_clock::now() + std::chrono::hours(1); | |
while (std::chrono::high_resolution_clock::now() < finished_time) { | |
std::this_thread::sleep_for(total_buffer_duration / 2); | |
UINT32 unplayed_frames; | |
verify_hresult(audio_client->GetCurrentPadding(&unplayed_frames), | |
"GetCurrentPadding failed"); | |
UINT32 buffer_frames_available_for_writing = | |
buffer_frames - unplayed_frames; | |
verify_hresult(audio_render_client->GetBuffer( | |
buffer_frames_available_for_writing, &audio_buffer), | |
"GetBuffer failed"); | |
auto buffer_releaser = std::make_scoped_resource_unchecked( | |
0, [audio_render_client, buffer_frames_available_for_writing](int) { | |
auto hr = audio_render_client->ReleaseBuffer( | |
buffer_frames_available_for_writing, 0); | |
if (FAILED(hr)) { | |
std::cerr << "ReleaseBuffer failed\n"; | |
} | |
}); | |
fill_buffer(audio_buffer, buffer_frames_available_for_writing, format); | |
} | |
} catch (std::exception &e) { | |
std::cerr << "Fatal error: " << e.what() << '\n'; | |
std::exit(EXIT_FAILURE); | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Hi
Please i'm new in WASAPI programming, can you tell me what this example can do?