Browse Source

Add WASAPI Event Driver buffer sync and keep the Sleep sync.

[workitem:6340]
pull/1/head
Alexandre Mutel 17 years ago
parent
commit
0b534236b8
  1. 24
      NAudio/CoreAudioApi/AudioClient.cs
  2. 3
      NAudio/CoreAudioApi/Interfaces/IAudioClient.cs
  3. 93
      NAudio/Wave/WaveOutputs/WasapiOut.cs

24
NAudio/CoreAudioApi/AudioClient.cs

@ -1,4 +1,5 @@
using System;
using System.Threading;
using NAudio.CoreAudioApi.Interfaces;
using System.Runtime.InteropServices;
using NAudio.Wave;
@ -79,6 +80,17 @@ namespace NAudio.CoreAudioApi
}
}
/// <summary>
/// Gets the stream latency (must initialize first)
/// </summary>
public long StreamLatency
{
get
{
return audioClientInterface.GetStreamLatency();
}
}
/// <summary>
/// Gets the current padding (must initialize first)
/// </summary>
@ -215,6 +227,15 @@ namespace NAudio.CoreAudioApi
audioClientInterface.Stop();
}
/// <summary>
/// Set the Event Handle for buffer synchro.
/// </summary>
/// <param name="eventWaitHandle">The Wait Handle to setup</param>
public void SetEventHandle(EventWaitHandle eventWaitHandle)
{
audioClientInterface.SetEventHandle(eventWaitHandle.SafeWaitHandle.DangerousGetHandle());
}
/// <summary>
/// Resets the audio stream
/// Reset is a control method that the client calls to reset a stopped audio stream.
@ -233,9 +254,6 @@ namespace NAudio.CoreAudioApi
#region IDisposable Members
/// <summary>
/// Dispose
/// </summary>
public void Dispose()
{
if (audioClientInterface != null)

3
NAudio/CoreAudioApi/Interfaces/IAudioClient.cs

@ -25,7 +25,8 @@ namespace NAudio.CoreAudioApi.Interfaces
/// </summary>
int GetBufferSize(out uint bufferSize);
int GetStreamLatency(out long streamLatency);
[return: MarshalAs(UnmanagedType.I8)]
long GetStreamLatency();
int GetCurrentPadding(out int currentPadding);

93
NAudio/Wave/WaveOutputs/WasapiOut.cs

@ -19,6 +19,8 @@ namespace NAudio.Wave
int latencyMilliseconds;
int bufferFrameCount;
int bytesPerFrame;
bool isUsingEventSync;
EventWaitHandle frameEventWaitHandle;
byte[] readBuffer;
PlaybackState playbackState;
Thread playThread;
@ -33,19 +35,21 @@ namespace NAudio.Wave
/// <param name="shareMode">ShareMode - shared or exclusive</param>
/// <param name="latency">Desired latency in milliseconds</param>
public WasapiOut(AudioClientShareMode shareMode, int latency) :
this(GetDefaultAudioEndpoint(), shareMode, latency)
this(GetDefaultAudioEndpoint(), shareMode, true, latency)
{
}
static MMDevice GetDefaultAudioEndpoint()
/// <summary>
/// WASAPI Out using default audio endpoint
/// </summary>
/// <param name="shareMode">ShareMode - shared or exclusive</param>
/// <param name="useEventSync">true if sync is done with event. false use sleep.</param>
/// <param name="latency">Desired latency in milliseconds</param>
public WasapiOut(AudioClientShareMode shareMode, bool useEventSync, int latency) :
this(GetDefaultAudioEndpoint(), shareMode, useEventSync, latency)
{
if (Environment.OSVersion.Version.Major < 6)
{
throw new NotSupportedException("WASAPI supported only on Windows Vista and above");
}
MMDeviceEnumerator enumerator = new MMDeviceEnumerator();
return enumerator.GetDefaultAudioEndpoint(DataFlow.Render,Role.Console);
}
/// <summary>
@ -53,14 +57,27 @@ namespace NAudio.Wave
/// </summary>
/// <param name="device">Device to use</param>
/// <param name="shareMode"></param>
/// <param name="useEventSync">true if sync is done with event. false use sleep.</param>
/// <param name="latency"></param>
public WasapiOut(MMDevice device, AudioClientShareMode shareMode, int latency)
public WasapiOut(MMDevice device, AudioClientShareMode shareMode, bool useEventSync, int latency)
{
this.audioClient = device.AudioClient;
this.shareMode = shareMode;
this.isUsingEventSync = useEventSync;
this.latencyMilliseconds = latency;
}
static MMDevice GetDefaultAudioEndpoint()
{
if (Environment.OSVersion.Version.Major < 6)
{
throw new NotSupportedException("WASAPI supported only on Windows Vista and above");
}
MMDeviceEnumerator enumerator = new MMDeviceEnumerator();
return enumerator.GetDefaultAudioEndpoint(DataFlow.Render, Role.Console);
}
private void PlayThread()
{
ResamplerDmoStream resamplerDmoStream = null;
@ -80,16 +97,32 @@ namespace NAudio.Wave
readBuffer = new byte[bufferFrameCount * bytesPerFrame];
FillBuffer(bufferFrameCount);
// Create WaitHandle for sync
WaitHandle[] waitHandles = new WaitHandle[] { frameEventWaitHandle };
audioClient.Start();
while (playbackState != PlaybackState.Stopped)
{
// Sleep for half the buffer duration.
Thread.Sleep(latencyMilliseconds / 2);
if (playbackState == PlaybackState.Playing)
// If using Event Sync, Wait for notification from AudioClient or Sleep half latency
int indexHandle = 0;
if ( isUsingEventSync ) {
indexHandle = WaitHandle.WaitAny(waitHandles, 3 * latencyMilliseconds, false);
} else {
Thread.Sleep(latencyMilliseconds / 2);
}
// If still playing and notification is ok
if (playbackState == PlaybackState.Playing && indexHandle != WaitHandle.WaitTimeout)
{
// See how much buffer space is available.
int numFramesPadding = audioClient.CurrentPadding;
int numFramesPadding = 0;
if ( isUsingEventSync) {
// In exclusive mode, always ask the max = bufferFrameCount = audioClient.BufferSize
numFramesPadding = (shareMode == AudioClientShareMode.Shared) ? audioClient.CurrentPadding : 0;
} else {
numFramesPadding = audioClient.CurrentPadding;
}
int numFramesAvailable = bufferFrameCount - numFramesPadding;
if (numFramesAvailable > 0)
{
@ -251,8 +284,38 @@ namespace NAudio.Wave
}
this.sourceStream = waveStream;
audioClient.Initialize(shareMode, AudioClientStreamFlags.None, latencyRefTimes, 0,
outputFormat, Guid.Empty);
// If using EventSync, setup is specific with shareMode
if (isUsingEventSync)
{
// Init Shared or Exclusive
if (shareMode == AudioClientShareMode.Shared)
{
// With EventCallBack and Shared, both latencies must be set to 0
audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, 0, 0,
outputFormat, Guid.Empty);
// Get back the effective latency from AudioClient
latencyMilliseconds = (int)(audioClient.StreamLatency / 10000);
}
else
{
// With EventCallBack and Exclusive, both latencies must equals
audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, latencyRefTimes,
outputFormat, Guid.Empty);
}
// Create the Wait Event Handle
frameEventWaitHandle = new EventWaitHandle(false, EventResetMode.AutoReset);
audioClient.SetEventHandle(frameEventWaitHandle);
}
else
{
// Normal setup for both sharedMode
audioClient.Initialize(shareMode, AudioClientStreamFlags.None, latencyRefTimes, 0,
outputFormat, Guid.Empty);
}
// Get the RenderClient
renderClient = audioClient.AudioRenderClient;
}

Loading…
Cancel
Save