introduce GetInOutLatencyMs to query the actual latency from the sound interface

This commit is contained in:
Volker Fischer 2013-12-17 20:52:38 +00:00
parent d089e7d308
commit 3fc6ccc8aa
4 changed files with 50 additions and 9 deletions

View file

@ -1229,16 +1229,33 @@ int CClient::EstimatedOverallDelay ( const int iPingTimeMs )
static_cast<double> ( GetSockBufNumFrames() +
GetServerSockBufNumFrames() ) / 2;
// we assume that we have two period sizes for the input and one for the
// output, therefore we have "3 *" instead of "2 *" (for input and output)
// the actual sound card buffer size, also consider delay introduced by
// sound card conversion buffer by using
// "GetSndCrdConvBufAdditionalDelayMonoBlSize"
const double dTotalSoundCardDelayMs =
( 3 * GetSndCrdActualMonoBlSize() +
GetSndCrdConvBufAdditionalDelayMonoBlSize() ) *
// consider delay introduced by the sound card conversion buffer by using
// "GetSndCrdConvBufAdditionalDelayMonoBlSize()"
double dTotalSoundCardDelayMs = GetSndCrdConvBufAdditionalDelayMonoBlSize() *
1000 / SYSTEM_SAMPLE_RATE_HZ;
// try to get the actual input/output sound card delay from the audio
// interface, per definition it is not available if a 0 is returned
const double dSoundCardInputOutputLatencyMs = Sound.GetInOutLatencyMs();
if ( dSoundCardInputOutputLatencyMs == 0.0 )
{
// use an alternative aproach for estimating the sound card delay:
//
// we assume that we have two period sizes for the input and one for the
// output, therefore we have "3 *" instead of "2 *" (for input and output)
// the actual sound card buffer size
// "GetSndCrdConvBufAdditionalDelayMonoBlSize"
dTotalSoundCardDelayMs +=
( 3 * GetSndCrdActualMonoBlSize() ) *
1000 / SYSTEM_SAMPLE_RATE_HZ;
}
else
{
// add the actual sound card latency in ms
dTotalSoundCardDelayMs += dSoundCardInputOutputLatencyMs;
}
// network packets are of the same size as the audio packets per definition
// if no sound card conversion buffer is used
const double dDelayToFillNetworkPacketsMs =

View file

@ -77,6 +77,8 @@ public:
virtual int GetLeftOutputChannel() { return 0; }
virtual int GetRightOutputChannel() { return 1; }
virtual double GetInOutLatencyMs() { return 0.0; } // "0.0" means no latency is available
virtual void OpenDriverSetup() {}
bool IsRunning() const { return bRun; }

View file

@ -363,6 +363,24 @@ int CSound::Init ( const int iNewPrefMonoBufferSize )
2 /* in/out */ * NUM_IN_OUT_CHANNELS /* stereo */,
iASIOBufferSizeMono, &asioCallbacks );
// query the latency of the driver
long lInputLatency = 0;
long lOutputLatency = 0;
if ( ASIOGetLatencies ( &lInputLatency, &lOutputLatency ) != ASE_NotPresent )
{
// add the input and output latencies (returned in number of
// samples) and calculate the time in ms
dInOutLatencyMs =
( static_cast<double> ( lInputLatency ) + lOutputLatency ) /
SYSTEM_SAMPLE_RATE_HZ * 1000;
}
else
{
// no latency available
dInOutLatencyMs = 0.0;
}
// check wether the driver requires the ASIOOutputReady() optimization
// (can be used by the driver to reduce output latency by one block)
bASIOPostOutput = ( ASIOOutputReady() == ASE_OK );
@ -402,7 +420,8 @@ CSound::CSound ( void (*fpNewCallback) ( CVector<int16_t>& psData, void* arg ),
vSelectedInputChannels ( NUM_IN_OUT_CHANNELS ),
vSelectedOutputChannels ( NUM_IN_OUT_CHANNELS ),
lNumInChan ( 0 ),
lNumOutChan ( 0 )
lNumOutChan ( 0 ),
dInOutLatencyMs ( 0.0 ) // "0.0" means that no latency value is available
{
int i;

View file

@ -72,6 +72,8 @@ public:
virtual int GetLeftOutputChannel() { return vSelectedOutputChannels[0]; }
virtual int GetRightOutputChannel() { return vSelectedOutputChannels[1]; }
virtual double GetInOutLatencyMs() { return dInOutLatencyMs; }
protected:
virtual QString LoadAndInitializeDriver ( int iIdx );
virtual void UnloadCurrentDriver();
@ -85,6 +87,7 @@ protected:
long lNumInChan;
long lNumOutChan;
double dInOutLatencyMs;
CVector<int> vSelectedInputChannels;
CVector<int> vSelectedOutputChannels;