fix for delay measurement

This commit is contained in:
Volker Fischer 2009-03-08 08:31:55 +00:00
parent 2f8d8fbd7e
commit 40fda55929
2 changed files with 8 additions and 9 deletions

View file

@ -279,13 +279,12 @@ void CClientSettingsDlg::OnPingTimeResult ( int iPingTime )
const int iTotalJitterBufferDelayMS = MIN_SERVER_BLOCK_DURATION_MS *
( 2 /* buffer at client and server */ * pClient->GetSockBufSize() ) / 2;
// TODO consider sound card interface block size
const int iTotalSoundCardDelayMS = 0;
// const int iTotalSoundCardDelayMS = 2 * MIN_SERVER_BLOCK_DURATION_MS +
// MIN_SERVER_BLOCK_DURATION_MS * ( pClient->GetSndInterface()->GetInNumBuf() +
// pClient->GetSndInterface()->GetOutNumBuf() ) / 2;
// we assume that we have two period sizes for the input and one for the
// output, therefore we have "3 *" instead of "2 *" (for input and output)
// the actual sound card buffer size
const int iTotalSoundCardDelayMS =
3 * pClient->GetSndCrdActualMonoBlSize() *
1000 / SND_CRD_SAMPLE_RATE;
const int iDelayToFillNetworkPackets =
( pClient->GetNetwBufSizeOut() + pClient->GetAudioBlockSizeIn() ) *

View file

@ -50,7 +50,7 @@ public:
CSocket ( CChannel* pNewChannel, const quint16 iPortNumber ) :
pChannel( pNewChannel ), bIsClient ( true ) { Init ( iPortNumber ); }
CSocket ( CChannelSet* pNewChannelSet, QObject* pNServP, const quint16 iPortNumber ) :
pChannelSet(pNewChannelSet), pServer ( pNServP ), bIsClient ( false )
pChannelSet ( pNewChannelSet ), pServer ( pNServP ), bIsClient ( false )
{ Init ( iPortNumber ); }
virtual ~CSocket() {}
@ -65,7 +65,7 @@ protected:
CVector<unsigned char> vecbyRecBuf;
CHostAddress RecHostAddr;
CChannel* pChannel; // for client
CChannel* pChannel; // for client
CChannelSet* pChannelSet; // for server
QObject* pServer;