2012-01-28 12:51:14 +01:00
|
|
|
/******************************************************************************\
|
2020-01-01 15:41:43 +01:00
|
|
|
* Copyright (c) 2004-2020
|
2012-01-28 12:51:14 +01:00
|
|
|
*
|
|
|
|
* Author(s):
|
|
|
|
* Volker Fischer
|
|
|
|
*
|
|
|
|
******************************************************************************
|
|
|
|
*
|
|
|
|
* This program is free software; you can redistribute it and/or modify it under
|
|
|
|
* the terms of the GNU General Public License as published by the Free Software
|
|
|
|
* Foundation; either version 2 of the License, or (at your option) any later
|
|
|
|
* version.
|
|
|
|
*
|
|
|
|
* This program is distributed in the hope that it will be useful, but WITHOUT
|
|
|
|
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
|
|
|
|
* FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
|
|
|
|
* details.
|
|
|
|
*
|
|
|
|
* You should have received a copy of the GNU General Public License along with
|
|
|
|
* this program; if not, write to the Free Software Foundation, Inc.,
|
2020-06-08 22:58:11 +02:00
|
|
|
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
|
2012-01-28 12:51:14 +01:00
|
|
|
*
|
|
|
|
\******************************************************************************/
|
|
|
|
|
|
|
|
#include "client.h"
|
|
|
|
|
|
|
|
|
|
|
|
/* Implementation *************************************************************/
|
2015-03-25 21:29:52 +01:00
|
|
|
CClient::CClient ( const quint16 iPortNumber,
|
2019-01-12 13:45:08 +01:00
|
|
|
const QString& strConnOnStartupAddress,
|
2019-09-22 20:13:08 +02:00
|
|
|
const int iCtrlMIDIChannel,
|
2020-04-30 20:48:48 +02:00
|
|
|
const bool bNoAutoJackConnect,
|
2020-04-30 22:03:01 +02:00
|
|
|
const QString& strNClientName ) :
|
2012-01-28 12:51:14 +01:00
|
|
|
vstrIPAddress ( MAX_NUM_SERVER_ADDR_ITEMS, "" ),
|
2013-02-11 16:36:47 +01:00
|
|
|
ChannelInfo (),
|
2014-01-19 11:51:31 +01:00
|
|
|
vecStoredFaderTags ( MAX_NUM_STORED_FADER_SETTINGS, "" ),
|
|
|
|
vecStoredFaderLevels ( MAX_NUM_STORED_FADER_SETTINGS, AUD_MIX_FADER_MAX ),
|
2020-05-18 20:46:46 +02:00
|
|
|
vecStoredPanValues ( MAX_NUM_STORED_FADER_SETTINGS, AUD_MIX_PAN_MAX / 2 ),
|
2014-01-19 11:51:31 +01:00
|
|
|
vecStoredFaderIsSolo ( MAX_NUM_STORED_FADER_SETTINGS, false ),
|
2020-04-04 17:14:59 +02:00
|
|
|
vecStoredFaderIsMute ( MAX_NUM_STORED_FADER_SETTINGS, false ),
|
2015-11-25 16:52:00 +01:00
|
|
|
iNewClientFaderLevel ( 100 ),
|
2020-04-19 10:04:19 +02:00
|
|
|
bConnectDlgShowAllMusicians ( true ),
|
2020-04-30 22:24:15 +02:00
|
|
|
strClientName ( strNClientName ),
|
2013-08-26 21:59:18 +02:00
|
|
|
vecWindowPosMain (), // empty array
|
|
|
|
vecWindowPosSettings (), // empty array
|
|
|
|
vecWindowPosChat (), // empty array
|
2015-01-31 09:15:56 +01:00
|
|
|
vecWindowPosProfile (), // empty array
|
2013-08-26 21:59:18 +02:00
|
|
|
vecWindowPosConnect (), // empty array
|
|
|
|
bWindowWasShownSettings ( false ),
|
|
|
|
bWindowWasShownChat ( false ),
|
2015-01-31 09:15:56 +01:00
|
|
|
bWindowWasShownProfile ( false ),
|
2013-08-26 21:59:18 +02:00
|
|
|
bWindowWasShownConnect ( false ),
|
2012-01-28 12:51:14 +01:00
|
|
|
Channel ( false ), /* we need a client channel -> "false" */
|
2020-04-12 21:36:50 +02:00
|
|
|
CurOpusEncoder ( nullptr ),
|
|
|
|
CurOpusDecoder ( nullptr ),
|
2013-02-16 19:06:18 +01:00
|
|
|
eAudioCompressionType ( CT_OPUS ),
|
2014-07-26 12:31:27 +02:00
|
|
|
iCeltNumCodedBytes ( OPUS_NUM_BYTES_MONO_LOW_QUALITY ),
|
2020-04-13 16:01:25 +02:00
|
|
|
iOPUSFrameSizeSamples ( DOUBLE_SYSTEM_FRAME_SIZE_SAMPLES ),
|
2020-03-29 12:00:36 +02:00
|
|
|
eAudioQuality ( AQ_NORMAL ),
|
2014-02-23 21:01:17 +01:00
|
|
|
eAudioChannelConf ( CC_MONO ),
|
2020-04-12 21:36:50 +02:00
|
|
|
iNumAudioChannels ( 1 ),
|
2013-03-10 10:27:25 +01:00
|
|
|
bIsInitializationPhase ( true ),
|
2020-04-16 17:54:45 +02:00
|
|
|
bMuteOutStream ( false ),
|
2020-05-26 20:45:10 +02:00
|
|
|
dMuteOutStreamGain ( 1.0 ),
|
2013-05-10 21:46:59 +02:00
|
|
|
Socket ( &Channel, iPortNumber ),
|
2020-04-30 22:03:01 +02:00
|
|
|
Sound ( AudioCallback, this, iCtrlMIDIChannel, bNoAutoJackConnect, strNClientName ),
|
2012-01-28 12:51:14 +01:00
|
|
|
iAudioInFader ( AUD_FADER_IN_MIDDLE ),
|
|
|
|
bReverbOnLeftChan ( false ),
|
|
|
|
iReverbLevel ( 0 ),
|
2020-04-15 15:39:03 +02:00
|
|
|
iSndCrdPrefFrameSizeFactor ( FRAME_SIZE_FACTOR_DEFAULT ),
|
|
|
|
iSndCrdFrameSizeFactor ( FRAME_SIZE_FACTOR_DEFAULT ),
|
2012-01-28 12:51:14 +01:00
|
|
|
bSndCrdConversionBufferRequired ( false ),
|
|
|
|
iSndCardMonoBlockSizeSamConvBuff ( 0 ),
|
|
|
|
bFraSiFactPrefSupported ( false ),
|
|
|
|
bFraSiFactDefSupported ( false ),
|
|
|
|
bFraSiFactSafeSupported ( false ),
|
|
|
|
eGUIDesign ( GD_ORIGINAL ),
|
2020-03-30 22:36:25 +02:00
|
|
|
bDisplayChannelLevels ( true ),
|
2020-04-17 21:21:37 +02:00
|
|
|
bEnableOPUS64 ( false ),
|
2014-01-03 09:54:49 +01:00
|
|
|
bJitterBufferOK ( true ),
|
2012-01-28 12:51:14 +01:00
|
|
|
strCentralServerAddress ( "" ),
|
2020-04-11 14:27:50 +02:00
|
|
|
eCentralServerAddressType ( AT_DEFAULT ),
|
2020-05-21 19:52:48 +02:00
|
|
|
iServerSockBufNumFrames ( DEF_NET_BUF_SIZE_NUM_BL ),
|
|
|
|
pSignalHandler ( CSignalHandler::getSingletonP() )
|
2012-01-28 12:51:14 +01:00
|
|
|
{
|
2013-02-16 11:54:40 +01:00
|
|
|
int iOpusError;
|
|
|
|
|
2013-02-16 20:11:30 +01:00
|
|
|
OpusMode = opus_custom_mode_create ( SYSTEM_SAMPLE_RATE_HZ,
|
2020-04-12 22:10:53 +02:00
|
|
|
DOUBLE_SYSTEM_FRAME_SIZE_SAMPLES,
|
2013-02-16 20:11:30 +01:00
|
|
|
&iOpusError );
|
2013-02-16 11:54:40 +01:00
|
|
|
|
2020-04-12 22:10:53 +02:00
|
|
|
Opus64Mode = opus_custom_mode_create ( SYSTEM_SAMPLE_RATE_HZ,
|
2020-04-15 15:29:43 +02:00
|
|
|
SYSTEM_FRAME_SIZE_SAMPLES,
|
2020-04-12 22:10:53 +02:00
|
|
|
&iOpusError );
|
|
|
|
|
|
|
|
// init audio encoders and decoders
|
|
|
|
OpusEncoderMono = opus_custom_encoder_create ( OpusMode, 1, &iOpusError ); // mono encoder legacy
|
|
|
|
OpusDecoderMono = opus_custom_decoder_create ( OpusMode, 1, &iOpusError ); // mono decoder legacy
|
|
|
|
OpusEncoderStereo = opus_custom_encoder_create ( OpusMode, 2, &iOpusError ); // stereo encoder legacy
|
|
|
|
OpusDecoderStereo = opus_custom_decoder_create ( OpusMode, 2, &iOpusError ); // stereo decoder legacy
|
|
|
|
Opus64EncoderMono = opus_custom_encoder_create ( Opus64Mode, 1, &iOpusError ); // mono encoder OPUS64
|
|
|
|
Opus64DecoderMono = opus_custom_decoder_create ( Opus64Mode, 1, &iOpusError ); // mono decoder OPUS64
|
|
|
|
Opus64EncoderStereo = opus_custom_encoder_create ( Opus64Mode, 2, &iOpusError ); // stereo encoder OPUS64
|
|
|
|
Opus64DecoderStereo = opus_custom_decoder_create ( Opus64Mode, 2, &iOpusError ); // stereo decoder OPUS64
|
2013-02-16 11:54:40 +01:00
|
|
|
|
2013-02-16 19:06:18 +01:00
|
|
|
// we require a constant bit rate
|
2020-04-12 22:10:53 +02:00
|
|
|
opus_custom_encoder_ctl ( OpusEncoderMono, OPUS_SET_VBR ( 0 ) );
|
|
|
|
opus_custom_encoder_ctl ( OpusEncoderStereo, OPUS_SET_VBR ( 0 ) );
|
|
|
|
opus_custom_encoder_ctl ( Opus64EncoderMono, OPUS_SET_VBR ( 0 ) );
|
|
|
|
opus_custom_encoder_ctl ( Opus64EncoderStereo, OPUS_SET_VBR ( 0 ) );
|
2013-02-16 19:06:18 +01:00
|
|
|
|
2020-04-23 19:36:00 +02:00
|
|
|
// for 64 samples frame size we have to adjust the PLC behavior to avoid loud artifacts
|
|
|
|
opus_custom_encoder_ctl ( Opus64EncoderMono, OPUS_SET_PACKET_LOSS_PERC ( 35 ) );
|
|
|
|
opus_custom_encoder_ctl ( Opus64EncoderStereo, OPUS_SET_PACKET_LOSS_PERC ( 35 ) );
|
|
|
|
|
2013-02-20 18:22:04 +01:00
|
|
|
// we want as low delay as possible
|
2020-04-12 22:10:53 +02:00
|
|
|
opus_custom_encoder_ctl ( OpusEncoderMono, OPUS_SET_APPLICATION ( OPUS_APPLICATION_RESTRICTED_LOWDELAY ) );
|
|
|
|
opus_custom_encoder_ctl ( OpusEncoderStereo, OPUS_SET_APPLICATION ( OPUS_APPLICATION_RESTRICTED_LOWDELAY ) );
|
|
|
|
opus_custom_encoder_ctl ( Opus64EncoderMono, OPUS_SET_APPLICATION ( OPUS_APPLICATION_RESTRICTED_LOWDELAY ) );
|
|
|
|
opus_custom_encoder_ctl ( Opus64EncoderStereo, OPUS_SET_APPLICATION ( OPUS_APPLICATION_RESTRICTED_LOWDELAY ) );
|
2013-02-20 18:22:04 +01:00
|
|
|
|
2020-04-04 12:40:13 +02:00
|
|
|
// set encoder low complexity for legacy 128 samples frame size
|
2020-04-12 21:36:50 +02:00
|
|
|
opus_custom_encoder_ctl ( OpusEncoderMono, OPUS_SET_COMPLEXITY ( 1 ) );
|
|
|
|
opus_custom_encoder_ctl ( OpusEncoderStereo, OPUS_SET_COMPLEXITY ( 1 ) );
|
2013-02-16 11:54:40 +01:00
|
|
|
|
2012-01-28 12:51:14 +01:00
|
|
|
|
|
|
|
// Connections -------------------------------------------------------------
|
2013-05-10 21:34:55 +02:00
|
|
|
// connections for the protocol mechanism
|
2020-06-06 17:19:25 +02:00
|
|
|
QObject::connect ( &Channel, &CChannel::MessReadyForSending,
|
|
|
|
this, &CClient::OnSendProtMessage );
|
2012-01-28 12:51:14 +01:00
|
|
|
|
2020-06-06 17:19:25 +02:00
|
|
|
QObject::connect ( &Channel, &CChannel::DetectedCLMessage,
|
|
|
|
this, &CClient::OnDetectedCLMessage );
|
2012-01-28 12:51:14 +01:00
|
|
|
|
2020-06-06 17:19:25 +02:00
|
|
|
QObject::connect ( &Channel, &CChannel::ReqJittBufSize,
|
|
|
|
this, &CClient::OnReqJittBufSize );
|
2012-01-28 12:51:14 +01:00
|
|
|
|
2020-06-06 17:19:25 +02:00
|
|
|
QObject::connect ( &Channel, &CChannel::JittBufSizeChanged,
|
|
|
|
this, &CClient::OnJittBufSizeChanged );
|
2012-01-28 12:51:14 +01:00
|
|
|
|
2020-06-06 17:19:25 +02:00
|
|
|
QObject::connect ( &Channel, &CChannel::ReqChanInfo,
|
|
|
|
this, &CClient::OnReqChanInfo );
|
2012-01-28 12:51:14 +01:00
|
|
|
|
2020-06-06 17:19:25 +02:00
|
|
|
QObject::connect ( &Channel, &CChannel::ConClientListMesReceived,
|
|
|
|
this, &CClient::ConClientListMesReceived );
|
2012-01-28 12:51:14 +01:00
|
|
|
|
2020-06-06 17:19:25 +02:00
|
|
|
QObject::connect ( &Channel, &CChannel::Disconnected,
|
|
|
|
this, &CClient::Disconnected );
|
2012-01-28 12:51:14 +01:00
|
|
|
|
2020-06-06 17:19:25 +02:00
|
|
|
QObject::connect ( &Channel, &CChannel::NewConnection,
|
|
|
|
this, &CClient::OnNewConnection );
|
2012-01-28 12:51:14 +01:00
|
|
|
|
2020-06-06 17:19:25 +02:00
|
|
|
QObject::connect ( &Channel, &CChannel::ChatTextReceived,
|
|
|
|
this, &CClient::ChatTextReceived );
|
2012-01-28 12:51:14 +01:00
|
|
|
|
2020-06-06 17:19:25 +02:00
|
|
|
QObject::connect ( &Channel, &CChannel::ClientIDReceived,
|
|
|
|
this, &CClient::ClientIDReceived );
|
2020-05-26 17:28:44 +02:00
|
|
|
|
2020-06-06 17:19:25 +02:00
|
|
|
QObject::connect ( &Channel, &CChannel::MuteStateHasChangedReceived,
|
|
|
|
this, &CClient::MuteStateHasChangedReceived );
|
2020-05-21 11:47:39 +02:00
|
|
|
|
2020-06-06 17:19:25 +02:00
|
|
|
QObject::connect ( &Channel, &CChannel::LicenceRequired,
|
|
|
|
this, &CClient::LicenceRequired );
|
2015-01-23 20:43:18 +01:00
|
|
|
|
2020-06-06 17:19:25 +02:00
|
|
|
QObject::connect ( &Channel, &CChannel::VersionAndOSReceived,
|
|
|
|
this, &CClient::VersionAndOSReceived );
|
2020-05-18 21:28:49 +02:00
|
|
|
|
2020-06-06 17:19:25 +02:00
|
|
|
QObject::connect ( &ConnLessProtocol, &CProtocol::CLMessReadyForSending,
|
|
|
|
this, &CClient::OnSendCLProtMessage );
|
2012-01-28 12:51:14 +01:00
|
|
|
|
2020-06-06 17:19:25 +02:00
|
|
|
QObject::connect ( &ConnLessProtocol, &CProtocol::CLServerListReceived,
|
|
|
|
this, &CClient::CLServerListReceived );
|
2012-01-28 12:51:14 +01:00
|
|
|
|
2020-06-06 17:19:25 +02:00
|
|
|
QObject::connect ( &ConnLessProtocol, &CProtocol::CLConnClientsListMesReceived,
|
|
|
|
this, &CClient::CLConnClientsListMesReceived );
|
2015-12-09 16:50:30 +01:00
|
|
|
|
2020-06-06 17:19:25 +02:00
|
|
|
QObject::connect ( &ConnLessProtocol, &CProtocol::CLPingReceived,
|
|
|
|
this, &CClient::OnCLPingReceived );
|
2012-01-28 12:51:14 +01:00
|
|
|
|
2020-06-06 17:19:25 +02:00
|
|
|
QObject::connect ( &ConnLessProtocol, &CProtocol::CLPingWithNumClientsReceived,
|
|
|
|
this, &CClient::OnCLPingWithNumClientsReceived );
|
2012-01-28 12:51:14 +01:00
|
|
|
|
2020-06-06 17:19:25 +02:00
|
|
|
QObject::connect ( &ConnLessProtocol, &CProtocol::CLDisconnection ,
|
|
|
|
this, &CClient::OnCLDisconnection );
|
2015-10-17 17:37:58 +02:00
|
|
|
|
2014-02-23 10:46:25 +01:00
|
|
|
#ifdef ENABLE_CLIENT_VERSION_AND_OS_DEBUGGING
|
2020-06-06 17:19:25 +02:00
|
|
|
QObject::connect ( &ConnLessProtocol, &CProtocol::CLVersionAndOSReceived,
|
|
|
|
this, &CClient::CLVersionAndOSReceived );
|
2014-02-23 10:46:25 +01:00
|
|
|
#endif
|
2013-05-10 21:34:55 +02:00
|
|
|
|
2020-06-06 17:19:25 +02:00
|
|
|
QObject::connect ( &ConnLessProtocol, &CProtocol::CLChannelLevelListReceived,
|
|
|
|
this, &CClient::CLChannelLevelListReceived );
|
2020-03-29 23:42:04 +02:00
|
|
|
|
2013-05-10 21:34:55 +02:00
|
|
|
// other
|
2020-06-06 17:19:25 +02:00
|
|
|
QObject::connect ( &Sound, &CSound::ReinitRequest,
|
|
|
|
this, &CClient::OnSndCrdReinitRequest );
|
2013-05-10 21:34:55 +02:00
|
|
|
|
2020-06-06 17:19:25 +02:00
|
|
|
QObject::connect ( &Sound, &CSound::ControllerInFaderLevel,
|
|
|
|
this, &CClient::ControllerInFaderLevel );
|
2019-01-12 13:45:08 +01:00
|
|
|
|
2020-06-06 17:19:25 +02:00
|
|
|
QObject::connect ( &Socket, &CHighPrioSocket::InvalidPacketReceived,
|
|
|
|
this, &CClient::OnInvalidPacketReceived );
|
2014-02-16 09:21:40 +01:00
|
|
|
|
2020-06-06 17:19:25 +02:00
|
|
|
QObject::connect ( pSignalHandler, &CSignalHandler::HandledSignal,
|
|
|
|
this, &CClient::OnHandledSignal );
|
2020-05-21 19:52:48 +02:00
|
|
|
|
2014-02-16 09:21:40 +01:00
|
|
|
|
|
|
|
// start the socket (it is important to start the socket after all
|
|
|
|
// initializations and connections)
|
|
|
|
Socket.Start();
|
2015-03-25 21:29:52 +01:00
|
|
|
|
|
|
|
// do an immediate start if a server address is given
|
|
|
|
if ( !strConnOnStartupAddress.isEmpty() )
|
|
|
|
{
|
|
|
|
SetServerAddr ( strConnOnStartupAddress );
|
|
|
|
Start();
|
|
|
|
}
|
2012-01-28 12:51:14 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void CClient::OnSendProtMessage ( CVector<uint8_t> vecMessage )
|
|
|
|
{
|
|
|
|
// the protocol queries me to call the function to send the message
|
|
|
|
// send it through the network
|
|
|
|
Socket.SendPacket ( vecMessage, Channel.GetAddress() );
|
|
|
|
}
|
|
|
|
|
|
|
|
void CClient::OnSendCLProtMessage ( CHostAddress InetAddr,
|
|
|
|
CVector<uint8_t> vecMessage )
|
|
|
|
{
|
|
|
|
// the protocol queries me to call the function to send the message
|
|
|
|
// send it through the network
|
|
|
|
Socket.SendPacket ( vecMessage, InetAddr );
|
|
|
|
}
|
|
|
|
|
2014-02-16 09:21:40 +01:00
|
|
|
void CClient::OnInvalidPacketReceived ( CHostAddress RecHostAddr )
|
2013-05-10 21:34:55 +02:00
|
|
|
{
|
2014-02-16 09:21:40 +01:00
|
|
|
// message coult not be parsed, check if the packet comes
|
|
|
|
// from the server we just connected -> if yes, send
|
|
|
|
// disconnect message since the server may not know that we
|
|
|
|
// are not connected anymore
|
|
|
|
if ( Channel.GetAddress() == RecHostAddr )
|
2013-05-10 21:34:55 +02:00
|
|
|
{
|
2014-02-16 09:21:40 +01:00
|
|
|
ConnLessProtocol.CreateCLDisconnection ( RecHostAddr );
|
2013-05-10 21:34:55 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-06-03 18:07:17 +02:00
|
|
|
void CClient::OnDetectedCLMessage ( CVector<uint8_t> vecbyMesBodyData,
|
2014-02-16 09:21:40 +01:00
|
|
|
int iRecID,
|
|
|
|
CHostAddress RecHostAddr )
|
2012-01-28 12:51:14 +01:00
|
|
|
{
|
2014-02-16 09:21:40 +01:00
|
|
|
// connection less messages are always processed
|
2013-06-03 18:07:17 +02:00
|
|
|
ConnLessProtocol.ParseConnectionLessMessageBody ( vecbyMesBodyData,
|
|
|
|
iRecID,
|
2014-02-16 09:21:40 +01:00
|
|
|
RecHostAddr );
|
2012-01-28 12:51:14 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void CClient::OnJittBufSizeChanged ( int iNewJitBufSize )
|
|
|
|
{
|
|
|
|
// we received a jitter buffer size changed message from the server,
|
|
|
|
// only apply this value if auto jitter buffer size is enabled
|
|
|
|
if ( GetDoAutoSockBufSize() )
|
|
|
|
{
|
|
|
|
// Note: Do not use the "SetServerSockBufNumFrames" function for setting
|
|
|
|
// the new server jitter buffer size since then a message would be sent
|
|
|
|
// to the server which is incorrect.
|
|
|
|
iServerSockBufNumFrames = iNewJitBufSize;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void CClient::OnNewConnection()
|
|
|
|
{
|
2013-02-11 16:36:47 +01:00
|
|
|
// a new connection was successfully initiated, send infos and request
|
2012-01-28 12:51:14 +01:00
|
|
|
// connected clients list
|
2013-02-11 16:36:47 +01:00
|
|
|
Channel.SetRemoteInfo ( ChannelInfo );
|
2012-01-28 12:51:14 +01:00
|
|
|
|
|
|
|
// We have to send a connected clients list request since it can happen
|
|
|
|
// that we just had connected to the server and then disconnected but
|
|
|
|
// the server still thinks that we are connected (the server is still
|
|
|
|
// waiting for the channel time-out). If we now connect again, we would
|
|
|
|
// not get the list because the server does not know about a new connection.
|
|
|
|
// Same problem is with the jitter buffer message.
|
|
|
|
Channel.CreateReqConnClientsList();
|
|
|
|
CreateServerJitterBufferMessage();
|
2020-03-30 22:36:25 +02:00
|
|
|
|
|
|
|
// send opt-in / out for Channel Level updates
|
|
|
|
Channel.CreateReqChannelLevelListMes ( bDisplayChannelLevels );
|
2012-01-28 12:51:14 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void CClient::CreateServerJitterBufferMessage()
|
|
|
|
{
|
|
|
|
// per definition in the client: if auto jitter buffer is enabled, both,
|
|
|
|
// the client and server shall use an auto jitter buffer
|
|
|
|
if ( GetDoAutoSockBufSize() )
|
|
|
|
{
|
|
|
|
// in case auto jitter buffer size is enabled, we have to transmit a
|
|
|
|
// special value
|
|
|
|
Channel.CreateJitBufMes ( AUTO_NET_BUF_SIZE_FOR_PROTOCOL );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
Channel.CreateJitBufMes ( GetServerSockBufNumFrames() );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void CClient::OnCLPingReceived ( CHostAddress InetAddr,
|
|
|
|
int iMs )
|
|
|
|
{
|
|
|
|
// make sure we are running and the server address is correct
|
|
|
|
if ( IsRunning() && ( InetAddr == Channel.GetAddress() ) )
|
|
|
|
{
|
|
|
|
// take care of wrap arounds (if wrapping, do not use result)
|
|
|
|
const int iCurDiff = EvaluatePingMessage ( iMs );
|
|
|
|
if ( iCurDiff >= 0 )
|
|
|
|
{
|
|
|
|
emit PingTimeReceived ( iCurDiff );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void CClient::OnCLPingWithNumClientsReceived ( CHostAddress InetAddr,
|
|
|
|
int iMs,
|
|
|
|
int iNumClients )
|
|
|
|
{
|
|
|
|
// take care of wrap arounds (if wrapping, do not use result)
|
|
|
|
const int iCurDiff = EvaluatePingMessage ( iMs );
|
|
|
|
if ( iCurDiff >= 0 )
|
|
|
|
{
|
|
|
|
emit CLPingTimeWithNumClientsReceived ( InetAddr,
|
|
|
|
iCurDiff,
|
|
|
|
iNumClients );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
int CClient::PreparePingMessage()
|
|
|
|
{
|
|
|
|
// transmit the current precise time (in ms)
|
|
|
|
return PreciseTime.elapsed();
|
|
|
|
}
|
|
|
|
|
|
|
|
int CClient::EvaluatePingMessage ( const int iMs )
|
|
|
|
{
|
|
|
|
// calculate difference between received time in ms and current time in ms
|
|
|
|
return PreciseTime.elapsed() - iMs;
|
|
|
|
}
|
|
|
|
|
2020-04-12 12:33:50 +02:00
|
|
|
void CClient::SetCentralServerAddressType ( const ECSAddType eNCSAT )
|
|
|
|
{
|
|
|
|
if ( eCentralServerAddressType != eNCSAT )
|
|
|
|
{
|
|
|
|
// update type and emit message to update the server list, too
|
|
|
|
eCentralServerAddressType = eNCSAT;
|
|
|
|
emit CentralServerAddressTypeChanged();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-01-28 12:51:14 +01:00
|
|
|
void CClient::SetDoAutoSockBufSize ( const bool bValue )
|
|
|
|
{
|
|
|
|
// first, set new value in the channel object
|
|
|
|
Channel.SetDoAutoSockBufSize ( bValue );
|
|
|
|
|
|
|
|
// inform the server about the change
|
|
|
|
CreateServerJitterBufferMessage();
|
|
|
|
}
|
|
|
|
|
2020-05-26 20:45:10 +02:00
|
|
|
void CClient::SetRemoteChanGain ( const int iId,
|
|
|
|
const double dGain,
|
|
|
|
const bool bIsMyOwnFader )
|
|
|
|
{
|
|
|
|
// if this gain is for my own channel, apply the value for the Mute Myself function
|
|
|
|
if ( bIsMyOwnFader )
|
|
|
|
{
|
|
|
|
dMuteOutStreamGain = dGain;
|
|
|
|
}
|
|
|
|
|
|
|
|
Channel.SetRemoteChanGain ( iId, dGain );
|
|
|
|
}
|
|
|
|
|
2012-01-28 12:51:14 +01:00
|
|
|
bool CClient::SetServerAddr ( QString strNAddr )
|
|
|
|
{
|
|
|
|
CHostAddress HostAddress;
|
2013-03-24 12:38:00 +01:00
|
|
|
if ( NetworkUtil().ParseNetworkAddress ( strNAddr,
|
|
|
|
HostAddress ) )
|
2012-01-28 12:51:14 +01:00
|
|
|
{
|
|
|
|
// apply address to the channel
|
|
|
|
Channel.SetAddress ( HostAddress );
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
return false; // invalid address
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-01-03 09:54:49 +01:00
|
|
|
bool CClient::GetAndResetbJitterBufferOKFlag()
|
|
|
|
{
|
|
|
|
// get the socket buffer put status flag and reset it
|
|
|
|
const bool bSocketJitBufOKFlag = Socket.GetAndResetbJitterBufferOKFlag();
|
|
|
|
|
|
|
|
if ( !bJitterBufferOK )
|
|
|
|
{
|
|
|
|
// our jitter buffer get status is not OK so the overall status of the
|
|
|
|
// jitter buffer is also not OK (we do not have to consider the status
|
|
|
|
// of the socket buffer put status flag)
|
|
|
|
|
|
|
|
// reset flag before returning the function
|
|
|
|
bJitterBufferOK = true;
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
// the jitter buffer get (our own status flag) is OK, the final status
|
|
|
|
// now depends on the jitter buffer put status flag from the socket
|
|
|
|
// since per definition the jitter buffer status is OK if both the
|
|
|
|
// put and get status are OK
|
|
|
|
return bSocketJitBufOKFlag;
|
|
|
|
}
|
|
|
|
|
2020-03-30 22:36:25 +02:00
|
|
|
void CClient::SetDisplayChannelLevels ( const bool bNDCL )
|
|
|
|
{
|
|
|
|
bDisplayChannelLevels = bNDCL;
|
|
|
|
|
|
|
|
// tell any connected server about the change
|
|
|
|
Channel.CreateReqChannelLevelListMes ( bDisplayChannelLevels );
|
|
|
|
}
|
|
|
|
|
2012-01-28 12:51:14 +01:00
|
|
|
void CClient::SetSndCrdPrefFrameSizeFactor ( const int iNewFactor )
|
|
|
|
{
|
|
|
|
// first check new input parameter
|
|
|
|
if ( ( iNewFactor == FRAME_SIZE_FACTOR_PREFERRED ) ||
|
|
|
|
( iNewFactor == FRAME_SIZE_FACTOR_DEFAULT ) ||
|
|
|
|
( iNewFactor == FRAME_SIZE_FACTOR_SAFE ) )
|
|
|
|
{
|
|
|
|
// init with new parameter, if client was running then first
|
|
|
|
// stop it and restart again after new initialization
|
|
|
|
const bool bWasRunning = Sound.IsRunning();
|
|
|
|
if ( bWasRunning )
|
|
|
|
{
|
|
|
|
Sound.Stop();
|
|
|
|
}
|
|
|
|
|
|
|
|
// set new parameter
|
|
|
|
iSndCrdPrefFrameSizeFactor = iNewFactor;
|
|
|
|
|
|
|
|
// init with new block size index parameter
|
|
|
|
Init();
|
|
|
|
|
|
|
|
if ( bWasRunning )
|
|
|
|
{
|
|
|
|
// restart client
|
|
|
|
Sound.Start();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-04-17 21:21:37 +02:00
|
|
|
void CClient::SetEnableOPUS64 ( const bool eNEnableOPUS64 )
|
|
|
|
{
|
|
|
|
// init with new parameter, if client was running then first
|
|
|
|
// stop it and restart again after new initialization
|
|
|
|
const bool bWasRunning = Sound.IsRunning();
|
|
|
|
if ( bWasRunning )
|
|
|
|
{
|
|
|
|
Sound.Stop();
|
|
|
|
}
|
|
|
|
|
|
|
|
// set new parameter
|
|
|
|
bEnableOPUS64 = eNEnableOPUS64;
|
|
|
|
Init();
|
|
|
|
|
|
|
|
if ( bWasRunning )
|
|
|
|
{
|
|
|
|
Sound.Start();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-08-15 21:15:01 +02:00
|
|
|
void CClient::SetAudioQuality ( const EAudioQuality eNAudioQuality )
|
2012-01-28 12:51:14 +01:00
|
|
|
{
|
|
|
|
// init with new parameter, if client was running then first
|
|
|
|
// stop it and restart again after new initialization
|
|
|
|
const bool bWasRunning = Sound.IsRunning();
|
|
|
|
if ( bWasRunning )
|
|
|
|
{
|
|
|
|
Sound.Stop();
|
|
|
|
}
|
|
|
|
|
|
|
|
// set new parameter
|
2013-08-15 21:15:01 +02:00
|
|
|
eAudioQuality = eNAudioQuality;
|
2012-01-28 12:51:14 +01:00
|
|
|
Init();
|
|
|
|
|
|
|
|
if ( bWasRunning )
|
|
|
|
{
|
|
|
|
Sound.Start();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-02-24 20:51:57 +01:00
|
|
|
void CClient::SetAudioChannels ( const EAudChanConf eNAudChanConf )
|
2012-01-28 12:51:14 +01:00
|
|
|
{
|
|
|
|
// init with new parameter, if client was running then first
|
|
|
|
// stop it and restart again after new initialization
|
|
|
|
const bool bWasRunning = Sound.IsRunning();
|
|
|
|
if ( bWasRunning )
|
|
|
|
{
|
|
|
|
Sound.Stop();
|
|
|
|
}
|
|
|
|
|
|
|
|
// set new parameter
|
2014-02-24 20:51:57 +01:00
|
|
|
eAudioChannelConf = eNAudChanConf;
|
2012-01-28 12:51:14 +01:00
|
|
|
Init();
|
|
|
|
|
|
|
|
if ( bWasRunning )
|
|
|
|
{
|
|
|
|
Sound.Start();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
QString CClient::SetSndCrdDev ( const int iNewDev )
|
|
|
|
{
|
|
|
|
// if client was running then first
|
|
|
|
// stop it and restart again after new initialization
|
|
|
|
const bool bWasRunning = Sound.IsRunning();
|
|
|
|
if ( bWasRunning )
|
|
|
|
{
|
|
|
|
Sound.Stop();
|
|
|
|
}
|
|
|
|
|
|
|
|
const QString strReturn = Sound.SetDev ( iNewDev );
|
|
|
|
|
|
|
|
// init again because the sound card actual buffer size might
|
|
|
|
// be changed on new device
|
|
|
|
Init();
|
|
|
|
|
|
|
|
if ( bWasRunning )
|
|
|
|
{
|
|
|
|
// restart client
|
|
|
|
Sound.Start();
|
|
|
|
}
|
|
|
|
|
|
|
|
return strReturn;
|
|
|
|
}
|
|
|
|
|
|
|
|
void CClient::SetSndCrdLeftInputChannel ( const int iNewChan )
|
|
|
|
{
|
|
|
|
// if client was running then first
|
|
|
|
// stop it and restart again after new initialization
|
|
|
|
const bool bWasRunning = Sound.IsRunning();
|
|
|
|
if ( bWasRunning )
|
|
|
|
{
|
|
|
|
Sound.Stop();
|
|
|
|
}
|
|
|
|
|
|
|
|
Sound.SetLeftInputChannel ( iNewChan );
|
|
|
|
Init();
|
|
|
|
|
|
|
|
if ( bWasRunning )
|
|
|
|
{
|
|
|
|
// restart client
|
|
|
|
Sound.Start();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void CClient::SetSndCrdRightInputChannel ( const int iNewChan )
|
|
|
|
{
|
|
|
|
// if client was running then first
|
|
|
|
// stop it and restart again after new initialization
|
|
|
|
const bool bWasRunning = Sound.IsRunning();
|
|
|
|
if ( bWasRunning )
|
|
|
|
{
|
|
|
|
Sound.Stop();
|
|
|
|
}
|
|
|
|
|
|
|
|
Sound.SetRightInputChannel ( iNewChan );
|
|
|
|
Init();
|
|
|
|
|
|
|
|
if ( bWasRunning )
|
|
|
|
{
|
|
|
|
// restart client
|
|
|
|
Sound.Start();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void CClient::SetSndCrdLeftOutputChannel ( const int iNewChan )
|
|
|
|
{
|
|
|
|
// if client was running then first
|
|
|
|
// stop it and restart again after new initialization
|
|
|
|
const bool bWasRunning = Sound.IsRunning();
|
|
|
|
if ( bWasRunning )
|
|
|
|
{
|
|
|
|
Sound.Stop();
|
|
|
|
}
|
|
|
|
|
|
|
|
Sound.SetLeftOutputChannel ( iNewChan );
|
|
|
|
Init();
|
|
|
|
|
|
|
|
if ( bWasRunning )
|
|
|
|
{
|
|
|
|
// restart client
|
|
|
|
Sound.Start();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void CClient::SetSndCrdRightOutputChannel ( const int iNewChan )
|
|
|
|
{
|
|
|
|
// if client was running then first
|
|
|
|
// stop it and restart again after new initialization
|
|
|
|
const bool bWasRunning = Sound.IsRunning();
|
|
|
|
if ( bWasRunning )
|
|
|
|
{
|
|
|
|
Sound.Stop();
|
|
|
|
}
|
|
|
|
|
|
|
|
Sound.SetRightOutputChannel ( iNewChan );
|
|
|
|
Init();
|
|
|
|
|
|
|
|
if ( bWasRunning )
|
|
|
|
{
|
|
|
|
// restart client
|
|
|
|
Sound.Start();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-01-30 20:08:40 +01:00
|
|
|
void CClient::OnSndCrdReinitRequest ( int iSndCrdResetType )
|
2012-01-28 12:51:14 +01:00
|
|
|
{
|
2012-01-30 20:08:40 +01:00
|
|
|
// in older QT versions, enums cannot easily be used in signals without
|
|
|
|
// registering them -> workaroud: we use the int type and cast to the enum
|
|
|
|
const ESndCrdResetType eSndCrdResetType =
|
|
|
|
static_cast<ESndCrdResetType> ( iSndCrdResetType );
|
|
|
|
|
2012-01-28 12:51:14 +01:00
|
|
|
// if client was running then first
|
|
|
|
// stop it and restart again after new initialization
|
|
|
|
const bool bWasRunning = Sound.IsRunning();
|
|
|
|
if ( bWasRunning )
|
|
|
|
{
|
|
|
|
Sound.Stop();
|
|
|
|
}
|
|
|
|
|
|
|
|
// perform reinit request as indicated by the request type parameter
|
|
|
|
if ( eSndCrdResetType != RS_ONLY_RESTART )
|
|
|
|
{
|
|
|
|
if ( eSndCrdResetType != RS_ONLY_RESTART_AND_INIT )
|
|
|
|
{
|
|
|
|
// reinit the driver if requested
|
|
|
|
// (we use the currently selected driver)
|
|
|
|
Sound.SetDev ( Sound.GetDev() );
|
|
|
|
}
|
|
|
|
|
|
|
|
// init client object (must always be performed if the driver
|
|
|
|
// was changed)
|
|
|
|
Init();
|
|
|
|
}
|
|
|
|
|
|
|
|
if ( bWasRunning )
|
|
|
|
{
|
|
|
|
// restart client
|
|
|
|
Sound.Start();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-05-21 21:35:35 +02:00
|
|
|
void CClient::OnHandledSignal ( int sigNum )
|
2020-05-21 19:52:48 +02:00
|
|
|
{
|
|
|
|
#ifdef _WIN32
|
|
|
|
// Windows does not actually get OnHandledSignal triggered
|
|
|
|
QCoreApplication::instance()->exit();
|
|
|
|
Q_UNUSED ( sigNum )
|
|
|
|
#else
|
|
|
|
switch ( sigNum )
|
|
|
|
{
|
|
|
|
case SIGINT:
|
|
|
|
case SIGTERM:
|
2020-06-06 10:24:22 +02:00
|
|
|
// this should trigger OnAboutToQuit
|
2020-05-21 19:52:48 +02:00
|
|
|
QCoreApplication::instance()->exit();
|
|
|
|
break;
|
|
|
|
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2012-01-28 12:51:14 +01:00
|
|
|
void CClient::Start()
|
|
|
|
{
|
|
|
|
// init object
|
|
|
|
Init();
|
|
|
|
|
|
|
|
// enable channel
|
|
|
|
Channel.SetEnable ( true );
|
|
|
|
|
|
|
|
// start audio interface
|
|
|
|
Sound.Start();
|
|
|
|
}
|
|
|
|
|
|
|
|
void CClient::Stop()
|
|
|
|
{
|
|
|
|
// stop audio interface
|
|
|
|
Sound.Stop();
|
|
|
|
|
|
|
|
// disable channel
|
|
|
|
Channel.SetEnable ( false );
|
|
|
|
|
|
|
|
// wait for approx. 100 ms to make sure no audio packet is still in the
|
|
|
|
// network queue causing the channel to be reconnected right after having
|
|
|
|
// received the disconnect message (seems not to gain much, disconnect is
|
|
|
|
// still not working reliably)
|
|
|
|
QTime DieTime = QTime::currentTime().addMSecs ( 100 );
|
|
|
|
while ( QTime::currentTime() < DieTime )
|
|
|
|
{
|
|
|
|
// exclude user input events because if we use AllEvents, it happens
|
|
|
|
// that if the user initiates a connection and disconnection quickly
|
|
|
|
// (e.g. quickly pressing enter five times), the software can get into
|
|
|
|
// an unknown state
|
|
|
|
QCoreApplication::processEvents (
|
|
|
|
QEventLoop::ExcludeUserInputEvents, 100 );
|
|
|
|
}
|
|
|
|
|
|
|
|
// Send disconnect message to server (Since we disable our protocol
|
|
|
|
// receive mechanism with the next command, we do not evaluate any
|
|
|
|
// respond from the server, therefore we just hope that the message
|
|
|
|
// gets its way to the server, if not, the old behaviour time-out
|
|
|
|
// disconnects the connection anyway).
|
|
|
|
ConnLessProtocol.CreateCLDisconnection ( Channel.GetAddress() );
|
|
|
|
|
|
|
|
// reset current signal level and LEDs
|
2014-01-03 09:54:49 +01:00
|
|
|
bJitterBufferOK = true;
|
2012-01-28 12:51:14 +01:00
|
|
|
SignalLevelMeter.Reset();
|
|
|
|
}
|
|
|
|
|
|
|
|
void CClient::Init()
|
|
|
|
{
|
|
|
|
// check if possible frame size factors are supported
|
2020-04-15 15:29:43 +02:00
|
|
|
const int iFraSizePreffered = SYSTEM_FRAME_SIZE_SAMPLES * FRAME_SIZE_FACTOR_PREFERRED;
|
|
|
|
const int iFraSizeDefault = SYSTEM_FRAME_SIZE_SAMPLES * FRAME_SIZE_FACTOR_DEFAULT;
|
|
|
|
const int iFraSizeSafe = SYSTEM_FRAME_SIZE_SAMPLES * FRAME_SIZE_FACTOR_SAFE;
|
2012-01-28 12:51:14 +01:00
|
|
|
|
2020-03-21 10:27:31 +01:00
|
|
|
bFraSiFactPrefSupported = ( Sound.Init ( iFraSizePreffered ) == iFraSizePreffered );
|
|
|
|
bFraSiFactDefSupported = ( Sound.Init ( iFraSizeDefault ) == iFraSizeDefault );
|
|
|
|
bFraSiFactSafeSupported = ( Sound.Init ( iFraSizeSafe ) == iFraSizeSafe );
|
2012-01-28 12:51:14 +01:00
|
|
|
|
|
|
|
// translate block size index in actual block size
|
2020-04-15 15:29:43 +02:00
|
|
|
const int iPrefMonoFrameSize = iSndCrdPrefFrameSizeFactor * SYSTEM_FRAME_SIZE_SAMPLES;
|
2012-01-28 12:51:14 +01:00
|
|
|
|
|
|
|
// get actual sound card buffer size using preferred size
|
|
|
|
iMonoBlockSizeSam = Sound.Init ( iPrefMonoFrameSize );
|
|
|
|
|
|
|
|
// Calculate the current sound card frame size factor. In case
|
|
|
|
// the current mono block size is not a multiple of the system
|
|
|
|
// frame size, we have to use a sound card conversion buffer.
|
2020-04-17 21:21:37 +02:00
|
|
|
if ( ( ( iMonoBlockSizeSam == ( SYSTEM_FRAME_SIZE_SAMPLES * FRAME_SIZE_FACTOR_PREFERRED ) ) && bEnableOPUS64 ) ||
|
2020-04-15 15:29:43 +02:00
|
|
|
( iMonoBlockSizeSam == ( SYSTEM_FRAME_SIZE_SAMPLES * FRAME_SIZE_FACTOR_DEFAULT ) ) ||
|
|
|
|
( iMonoBlockSizeSam == ( SYSTEM_FRAME_SIZE_SAMPLES * FRAME_SIZE_FACTOR_SAFE ) ) )
|
2012-01-28 12:51:14 +01:00
|
|
|
{
|
|
|
|
// regular case: one of our predefined buffer sizes is available
|
2020-04-15 15:29:43 +02:00
|
|
|
iSndCrdFrameSizeFactor = iMonoBlockSizeSam / SYSTEM_FRAME_SIZE_SAMPLES;
|
2012-01-28 12:51:14 +01:00
|
|
|
|
|
|
|
// no sound card conversion buffer required
|
2020-03-21 10:27:31 +01:00
|
|
|
bSndCrdConversionBufferRequired = false;
|
2012-01-28 12:51:14 +01:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
// An unsupported sound card buffer size is currently used -> we have
|
|
|
|
// to use a conversion buffer. Per definition we use the smallest buffer
|
2020-04-13 16:01:25 +02:00
|
|
|
// size as the current frame size.
|
2012-01-28 12:51:14 +01:00
|
|
|
|
|
|
|
// store actual sound card buffer size (stereo)
|
2020-04-13 16:01:25 +02:00
|
|
|
bSndCrdConversionBufferRequired = true;
|
|
|
|
iSndCardMonoBlockSizeSamConvBuff = iMonoBlockSizeSam;
|
2012-01-28 12:51:14 +01:00
|
|
|
|
2020-04-13 16:01:25 +02:00
|
|
|
// overwrite block size factor by using one frame
|
|
|
|
iSndCrdFrameSizeFactor = 1;
|
2012-01-28 12:51:14 +01:00
|
|
|
}
|
|
|
|
|
2020-04-13 16:01:25 +02:00
|
|
|
// select the OPUS frame size mode depending on current mono block size samples
|
|
|
|
if ( bSndCrdConversionBufferRequired )
|
|
|
|
{
|
2020-04-17 21:21:37 +02:00
|
|
|
if ( ( iSndCardMonoBlockSizeSamConvBuff < DOUBLE_SYSTEM_FRAME_SIZE_SAMPLES ) && bEnableOPUS64 )
|
2020-04-13 16:01:25 +02:00
|
|
|
{
|
2020-04-15 15:29:43 +02:00
|
|
|
iMonoBlockSizeSam = SYSTEM_FRAME_SIZE_SAMPLES;
|
2020-04-13 16:01:25 +02:00
|
|
|
eAudioCompressionType = CT_OPUS64;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
iMonoBlockSizeSam = DOUBLE_SYSTEM_FRAME_SIZE_SAMPLES;
|
|
|
|
eAudioCompressionType = CT_OPUS;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
if ( iMonoBlockSizeSam < DOUBLE_SYSTEM_FRAME_SIZE_SAMPLES )
|
|
|
|
{
|
|
|
|
eAudioCompressionType = CT_OPUS64;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
// since we use double size frame size for OPUS, we have to adjust the frame size factor
|
|
|
|
iSndCrdFrameSizeFactor /= 2;
|
|
|
|
eAudioCompressionType = CT_OPUS;
|
2012-01-28 12:51:14 +01:00
|
|
|
|
2020-04-13 16:01:25 +02:00
|
|
|
}
|
|
|
|
}
|
2012-01-28 12:51:14 +01:00
|
|
|
|
2013-08-15 21:15:01 +02:00
|
|
|
// inits for audio coding
|
2020-04-13 16:01:25 +02:00
|
|
|
if ( eAudioCompressionType == CT_OPUS )
|
2012-01-28 12:51:14 +01:00
|
|
|
{
|
2020-04-13 16:01:25 +02:00
|
|
|
iOPUSFrameSizeSamples = DOUBLE_SYSTEM_FRAME_SIZE_SAMPLES;
|
2020-04-12 21:36:50 +02:00
|
|
|
|
2020-04-13 16:01:25 +02:00
|
|
|
if ( eAudioChannelConf == CC_MONO )
|
2012-01-28 12:51:14 +01:00
|
|
|
{
|
2020-04-13 16:01:25 +02:00
|
|
|
CurOpusEncoder = OpusEncoderMono;
|
|
|
|
CurOpusDecoder = OpusDecoderMono;
|
|
|
|
iNumAudioChannels = 1;
|
|
|
|
|
|
|
|
switch ( eAudioQuality )
|
|
|
|
{
|
|
|
|
case AQ_LOW: iCeltNumCodedBytes = OPUS_NUM_BYTES_MONO_LOW_QUALITY_DBLE_FRAMESIZE; break;
|
|
|
|
case AQ_NORMAL: iCeltNumCodedBytes = OPUS_NUM_BYTES_MONO_NORMAL_QUALITY_DBLE_FRAMESIZE; break;
|
|
|
|
case AQ_HIGH: iCeltNumCodedBytes = OPUS_NUM_BYTES_MONO_HIGH_QUALITY_DBLE_FRAMESIZE; break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
CurOpusEncoder = OpusEncoderStereo;
|
|
|
|
CurOpusDecoder = OpusDecoderStereo;
|
|
|
|
iNumAudioChannels = 2;
|
|
|
|
|
|
|
|
switch ( eAudioQuality )
|
|
|
|
{
|
|
|
|
case AQ_LOW: iCeltNumCodedBytes = OPUS_NUM_BYTES_STEREO_LOW_QUALITY_DBLE_FRAMESIZE; break;
|
|
|
|
case AQ_NORMAL: iCeltNumCodedBytes = OPUS_NUM_BYTES_STEREO_NORMAL_QUALITY_DBLE_FRAMESIZE; break;
|
|
|
|
case AQ_HIGH: iCeltNumCodedBytes = OPUS_NUM_BYTES_STEREO_HIGH_QUALITY_DBLE_FRAMESIZE; break;
|
|
|
|
}
|
2012-01-28 12:51:14 +01:00
|
|
|
}
|
|
|
|
}
|
2020-04-13 16:01:25 +02:00
|
|
|
else /* CT_OPUS64 */
|
2012-01-28 12:51:14 +01:00
|
|
|
{
|
2020-04-15 15:29:43 +02:00
|
|
|
iOPUSFrameSizeSamples = SYSTEM_FRAME_SIZE_SAMPLES;
|
2020-04-12 21:36:50 +02:00
|
|
|
|
2020-04-13 16:01:25 +02:00
|
|
|
if ( eAudioChannelConf == CC_MONO )
|
|
|
|
{
|
|
|
|
CurOpusEncoder = Opus64EncoderMono;
|
|
|
|
CurOpusDecoder = Opus64DecoderMono;
|
|
|
|
iNumAudioChannels = 1;
|
|
|
|
|
|
|
|
switch ( eAudioQuality )
|
|
|
|
{
|
|
|
|
case AQ_LOW: iCeltNumCodedBytes = OPUS_NUM_BYTES_MONO_LOW_QUALITY; break;
|
|
|
|
case AQ_NORMAL: iCeltNumCodedBytes = OPUS_NUM_BYTES_MONO_NORMAL_QUALITY; break;
|
|
|
|
case AQ_HIGH: iCeltNumCodedBytes = OPUS_NUM_BYTES_MONO_HIGH_QUALITY; break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
2012-01-28 12:51:14 +01:00
|
|
|
{
|
2020-04-13 16:01:25 +02:00
|
|
|
CurOpusEncoder = Opus64EncoderStereo;
|
|
|
|
CurOpusDecoder = Opus64DecoderStereo;
|
|
|
|
iNumAudioChannels = 2;
|
|
|
|
|
|
|
|
switch ( eAudioQuality )
|
|
|
|
{
|
|
|
|
case AQ_LOW: iCeltNumCodedBytes = OPUS_NUM_BYTES_STEREO_LOW_QUALITY; break;
|
|
|
|
case AQ_NORMAL: iCeltNumCodedBytes = OPUS_NUM_BYTES_STEREO_NORMAL_QUALITY; break;
|
|
|
|
case AQ_HIGH: iCeltNumCodedBytes = OPUS_NUM_BYTES_STEREO_HIGH_QUALITY; break;
|
|
|
|
}
|
2012-01-28 12:51:14 +01:00
|
|
|
}
|
|
|
|
}
|
2016-02-14 17:19:14 +01:00
|
|
|
|
2020-04-13 16:01:25 +02:00
|
|
|
// calculate stereo (two channels) buffer size
|
|
|
|
iStereoBlockSizeSam = 2 * iMonoBlockSizeSam;
|
|
|
|
|
2012-01-28 12:51:14 +01:00
|
|
|
vecCeltData.Init ( iCeltNumCodedBytes );
|
2020-04-16 17:54:45 +02:00
|
|
|
vecZeros.Init ( iStereoBlockSizeSam, 0 );
|
2020-04-23 20:54:58 +02:00
|
|
|
vecsStereoSndCrdMuteStream.Init ( iStereoBlockSizeSam );
|
2012-01-28 12:51:14 +01:00
|
|
|
|
2020-05-26 20:45:10 +02:00
|
|
|
dMuteOutStreamGain = 1.0;
|
|
|
|
|
2020-04-12 21:36:50 +02:00
|
|
|
opus_custom_encoder_ctl ( CurOpusEncoder,
|
|
|
|
OPUS_SET_BITRATE (
|
|
|
|
CalcBitRateBitsPerSecFromCodedBytes (
|
2020-04-12 22:10:53 +02:00
|
|
|
iCeltNumCodedBytes, iOPUSFrameSizeSamples ) ) );
|
2013-02-16 19:06:18 +01:00
|
|
|
|
2012-01-28 12:51:14 +01:00
|
|
|
// inits for network and channel
|
|
|
|
vecbyNetwData.Init ( iCeltNumCodedBytes );
|
2014-02-23 21:01:17 +01:00
|
|
|
|
2020-04-12 21:36:50 +02:00
|
|
|
// set the channel network properties
|
|
|
|
Channel.SetAudioStreamProperties ( eAudioCompressionType,
|
|
|
|
iCeltNumCodedBytes,
|
|
|
|
iSndCrdFrameSizeFactor,
|
|
|
|
iNumAudioChannels );
|
2013-03-10 10:27:25 +01:00
|
|
|
|
2020-04-13 16:01:25 +02:00
|
|
|
// init reverberation
|
|
|
|
AudioReverbL.Init ( SYSTEM_SAMPLE_RATE_HZ );
|
|
|
|
AudioReverbR.Init ( SYSTEM_SAMPLE_RATE_HZ );
|
|
|
|
|
|
|
|
// init the sound card conversion buffers
|
|
|
|
if ( bSndCrdConversionBufferRequired )
|
|
|
|
{
|
|
|
|
// inits for conversion buffer (the size of the conversion buffer must
|
|
|
|
// be the sum of input/output sizes which is the worst case fill level)
|
|
|
|
const int iSndCardStereoBlockSizeSamConvBuff = 2 * iSndCardMonoBlockSizeSamConvBuff;
|
|
|
|
const int iConBufSize = iStereoBlockSizeSam + iSndCardStereoBlockSizeSamConvBuff;
|
|
|
|
|
|
|
|
SndCrdConversionBufferIn.Init ( iConBufSize );
|
|
|
|
SndCrdConversionBufferOut.Init ( iConBufSize );
|
|
|
|
vecDataConvBuf.Init ( iStereoBlockSizeSam );
|
|
|
|
|
|
|
|
// the output conversion buffer must be filled with the inner
|
|
|
|
// block size for initialization (this is the latency which is
|
|
|
|
// introduced by the conversion buffer) to avoid buffer underruns
|
2020-04-16 17:54:45 +02:00
|
|
|
SndCrdConversionBufferOut.Put ( vecZeros, iStereoBlockSizeSam );
|
2020-04-13 16:01:25 +02:00
|
|
|
}
|
|
|
|
|
2020-04-08 14:48:33 +02:00
|
|
|
// reset initialization phase flag and mute flag
|
2013-03-10 10:27:25 +01:00
|
|
|
bIsInitializationPhase = true;
|
2012-01-28 12:51:14 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void CClient::AudioCallback ( CVector<int16_t>& psData, void* arg )
|
|
|
|
{
|
|
|
|
// get the pointer to the object
|
2014-01-03 09:54:49 +01:00
|
|
|
CClient* pMyClientObj = static_cast<CClient*> ( arg );
|
2012-01-28 12:51:14 +01:00
|
|
|
|
|
|
|
// process audio data
|
|
|
|
pMyClientObj->ProcessSndCrdAudioData ( psData );
|
2015-03-13 18:09:49 +01:00
|
|
|
|
|
|
|
/*
|
|
|
|
// TEST do a soundcard jitter measurement
|
|
|
|
static CTimingMeas JitterMeas ( 1000, "test2.dat" );
|
|
|
|
JitterMeas.Measure();
|
|
|
|
*/
|
2020-04-23 20:54:58 +02:00
|
|
|
}
|
|
|
|
|
2020-05-20 18:00:33 +02:00
|
|
|
void CClient::ProcessSndCrdAudioData ( CVector<int16_t>& vecsStereoSndCrd )
|
2020-04-23 20:54:58 +02:00
|
|
|
{
|
2012-01-28 12:51:14 +01:00
|
|
|
// check if a conversion buffer is required or not
|
|
|
|
if ( bSndCrdConversionBufferRequired )
|
|
|
|
{
|
|
|
|
// add new sound card block in conversion buffer
|
2020-05-20 18:00:33 +02:00
|
|
|
SndCrdConversionBufferIn.Put ( vecsStereoSndCrd, vecsStereoSndCrd.Size() );
|
2012-01-28 12:51:14 +01:00
|
|
|
|
|
|
|
// process all available blocks of data
|
|
|
|
while ( SndCrdConversionBufferIn.GetAvailData() >= iStereoBlockSizeSam )
|
|
|
|
{
|
|
|
|
// get one block of data for processing
|
2014-01-12 10:48:49 +01:00
|
|
|
SndCrdConversionBufferIn.Get ( vecDataConvBuf, iStereoBlockSizeSam );
|
2012-01-28 12:51:14 +01:00
|
|
|
|
|
|
|
// process audio data
|
|
|
|
ProcessAudioDataIntern ( vecDataConvBuf );
|
|
|
|
|
2014-01-12 10:48:49 +01:00
|
|
|
SndCrdConversionBufferOut.Put ( vecDataConvBuf, iStereoBlockSizeSam );
|
2012-01-28 12:51:14 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// get processed sound card block out of the conversion buffer
|
2020-05-20 18:00:33 +02:00
|
|
|
SndCrdConversionBufferOut.Get ( vecsStereoSndCrd, vecsStereoSndCrd.Size() );
|
2012-01-28 12:51:14 +01:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
// regular case: no conversion buffer required
|
|
|
|
// process audio data
|
2020-05-20 18:00:33 +02:00
|
|
|
ProcessAudioDataIntern ( vecsStereoSndCrd );
|
2012-01-28 12:51:14 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void CClient::ProcessAudioDataIntern ( CVector<int16_t>& vecsStereoSndCrd )
|
|
|
|
{
|
2020-04-26 16:20:21 +02:00
|
|
|
int i, j, iUnused;
|
2020-04-12 21:36:50 +02:00
|
|
|
unsigned char* pCurCodedData;
|
|
|
|
|
2012-01-28 12:51:14 +01:00
|
|
|
|
|
|
|
// Transmit signal ---------------------------------------------------------
|
|
|
|
// update stereo signal level meter
|
|
|
|
SignalLevelMeter.Update ( vecsStereoSndCrd );
|
|
|
|
|
|
|
|
// add reverberation effect if activated
|
|
|
|
if ( iReverbLevel != 0 )
|
|
|
|
{
|
|
|
|
// calculate attenuation amplification factor
|
2020-04-30 16:16:59 +02:00
|
|
|
const double dRevLev = static_cast<double> ( iReverbLevel ) / AUD_REVERB_MAX / 4;
|
2012-01-28 12:51:14 +01:00
|
|
|
|
2014-02-23 21:55:42 +01:00
|
|
|
if ( eAudioChannelConf == CC_STEREO )
|
|
|
|
{
|
|
|
|
// for stereo always apply reverberation effect on both channels
|
|
|
|
for ( i = 0; i < iStereoBlockSizeSam; i += 2 )
|
|
|
|
{
|
|
|
|
// both channels (stereo)
|
|
|
|
AudioReverbL.ProcessSample ( vecsStereoSndCrd[i], vecsStereoSndCrd[i + 1], dRevLev );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
2012-01-28 12:51:14 +01:00
|
|
|
{
|
2014-02-23 21:55:42 +01:00
|
|
|
// mono and mono-in/stereo out mode
|
2012-01-28 12:51:14 +01:00
|
|
|
if ( bReverbOnLeftChan )
|
|
|
|
{
|
|
|
|
for ( i = 0; i < iStereoBlockSizeSam; i += 2 )
|
|
|
|
{
|
|
|
|
// left channel
|
2014-01-01 22:31:22 +01:00
|
|
|
int16_t sRightDummy = 0; // has to be 0 for mono reverb
|
|
|
|
AudioReverbL.ProcessSample ( vecsStereoSndCrd[i], sRightDummy, dRevLev );
|
2012-01-28 12:51:14 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
for ( i = 1; i < iStereoBlockSizeSam; i += 2 )
|
|
|
|
{
|
|
|
|
// right channel
|
2014-01-01 22:31:22 +01:00
|
|
|
int16_t sRightDummy = 0; // has to be 0 for mono reverb
|
|
|
|
AudioReverbR.ProcessSample ( vecsStereoSndCrd[i], sRightDummy, dRevLev );
|
2012-01-28 12:51:14 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// mix both signals depending on the fading setting, convert
|
|
|
|
// from double to short
|
|
|
|
if ( iAudioInFader == AUD_FADER_IN_MIDDLE )
|
|
|
|
{
|
2013-12-15 13:46:09 +01:00
|
|
|
// no action require if fader is in the middle and stereo is used
|
2014-02-23 21:55:42 +01:00
|
|
|
if ( eAudioChannelConf != CC_STEREO )
|
2012-01-28 12:51:14 +01:00
|
|
|
{
|
2013-12-15 13:46:09 +01:00
|
|
|
// mix channels together (store result in first half of the vector)
|
2012-01-28 12:51:14 +01:00
|
|
|
for ( i = 0, j = 0; i < iMonoBlockSizeSam; i++, j += 2 )
|
|
|
|
{
|
2013-12-15 13:46:09 +01:00
|
|
|
// for the sum make sure we have more bits available (cast to
|
|
|
|
// int32), after the normalization by 2, the result will fit
|
|
|
|
// into the old size so that cast to int16 is safe
|
|
|
|
vecsStereoSndCrd[i] = static_cast<int16_t> (
|
2020-04-12 21:36:50 +02:00
|
|
|
( static_cast<int32_t> ( vecsStereoSndCrd[j] ) + vecsStereoSndCrd[j + 1] ) / 2 );
|
2012-01-28 12:51:14 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2014-02-23 21:55:42 +01:00
|
|
|
if ( eAudioChannelConf == CC_STEREO )
|
|
|
|
{
|
|
|
|
// stereo
|
|
|
|
const double dAttFactStereo = static_cast<double> (
|
2020-04-12 21:36:50 +02:00
|
|
|
AUD_FADER_IN_MIDDLE - abs ( AUD_FADER_IN_MIDDLE - iAudioInFader ) ) / AUD_FADER_IN_MIDDLE;
|
2014-02-23 21:55:42 +01:00
|
|
|
|
|
|
|
if ( iAudioInFader > AUD_FADER_IN_MIDDLE )
|
|
|
|
{
|
|
|
|
for ( i = 0, j = 0; i < iMonoBlockSizeSam; i++, j += 2 )
|
|
|
|
{
|
|
|
|
// attenuation on right channel
|
2020-04-12 21:36:50 +02:00
|
|
|
vecsStereoSndCrd[j + 1] = Double2Short ( dAttFactStereo * vecsStereoSndCrd[j + 1] );
|
2014-02-23 21:55:42 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
for ( i = 0, j = 0; i < iMonoBlockSizeSam; i++, j += 2 )
|
|
|
|
{
|
|
|
|
// attenuation on left channel
|
2020-04-12 21:36:50 +02:00
|
|
|
vecsStereoSndCrd[j] = Double2Short ( dAttFactStereo * vecsStereoSndCrd[j] );
|
2014-02-23 21:55:42 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
2012-01-28 12:51:14 +01:00
|
|
|
{
|
2014-02-23 21:55:42 +01:00
|
|
|
// mono and mono-in/stereo out mode
|
2012-01-28 12:51:14 +01:00
|
|
|
// make sure that in the middle position the two channels are
|
|
|
|
// amplified by 1/2, if the pan is set to one channel, this
|
2014-02-23 21:01:17 +01:00
|
|
|
// channel should have an amplification of 1
|
2012-01-28 12:51:14 +01:00
|
|
|
const double dAttFactMono = static_cast<double> (
|
2020-04-12 21:36:50 +02:00
|
|
|
AUD_FADER_IN_MIDDLE - abs ( AUD_FADER_IN_MIDDLE - iAudioInFader ) ) / AUD_FADER_IN_MIDDLE / 2;
|
2012-01-28 12:51:14 +01:00
|
|
|
|
|
|
|
const double dAmplFactMono = 0.5 + static_cast<double> (
|
2020-04-12 21:36:50 +02:00
|
|
|
abs ( AUD_FADER_IN_MIDDLE - iAudioInFader ) ) / AUD_FADER_IN_MIDDLE / 2;
|
2012-01-28 12:51:14 +01:00
|
|
|
|
|
|
|
if ( iAudioInFader > AUD_FADER_IN_MIDDLE )
|
|
|
|
{
|
|
|
|
for ( i = 0, j = 0; i < iMonoBlockSizeSam; i++, j += 2 )
|
|
|
|
{
|
2013-12-15 13:46:09 +01:00
|
|
|
// attenuation on right channel (store result in first half
|
|
|
|
// of the vector)
|
|
|
|
vecsStereoSndCrd[i] = Double2Short (
|
|
|
|
dAmplFactMono * vecsStereoSndCrd[j] +
|
|
|
|
dAttFactMono * vecsStereoSndCrd[j + 1] );
|
2012-01-28 12:51:14 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
for ( i = 0, j = 0; i < iMonoBlockSizeSam; i++, j += 2 )
|
|
|
|
{
|
2013-12-15 13:46:09 +01:00
|
|
|
// attenuation on left channel (store result in first half
|
|
|
|
// of the vector)
|
|
|
|
vecsStereoSndCrd[i] = Double2Short (
|
|
|
|
dAmplFactMono * vecsStereoSndCrd[j + 1] +
|
|
|
|
dAttFactMono * vecsStereoSndCrd[j] );
|
2012-01-28 12:51:14 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2014-02-23 21:55:42 +01:00
|
|
|
}
|
2014-02-23 21:01:17 +01:00
|
|
|
|
2014-02-23 21:55:42 +01:00
|
|
|
// Support for mono-in/stereo-out mode: Per definition this mode works in
|
|
|
|
// full stereo mode at the transmission level. The only thing which is done
|
|
|
|
// is to mix both sound card inputs together and then put this signal on
|
|
|
|
// both stereo channels to be transmitted to the server.
|
|
|
|
if ( eAudioChannelConf == CC_MONO_IN_STEREO_OUT )
|
|
|
|
{
|
|
|
|
// copy mono data in stereo sound card buffer (note that since the input
|
|
|
|
// and output is the same buffer, we have to start from the end not to
|
|
|
|
// overwrite input values)
|
|
|
|
for ( i = iMonoBlockSizeSam - 1, j = iStereoBlockSizeSam - 2; i >= 0; i--, j -= 2 )
|
|
|
|
{
|
2020-04-12 21:36:50 +02:00
|
|
|
vecsStereoSndCrd[j] = vecsStereoSndCrd[j + 1] = vecsStereoSndCrd[i];
|
2014-02-23 21:01:17 +01:00
|
|
|
}
|
2012-01-28 12:51:14 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
for ( i = 0; i < iSndCrdFrameSizeFactor; i++ )
|
|
|
|
{
|
2020-04-12 21:36:50 +02:00
|
|
|
// OPUS encoding
|
2020-04-12 22:10:53 +02:00
|
|
|
if ( CurOpusEncoder != nullptr )
|
2012-01-28 12:51:14 +01:00
|
|
|
{
|
2020-04-16 17:54:45 +02:00
|
|
|
if ( bMuteOutStream )
|
|
|
|
{
|
2020-04-26 16:20:21 +02:00
|
|
|
iUnused = opus_custom_encode ( CurOpusEncoder,
|
|
|
|
&vecZeros[i * iNumAudioChannels * iOPUSFrameSizeSamples],
|
|
|
|
iOPUSFrameSizeSamples,
|
|
|
|
&vecCeltData[0],
|
|
|
|
iCeltNumCodedBytes );
|
2020-04-16 17:54:45 +02:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2020-04-26 16:20:21 +02:00
|
|
|
iUnused = opus_custom_encode ( CurOpusEncoder,
|
|
|
|
&vecsStereoSndCrd[i * iNumAudioChannels * iOPUSFrameSizeSamples],
|
|
|
|
iOPUSFrameSizeSamples,
|
|
|
|
&vecCeltData[0],
|
|
|
|
iCeltNumCodedBytes );
|
2020-04-16 17:54:45 +02:00
|
|
|
}
|
2012-01-28 12:51:14 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// send coded audio through the network
|
2014-01-12 10:48:49 +01:00
|
|
|
Channel.PrepAndSendPacket ( &Socket,
|
|
|
|
vecCeltData,
|
|
|
|
iCeltNumCodedBytes );
|
2012-01-28 12:51:14 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Receive signal ----------------------------------------------------------
|
2020-04-16 17:54:45 +02:00
|
|
|
// in case of mute stream, store local data
|
|
|
|
if ( bMuteOutStream )
|
|
|
|
{
|
2020-04-23 20:54:58 +02:00
|
|
|
vecsStereoSndCrdMuteStream = vecsStereoSndCrd;
|
2020-04-16 17:54:45 +02:00
|
|
|
}
|
|
|
|
|
2012-01-28 12:51:14 +01:00
|
|
|
for ( i = 0; i < iSndCrdFrameSizeFactor; i++ )
|
|
|
|
{
|
|
|
|
// receive a new block
|
|
|
|
const bool bReceiveDataOk =
|
2014-01-12 10:48:49 +01:00
|
|
|
( Channel.GetData ( vecbyNetwData, iCeltNumCodedBytes ) == GS_BUFFER_OK );
|
2012-01-28 12:51:14 +01:00
|
|
|
|
2020-04-12 21:36:50 +02:00
|
|
|
// get pointer to coded data and manage the flags
|
2012-01-28 12:51:14 +01:00
|
|
|
if ( bReceiveDataOk )
|
|
|
|
{
|
2020-04-12 21:36:50 +02:00
|
|
|
pCurCodedData = &vecbyNetwData[0];
|
2013-03-10 10:27:25 +01:00
|
|
|
|
2020-04-12 21:36:50 +02:00
|
|
|
// on any valid received packet, we clear the initialization phase flag
|
|
|
|
bIsInitializationPhase = false;
|
2012-01-28 12:51:14 +01:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2020-04-12 21:36:50 +02:00
|
|
|
// for lost packets use null pointer as coded input data
|
2020-04-20 22:00:28 +02:00
|
|
|
pCurCodedData = nullptr;
|
2020-04-12 21:36:50 +02:00
|
|
|
|
|
|
|
// invalidate the buffer OK status flag
|
|
|
|
bJitterBufferOK = false;
|
|
|
|
}
|
|
|
|
|
|
|
|
// OPUS decoding
|
2020-04-12 22:10:53 +02:00
|
|
|
if ( CurOpusDecoder != nullptr )
|
2020-04-12 21:36:50 +02:00
|
|
|
{
|
2020-04-26 16:20:21 +02:00
|
|
|
iUnused = opus_custom_decode ( CurOpusDecoder,
|
|
|
|
pCurCodedData,
|
|
|
|
iCeltNumCodedBytes,
|
|
|
|
&vecsStereoSndCrd[i * iNumAudioChannels * iOPUSFrameSizeSamples],
|
|
|
|
iOPUSFrameSizeSamples );
|
2012-01-28 12:51:14 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
// TEST
|
2020-04-07 22:09:29 +02:00
|
|
|
// fid=fopen('c:\\temp\test2.dat','r');x=fread(fid,'int16');fclose(fid);
|
2012-01-28 12:51:14 +01:00
|
|
|
static FILE* pFileDelay = fopen("c:\\temp\\test2.dat", "wb");
|
|
|
|
short sData[2];
|
|
|
|
for (i = 0; i < iMonoBlockSizeSam; i++)
|
|
|
|
{
|
2020-04-12 21:36:50 +02:00
|
|
|
sData[0] = (short) vecsStereoSndCrd[i];
|
2012-01-28 12:51:14 +01:00
|
|
|
fwrite(&sData, size_t(2), size_t(1), pFileDelay);
|
|
|
|
}
|
|
|
|
fflush(pFileDelay);
|
|
|
|
*/
|
|
|
|
|
2020-04-16 17:54:45 +02:00
|
|
|
|
|
|
|
// for muted stream we have to add our local data here
|
|
|
|
if ( bMuteOutStream )
|
|
|
|
{
|
|
|
|
for ( i = 0; i < iStereoBlockSizeSam; i++ )
|
|
|
|
{
|
2020-04-16 20:08:54 +02:00
|
|
|
vecsStereoSndCrd[i] = Double2Short (
|
2020-05-26 20:45:10 +02:00
|
|
|
vecsStereoSndCrd[i] + vecsStereoSndCrdMuteStream[i] * dMuteOutStreamGain );
|
2020-04-16 17:54:45 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-04-12 21:36:50 +02:00
|
|
|
// check if channel is connected and if we do not have the initialization phase
|
2013-03-10 10:27:25 +01:00
|
|
|
if ( Channel.IsConnected() && ( !bIsInitializationPhase ) )
|
2012-01-28 12:51:14 +01:00
|
|
|
{
|
2014-02-23 21:01:17 +01:00
|
|
|
if ( eAudioChannelConf == CC_MONO )
|
2012-01-28 12:51:14 +01:00
|
|
|
{
|
2020-04-12 21:36:50 +02:00
|
|
|
// copy mono data in stereo sound card buffer (note that since the input
|
|
|
|
// and output is the same buffer, we have to start from the end not to
|
|
|
|
// overwrite input values)
|
|
|
|
for ( i = iMonoBlockSizeSam - 1, j = iStereoBlockSizeSam - 2; i >= 0; i--, j -= 2 )
|
2012-01-28 12:51:14 +01:00
|
|
|
{
|
2020-04-12 21:36:50 +02:00
|
|
|
vecsStereoSndCrd[j] = vecsStereoSndCrd[j + 1] = vecsStereoSndCrd[i];
|
2012-01-28 12:51:14 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
// if not connected, clear data
|
|
|
|
vecsStereoSndCrd.Reset ( 0 );
|
|
|
|
}
|
|
|
|
|
|
|
|
// update socket buffer size
|
|
|
|
Channel.UpdateSocketBufferSize();
|
2020-05-08 14:31:16 +02:00
|
|
|
|
2020-05-08 18:16:27 +02:00
|
|
|
Q_UNUSED ( iUnused )
|
2012-01-28 12:51:14 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
int CClient::EstimatedOverallDelay ( const int iPingTimeMs )
|
|
|
|
{
|
2020-04-13 19:26:15 +02:00
|
|
|
const double dSystemBlockDurationMs = static_cast<double> ( iOPUSFrameSizeSamples ) /
|
|
|
|
SYSTEM_SAMPLE_RATE_HZ * 1000;
|
|
|
|
|
2015-03-13 22:18:09 +01:00
|
|
|
// If the jitter buffers are set effectively, i.e. they are exactly the
|
|
|
|
// size of the network jitter, then the delay of the buffer is the buffer
|
|
|
|
// length. Since that is usually not the case but the buffers are usually
|
|
|
|
// a bit larger than necessary, we introduce some factor for compensation.
|
|
|
|
// Consider the jitter buffer on the client and on the server side, too.
|
2020-04-13 19:26:15 +02:00
|
|
|
const double dTotalJitterBufferDelayMs = dSystemBlockDurationMs *
|
2012-01-28 12:51:14 +01:00
|
|
|
static_cast<double> ( GetSockBufNumFrames() +
|
2019-05-17 22:55:46 +02:00
|
|
|
GetServerSockBufNumFrames() ) * 0.7;
|
2012-01-28 12:51:14 +01:00
|
|
|
|
2013-12-17 21:52:38 +01:00
|
|
|
// consider delay introduced by the sound card conversion buffer by using
|
|
|
|
// "GetSndCrdConvBufAdditionalDelayMonoBlSize()"
|
|
|
|
double dTotalSoundCardDelayMs = GetSndCrdConvBufAdditionalDelayMonoBlSize() *
|
2012-01-28 12:51:14 +01:00
|
|
|
1000 / SYSTEM_SAMPLE_RATE_HZ;
|
|
|
|
|
2013-12-17 21:52:38 +01:00
|
|
|
// try to get the actual input/output sound card delay from the audio
|
|
|
|
// interface, per definition it is not available if a 0 is returned
|
|
|
|
const double dSoundCardInputOutputLatencyMs = Sound.GetInOutLatencyMs();
|
|
|
|
|
|
|
|
if ( dSoundCardInputOutputLatencyMs == 0.0 )
|
|
|
|
{
|
|
|
|
// use an alternative aproach for estimating the sound card delay:
|
|
|
|
//
|
|
|
|
// we assume that we have two period sizes for the input and one for the
|
|
|
|
// output, therefore we have "3 *" instead of "2 *" (for input and output)
|
|
|
|
// the actual sound card buffer size
|
|
|
|
// "GetSndCrdConvBufAdditionalDelayMonoBlSize"
|
|
|
|
dTotalSoundCardDelayMs +=
|
|
|
|
( 3 * GetSndCrdActualMonoBlSize() ) *
|
|
|
|
1000 / SYSTEM_SAMPLE_RATE_HZ;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
// add the actual sound card latency in ms
|
|
|
|
dTotalSoundCardDelayMs += dSoundCardInputOutputLatencyMs;
|
|
|
|
}
|
|
|
|
|
2012-01-28 12:51:14 +01:00
|
|
|
// network packets are of the same size as the audio packets per definition
|
|
|
|
// if no sound card conversion buffer is used
|
|
|
|
const double dDelayToFillNetworkPacketsMs =
|
|
|
|
GetSystemMonoBlSize() * 1000 / SYSTEM_SAMPLE_RATE_HZ;
|
|
|
|
|
2020-04-13 19:26:15 +02:00
|
|
|
// OPUS additional delay at small frame sizes is half a frame size
|
|
|
|
const double dAdditionalAudioCodecDelayMs = dSystemBlockDurationMs / 2;
|
2012-01-28 12:51:14 +01:00
|
|
|
|
|
|
|
const double dTotalBufferDelayMs =
|
|
|
|
dDelayToFillNetworkPacketsMs +
|
|
|
|
dTotalJitterBufferDelayMs +
|
|
|
|
dTotalSoundCardDelayMs +
|
|
|
|
dAdditionalAudioCodecDelayMs;
|
|
|
|
|
2013-03-24 12:38:00 +01:00
|
|
|
return MathUtils::round ( dTotalBufferDelayMs + iPingTimeMs );
|
2012-01-28 12:51:14 +01:00
|
|
|
}
|