2011-04-23 22:43:07 +02:00
|
|
|
/******************************************************************************\
|
2020-01-01 15:41:43 +01:00
|
|
|
* Copyright (c) 2004-2020
|
2011-04-23 22:43:07 +02:00
|
|
|
*
|
|
|
|
* Author(s):
|
|
|
|
* Volker Fischer
|
|
|
|
*
|
|
|
|
******************************************************************************
|
|
|
|
*
|
|
|
|
* This program is free software; you can redistribute it and/or modify it under
|
|
|
|
* the terms of the GNU General Public License as published by the Free Software
|
|
|
|
* Foundation; either version 2 of the License, or (at your option) any later
|
|
|
|
* version.
|
|
|
|
*
|
|
|
|
* This program is distributed in the hope that it will be useful, but WITHOUT
|
|
|
|
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
|
|
|
|
* FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
|
|
|
|
* details.
|
|
|
|
*
|
|
|
|
* You should have received a copy of the GNU General Public License along with
|
|
|
|
* this program; if not, write to the Free Software Foundation, Inc.,
|
2020-06-08 22:58:11 +02:00
|
|
|
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
|
2011-04-23 22:43:07 +02:00
|
|
|
*
|
|
|
|
\******************************************************************************/
|
|
|
|
|
|
|
|
#include "server.h"
|
|
|
|
|
|
|
|
|
|
|
|
// CHighPrecisionTimer implementation ******************************************
|
2013-03-03 22:32:42 +01:00
|
|
|
#ifdef _WIN32
|
2020-04-04 19:03:19 +02:00
|
|
|
CHighPrecisionTimer::CHighPrecisionTimer ( const bool bNewUseDoubleSystemFrameSize ) :
|
|
|
|
bUseDoubleSystemFrameSize ( bNewUseDoubleSystemFrameSize )
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
|
|
|
// add some error checking, the high precision timer implementation only
|
2020-03-28 16:27:45 +01:00
|
|
|
// supports 64 and 128 samples frame size at 48 kHz sampling rate
|
2020-04-15 15:29:43 +02:00
|
|
|
#if ( SYSTEM_FRAME_SIZE_SAMPLES != 64 ) && ( DOUBLE_SYSTEM_FRAME_SIZE_SAMPLES != 128 )
|
2020-03-28 16:27:45 +01:00
|
|
|
# error "Only system frame size of 64 and 128 samples is supported by this module"
|
2011-04-23 22:43:07 +02:00
|
|
|
#endif
|
2013-03-03 22:32:42 +01:00
|
|
|
#if ( SYSTEM_SAMPLE_RATE_HZ != 48000 )
|
2011-04-23 22:43:07 +02:00
|
|
|
# error "Only a system sample rate of 48 kHz is supported by this module"
|
|
|
|
#endif
|
|
|
|
|
|
|
|
// Since QT only supports a minimum timer resolution of 1 ms but for our
|
2013-03-24 11:49:25 +01:00
|
|
|
// server we require a timer interval of 2.333 ms for 128 samples
|
2011-04-23 22:43:07 +02:00
|
|
|
// frame size at 48 kHz sampling rate.
|
2020-03-28 16:27:45 +01:00
|
|
|
// To support this interval, we use a timer with 2 ms resolution for 128
|
|
|
|
// samples frame size and 1 ms resolution for 64 samples frame size.
|
|
|
|
// Then we fire the actual frame timer if the error to the actual
|
2011-04-23 22:43:07 +02:00
|
|
|
// required interval is minimum.
|
|
|
|
veciTimeOutIntervals.Init ( 3 );
|
|
|
|
|
2020-03-28 16:27:45 +01:00
|
|
|
// for 128 sample frame size at 48 kHz sampling rate with 2 ms timer resolution:
|
2011-04-23 22:43:07 +02:00
|
|
|
// actual intervals: 0.0 2.666 5.333 8.0
|
|
|
|
// quantized to 2 ms: 0 2 6 8 (0)
|
2020-03-28 16:27:45 +01:00
|
|
|
// for 64 sample frame size at 48 kHz sampling rate with 1 ms timer resolution:
|
|
|
|
// actual intervals: 0.0 1.333 2.666 4.0
|
|
|
|
// quantized to 2 ms: 0 1 3 4 (0)
|
2011-04-23 22:43:07 +02:00
|
|
|
veciTimeOutIntervals[0] = 0;
|
|
|
|
veciTimeOutIntervals[1] = 1;
|
|
|
|
veciTimeOutIntervals[2] = 0;
|
|
|
|
|
|
|
|
// connect timer timeout signal
|
2020-06-07 09:01:52 +02:00
|
|
|
QObject::connect ( &Timer, &QTimer::timeout,
|
|
|
|
this, &CHighPrecisionTimer::OnTimer );
|
2011-04-23 22:43:07 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
void CHighPrecisionTimer::Start()
|
|
|
|
{
|
|
|
|
// reset position pointer and counter
|
|
|
|
iCurPosInVector = 0;
|
|
|
|
iIntervalCounter = 0;
|
|
|
|
|
2020-04-04 19:03:19 +02:00
|
|
|
if ( bUseDoubleSystemFrameSize )
|
|
|
|
{
|
|
|
|
// start internal timer with 2 ms resolution for 128 samples frame size
|
|
|
|
Timer.start ( 2 );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
// start internal timer with 1 ms resolution for 64 samples frame size
|
|
|
|
Timer.start ( 1 );
|
|
|
|
}
|
2011-04-23 22:43:07 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
void CHighPrecisionTimer::Stop()
|
|
|
|
{
|
|
|
|
// stop timer
|
|
|
|
Timer.stop();
|
|
|
|
}
|
|
|
|
|
|
|
|
void CHighPrecisionTimer::OnTimer()
|
|
|
|
{
|
|
|
|
// check if maximum number of high precision timer intervals are
|
|
|
|
// finished
|
|
|
|
if ( veciTimeOutIntervals[iCurPosInVector] == iIntervalCounter )
|
|
|
|
{
|
|
|
|
// reset interval counter
|
|
|
|
iIntervalCounter = 0;
|
|
|
|
|
|
|
|
// go to next position in vector, take care of wrap around
|
|
|
|
iCurPosInVector++;
|
|
|
|
if ( iCurPosInVector == veciTimeOutIntervals.Size() )
|
|
|
|
{
|
|
|
|
iCurPosInVector = 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
// minimum time error to actual required timer interval is reached,
|
|
|
|
// emit signal for server
|
|
|
|
emit timeout();
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
// next high precision timer interval
|
|
|
|
iIntervalCounter++;
|
|
|
|
}
|
|
|
|
}
|
2013-03-03 22:32:42 +01:00
|
|
|
#else // Mac and Linux
|
2020-04-04 19:03:19 +02:00
|
|
|
CHighPrecisionTimer::CHighPrecisionTimer ( const bool bUseDoubleSystemFrameSize ) :
|
2013-03-03 22:32:42 +01:00
|
|
|
bRun ( false )
|
|
|
|
{
|
|
|
|
// calculate delay in ns
|
2020-04-05 18:57:28 +02:00
|
|
|
uint64_t iNsDelay;
|
2020-04-04 19:03:19 +02:00
|
|
|
|
|
|
|
if ( bUseDoubleSystemFrameSize )
|
|
|
|
{
|
|
|
|
iNsDelay = ( (uint64_t) DOUBLE_SYSTEM_FRAME_SIZE_SAMPLES * 1000000000 ) /
|
|
|
|
(uint64_t) SYSTEM_SAMPLE_RATE_HZ; // in ns
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2020-04-15 15:29:43 +02:00
|
|
|
iNsDelay = ( (uint64_t) SYSTEM_FRAME_SIZE_SAMPLES * 1000000000 ) /
|
2020-04-04 19:03:19 +02:00
|
|
|
(uint64_t) SYSTEM_SAMPLE_RATE_HZ; // in ns
|
|
|
|
}
|
2013-03-03 22:32:42 +01:00
|
|
|
|
|
|
|
#if defined ( __APPLE__ ) || defined ( __MACOSX )
|
|
|
|
// calculate delay in mach absolute time
|
|
|
|
struct mach_timebase_info timeBaseInfo;
|
|
|
|
mach_timebase_info ( &timeBaseInfo );
|
|
|
|
|
|
|
|
Delay = ( iNsDelay * (uint64_t) timeBaseInfo.denom ) /
|
|
|
|
(uint64_t) timeBaseInfo.numer;
|
|
|
|
#else
|
|
|
|
// set delay
|
2013-03-04 17:11:37 +01:00
|
|
|
Delay = iNsDelay;
|
2013-03-03 22:32:42 +01:00
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
|
|
|
void CHighPrecisionTimer::Start()
|
|
|
|
{
|
|
|
|
// only start if not already running
|
|
|
|
if ( !bRun )
|
|
|
|
{
|
|
|
|
// set run flag
|
|
|
|
bRun = true;
|
|
|
|
|
|
|
|
// set initial end time
|
|
|
|
#if defined ( __APPLE__ ) || defined ( __MACOSX )
|
2013-03-04 17:11:37 +01:00
|
|
|
NextEnd = mach_absolute_time() + Delay;
|
2013-03-03 22:32:42 +01:00
|
|
|
#else
|
2013-03-03 22:38:23 +01:00
|
|
|
clock_gettime ( CLOCK_MONOTONIC, &NextEnd );
|
2013-03-04 17:11:37 +01:00
|
|
|
|
|
|
|
NextEnd.tv_nsec += Delay;
|
|
|
|
if ( NextEnd.tv_nsec >= 1000000000L )
|
|
|
|
{
|
|
|
|
NextEnd.tv_sec++;
|
|
|
|
NextEnd.tv_nsec -= 1000000000L;
|
|
|
|
}
|
2013-03-03 22:32:42 +01:00
|
|
|
#endif
|
|
|
|
|
|
|
|
// start thread
|
2014-01-06 13:53:54 +01:00
|
|
|
QThread::start ( QThread::TimeCriticalPriority );
|
2013-03-03 22:32:42 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void CHighPrecisionTimer::Stop()
|
|
|
|
{
|
|
|
|
// set flag so that thread can leave the main loop
|
|
|
|
bRun = false;
|
|
|
|
|
|
|
|
// give thread some time to terminate
|
|
|
|
wait ( 5000 );
|
|
|
|
}
|
|
|
|
|
|
|
|
void CHighPrecisionTimer::run()
|
|
|
|
{
|
|
|
|
// loop until the thread shall be terminated
|
|
|
|
while ( bRun )
|
|
|
|
{
|
|
|
|
// call processing routine by fireing signal
|
2014-01-06 13:53:54 +01:00
|
|
|
|
|
|
|
// TODO by emit a signal we leave the high priority thread -> maybe use some
|
|
|
|
// other connection type to have something like a true callback, e.g.
|
|
|
|
// "Qt::DirectConnection" -> Can this work?
|
|
|
|
|
2013-03-03 22:32:42 +01:00
|
|
|
emit timeout();
|
|
|
|
|
|
|
|
// now wait until the next buffer shall be processed (we
|
|
|
|
// use the "increment method" to make sure we do not introduce
|
|
|
|
// a timing drift)
|
|
|
|
#if defined ( __APPLE__ ) || defined ( __MACOSX )
|
|
|
|
mach_wait_until ( NextEnd );
|
2013-03-03 22:56:25 +01:00
|
|
|
|
|
|
|
NextEnd += Delay;
|
2013-03-03 22:32:42 +01:00
|
|
|
#else
|
|
|
|
clock_nanosleep ( CLOCK_MONOTONIC,
|
|
|
|
TIMER_ABSTIME,
|
2013-03-03 22:38:23 +01:00
|
|
|
&NextEnd,
|
2013-03-03 22:32:42 +01:00
|
|
|
NULL );
|
|
|
|
|
2013-03-04 17:11:37 +01:00
|
|
|
NextEnd.tv_nsec += Delay;
|
|
|
|
if ( NextEnd.tv_nsec >= 1000000000L )
|
|
|
|
{
|
|
|
|
NextEnd.tv_sec++;
|
|
|
|
NextEnd.tv_nsec -= 1000000000L;
|
|
|
|
}
|
2013-03-03 22:56:25 +01:00
|
|
|
#endif
|
2013-03-03 22:32:42 +01:00
|
|
|
}
|
|
|
|
}
|
2011-04-23 22:43:07 +02:00
|
|
|
#endif
|
|
|
|
|
|
|
|
|
|
|
|
// CServer implementation ******************************************************
|
2015-01-23 20:43:18 +01:00
|
|
|
CServer::CServer ( const int iNewMaxNumChan,
|
2020-03-21 19:57:18 +01:00
|
|
|
const int iMaxDaysHistory,
|
2015-01-23 20:43:18 +01:00
|
|
|
const QString& strLoggingFileName,
|
|
|
|
const quint16 iPortNumber,
|
|
|
|
const QString& strHTMLStatusFileName,
|
|
|
|
const QString& strHistoryFileName,
|
|
|
|
const QString& strServerNameForHTMLStatusFile,
|
|
|
|
const QString& strCentralServer,
|
|
|
|
const QString& strServerInfo,
|
|
|
|
const QString& strNewWelcomeMessage,
|
2019-04-03 19:12:45 +02:00
|
|
|
const QString& strRecordingDirName,
|
2015-01-23 20:43:18 +01:00
|
|
|
const bool bNCentServPingServerInList,
|
2020-05-12 22:40:59 +02:00
|
|
|
const bool bNDisconnectAllClientsOnQuit,
|
2020-04-04 19:03:19 +02:00
|
|
|
const bool bNUseDoubleSystemFrameSize,
|
2015-01-23 20:43:18 +01:00
|
|
|
const ELicenceType eNLicenceType ) :
|
2020-05-12 22:40:59 +02:00
|
|
|
bUseDoubleSystemFrameSize ( bNUseDoubleSystemFrameSize ),
|
|
|
|
iMaxNumChannels ( iNewMaxNumChan ),
|
|
|
|
Socket ( this, iPortNumber ),
|
|
|
|
Logging ( iMaxDaysHistory ),
|
2020-06-07 12:09:30 +02:00
|
|
|
iFrameCount ( 0 ),
|
2020-05-12 22:40:59 +02:00
|
|
|
JamRecorder ( strRecordingDirName ),
|
2020-06-11 21:49:50 +02:00
|
|
|
bEnableRecording ( false ),
|
2020-05-12 22:40:59 +02:00
|
|
|
bWriteStatusHTMLFile ( false ),
|
|
|
|
HighPrecisionTimer ( bNUseDoubleSystemFrameSize ),
|
|
|
|
ServerListManager ( iPortNumber,
|
|
|
|
strCentralServer,
|
|
|
|
strServerInfo,
|
|
|
|
iNewMaxNumChan,
|
|
|
|
bNCentServPingServerInList,
|
|
|
|
&ConnLessProtocol ),
|
|
|
|
bAutoRunMinimized ( false ),
|
|
|
|
strWelcomeMessage ( strNewWelcomeMessage ),
|
|
|
|
eLicenceType ( eNLicenceType ),
|
|
|
|
bDisconnectAllClientsOnQuit ( bNDisconnectAllClientsOnQuit ),
|
|
|
|
pSignalHandler ( CSignalHandler::getSingletonP() )
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
2013-02-16 11:54:40 +01:00
|
|
|
int iOpusError;
|
2011-04-23 22:43:07 +02:00
|
|
|
int i;
|
|
|
|
|
2020-04-04 19:03:19 +02:00
|
|
|
// create OPUS encoder/decoder for each channel (must be done before
|
2011-04-23 22:43:07 +02:00
|
|
|
// enabling the channels), create a mono and stereo encoder/decoder
|
|
|
|
// for each channel
|
2014-01-08 22:24:37 +01:00
|
|
|
for ( i = 0; i < iMaxNumChannels; i++ )
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
2020-04-04 19:03:19 +02:00
|
|
|
// init OPUS -----------------------------------------------------------
|
2013-02-16 20:11:30 +01:00
|
|
|
OpusMode[i] = opus_custom_mode_create ( SYSTEM_SAMPLE_RATE_HZ,
|
2020-04-04 19:03:19 +02:00
|
|
|
DOUBLE_SYSTEM_FRAME_SIZE_SAMPLES,
|
2013-02-16 20:11:30 +01:00
|
|
|
&iOpusError );
|
2013-02-16 11:54:40 +01:00
|
|
|
|
2020-04-04 19:03:19 +02:00
|
|
|
Opus64Mode[i] = opus_custom_mode_create ( SYSTEM_SAMPLE_RATE_HZ,
|
2020-04-15 15:29:43 +02:00
|
|
|
SYSTEM_FRAME_SIZE_SAMPLES,
|
2020-04-04 19:03:19 +02:00
|
|
|
&iOpusError );
|
2013-02-16 20:11:30 +01:00
|
|
|
|
2020-04-04 19:03:19 +02:00
|
|
|
// init audio encoders and decoders
|
|
|
|
OpusEncoderMono[i] = opus_custom_encoder_create ( OpusMode[i], 1, &iOpusError ); // mono encoder legacy
|
|
|
|
OpusDecoderMono[i] = opus_custom_decoder_create ( OpusMode[i], 1, &iOpusError ); // mono decoder legacy
|
|
|
|
OpusEncoderStereo[i] = opus_custom_encoder_create ( OpusMode[i], 2, &iOpusError ); // stereo encoder legacy
|
|
|
|
OpusDecoderStereo[i] = opus_custom_decoder_create ( OpusMode[i], 2, &iOpusError ); // stereo decoder legacy
|
|
|
|
Opus64EncoderMono[i] = opus_custom_encoder_create ( Opus64Mode[i], 1, &iOpusError ); // mono encoder OPUS64
|
|
|
|
Opus64DecoderMono[i] = opus_custom_decoder_create ( Opus64Mode[i], 1, &iOpusError ); // mono decoder OPUS64
|
|
|
|
Opus64EncoderStereo[i] = opus_custom_encoder_create ( Opus64Mode[i], 2, &iOpusError ); // stereo encoder OPUS64
|
|
|
|
Opus64DecoderStereo[i] = opus_custom_decoder_create ( Opus64Mode[i], 2, &iOpusError ); // stereo decoder OPUS64
|
2013-02-16 11:54:40 +01:00
|
|
|
|
2013-02-16 19:16:12 +01:00
|
|
|
// we require a constant bit rate
|
2020-04-04 19:03:19 +02:00
|
|
|
opus_custom_encoder_ctl ( OpusEncoderMono[i], OPUS_SET_VBR ( 0 ) );
|
|
|
|
opus_custom_encoder_ctl ( OpusEncoderStereo[i], OPUS_SET_VBR ( 0 ) );
|
|
|
|
opus_custom_encoder_ctl ( Opus64EncoderMono[i], OPUS_SET_VBR ( 0 ) );
|
|
|
|
opus_custom_encoder_ctl ( Opus64EncoderStereo[i], OPUS_SET_VBR ( 0 ) );
|
2013-02-16 19:16:12 +01:00
|
|
|
|
2020-04-23 19:36:00 +02:00
|
|
|
// for 64 samples frame size we have to adjust the PLC behavior to avoid loud artifacts
|
|
|
|
opus_custom_encoder_ctl ( Opus64EncoderMono[i], OPUS_SET_PACKET_LOSS_PERC ( 35 ) );
|
|
|
|
opus_custom_encoder_ctl ( Opus64EncoderStereo[i], OPUS_SET_PACKET_LOSS_PERC ( 35 ) );
|
|
|
|
|
2013-02-20 18:22:04 +01:00
|
|
|
// we want as low delay as possible
|
2020-04-04 19:03:19 +02:00
|
|
|
opus_custom_encoder_ctl ( OpusEncoderMono[i], OPUS_SET_APPLICATION ( OPUS_APPLICATION_RESTRICTED_LOWDELAY ) );
|
|
|
|
opus_custom_encoder_ctl ( OpusEncoderStereo[i], OPUS_SET_APPLICATION ( OPUS_APPLICATION_RESTRICTED_LOWDELAY ) );
|
|
|
|
opus_custom_encoder_ctl ( Opus64EncoderMono[i], OPUS_SET_APPLICATION ( OPUS_APPLICATION_RESTRICTED_LOWDELAY ) );
|
|
|
|
opus_custom_encoder_ctl ( Opus64EncoderStereo[i], OPUS_SET_APPLICATION ( OPUS_APPLICATION_RESTRICTED_LOWDELAY ) );
|
2013-02-20 18:22:04 +01:00
|
|
|
|
2020-04-04 12:40:13 +02:00
|
|
|
// set encoder low complexity for legacy 128 samples frame size
|
2020-04-04 19:03:19 +02:00
|
|
|
opus_custom_encoder_ctl ( OpusEncoderMono[i], OPUS_SET_COMPLEXITY ( 1 ) );
|
|
|
|
opus_custom_encoder_ctl ( OpusEncoderStereo[i], OPUS_SET_COMPLEXITY ( 1 ) );
|
2020-04-10 12:07:23 +02:00
|
|
|
|
|
|
|
|
|
|
|
// init double-to-normal frame size conversion buffers -----------------
|
|
|
|
// use worst case memory initialization to avoid allocating memory in
|
|
|
|
// the time-critical thread
|
|
|
|
DoubleFrameSizeConvBufIn[i].Init ( 2 /* stereo */ * DOUBLE_SYSTEM_FRAME_SIZE_SAMPLES /* worst case buffer size */ );
|
|
|
|
DoubleFrameSizeConvBufOut[i].Init ( 2 /* stereo */ * DOUBLE_SYSTEM_FRAME_SIZE_SAMPLES /* worst case buffer size */ );
|
2011-04-23 22:43:07 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// define colors for chat window identifiers
|
|
|
|
vstrChatColors.Init ( 6 );
|
|
|
|
vstrChatColors[0] = "mediumblue";
|
|
|
|
vstrChatColors[1] = "red";
|
|
|
|
vstrChatColors[2] = "darkorchid";
|
|
|
|
vstrChatColors[3] = "green";
|
|
|
|
vstrChatColors[4] = "maroon";
|
|
|
|
vstrChatColors[5] = "coral";
|
|
|
|
|
2020-04-08 18:51:51 +02:00
|
|
|
// set the server frame size
|
|
|
|
if ( bUseDoubleSystemFrameSize )
|
|
|
|
{
|
|
|
|
iServerFrameSizeSamples = DOUBLE_SYSTEM_FRAME_SIZE_SAMPLES;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2020-04-15 15:29:43 +02:00
|
|
|
iServerFrameSizeSamples = SYSTEM_FRAME_SIZE_SAMPLES;
|
2020-04-08 18:51:51 +02:00
|
|
|
}
|
|
|
|
|
2014-01-12 10:48:49 +01:00
|
|
|
|
|
|
|
// To avoid audio clitches, in the entire realtime timer audio processing
|
|
|
|
// routine including the ProcessData no memory must be allocated. Since we
|
|
|
|
// do not know the required sizes for the vectors, we allocate memory for
|
|
|
|
// the worst case here:
|
|
|
|
|
|
|
|
// we always use stereo audio buffers (which is the worst case)
|
2020-04-04 19:03:19 +02:00
|
|
|
vecsSendData.Init ( 2 /* stereo */ * DOUBLE_SYSTEM_FRAME_SIZE_SAMPLES /* worst case buffer size */ );
|
2014-01-12 10:48:49 +01:00
|
|
|
|
|
|
|
// allocate worst case memory for the temporary vectors
|
2020-04-10 12:07:23 +02:00
|
|
|
vecChanIDsCurConChan.Init ( iMaxNumChannels );
|
|
|
|
vecvecdGains.Init ( iMaxNumChannels );
|
2020-05-18 20:46:46 +02:00
|
|
|
vecvecdPannings.Init ( iMaxNumChannels );
|
2020-04-10 12:07:23 +02:00
|
|
|
vecvecsData.Init ( iMaxNumChannels );
|
|
|
|
vecNumAudioChannels.Init ( iMaxNumChannels );
|
|
|
|
vecNumFrameSizeConvBlocks.Init ( iMaxNumChannels );
|
|
|
|
vecUseDoubleSysFraSizeConvBuf.Init ( iMaxNumChannels );
|
|
|
|
vecAudioComprType.Init ( iMaxNumChannels );
|
2014-01-12 10:48:49 +01:00
|
|
|
|
|
|
|
for ( i = 0; i < iMaxNumChannels; i++ )
|
|
|
|
{
|
|
|
|
// init vectors storing information of all channels
|
|
|
|
vecvecdGains[i].Init ( iMaxNumChannels );
|
2020-05-18 20:46:46 +02:00
|
|
|
vecvecdPannings[i].Init ( iMaxNumChannels );
|
2014-01-12 10:48:49 +01:00
|
|
|
|
|
|
|
// we always use stereo audio buffers (see "vecsSendData")
|
2020-04-04 19:03:19 +02:00
|
|
|
vecvecsData[i].Init ( 2 /* stereo */ * DOUBLE_SYSTEM_FRAME_SIZE_SAMPLES /* worst case buffer size */ );
|
2014-01-12 10:48:49 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// allocate worst case memory for the coded data
|
|
|
|
vecbyCodedData.Init ( MAX_SIZE_BYTES_NETW_BUF );
|
|
|
|
|
2020-04-04 23:57:16 +02:00
|
|
|
// allocate worst case memory for the channel levels
|
|
|
|
vecChannelLevels.Init ( iMaxNumChannels );
|
2014-01-08 22:24:37 +01:00
|
|
|
|
2011-04-23 22:43:07 +02:00
|
|
|
// enable history graph (if requested)
|
|
|
|
if ( !strHistoryFileName.isEmpty() )
|
|
|
|
{
|
|
|
|
Logging.EnableHistory ( strHistoryFileName );
|
|
|
|
}
|
|
|
|
|
|
|
|
// enable logging (if requested)
|
|
|
|
if ( !strLoggingFileName.isEmpty() )
|
|
|
|
{
|
|
|
|
// in case the history is enabled and a logging file name is
|
|
|
|
// given, parse the logging file for old entries which are then
|
|
|
|
// added in the history on software startup
|
|
|
|
if ( !strHistoryFileName.isEmpty() )
|
|
|
|
{
|
|
|
|
Logging.ParseLogFile ( strLoggingFileName );
|
|
|
|
}
|
|
|
|
|
|
|
|
Logging.Start ( strLoggingFileName );
|
|
|
|
}
|
|
|
|
|
|
|
|
// HTML status file writing
|
|
|
|
if ( !strHTMLStatusFileName.isEmpty() )
|
|
|
|
{
|
|
|
|
QString strCurServerNameForHTMLStatusFile = strServerNameForHTMLStatusFile;
|
|
|
|
|
|
|
|
// if server name is empty, substitude a default name
|
|
|
|
if ( strCurServerNameForHTMLStatusFile.isEmpty() )
|
|
|
|
{
|
|
|
|
strCurServerNameForHTMLStatusFile = "[server address]";
|
|
|
|
}
|
|
|
|
|
|
|
|
// (the static cast to integer of the port number is required so that it
|
|
|
|
// works correctly under Linux)
|
|
|
|
StartStatusHTMLFileWriting ( strHTMLStatusFileName,
|
|
|
|
strCurServerNameForHTMLStatusFile + ":" +
|
|
|
|
QString().number( static_cast<int> ( iPortNumber ) ) );
|
|
|
|
}
|
|
|
|
|
2020-06-01 09:55:31 +02:00
|
|
|
// enable jam recording (if requested) - kicks off the thread
|
2020-05-30 17:24:52 +02:00
|
|
|
if ( !strRecordingDirName.isEmpty() )
|
2019-04-03 19:12:45 +02:00
|
|
|
{
|
2020-05-30 17:24:52 +02:00
|
|
|
bRecorderInitialised = JamRecorder.Init ( this, iServerFrameSizeSamples );
|
2020-06-11 21:49:50 +02:00
|
|
|
SetEnableRecording ( bRecorderInitialised );
|
2019-04-03 19:12:45 +02:00
|
|
|
}
|
|
|
|
|
2011-04-23 22:43:07 +02:00
|
|
|
// enable all channels (for the server all channel must be enabled the
|
|
|
|
// entire life time of the software)
|
2014-01-08 22:24:37 +01:00
|
|
|
for ( i = 0; i < iMaxNumChannels; i++ )
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
|
|
|
vecChannels[i].SetEnable ( true );
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Connections -------------------------------------------------------------
|
|
|
|
// connect timer timeout signal
|
2020-06-06 22:15:33 +02:00
|
|
|
QObject::connect ( &HighPrecisionTimer, &CHighPrecisionTimer::timeout,
|
|
|
|
this, &CServer::OnTimer );
|
2011-04-23 22:43:07 +02:00
|
|
|
|
2020-06-06 22:15:33 +02:00
|
|
|
QObject::connect ( &ConnLessProtocol, &CProtocol::CLMessReadyForSending,
|
|
|
|
this, &CServer::OnSendCLProtMessage );
|
2011-04-23 22:43:07 +02:00
|
|
|
|
2020-06-06 22:15:33 +02:00
|
|
|
QObject::connect ( &ConnLessProtocol, &CProtocol::CLPingReceived,
|
|
|
|
this, &CServer::OnCLPingReceived );
|
2011-05-24 21:40:57 +02:00
|
|
|
|
2020-06-06 22:15:33 +02:00
|
|
|
QObject::connect ( &ConnLessProtocol, &CProtocol::CLPingWithNumClientsReceived,
|
|
|
|
this, &CServer::OnCLPingWithNumClientsReceived );
|
2011-04-23 22:43:07 +02:00
|
|
|
|
2020-06-06 22:15:33 +02:00
|
|
|
QObject::connect ( &ConnLessProtocol, &CProtocol::CLRegisterServerReceived,
|
|
|
|
this, &CServer::OnCLRegisterServerReceived );
|
2011-04-23 22:43:07 +02:00
|
|
|
|
2020-06-06 22:15:33 +02:00
|
|
|
QObject::connect ( &ConnLessProtocol, &CProtocol::CLUnregisterServerReceived,
|
|
|
|
this, &CServer::OnCLUnregisterServerReceived );
|
2011-05-03 22:37:06 +02:00
|
|
|
|
2020-06-06 22:15:33 +02:00
|
|
|
QObject::connect ( &ConnLessProtocol, &CProtocol::CLReqServerList,
|
|
|
|
this, &CServer::OnCLReqServerList );
|
2011-04-23 22:43:07 +02:00
|
|
|
|
2020-06-06 22:15:33 +02:00
|
|
|
QObject::connect ( &ConnLessProtocol, &CProtocol::CLRegisterServerResp,
|
|
|
|
this, &CServer::OnCLRegisterServerResp );
|
2020-04-14 22:00:08 +02:00
|
|
|
|
2020-06-06 22:15:33 +02:00
|
|
|
QObject::connect ( &ConnLessProtocol, &CProtocol::CLSendEmptyMes,
|
|
|
|
this, &CServer::OnCLSendEmptyMes );
|
2011-04-23 22:43:07 +02:00
|
|
|
|
2020-06-06 22:15:33 +02:00
|
|
|
QObject::connect ( &ConnLessProtocol, &CProtocol::CLDisconnection,
|
|
|
|
this, &CServer::OnCLDisconnection );
|
2011-05-22 11:47:09 +02:00
|
|
|
|
2020-06-06 22:15:33 +02:00
|
|
|
QObject::connect ( &ConnLessProtocol, &CProtocol::CLReqVersionAndOS,
|
|
|
|
this, &CServer::OnCLReqVersionAndOS );
|
2011-04-23 22:43:07 +02:00
|
|
|
|
2020-06-06 22:15:33 +02:00
|
|
|
QObject::connect ( &ConnLessProtocol, &CProtocol::CLReqConnClientsList,
|
|
|
|
this, &CServer::OnCLReqConnClientsList );
|
2015-12-06 18:51:06 +01:00
|
|
|
|
2020-06-06 22:15:33 +02:00
|
|
|
QObject::connect ( &ServerListManager, &CServerListManager::SvrRegStatusChanged,
|
|
|
|
this, &CServer::SvrRegStatusChanged );
|
2020-04-14 22:00:08 +02:00
|
|
|
|
2020-06-06 22:15:33 +02:00
|
|
|
QObject::connect( &JamRecorder, &recorder::CJamRecorder::RecordingSessionStarted,
|
|
|
|
this, &CServer::RecordingSessionStarted );
|
2020-05-15 23:04:55 +02:00
|
|
|
|
2020-06-06 22:15:33 +02:00
|
|
|
QObject::connect ( QCoreApplication::instance(), &QCoreApplication::aboutToQuit,
|
|
|
|
this, &CServer::OnAboutToQuit );
|
2020-05-08 18:16:27 +02:00
|
|
|
|
2020-06-06 22:15:33 +02:00
|
|
|
QObject::connect ( pSignalHandler, &CSignalHandler::HandledSignal,
|
|
|
|
this, &CServer::OnHandledSignal );
|
2020-05-08 18:16:27 +02:00
|
|
|
|
2020-04-18 12:20:31 +02:00
|
|
|
connectChannelSignalsToServerSlots<MAX_NUM_CHANNELS>();
|
|
|
|
|
2014-02-16 09:20:07 +01:00
|
|
|
// start the socket (it is important to start the socket after all
|
|
|
|
// initializations and connections)
|
|
|
|
Socket.Start();
|
2011-04-23 22:43:07 +02:00
|
|
|
}
|
|
|
|
|
2020-04-18 12:20:31 +02:00
|
|
|
template<unsigned int slotId>
|
|
|
|
inline void CServer::connectChannelSignalsToServerSlots()
|
|
|
|
{
|
|
|
|
int iCurChanID = slotId - 1;
|
|
|
|
|
|
|
|
void ( CServer::* pOnSendProtMessCh )( CVector<uint8_t> ) =
|
|
|
|
&CServerSlots<slotId>::OnSendProtMessCh;
|
|
|
|
|
|
|
|
void ( CServer::* pOnReqConnClientsListCh )() =
|
|
|
|
&CServerSlots<slotId>::OnReqConnClientsListCh;
|
|
|
|
|
|
|
|
void ( CServer::* pOnChatTextReceivedCh )( QString ) =
|
|
|
|
&CServerSlots<slotId>::OnChatTextReceivedCh;
|
|
|
|
|
2020-05-21 11:47:39 +02:00
|
|
|
void ( CServer::* pOnMuteStateHasChangedCh )( int, bool ) =
|
|
|
|
&CServerSlots<slotId>::OnMuteStateHasChangedCh;
|
|
|
|
|
2020-04-18 12:20:31 +02:00
|
|
|
void ( CServer::* pOnServerAutoSockBufSizeChangeCh )( int ) =
|
|
|
|
&CServerSlots<slotId>::OnServerAutoSockBufSizeChangeCh;
|
|
|
|
|
|
|
|
// send message
|
|
|
|
QObject::connect ( &vecChannels[iCurChanID], &CChannel::MessReadyForSending,
|
|
|
|
this, pOnSendProtMessCh );
|
|
|
|
|
|
|
|
// request connected clients list
|
|
|
|
QObject::connect ( &vecChannels[iCurChanID], &CChannel::ReqConnClientsList,
|
|
|
|
this, pOnReqConnClientsListCh );
|
|
|
|
|
|
|
|
// channel info has changed
|
|
|
|
QObject::connect ( &vecChannels[iCurChanID], &CChannel::ChanInfoHasChanged,
|
2020-05-14 21:12:06 +02:00
|
|
|
this, &CServer::CreateAndSendChanListForAllConChannels );
|
2020-04-18 12:20:31 +02:00
|
|
|
|
|
|
|
// chat text received
|
|
|
|
QObject::connect ( &vecChannels[iCurChanID], &CChannel::ChatTextReceived,
|
2020-05-14 21:12:06 +02:00
|
|
|
this, pOnChatTextReceivedCh );
|
2020-04-18 12:20:31 +02:00
|
|
|
|
2020-05-21 11:47:39 +02:00
|
|
|
// other mute state has changed
|
|
|
|
QObject::connect ( &vecChannels[iCurChanID], &CChannel::MuteStateHasChanged,
|
|
|
|
this, pOnMuteStateHasChangedCh );
|
|
|
|
|
2020-04-18 12:20:31 +02:00
|
|
|
// auto socket buffer size change
|
|
|
|
QObject::connect ( &vecChannels[iCurChanID], &CChannel::ServerAutoSockBufSizeChange,
|
2020-05-14 21:12:06 +02:00
|
|
|
this, pOnServerAutoSockBufSizeChangeCh );
|
2020-04-18 12:20:31 +02:00
|
|
|
|
|
|
|
connectChannelSignalsToServerSlots<slotId - 1>();
|
2020-06-06 22:15:33 +02:00
|
|
|
}
|
2020-04-18 12:20:31 +02:00
|
|
|
|
|
|
|
template<>
|
2020-06-06 22:15:33 +02:00
|
|
|
inline void CServer::connectChannelSignalsToServerSlots<0>() {}
|
2020-04-18 12:20:31 +02:00
|
|
|
|
|
|
|
void CServer::CreateAndSendJitBufMessage ( const int iCurChanID,
|
|
|
|
const int iNNumFra )
|
|
|
|
{
|
|
|
|
vecChannels[iCurChanID].CreateJitBufMes ( iNNumFra );
|
|
|
|
}
|
|
|
|
|
|
|
|
void CServer::SendProtMessage ( int iChID, CVector<uint8_t> vecMessage )
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
|
|
|
// the protocol queries me to call the function to send the message
|
|
|
|
// send it through the network
|
|
|
|
Socket.SendPacket ( vecMessage, vecChannels[iChID].GetAddress() );
|
|
|
|
}
|
|
|
|
|
2014-02-16 09:20:07 +01:00
|
|
|
void CServer::OnNewConnection ( int iChID,
|
|
|
|
CHostAddress RecHostAddr )
|
2013-05-10 21:37:57 +02:00
|
|
|
{
|
2020-05-26 17:28:44 +02:00
|
|
|
// inform the client about its own ID at the server (note that this
|
|
|
|
// must be the first message to be sent for a new connection)
|
|
|
|
vecChannels[iChID].CreateClientIDMes ( iChID );
|
|
|
|
|
2013-05-10 21:37:57 +02:00
|
|
|
// on a new connection we query the network transport properties for the
|
|
|
|
// audio packets (to use the correct network block size and audio
|
|
|
|
// compression properties, etc.)
|
|
|
|
vecChannels[iChID].CreateReqNetwTranspPropsMes();
|
|
|
|
|
|
|
|
// this is a new connection, query the jitter buffer size we shall use
|
|
|
|
// for this client (note that at the same time on a new connection the
|
|
|
|
// client sends the jitter buffer size by default but maybe we have
|
|
|
|
// reached a state where this did not happen because of network trouble,
|
|
|
|
// client or server thinks that the connection was still active, etc.)
|
|
|
|
vecChannels[iChID].CreateReqJitBufMes();
|
2014-02-16 09:20:07 +01:00
|
|
|
|
|
|
|
// A new client connected to the server, the channel list
|
|
|
|
// at all clients have to be updated. This is done by sending
|
|
|
|
// a channel name request to the client which causes a channel
|
|
|
|
// name message to be transmitted to the server. If the server
|
|
|
|
// receives this message, the channel list will be automatically
|
|
|
|
// updated (implicitely).
|
|
|
|
//
|
|
|
|
// Usually it is not required to send the channel list to the
|
|
|
|
// client currently connecting since it automatically requests
|
|
|
|
// the channel list on a new connection (as a result, he will
|
|
|
|
// usually get the list twice which has no impact on functionality
|
|
|
|
// but will only increase the network load a tiny little bit). But
|
|
|
|
// in case the client thinks he is still connected but the server
|
|
|
|
// was restartet, it is important that we send the channel list
|
|
|
|
// at this place.
|
|
|
|
vecChannels[iChID].CreateReqChanInfoMes();
|
|
|
|
|
|
|
|
// send welcome message (if enabled)
|
|
|
|
if ( !strWelcomeMessage.isEmpty() )
|
|
|
|
{
|
|
|
|
// create formated server welcome message and send it just to
|
|
|
|
// the client which just connected to the server
|
|
|
|
const QString strWelcomeMessageFormated =
|
|
|
|
"<b>Server Welcome Message:</b> " + strWelcomeMessage;
|
|
|
|
|
|
|
|
vecChannels[iChID].CreateChatTextMes ( strWelcomeMessageFormated );
|
|
|
|
}
|
2015-01-23 20:43:18 +01:00
|
|
|
|
|
|
|
// send licence request message (if enabled)
|
|
|
|
if ( eLicenceType != LT_NO_LICENCE )
|
|
|
|
{
|
|
|
|
vecChannels[iChID].CreateLicReqMes ( eLicenceType );
|
|
|
|
}
|
2020-04-10 12:07:23 +02:00
|
|
|
|
2020-05-18 19:00:56 +02:00
|
|
|
// send version info (for, e.g., feature activation in the client)
|
|
|
|
vecChannels[iChID].CreateVersionAndOSMes();
|
|
|
|
|
2020-04-10 12:07:23 +02:00
|
|
|
// reset the conversion buffers
|
|
|
|
DoubleFrameSizeConvBufIn[iChID].Reset();
|
|
|
|
DoubleFrameSizeConvBufOut[iChID].Reset();
|
2020-05-26 15:43:00 +02:00
|
|
|
|
|
|
|
// logging of new connected channel
|
|
|
|
Logging.AddNewConnection ( RecHostAddr.InetAddr );
|
2014-02-16 09:20:07 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void CServer::OnServerFull ( CHostAddress RecHostAddr )
|
|
|
|
{
|
|
|
|
// inform the calling client that no channel is free
|
|
|
|
ConnLessProtocol.CreateCLServerFullMes ( RecHostAddr );
|
2013-05-10 21:37:57 +02:00
|
|
|
}
|
|
|
|
|
2011-04-23 22:43:07 +02:00
|
|
|
void CServer::OnSendCLProtMessage ( CHostAddress InetAddr,
|
|
|
|
CVector<uint8_t> vecMessage )
|
|
|
|
{
|
|
|
|
// the protocol queries me to call the function to send the message
|
|
|
|
// send it through the network
|
|
|
|
Socket.SendPacket ( vecMessage, InetAddr );
|
|
|
|
}
|
|
|
|
|
2014-02-16 09:20:07 +01:00
|
|
|
void CServer::OnProtcolCLMessageReceived ( int iRecID,
|
|
|
|
CVector<uint8_t> vecbyMesBodyData,
|
|
|
|
CHostAddress RecHostAddr )
|
2011-05-23 21:06:02 +02:00
|
|
|
{
|
2014-02-16 09:20:07 +01:00
|
|
|
// connection less messages are always processed
|
2013-06-03 18:07:17 +02:00
|
|
|
ConnLessProtocol.ParseConnectionLessMessageBody ( vecbyMesBodyData,
|
|
|
|
iRecID,
|
2014-02-16 09:20:07 +01:00
|
|
|
RecHostAddr );
|
2011-05-23 21:06:02 +02:00
|
|
|
}
|
|
|
|
|
2011-05-22 11:47:09 +02:00
|
|
|
void CServer::OnCLDisconnection ( CHostAddress InetAddr )
|
|
|
|
{
|
|
|
|
// check if the given address is actually a client which is connected to
|
|
|
|
// this server, if yes, disconnect it
|
|
|
|
const int iCurChanID = FindChannel ( InetAddr );
|
|
|
|
|
|
|
|
if ( iCurChanID != INVALID_CHANNEL_ID )
|
|
|
|
{
|
|
|
|
vecChannels[iCurChanID].Disconnect();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-05-08 18:16:27 +02:00
|
|
|
void CServer::OnAboutToQuit()
|
|
|
|
{
|
2020-05-12 22:40:59 +02:00
|
|
|
// if enabled, disconnect all clients on quit
|
|
|
|
if ( bDisconnectAllClientsOnQuit )
|
|
|
|
{
|
|
|
|
Mutex.lock();
|
|
|
|
{
|
|
|
|
for ( int i = 0; i < iMaxNumChannels; i++ )
|
|
|
|
{
|
|
|
|
if ( vecChannels[i].IsConnected() )
|
|
|
|
{
|
|
|
|
ConnLessProtocol.CreateCLDisconnection ( vecChannels[i].GetAddress() );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Mutex.unlock(); // release mutex
|
|
|
|
}
|
|
|
|
|
2020-05-11 21:12:16 +02:00
|
|
|
Stop();
|
2020-05-08 18:16:27 +02:00
|
|
|
|
|
|
|
// if server was registered at the central server, unregister on shutdown
|
|
|
|
if ( GetServerListEnabled() )
|
|
|
|
{
|
|
|
|
UnregisterSlaveServer();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-05-15 21:01:57 +02:00
|
|
|
void CServer::OnHandledSignal ( int sigNum )
|
2020-05-08 18:16:27 +02:00
|
|
|
{
|
2020-06-13 08:20:03 +02:00
|
|
|
// show the signal number on the command line (note that this does not work for the Windows command line)
|
|
|
|
// TODO we should use the ConsoleWriterFactory() instead of qDebug()
|
|
|
|
qDebug() << "OnHandledSignal: " << sigNum;
|
|
|
|
|
2020-05-15 21:01:57 +02:00
|
|
|
#ifdef _WIN32
|
|
|
|
// Windows does not actually get OnHandledSignal triggered
|
2020-05-08 18:16:27 +02:00
|
|
|
QCoreApplication::instance()->exit();
|
2020-05-15 21:01:57 +02:00
|
|
|
Q_UNUSED ( sigNum )
|
|
|
|
#else
|
|
|
|
switch ( sigNum )
|
|
|
|
{
|
|
|
|
|
2020-05-15 22:52:13 +02:00
|
|
|
case SIGUSR1:
|
|
|
|
RequestNewRecording();
|
|
|
|
break;
|
|
|
|
|
2020-05-30 17:24:52 +02:00
|
|
|
case SIGUSR2:
|
|
|
|
SetEnableRecording ( !bEnableRecording );
|
|
|
|
break;
|
|
|
|
|
2020-05-15 21:01:57 +02:00
|
|
|
case SIGINT:
|
|
|
|
case SIGTERM:
|
|
|
|
// This should trigger OnAboutToQuit
|
|
|
|
QCoreApplication::instance()->exit();
|
|
|
|
break;
|
|
|
|
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
#endif
|
2020-05-08 18:16:27 +02:00
|
|
|
}
|
|
|
|
|
2020-05-16 09:24:46 +02:00
|
|
|
void CServer::RequestNewRecording()
|
|
|
|
{
|
2020-05-30 17:24:52 +02:00
|
|
|
if ( bRecorderInitialised && bEnableRecording )
|
2020-05-16 09:24:46 +02:00
|
|
|
{
|
|
|
|
emit RestartRecorder();
|
|
|
|
}
|
2020-05-08 18:16:27 +02:00
|
|
|
}
|
|
|
|
|
2020-05-30 17:24:52 +02:00
|
|
|
void CServer::SetEnableRecording ( bool bNewEnableRecording )
|
|
|
|
{
|
2020-06-01 09:55:31 +02:00
|
|
|
if ( bRecorderInitialised )
|
|
|
|
{
|
2020-06-11 21:49:50 +02:00
|
|
|
// note that this block executes regardless of whether
|
|
|
|
// what appears to be a change is being applied, to ensure
|
|
|
|
// the requested state is the result
|
2020-05-30 17:24:52 +02:00
|
|
|
bEnableRecording = bNewEnableRecording;
|
2020-06-13 08:20:03 +02:00
|
|
|
|
|
|
|
#if QT_VERSION >= QT_VERSION_CHECK(5, 5, 0)
|
|
|
|
// TODO we should use the ConsoleWriterFactory() instead of qInfo()
|
|
|
|
qInfo() << "Recording state " << ( bEnableRecording ? "enabled" : "disabled" );
|
|
|
|
#endif
|
2020-06-01 09:55:31 +02:00
|
|
|
|
2020-05-30 17:24:52 +02:00
|
|
|
if ( !bEnableRecording )
|
|
|
|
{
|
|
|
|
emit StopRecorder();
|
|
|
|
}
|
|
|
|
else if ( !IsRunning() )
|
|
|
|
{
|
|
|
|
// This dirty hack is for the GUI. It doesn't care.
|
|
|
|
emit StopRecorder();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2011-04-23 22:43:07 +02:00
|
|
|
void CServer::Start()
|
|
|
|
{
|
|
|
|
// only start if not already running
|
|
|
|
if ( !IsRunning() )
|
|
|
|
{
|
|
|
|
// start timer
|
|
|
|
HighPrecisionTimer.Start();
|
|
|
|
|
2011-05-06 22:18:20 +02:00
|
|
|
// emit start signal
|
|
|
|
emit Started();
|
2011-04-23 22:43:07 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void CServer::Stop()
|
|
|
|
{
|
|
|
|
// Under Mac we have the problem that the timer shutdown might
|
|
|
|
// take some time and therefore we get a lot of "server stopped"
|
|
|
|
// entries in the log. The following condition shall prevent this.
|
|
|
|
// For the other OSs this should not hurt either.
|
|
|
|
if ( IsRunning() )
|
|
|
|
{
|
|
|
|
// stop timer
|
|
|
|
HighPrecisionTimer.Stop();
|
|
|
|
|
|
|
|
// logging (add "server stopped" logging entry)
|
|
|
|
Logging.AddServerStopped();
|
2011-05-06 22:18:20 +02:00
|
|
|
|
|
|
|
// emit stopped signal
|
|
|
|
emit Stopped();
|
2011-04-23 22:43:07 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void CServer::OnTimer()
|
|
|
|
{
|
2015-03-13 18:15:48 +01:00
|
|
|
/*
|
2020-06-07 13:08:06 +02:00
|
|
|
static CTimingMeas JitterMeas ( 1000, "test2.dat" ); JitterMeas.Measure(); // TEST do a timer jitter measurement
|
2015-03-13 18:15:48 +01:00
|
|
|
*/
|
2011-04-23 22:43:07 +02:00
|
|
|
// Get data from all connected clients -------------------------------------
|
2014-01-08 22:24:37 +01:00
|
|
|
// some inits
|
2020-06-08 22:02:16 +02:00
|
|
|
int iUnused;
|
2014-01-08 22:24:37 +01:00
|
|
|
int iNumClients = 0; // init connected client counter
|
2011-04-23 22:43:07 +02:00
|
|
|
bool bChannelIsNowDisconnected = false;
|
2020-06-07 12:09:30 +02:00
|
|
|
bool bUpdateChannelLevels = false;
|
2020-04-04 23:57:16 +02:00
|
|
|
bool bSendChannelLevels = false;
|
2011-04-23 22:43:07 +02:00
|
|
|
|
|
|
|
// Make put and get calls thread safe. Do not forget to unlock mutex
|
|
|
|
// afterwards!
|
|
|
|
Mutex.lock();
|
|
|
|
{
|
|
|
|
// first, get number and IDs of connected channels
|
2020-06-07 13:08:06 +02:00
|
|
|
for ( int i = 0; i < iMaxNumChannels; i++ )
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
|
|
|
if ( vecChannels[i].IsConnected() )
|
|
|
|
{
|
2014-01-08 22:24:37 +01:00
|
|
|
// add ID and increment counter (note that the vector length is
|
|
|
|
// according to the worst case scenario, if the number of
|
|
|
|
// connected clients is less, only a subset of elements of this
|
|
|
|
// vector are actually used and the others are dummy elements)
|
|
|
|
vecChanIDsCurConChan[iNumClients] = i;
|
|
|
|
iNumClients++;
|
2011-04-23 22:43:07 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-01-08 22:24:37 +01:00
|
|
|
// process connected channels
|
2020-06-07 13:08:06 +02:00
|
|
|
for ( int i = 0; i < iNumClients; i++ )
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
2020-06-07 13:08:06 +02:00
|
|
|
int iClientFrameSizeSamples = 0; // initialize to avoid a compiler warning
|
|
|
|
OpusCustomDecoder* CurOpusDecoder;
|
|
|
|
unsigned char* pCurCodedData;
|
|
|
|
|
2011-04-23 22:43:07 +02:00
|
|
|
// get actual ID of current channel
|
2014-01-08 22:24:37 +01:00
|
|
|
const int iCurChanID = vecChanIDsCurConChan[i];
|
2011-04-23 22:43:07 +02:00
|
|
|
|
2020-04-05 09:34:00 +02:00
|
|
|
// get and store number of audio channels and compression type
|
2020-04-04 19:03:19 +02:00
|
|
|
vecNumAudioChannels[i] = vecChannels[iCurChanID].GetNumAudioChannels();
|
2020-04-05 09:34:00 +02:00
|
|
|
vecAudioComprType[i] = vecChannels[iCurChanID].GetAudioCompressionType();
|
2020-04-04 19:03:19 +02:00
|
|
|
|
2020-04-05 17:35:40 +02:00
|
|
|
// get info about required frame size conversion properties
|
2020-04-10 12:07:23 +02:00
|
|
|
vecUseDoubleSysFraSizeConvBuf[i] = ( !bUseDoubleSystemFrameSize && ( vecAudioComprType[i] == CT_OPUS ) );
|
|
|
|
|
2020-04-05 17:35:40 +02:00
|
|
|
if ( bUseDoubleSystemFrameSize && ( vecAudioComprType[i] == CT_OPUS64 ) )
|
|
|
|
{
|
|
|
|
vecNumFrameSizeConvBlocks[i] = 2;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
vecNumFrameSizeConvBlocks[i] = 1;
|
|
|
|
}
|
|
|
|
|
2020-04-10 12:07:23 +02:00
|
|
|
// update conversion buffer size (nothing will happen if the size stays the same)
|
|
|
|
if ( vecUseDoubleSysFraSizeConvBuf[i] )
|
|
|
|
{
|
|
|
|
DoubleFrameSizeConvBufIn[iCurChanID].SetBufferSize ( DOUBLE_SYSTEM_FRAME_SIZE_SAMPLES * vecNumAudioChannels[i] );
|
|
|
|
DoubleFrameSizeConvBufOut[iCurChanID].SetBufferSize ( DOUBLE_SYSTEM_FRAME_SIZE_SAMPLES * vecNumAudioChannels[i] );
|
|
|
|
}
|
|
|
|
|
2020-04-05 08:13:53 +02:00
|
|
|
// select the opus decoder and raw audio frame length
|
2020-04-05 09:34:00 +02:00
|
|
|
if ( vecAudioComprType[i] == CT_OPUS )
|
2020-04-04 19:03:19 +02:00
|
|
|
{
|
2020-04-08 18:51:51 +02:00
|
|
|
iClientFrameSizeSamples = DOUBLE_SYSTEM_FRAME_SIZE_SAMPLES;
|
2020-04-05 08:13:53 +02:00
|
|
|
|
|
|
|
if ( vecNumAudioChannels[i] == 1 )
|
|
|
|
{
|
|
|
|
CurOpusDecoder = OpusDecoderMono[iCurChanID];
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
CurOpusDecoder = OpusDecoderStereo[iCurChanID];
|
|
|
|
}
|
|
|
|
}
|
2020-04-05 09:34:00 +02:00
|
|
|
else if ( vecAudioComprType[i] == CT_OPUS64 )
|
2020-04-05 08:13:53 +02:00
|
|
|
{
|
2020-04-15 15:29:43 +02:00
|
|
|
iClientFrameSizeSamples = SYSTEM_FRAME_SIZE_SAMPLES;
|
2020-04-05 08:13:53 +02:00
|
|
|
|
|
|
|
if ( vecNumAudioChannels[i] == 1 )
|
|
|
|
{
|
|
|
|
CurOpusDecoder = Opus64DecoderMono[iCurChanID];
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
CurOpusDecoder = Opus64DecoderStereo[iCurChanID];
|
|
|
|
}
|
2020-04-04 19:03:19 +02:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2020-04-05 08:13:53 +02:00
|
|
|
CurOpusDecoder = nullptr;
|
2020-04-04 19:03:19 +02:00
|
|
|
}
|
2011-04-23 22:43:07 +02:00
|
|
|
|
|
|
|
// get gains of all connected channels
|
2020-06-07 13:08:06 +02:00
|
|
|
for ( int j = 0; j < iNumClients; j++ )
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
|
|
|
// The second index of "vecvecdGains" does not represent
|
2014-01-08 22:24:37 +01:00
|
|
|
// the channel ID! Therefore we have to use
|
|
|
|
// "vecChanIDsCurConChan" to query the IDs of the currently
|
|
|
|
// connected channels
|
2020-03-29 18:03:24 +02:00
|
|
|
vecvecdGains[i][j] = vecChannels[iCurChanID].GetGain ( vecChanIDsCurConChan[j] );
|
2020-05-18 20:46:46 +02:00
|
|
|
|
2020-03-29 18:03:24 +02:00
|
|
|
// consider audio fade-in
|
|
|
|
vecvecdGains[i][j] *= vecChannels[vecChanIDsCurConChan[j]].GetFadeInGain();
|
2020-04-26 00:55:28 +02:00
|
|
|
|
2020-05-18 20:46:46 +02:00
|
|
|
// panning
|
|
|
|
vecvecdPannings[i][j] = vecChannels[iCurChanID].GetPan ( vecChanIDsCurConChan[j] );
|
2011-04-23 22:43:07 +02:00
|
|
|
}
|
|
|
|
|
2020-06-07 12:09:30 +02:00
|
|
|
// flag for updating channel levels (if at least one clients wants it)
|
|
|
|
if ( vecChannels[iCurChanID].ChannelLevelsRequired() )
|
|
|
|
{
|
|
|
|
bUpdateChannelLevels = true;
|
|
|
|
}
|
|
|
|
|
2020-04-10 12:07:23 +02:00
|
|
|
// If the server frame size is smaller than the received OPUS frame size, we need a conversion
|
|
|
|
// buffer which stores the large buffer.
|
|
|
|
// Note that we have a shortcut here. If the conversion buffer is not needed, the boolean flag
|
|
|
|
// is false and the Get() function is not called at all. Therefore if the buffer is not needed
|
|
|
|
// we do not spend any time in the function but go directly inside the if condition.
|
|
|
|
if ( ( vecUseDoubleSysFraSizeConvBuf[i] == 0 ) ||
|
2020-04-15 15:29:43 +02:00
|
|
|
!DoubleFrameSizeConvBufIn[iCurChanID].Get ( vecvecsData[i], SYSTEM_FRAME_SIZE_SAMPLES * vecNumAudioChannels[i] ) )
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
2020-04-10 12:07:23 +02:00
|
|
|
// get current number of OPUS coded bytes
|
|
|
|
const int iCeltNumCodedBytes = vecChannels[iCurChanID].GetNetwFrameSize();
|
2020-04-04 19:03:19 +02:00
|
|
|
|
2020-04-10 12:07:23 +02:00
|
|
|
for ( int iB = 0; iB < vecNumFrameSizeConvBlocks[i]; iB++ )
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
2020-04-10 12:07:23 +02:00
|
|
|
// get data
|
|
|
|
const EGetDataStat eGetStat = vecChannels[iCurChanID].GetData ( vecbyCodedData, iCeltNumCodedBytes );
|
|
|
|
|
|
|
|
// if channel was just disconnected, set flag that connected
|
|
|
|
// client list is sent to all other clients
|
|
|
|
// and emit the client disconnected signal
|
|
|
|
if ( eGetStat == GS_CHAN_NOW_DISCONNECTED )
|
2013-02-16 19:16:12 +01:00
|
|
|
{
|
2020-04-10 12:07:23 +02:00
|
|
|
if ( bEnableRecording )
|
|
|
|
{
|
|
|
|
emit ClientDisconnected ( iCurChanID ); // TODO do this outside the mutex lock?
|
|
|
|
}
|
|
|
|
|
|
|
|
bChannelIsNowDisconnected = true;
|
2013-02-16 19:16:12 +01:00
|
|
|
}
|
2020-04-04 19:03:19 +02:00
|
|
|
|
2020-04-10 12:07:23 +02:00
|
|
|
// get pointer to coded data
|
|
|
|
if ( eGetStat == GS_BUFFER_OK )
|
|
|
|
{
|
|
|
|
pCurCodedData = &vecbyCodedData[0];
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
// for lost packets use null pointer as coded input data
|
|
|
|
pCurCodedData = nullptr;
|
|
|
|
}
|
2020-04-04 19:03:19 +02:00
|
|
|
|
2020-04-10 12:07:23 +02:00
|
|
|
// OPUS decode received data stream
|
|
|
|
if ( CurOpusDecoder != nullptr )
|
|
|
|
{
|
2020-06-08 22:02:16 +02:00
|
|
|
iUnused = opus_custom_decode ( CurOpusDecoder,
|
|
|
|
pCurCodedData,
|
|
|
|
iCeltNumCodedBytes,
|
|
|
|
&vecvecsData[i][iB * SYSTEM_FRAME_SIZE_SAMPLES * vecNumAudioChannels[i]],
|
|
|
|
iClientFrameSizeSamples );
|
2020-04-10 12:07:23 +02:00
|
|
|
}
|
2020-04-04 19:03:19 +02:00
|
|
|
}
|
|
|
|
|
2020-04-10 12:07:23 +02:00
|
|
|
// a new large frame is ready, if the conversion buffer is required, put it in the buffer
|
|
|
|
// and read out the small frame size immediately for further processing
|
|
|
|
if ( vecUseDoubleSysFraSizeConvBuf[i] != 0 )
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
2020-04-10 12:07:23 +02:00
|
|
|
DoubleFrameSizeConvBufIn[iCurChanID].PutAll ( vecvecsData[i] );
|
2020-04-15 15:29:43 +02:00
|
|
|
DoubleFrameSizeConvBufIn[iCurChanID].Get ( vecvecsData[i], SYSTEM_FRAME_SIZE_SAMPLES * vecNumAudioChannels[i] );
|
2011-04-23 22:43:07 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// a channel is now disconnected, take action on it
|
|
|
|
if ( bChannelIsNowDisconnected )
|
|
|
|
{
|
|
|
|
// update channel list for all currently connected clients
|
|
|
|
CreateAndSendChanListForAllConChannels();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Mutex.unlock(); // release mutex
|
|
|
|
|
|
|
|
|
|
|
|
// Process data ------------------------------------------------------------
|
|
|
|
// Check if at least one client is connected. If not, stop server until
|
|
|
|
// one client is connected.
|
2014-02-16 09:20:07 +01:00
|
|
|
if ( iNumClients > 0 )
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
2020-06-07 12:09:30 +02:00
|
|
|
// calculate levels for all connected clients
|
|
|
|
if ( bUpdateChannelLevels )
|
2020-04-04 23:57:16 +02:00
|
|
|
{
|
2020-06-07 12:09:30 +02:00
|
|
|
bSendChannelLevels = CreateLevelsForAllConChannels ( iNumClients,
|
|
|
|
vecNumAudioChannels,
|
|
|
|
vecvecsData,
|
|
|
|
vecChannelLevels );
|
2020-04-08 18:51:51 +02:00
|
|
|
}
|
2020-04-04 23:57:16 +02:00
|
|
|
|
2011-04-23 22:43:07 +02:00
|
|
|
for ( int i = 0; i < iNumClients; i++ )
|
|
|
|
{
|
2020-06-07 13:08:06 +02:00
|
|
|
int iClientFrameSizeSamples = 0; // initialize to avoid a compiler warning
|
|
|
|
OpusCustomEncoder* CurOpusEncoder;
|
|
|
|
|
2011-04-23 22:43:07 +02:00
|
|
|
// get actual ID of current channel
|
2014-01-08 22:24:37 +01:00
|
|
|
const int iCurChanID = vecChanIDsCurConChan[i];
|
|
|
|
|
|
|
|
// get number of audio channels of current channel
|
|
|
|
const int iCurNumAudChan = vecNumAudioChannels[i];
|
|
|
|
|
2019-04-11 22:25:36 +02:00
|
|
|
// export the audio data for recording purpose
|
|
|
|
if ( bEnableRecording )
|
|
|
|
{
|
|
|
|
emit AudioFrame ( iCurChanID,
|
|
|
|
vecChannels[iCurChanID].GetName(),
|
|
|
|
vecChannels[iCurChanID].GetAddress(),
|
|
|
|
iCurNumAudChan,
|
|
|
|
vecvecsData[i] );
|
|
|
|
}
|
2019-04-03 19:12:45 +02:00
|
|
|
|
2011-04-23 22:43:07 +02:00
|
|
|
// generate a sparate mix for each channel
|
|
|
|
// actual processing of audio data -> mix
|
2014-01-12 10:48:49 +01:00
|
|
|
ProcessData ( vecvecsData,
|
2014-01-08 22:24:37 +01:00
|
|
|
vecvecdGains[i],
|
2020-04-26 00:55:28 +02:00
|
|
|
vecvecdPannings[i],
|
2014-01-08 22:24:37 +01:00
|
|
|
vecNumAudioChannels,
|
2014-01-12 10:48:49 +01:00
|
|
|
vecsSendData,
|
|
|
|
iCurNumAudChan,
|
|
|
|
iNumClients );
|
2011-04-23 22:43:07 +02:00
|
|
|
|
|
|
|
// get current number of CELT coded bytes
|
2020-04-04 19:03:19 +02:00
|
|
|
const int iCeltNumCodedBytes = vecChannels[iCurChanID].GetNetwFrameSize();
|
|
|
|
|
2020-04-05 08:13:53 +02:00
|
|
|
// select the opus encoder and raw audio frame length
|
2020-04-05 09:34:00 +02:00
|
|
|
if ( vecAudioComprType[i] == CT_OPUS )
|
2020-04-05 08:13:53 +02:00
|
|
|
{
|
2020-04-08 18:51:51 +02:00
|
|
|
iClientFrameSizeSamples = DOUBLE_SYSTEM_FRAME_SIZE_SAMPLES;
|
2020-04-05 08:13:53 +02:00
|
|
|
|
2020-04-05 09:34:00 +02:00
|
|
|
if ( vecNumAudioChannels[i] == 1 )
|
2020-04-05 08:13:53 +02:00
|
|
|
{
|
|
|
|
CurOpusEncoder = OpusEncoderMono[iCurChanID];
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
CurOpusEncoder = OpusEncoderStereo[iCurChanID];
|
|
|
|
}
|
|
|
|
}
|
2020-04-05 09:34:00 +02:00
|
|
|
else if ( vecAudioComprType[i] == CT_OPUS64 )
|
2020-04-04 19:03:19 +02:00
|
|
|
{
|
2020-04-15 15:29:43 +02:00
|
|
|
iClientFrameSizeSamples = SYSTEM_FRAME_SIZE_SAMPLES;
|
2020-04-05 08:13:53 +02:00
|
|
|
|
2020-04-05 09:34:00 +02:00
|
|
|
if ( vecNumAudioChannels[i] == 1 )
|
2020-04-05 08:13:53 +02:00
|
|
|
{
|
|
|
|
CurOpusEncoder = Opus64EncoderMono[iCurChanID];
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
CurOpusEncoder = Opus64EncoderStereo[iCurChanID];
|
|
|
|
}
|
2020-04-04 19:03:19 +02:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2020-04-05 08:13:53 +02:00
|
|
|
CurOpusEncoder = nullptr;
|
2020-04-04 19:03:19 +02:00
|
|
|
}
|
2011-04-23 22:43:07 +02:00
|
|
|
|
2020-04-10 12:07:23 +02:00
|
|
|
// If the server frame size is smaller than the received OPUS frame size, we need a conversion
|
|
|
|
// buffer which stores the large buffer.
|
|
|
|
// Note that we have a shortcut here. If the conversion buffer is not needed, the boolean flag
|
|
|
|
// is false and the Get() function is not called at all. Therefore if the buffer is not needed
|
|
|
|
// we do not spend any time in the function but go directly inside the if condition.
|
|
|
|
if ( ( vecUseDoubleSysFraSizeConvBuf[i] == 0 ) ||
|
2020-04-15 15:29:43 +02:00
|
|
|
DoubleFrameSizeConvBufOut[iCurChanID].Put ( vecsSendData, SYSTEM_FRAME_SIZE_SAMPLES * vecNumAudioChannels[i] ) )
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
2020-04-10 12:07:23 +02:00
|
|
|
if ( vecUseDoubleSysFraSizeConvBuf[i] != 0 )
|
|
|
|
{
|
|
|
|
// get the large frame from the conversion buffer
|
|
|
|
DoubleFrameSizeConvBufOut[iCurChanID].GetAll ( vecsSendData, DOUBLE_SYSTEM_FRAME_SIZE_SAMPLES * vecNumAudioChannels[i] );
|
|
|
|
}
|
|
|
|
|
|
|
|
for ( int iB = 0; iB < vecNumFrameSizeConvBlocks[i]; iB++ )
|
2020-04-05 08:13:53 +02:00
|
|
|
{
|
2020-04-10 12:07:23 +02:00
|
|
|
// OPUS encoding
|
|
|
|
if ( CurOpusEncoder != nullptr )
|
|
|
|
{
|
2013-02-16 19:16:12 +01:00
|
|
|
// TODO find a better place than this: the setting does not change all the time
|
|
|
|
// so for speed optimization it would be better to set it only if the network
|
|
|
|
// frame size is changed
|
2020-04-04 19:03:19 +02:00
|
|
|
opus_custom_encoder_ctl ( CurOpusEncoder,
|
2020-04-08 18:51:51 +02:00
|
|
|
OPUS_SET_BITRATE ( CalcBitRateBitsPerSecFromCodedBytes ( iCeltNumCodedBytes, iClientFrameSizeSamples ) ) );
|
2013-02-16 20:11:30 +01:00
|
|
|
|
2020-06-08 22:02:16 +02:00
|
|
|
iUnused = opus_custom_encode ( CurOpusEncoder,
|
|
|
|
&vecsSendData[iB * SYSTEM_FRAME_SIZE_SAMPLES * vecNumAudioChannels[i]],
|
|
|
|
iClientFrameSizeSamples,
|
|
|
|
&vecbyCodedData[0],
|
|
|
|
iCeltNumCodedBytes );
|
2020-04-10 12:07:23 +02:00
|
|
|
}
|
2011-04-23 22:43:07 +02:00
|
|
|
|
2020-04-10 12:07:23 +02:00
|
|
|
// send separate mix to current clients
|
|
|
|
vecChannels[iCurChanID].PrepAndSendPacket ( &Socket,
|
|
|
|
vecbyCodedData,
|
|
|
|
iCeltNumCodedBytes );
|
|
|
|
}
|
2011-05-27 22:29:37 +02:00
|
|
|
|
2020-04-10 12:07:23 +02:00
|
|
|
// update socket buffer size
|
|
|
|
vecChannels[iCurChanID].UpdateSocketBufferSize();
|
2020-04-04 23:57:16 +02:00
|
|
|
|
2020-04-10 12:07:23 +02:00
|
|
|
// send channel levels
|
|
|
|
if ( bSendChannelLevels && vecChannels[iCurChanID].ChannelLevelsRequired() )
|
|
|
|
{
|
2020-06-07 12:09:30 +02:00
|
|
|
ConnLessProtocol.CreateCLChannelLevelListMes ( vecChannels[iCurChanID].GetAddress(),
|
|
|
|
vecChannelLevels,
|
|
|
|
iNumClients );
|
2020-04-10 12:07:23 +02:00
|
|
|
}
|
2020-04-04 23:57:16 +02:00
|
|
|
}
|
2011-04-23 22:43:07 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
// Disable server if no clients are connected. In this case the server
|
|
|
|
// does not consume any significant CPU when no client is connected.
|
|
|
|
Stop();
|
|
|
|
}
|
2020-06-08 22:02:16 +02:00
|
|
|
|
|
|
|
Q_UNUSED ( iUnused )
|
2011-04-23 22:43:07 +02:00
|
|
|
}
|
|
|
|
|
2014-01-08 22:24:37 +01:00
|
|
|
/// @brief Mix all audio data from all clients together.
|
2020-05-18 20:46:46 +02:00
|
|
|
void CServer::ProcessData ( const CVector<CVector<int16_t> >& vecvecsData,
|
|
|
|
const CVector<double>& vecdGains,
|
|
|
|
const CVector<double>& vecdPannings,
|
|
|
|
const CVector<int>& vecNumAudioChannels,
|
|
|
|
CVector<int16_t>& vecsOutData,
|
|
|
|
const int iCurNumAudChan,
|
|
|
|
const int iNumClients )
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
|
|
|
int i, j, k;
|
|
|
|
|
|
|
|
// init return vector with zeros since we mix all channels on that vector
|
2014-01-08 22:24:37 +01:00
|
|
|
vecsOutData.Reset ( 0 );
|
2011-04-23 22:43:07 +02:00
|
|
|
|
2014-01-08 22:24:37 +01:00
|
|
|
// distinguish between stereo and mono mode
|
2011-04-23 22:43:07 +02:00
|
|
|
if ( iCurNumAudChan == 1 )
|
|
|
|
{
|
|
|
|
// Mono target channel -------------------------------------------------
|
|
|
|
for ( j = 0; j < iNumClients; j++ )
|
|
|
|
{
|
2014-01-08 22:24:37 +01:00
|
|
|
// get a reference to the audio data and gain of the current client
|
|
|
|
const CVector<int16_t>& vecsData = vecvecsData[j];
|
|
|
|
const double dGain = vecdGains[j];
|
|
|
|
|
2011-04-23 22:43:07 +02:00
|
|
|
// if channel gain is 1, avoid multiplication for speed optimization
|
2014-01-08 22:24:37 +01:00
|
|
|
if ( dGain == static_cast<double> ( 1.0 ) )
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
|
|
|
if ( vecNumAudioChannels[j] == 1 )
|
|
|
|
{
|
|
|
|
// mono
|
2020-04-08 18:51:51 +02:00
|
|
|
for ( i = 0; i < iServerFrameSizeSamples; i++ )
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
2014-01-08 22:24:37 +01:00
|
|
|
vecsOutData[i] = Double2Short (
|
|
|
|
static_cast<double> ( vecsOutData[i] ) + vecsData[i] );
|
2011-04-23 22:43:07 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
// stereo: apply stereo-to-mono attenuation
|
2020-04-08 18:51:51 +02:00
|
|
|
for ( i = 0, k = 0; i < iServerFrameSizeSamples; i++, k += 2 )
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
|
|
|
vecsOutData[i] =
|
|
|
|
Double2Short ( vecsOutData[i] +
|
2014-01-08 22:24:37 +01:00
|
|
|
( static_cast<double> ( vecsData[k] ) + vecsData[k + 1] ) / 2 );
|
2011-04-23 22:43:07 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
if ( vecNumAudioChannels[j] == 1 )
|
|
|
|
{
|
|
|
|
// mono
|
2020-04-08 18:51:51 +02:00
|
|
|
for ( i = 0; i < iServerFrameSizeSamples; i++ )
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
2014-01-08 22:24:37 +01:00
|
|
|
vecsOutData[i] = Double2Short (
|
|
|
|
vecsOutData[i] + vecsData[i] * dGain );
|
2011-04-23 22:43:07 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
// stereo: apply stereo-to-mono attenuation
|
2020-04-08 18:51:51 +02:00
|
|
|
for ( i = 0, k = 0; i < iServerFrameSizeSamples; i++, k += 2 )
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
|
|
|
vecsOutData[i] =
|
2014-01-08 22:24:37 +01:00
|
|
|
Double2Short ( vecsOutData[i] + dGain *
|
|
|
|
( static_cast<double> ( vecsData[k] ) + vecsData[k + 1] ) / 2 );
|
2011-04-23 22:43:07 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
// Stereo target channel -----------------------------------------------
|
|
|
|
for ( j = 0; j < iNumClients; j++ )
|
|
|
|
{
|
2020-05-19 20:38:14 +02:00
|
|
|
// get a reference to the audio data and gain/pan of the current client
|
2014-01-08 22:24:37 +01:00
|
|
|
const CVector<int16_t>& vecsData = vecvecsData[j];
|
|
|
|
const double dGain = vecdGains[j];
|
2020-05-19 20:38:14 +02:00
|
|
|
const double dPan = vecdPannings[j];
|
2020-05-18 20:46:46 +02:00
|
|
|
|
2020-05-19 20:38:14 +02:00
|
|
|
// calculate combined gain/pan for each stereo channel where we define
|
|
|
|
// the panning that center equals full gain for both channels
|
|
|
|
const double dGainL = std::min ( 0.5, 1 - dPan ) * 2 * dGain;
|
|
|
|
const double dGainR = std::min ( 0.5, dPan ) * 2 * dGain;
|
2014-01-08 22:24:37 +01:00
|
|
|
|
2011-04-23 22:43:07 +02:00
|
|
|
// if channel gain is 1, avoid multiplication for speed optimization
|
2020-05-19 20:38:14 +02:00
|
|
|
if ( ( dGainL == static_cast<double> ( 1.0 ) ) && ( dGainR == static_cast<double> ( 1.0 ) ) )
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
|
|
|
if ( vecNumAudioChannels[j] == 1 )
|
|
|
|
{
|
|
|
|
// mono: copy same mono data in both out stereo audio channels
|
2020-04-08 18:51:51 +02:00
|
|
|
for ( i = 0, k = 0; i < iServerFrameSizeSamples; i++, k += 2 )
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
|
|
|
// left channel
|
2014-01-08 22:24:37 +01:00
|
|
|
vecsOutData[k] = Double2Short (
|
2020-05-19 20:38:14 +02:00
|
|
|
static_cast<double> ( vecsOutData[k] ) + vecsData[i] );
|
2011-04-23 22:43:07 +02:00
|
|
|
|
|
|
|
// right channel
|
2014-01-08 22:24:37 +01:00
|
|
|
vecsOutData[k + 1] = Double2Short (
|
2020-05-19 20:38:14 +02:00
|
|
|
static_cast<double> ( vecsOutData[k + 1] ) + vecsData[i] );
|
2011-04-23 22:43:07 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
// stereo
|
2020-04-08 18:51:51 +02:00
|
|
|
for ( i = 0; i < ( 2 * iServerFrameSizeSamples ); i++ )
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
2014-01-08 22:24:37 +01:00
|
|
|
vecsOutData[i] = Double2Short (
|
2020-05-19 20:38:14 +02:00
|
|
|
static_cast<double> ( vecsOutData[i] ) + vecsData[i] );
|
2011-04-23 22:43:07 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
if ( vecNumAudioChannels[j] == 1 )
|
|
|
|
{
|
|
|
|
// mono: copy same mono data in both out stereo audio channels
|
2020-04-08 18:51:51 +02:00
|
|
|
for ( i = 0, k = 0; i < iServerFrameSizeSamples; i++, k += 2 )
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
2020-05-26 20:48:08 +02:00
|
|
|
// left/right channel
|
|
|
|
vecsOutData[k] = Double2Short ( vecsOutData[k] + vecsData[i] * dGainL );
|
|
|
|
vecsOutData[k + 1] = Double2Short ( vecsOutData[k + 1] + vecsData[i] * dGainR );
|
2011-04-23 22:43:07 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
// stereo
|
2020-05-19 20:38:14 +02:00
|
|
|
for ( i = 0; i < ( 2 * iServerFrameSizeSamples ); i += 2 )
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
2020-05-26 20:48:08 +02:00
|
|
|
// left/right channel
|
|
|
|
vecsOutData[i] = Double2Short ( vecsOutData[i] + vecsData[i] * dGainL );
|
|
|
|
vecsOutData[i + 1] = Double2Short ( vecsOutData[i + 1] + vecsData[i + 1] * dGainR );
|
2011-04-23 22:43:07 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-02-11 16:36:47 +01:00
|
|
|
CVector<CChannelInfo> CServer::CreateChannelList()
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
2013-02-11 16:36:47 +01:00
|
|
|
CVector<CChannelInfo> vecChanInfo ( 0 );
|
2011-04-23 22:43:07 +02:00
|
|
|
|
|
|
|
// look for free channels
|
2014-01-08 22:24:37 +01:00
|
|
|
for ( int i = 0; i < iMaxNumChannels; i++ )
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
|
|
|
if ( vecChannels[i].IsConnected() )
|
|
|
|
{
|
|
|
|
// append channel ID, IP address and channel name to storing vectors
|
2013-02-11 16:36:47 +01:00
|
|
|
vecChanInfo.Add ( CChannelInfo (
|
2011-04-23 22:43:07 +02:00
|
|
|
i, // ID
|
2020-06-05 20:56:52 +02:00
|
|
|
QHostAddress ( QHostAddress::Null ).toIPv4Address(), // use invalid IP address (for privacy reason, #316)
|
2013-02-11 16:36:47 +01:00
|
|
|
vecChannels[i].GetChanInfo() ) );
|
2011-04-23 22:43:07 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return vecChanInfo;
|
|
|
|
}
|
|
|
|
|
|
|
|
void CServer::CreateAndSendChanListForAllConChannels()
|
|
|
|
{
|
|
|
|
// create channel list
|
2013-02-11 16:36:47 +01:00
|
|
|
CVector<CChannelInfo> vecChanInfo ( CreateChannelList() );
|
2011-04-23 22:43:07 +02:00
|
|
|
|
|
|
|
// now send connected channels list to all connected clients
|
2014-01-08 22:24:37 +01:00
|
|
|
for ( int i = 0; i < iMaxNumChannels; i++ )
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
|
|
|
if ( vecChannels[i].IsConnected() )
|
|
|
|
{
|
|
|
|
// send message
|
|
|
|
vecChannels[i].CreateConClientListMes ( vecChanInfo );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// create status HTML file if enabled
|
|
|
|
if ( bWriteStatusHTMLFile )
|
|
|
|
{
|
|
|
|
WriteHTMLChannelList();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void CServer::CreateAndSendChanListForThisChan ( const int iCurChanID )
|
|
|
|
{
|
|
|
|
// create channel list
|
2013-02-11 16:36:47 +01:00
|
|
|
CVector<CChannelInfo> vecChanInfo ( CreateChannelList() );
|
2011-04-23 22:43:07 +02:00
|
|
|
|
|
|
|
// now send connected channels list to the channel with the ID "iCurChanID"
|
|
|
|
vecChannels[iCurChanID].CreateConClientListMes ( vecChanInfo );
|
|
|
|
}
|
|
|
|
|
|
|
|
void CServer::CreateAndSendChatTextForAllConChannels ( const int iCurChanID,
|
|
|
|
const QString& strChatText )
|
|
|
|
{
|
|
|
|
// Create message which is sent to all connected clients -------------------
|
|
|
|
// get client name, if name is empty, use IP address instead
|
|
|
|
QString ChanName = vecChannels[iCurChanID].GetName();
|
2013-02-11 16:36:47 +01:00
|
|
|
|
2011-04-23 22:43:07 +02:00
|
|
|
// add time and name of the client at the beginning of the message text and
|
|
|
|
// use different colors
|
|
|
|
QString sCurColor = vstrChatColors[iCurChanID % vstrChatColors.Size()];
|
2013-02-11 16:36:47 +01:00
|
|
|
|
2011-04-23 22:43:07 +02:00
|
|
|
const QString strActualMessageText =
|
|
|
|
"<font color=""" + sCurColor + """>(" +
|
|
|
|
QTime::currentTime().toString ( "hh:mm:ss AP" ) + ") <b>" + ChanName +
|
|
|
|
"</b></font> " + strChatText;
|
|
|
|
|
|
|
|
|
|
|
|
// Send chat text to all connected clients ---------------------------------
|
2014-01-08 22:24:37 +01:00
|
|
|
for ( int i = 0; i < iMaxNumChannels; i++ )
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
|
|
|
if ( vecChannels[i].IsConnected() )
|
|
|
|
{
|
|
|
|
// send message
|
|
|
|
vecChannels[i].CreateChatTextMes ( strActualMessageText );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-05-21 11:47:39 +02:00
|
|
|
void CServer::CreateOtherMuteStateChanged ( const int iCurChanID,
|
|
|
|
const int iOtherChanID,
|
|
|
|
const bool bIsMuted )
|
|
|
|
{
|
2020-05-21 21:01:01 +02:00
|
|
|
if ( vecChannels[iOtherChanID].IsConnected() )
|
2020-05-21 11:47:39 +02:00
|
|
|
{
|
|
|
|
// send message
|
2020-05-21 21:01:01 +02:00
|
|
|
vecChannels[iOtherChanID].CreateMuteStateHasChangedMes ( iCurChanID, bIsMuted );
|
2020-05-21 11:47:39 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2011-04-23 22:43:07 +02:00
|
|
|
int CServer::GetFreeChan()
|
|
|
|
{
|
|
|
|
// look for a free channel
|
2014-01-08 22:24:37 +01:00
|
|
|
for ( int i = 0; i < iMaxNumChannels; i++ )
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
|
|
|
if ( !vecChannels[i].IsConnected() )
|
|
|
|
{
|
|
|
|
return i;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// no free channel found, return invalid ID
|
|
|
|
return INVALID_CHANNEL_ID;
|
|
|
|
}
|
|
|
|
|
2011-04-25 12:51:57 +02:00
|
|
|
int CServer::GetNumberOfConnectedClients()
|
|
|
|
{
|
|
|
|
int iNumConnClients = 0;
|
|
|
|
|
|
|
|
// check all possible channels for connection status
|
2014-01-08 22:24:37 +01:00
|
|
|
for ( int i = 0; i < iMaxNumChannels; i++ )
|
2011-04-25 12:51:57 +02:00
|
|
|
{
|
|
|
|
if ( vecChannels[i].IsConnected() )
|
|
|
|
{
|
|
|
|
// this channel is connected, increment counter
|
2014-01-08 22:24:37 +01:00
|
|
|
iNumConnClients++;
|
2011-04-25 12:51:57 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return iNumConnClients;
|
|
|
|
}
|
|
|
|
|
2014-01-08 22:24:37 +01:00
|
|
|
int CServer::FindChannel ( const CHostAddress& CheckAddr )
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
|
|
|
CHostAddress InetAddr;
|
|
|
|
|
|
|
|
// check for all possible channels if IP is already in use
|
2014-01-08 22:24:37 +01:00
|
|
|
for ( int i = 0; i < iMaxNumChannels; i++ )
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
2014-01-08 22:24:37 +01:00
|
|
|
// the "GetAddress" gives a valid address and returns true if the
|
|
|
|
// channel is connected
|
|
|
|
if ( vecChannels[i].GetAddress ( InetAddr ) )
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
2014-01-08 22:24:37 +01:00
|
|
|
// IP found, return channel number
|
|
|
|
if ( InetAddr == CheckAddr )
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
2014-01-08 22:24:37 +01:00
|
|
|
return i;
|
2011-04-23 22:43:07 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// IP not found, return invalid ID
|
|
|
|
return INVALID_CHANNEL_ID;
|
|
|
|
}
|
|
|
|
|
2014-02-16 09:20:07 +01:00
|
|
|
void CServer::OnProtcolMessageReceived ( int iRecCounter,
|
|
|
|
int iRecID,
|
|
|
|
CVector<uint8_t> vecbyMesBodyData,
|
|
|
|
CHostAddress RecHostAddr )
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
2014-02-16 09:20:07 +01:00
|
|
|
Mutex.lock();
|
|
|
|
{
|
|
|
|
// find the channel with the received address
|
|
|
|
const int iCurChanID = FindChannel ( RecHostAddr );
|
|
|
|
|
|
|
|
// if the channel exists, apply the protocol message to the channel
|
|
|
|
if ( iCurChanID != INVALID_CHANNEL_ID )
|
|
|
|
{
|
|
|
|
vecChannels[iCurChanID].PutProtcolData ( iRecCounter,
|
|
|
|
iRecID,
|
|
|
|
vecbyMesBodyData,
|
|
|
|
RecHostAddr );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Mutex.unlock();
|
|
|
|
}
|
|
|
|
|
|
|
|
bool CServer::PutAudioData ( const CVector<uint8_t>& vecbyRecBuf,
|
|
|
|
const int iNumBytesRead,
|
|
|
|
const CHostAddress& HostAdr,
|
|
|
|
int& iCurChanID )
|
|
|
|
{
|
|
|
|
bool bNewConnection = false; // init return value
|
2020-06-07 12:09:30 +02:00
|
|
|
bool bChanOK = true; // init with ok, might be overwritten
|
2011-04-23 22:43:07 +02:00
|
|
|
|
|
|
|
Mutex.lock();
|
|
|
|
{
|
|
|
|
// Get channel ID ------------------------------------------------------
|
|
|
|
// check address
|
2014-02-16 09:20:07 +01:00
|
|
|
iCurChanID = FindChannel ( HostAdr );
|
2011-04-23 22:43:07 +02:00
|
|
|
|
|
|
|
if ( iCurChanID == INVALID_CHANNEL_ID )
|
|
|
|
{
|
2014-02-16 09:20:07 +01:00
|
|
|
// a new client is calling, look for free channel
|
|
|
|
iCurChanID = GetFreeChan();
|
2011-04-23 22:43:07 +02:00
|
|
|
|
2014-02-16 09:20:07 +01:00
|
|
|
if ( iCurChanID != INVALID_CHANNEL_ID )
|
|
|
|
{
|
|
|
|
// initialize current channel by storing the calling host
|
|
|
|
// address
|
|
|
|
vecChannels[iCurChanID].SetAddress ( HostAdr );
|
2011-04-23 22:43:07 +02:00
|
|
|
|
2014-02-16 09:20:07 +01:00
|
|
|
// reset channel info
|
|
|
|
vecChannels[iCurChanID].ResetInfo();
|
2011-04-23 22:43:07 +02:00
|
|
|
|
2014-02-16 09:20:07 +01:00
|
|
|
// reset the channel gains of current channel, at the same
|
|
|
|
// time reset gains of this channel ID for all other channels
|
|
|
|
for ( int i = 0; i < iMaxNumChannels; i++ )
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
2019-05-17 22:55:46 +02:00
|
|
|
vecChannels[iCurChanID].SetGain ( i, 1.0 );
|
2011-04-23 22:43:07 +02:00
|
|
|
|
2014-02-16 09:20:07 +01:00
|
|
|
// other channels (we do not distinguish the case if
|
|
|
|
// i == iCurChanID for simplicity)
|
2019-05-17 22:55:46 +02:00
|
|
|
vecChannels[i].SetGain ( iCurChanID, 1.0 );
|
2011-04-23 22:43:07 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2014-02-16 09:20:07 +01:00
|
|
|
// no free channel available
|
2011-04-23 22:43:07 +02:00
|
|
|
bChanOK = false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-02-16 09:20:07 +01:00
|
|
|
// Put received audio data in jitter buffer ----------------------------
|
2011-04-23 22:43:07 +02:00
|
|
|
if ( bChanOK )
|
|
|
|
{
|
|
|
|
// put packet in socket buffer
|
2014-02-16 09:20:07 +01:00
|
|
|
if ( vecChannels[iCurChanID].PutAudioData ( vecbyRecBuf,
|
|
|
|
iNumBytesRead,
|
|
|
|
HostAdr ) == PS_NEW_CONNECTION )
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
2014-02-16 09:20:07 +01:00
|
|
|
// in case we have a new connection return this information
|
|
|
|
bNewConnection = true;
|
2013-02-11 21:24:38 +01:00
|
|
|
}
|
2011-04-23 22:43:07 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
Mutex.unlock();
|
|
|
|
|
2014-02-16 09:20:07 +01:00
|
|
|
// return the state if a new connection was happening
|
|
|
|
return bNewConnection;
|
2011-04-23 22:43:07 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
void CServer::GetConCliParam ( CVector<CHostAddress>& vecHostAddresses,
|
|
|
|
CVector<QString>& vecsName,
|
|
|
|
CVector<int>& veciJitBufNumFrames,
|
|
|
|
CVector<int>& veciNetwFrameSizeFact )
|
|
|
|
{
|
|
|
|
CHostAddress InetAddr;
|
|
|
|
|
|
|
|
// init return values
|
2014-01-08 22:24:37 +01:00
|
|
|
vecHostAddresses.Init ( iMaxNumChannels );
|
|
|
|
vecsName.Init ( iMaxNumChannels );
|
|
|
|
veciJitBufNumFrames.Init ( iMaxNumChannels );
|
|
|
|
veciNetwFrameSizeFact.Init ( iMaxNumChannels );
|
2011-04-23 22:43:07 +02:00
|
|
|
|
|
|
|
// check all possible channels
|
2014-01-08 22:24:37 +01:00
|
|
|
for ( int i = 0; i < iMaxNumChannels; i++ )
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
|
|
|
if ( vecChannels[i].GetAddress ( InetAddr ) )
|
|
|
|
{
|
|
|
|
// get requested data
|
|
|
|
vecHostAddresses[i] = InetAddr;
|
|
|
|
vecsName[i] = vecChannels[i].GetName();
|
|
|
|
veciJitBufNumFrames[i] = vecChannels[i].GetSockBufNumFrames();
|
|
|
|
veciNetwFrameSizeFact[i] = vecChannels[i].GetNetwFrameSizeFact();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void CServer::StartStatusHTMLFileWriting ( const QString& strNewFileName,
|
|
|
|
const QString& strNewServerNameWithPort )
|
|
|
|
{
|
|
|
|
// set important parameters
|
|
|
|
strServerHTMLFileListName = strNewFileName;
|
|
|
|
strServerNameWithPort = strNewServerNameWithPort;
|
|
|
|
|
|
|
|
// set flag
|
|
|
|
bWriteStatusHTMLFile = true;
|
|
|
|
|
|
|
|
// write initial file
|
|
|
|
WriteHTMLChannelList();
|
|
|
|
}
|
|
|
|
|
|
|
|
void CServer::WriteHTMLChannelList()
|
|
|
|
{
|
|
|
|
// prepare file and stream
|
|
|
|
QFile serverFileListFile ( strServerHTMLFileListName );
|
2014-01-08 22:24:37 +01:00
|
|
|
|
2011-04-23 22:43:07 +02:00
|
|
|
if ( !serverFileListFile.open ( QIODevice::WriteOnly | QIODevice::Text ) )
|
|
|
|
{
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
QTextStream streamFileOut ( &serverFileListFile );
|
|
|
|
streamFileOut << strServerNameWithPort << endl << "<ul>" << endl;
|
|
|
|
|
|
|
|
// depending on number of connected clients write list
|
2014-01-08 22:24:37 +01:00
|
|
|
if ( GetNumberOfConnectedClients() == 0 )
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
|
|
|
// no clients are connected -> empty server
|
|
|
|
streamFileOut << " No client connected" << endl;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
// write entry for each connected client
|
2014-01-08 22:24:37 +01:00
|
|
|
for ( int i = 0; i < iMaxNumChannels; i++ )
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
|
|
|
if ( vecChannels[i].IsConnected() )
|
|
|
|
{
|
2020-06-05 14:22:44 +02:00
|
|
|
streamFileOut << " <li>" << vecChannels[i].GetName() << "</li>" << endl;
|
2011-04-23 22:43:07 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// finish list
|
|
|
|
streamFileOut << "</ul>" << endl;
|
|
|
|
}
|
|
|
|
|
2011-05-25 23:01:45 +02:00
|
|
|
void CServer::customEvent ( QEvent* pEvent )
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
2011-05-25 23:01:45 +02:00
|
|
|
if ( pEvent->type() == QEvent::User + 11 )
|
2011-04-23 22:43:07 +02:00
|
|
|
{
|
2013-03-24 16:42:23 +01:00
|
|
|
const int iMessType = ( (CCustomEvent*) pEvent )->iMessType;
|
2011-04-23 22:43:07 +02:00
|
|
|
|
|
|
|
switch ( iMessType )
|
|
|
|
{
|
|
|
|
case MS_PACKET_RECEIVED:
|
|
|
|
// wake up the server if a packet was received
|
|
|
|
// if the server is still running, the call to Start() will have
|
|
|
|
// no effect
|
|
|
|
Start();
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2020-04-04 23:57:16 +02:00
|
|
|
|
|
|
|
/// @brief Compute frame peak level for each client
|
2020-06-07 12:09:30 +02:00
|
|
|
bool CServer::CreateLevelsForAllConChannels ( const int iNumClients,
|
2020-04-04 23:57:16 +02:00
|
|
|
const CVector<int>& vecNumAudioChannels,
|
|
|
|
const CVector<CVector<int16_t> > vecvecsData,
|
|
|
|
CVector<uint16_t>& vecLevelsOut )
|
|
|
|
{
|
2020-06-07 12:09:30 +02:00
|
|
|
int i, j, k;
|
|
|
|
bool bLevelsWereUpdated = false;
|
2020-04-04 23:57:16 +02:00
|
|
|
|
2020-06-07 12:09:30 +02:00
|
|
|
// low frequency updates
|
|
|
|
if ( iFrameCount > CHANNEL_LEVEL_UPDATE_INTERVAL )
|
2020-04-04 23:57:16 +02:00
|
|
|
{
|
2020-06-07 12:09:30 +02:00
|
|
|
iFrameCount = 0;
|
|
|
|
bLevelsWereUpdated = true;
|
2020-04-04 23:57:16 +02:00
|
|
|
|
2020-06-07 12:09:30 +02:00
|
|
|
// init return vector with zeros since we mix all channels on that vector
|
|
|
|
vecLevelsOut.Reset ( 0 );
|
2020-04-22 21:19:55 +02:00
|
|
|
|
2020-06-07 12:09:30 +02:00
|
|
|
for ( j = 0; j < iNumClients; j++ )
|
2020-04-04 23:57:16 +02:00
|
|
|
{
|
2020-06-07 12:09:30 +02:00
|
|
|
// get a reference to the audio data
|
|
|
|
const CVector<int16_t>& vecsData = vecvecsData[j];
|
|
|
|
|
|
|
|
double dCurLevel = 0.0;
|
|
|
|
|
|
|
|
if ( vecNumAudioChannels[j] == 1 )
|
2020-04-04 23:57:16 +02:00
|
|
|
{
|
2020-06-07 12:09:30 +02:00
|
|
|
// mono
|
|
|
|
for ( i = 0; i < iServerFrameSizeSamples; i += 3 )
|
|
|
|
{
|
|
|
|
dCurLevel = std::max ( dCurLevel, fabs ( static_cast<double> ( vecsData[i] ) ) );
|
|
|
|
}
|
2020-04-04 23:57:16 +02:00
|
|
|
}
|
2020-06-07 12:09:30 +02:00
|
|
|
else
|
2020-04-04 23:57:16 +02:00
|
|
|
{
|
2020-06-07 12:09:30 +02:00
|
|
|
// stereo: apply stereo-to-mono attenuation
|
|
|
|
for ( i = 0, k = 0; i < iServerFrameSizeSamples; i += 3, k += 6 )
|
|
|
|
{
|
|
|
|
double sMix = ( static_cast<double> ( vecsData[k] ) + vecsData[k + 1] ) / 2;
|
|
|
|
dCurLevel = std::max ( dCurLevel, fabs ( sMix ) );
|
|
|
|
}
|
2020-04-04 23:57:16 +02:00
|
|
|
}
|
|
|
|
|
2020-06-07 12:09:30 +02:00
|
|
|
// smoothing
|
|
|
|
const int iChId = vecChanIDsCurConChan[j];
|
|
|
|
dCurLevel = std::max ( dCurLevel, vecChannels[iChId].GetPrevLevel() * 0.5 );
|
|
|
|
vecChannels[iChId].SetPrevLevel ( dCurLevel );
|
2020-04-04 23:57:16 +02:00
|
|
|
|
2020-06-07 12:09:30 +02:00
|
|
|
// logarithmic measure
|
|
|
|
double dCurSigLevel = CStereoSignalLevelMeter::CalcLogResult ( dCurLevel );
|
2020-04-04 23:57:16 +02:00
|
|
|
|
2020-06-07 12:09:30 +02:00
|
|
|
// map to signal level meter
|
|
|
|
dCurSigLevel -= LOW_BOUND_SIG_METER;
|
|
|
|
dCurSigLevel *= NUM_STEPS_LED_BAR / ( UPPER_BOUND_SIG_METER - LOW_BOUND_SIG_METER );
|
2020-04-04 23:57:16 +02:00
|
|
|
|
2020-06-07 12:09:30 +02:00
|
|
|
if ( dCurSigLevel < 0 )
|
|
|
|
{
|
|
|
|
dCurSigLevel = 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
vecLevelsOut[j] = static_cast<uint16_t> ( ceil ( dCurSigLevel ) );
|
2020-04-04 23:57:16 +02:00
|
|
|
}
|
2020-06-07 12:09:30 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// increment the frame counter needed for low frequency update trigger
|
|
|
|
iFrameCount++;
|
2020-04-04 23:57:16 +02:00
|
|
|
|
2020-06-07 12:09:30 +02:00
|
|
|
if ( bUseDoubleSystemFrameSize )
|
|
|
|
{
|
|
|
|
// additional increment needed for double frame size to get to the same time interval
|
|
|
|
iFrameCount++;
|
2020-04-04 23:57:16 +02:00
|
|
|
}
|
2020-06-07 12:09:30 +02:00
|
|
|
|
|
|
|
return bLevelsWereUpdated;
|
2020-04-04 23:57:16 +02:00
|
|
|
}
|