jamulus/src/server.cpp

1674 lines
63 KiB
C++
Raw Normal View History

2011-04-23 22:43:07 +02:00
/******************************************************************************\
2020-01-01 15:41:43 +01:00
* Copyright (c) 2004-2020
2011-04-23 22:43:07 +02:00
*
* Author(s):
* Volker Fischer
*
******************************************************************************
*
* This program is free software; you can redistribute it and/or modify it under
* the terms of the GNU General Public License as published by the Free Software
* Foundation; either version 2 of the License, or (at your option) any later
* version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License along with
* this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
2011-04-23 22:43:07 +02:00
*
\******************************************************************************/
#include "server.h"
// CHighPrecisionTimer implementation ******************************************
#ifdef _WIN32
CHighPrecisionTimer::CHighPrecisionTimer ( const bool bNewUseDoubleSystemFrameSize ) :
bUseDoubleSystemFrameSize ( bNewUseDoubleSystemFrameSize )
2011-04-23 22:43:07 +02:00
{
// add some error checking, the high precision timer implementation only
// supports 64 and 128 samples frame size at 48 kHz sampling rate
#if ( SYSTEM_FRAME_SIZE_SAMPLES != 64 ) && ( DOUBLE_SYSTEM_FRAME_SIZE_SAMPLES != 128 )
# error "Only system frame size of 64 and 128 samples is supported by this module"
2011-04-23 22:43:07 +02:00
#endif
#if ( SYSTEM_SAMPLE_RATE_HZ != 48000 )
2011-04-23 22:43:07 +02:00
# error "Only a system sample rate of 48 kHz is supported by this module"
#endif
// Since QT only supports a minimum timer resolution of 1 ms but for our
2013-03-24 11:49:25 +01:00
// server we require a timer interval of 2.333 ms for 128 samples
2011-04-23 22:43:07 +02:00
// frame size at 48 kHz sampling rate.
// To support this interval, we use a timer with 2 ms resolution for 128
// samples frame size and 1 ms resolution for 64 samples frame size.
// Then we fire the actual frame timer if the error to the actual
2011-04-23 22:43:07 +02:00
// required interval is minimum.
veciTimeOutIntervals.Init ( 3 );
// for 128 sample frame size at 48 kHz sampling rate with 2 ms timer resolution:
2011-04-23 22:43:07 +02:00
// actual intervals: 0.0 2.666 5.333 8.0
// quantized to 2 ms: 0 2 6 8 (0)
// for 64 sample frame size at 48 kHz sampling rate with 1 ms timer resolution:
// actual intervals: 0.0 1.333 2.666 4.0
// quantized to 2 ms: 0 1 3 4 (0)
2011-04-23 22:43:07 +02:00
veciTimeOutIntervals[0] = 0;
veciTimeOutIntervals[1] = 1;
veciTimeOutIntervals[2] = 0;
// connect timer timeout signal
QObject::connect ( &Timer, &QTimer::timeout,
this, &CHighPrecisionTimer::OnTimer );
2011-04-23 22:43:07 +02:00
}
void CHighPrecisionTimer::Start()
{
// reset position pointer and counter
iCurPosInVector = 0;
iIntervalCounter = 0;
if ( bUseDoubleSystemFrameSize )
{
// start internal timer with 2 ms resolution for 128 samples frame size
Timer.start ( 2 );
}
else
{
// start internal timer with 1 ms resolution for 64 samples frame size
Timer.start ( 1 );
}
2011-04-23 22:43:07 +02:00
}
void CHighPrecisionTimer::Stop()
{
// stop timer
Timer.stop();
}
void CHighPrecisionTimer::OnTimer()
{
// check if maximum number of high precision timer intervals are
// finished
if ( veciTimeOutIntervals[iCurPosInVector] == iIntervalCounter )
{
// reset interval counter
iIntervalCounter = 0;
// go to next position in vector, take care of wrap around
iCurPosInVector++;
if ( iCurPosInVector == veciTimeOutIntervals.Size() )
{
iCurPosInVector = 0;
}
// minimum time error to actual required timer interval is reached,
// emit signal for server
emit timeout();
}
else
{
// next high precision timer interval
iIntervalCounter++;
}
}
#else // Mac and Linux
CHighPrecisionTimer::CHighPrecisionTimer ( const bool bUseDoubleSystemFrameSize ) :
bRun ( false )
{
// calculate delay in ns
2020-04-05 18:57:28 +02:00
uint64_t iNsDelay;
if ( bUseDoubleSystemFrameSize )
{
iNsDelay = ( (uint64_t) DOUBLE_SYSTEM_FRAME_SIZE_SAMPLES * 1000000000 ) /
(uint64_t) SYSTEM_SAMPLE_RATE_HZ; // in ns
}
else
{
iNsDelay = ( (uint64_t) SYSTEM_FRAME_SIZE_SAMPLES * 1000000000 ) /
(uint64_t) SYSTEM_SAMPLE_RATE_HZ; // in ns
}
#if defined ( __APPLE__ ) || defined ( __MACOSX )
// calculate delay in mach absolute time
struct mach_timebase_info timeBaseInfo;
mach_timebase_info ( &timeBaseInfo );
Delay = ( iNsDelay * (uint64_t) timeBaseInfo.denom ) /
(uint64_t) timeBaseInfo.numer;
#else
// set delay
2013-03-04 17:11:37 +01:00
Delay = iNsDelay;
#endif
}
void CHighPrecisionTimer::Start()
{
// only start if not already running
if ( !bRun )
{
// set run flag
bRun = true;
// set initial end time
#if defined ( __APPLE__ ) || defined ( __MACOSX )
2013-03-04 17:11:37 +01:00
NextEnd = mach_absolute_time() + Delay;
#else
2013-03-03 22:38:23 +01:00
clock_gettime ( CLOCK_MONOTONIC, &NextEnd );
2013-03-04 17:11:37 +01:00
NextEnd.tv_nsec += Delay;
if ( NextEnd.tv_nsec >= 1000000000L )
{
NextEnd.tv_sec++;
NextEnd.tv_nsec -= 1000000000L;
}
#endif
// start thread
QThread::start ( QThread::TimeCriticalPriority );
}
}
void CHighPrecisionTimer::Stop()
{
// set flag so that thread can leave the main loop
bRun = false;
// give thread some time to terminate
wait ( 5000 );
}
void CHighPrecisionTimer::run()
{
// loop until the thread shall be terminated
while ( bRun )
{
// call processing routine by fireing signal
// TODO by emit a signal we leave the high priority thread -> maybe use some
// other connection type to have something like a true callback, e.g.
// "Qt::DirectConnection" -> Can this work?
emit timeout();
// now wait until the next buffer shall be processed (we
// use the "increment method" to make sure we do not introduce
// a timing drift)
#if defined ( __APPLE__ ) || defined ( __MACOSX )
mach_wait_until ( NextEnd );
2013-03-03 22:56:25 +01:00
NextEnd += Delay;
#else
clock_nanosleep ( CLOCK_MONOTONIC,
TIMER_ABSTIME,
2013-03-03 22:38:23 +01:00
&NextEnd,
NULL );
2013-03-04 17:11:37 +01:00
NextEnd.tv_nsec += Delay;
if ( NextEnd.tv_nsec >= 1000000000L )
{
NextEnd.tv_sec++;
NextEnd.tv_nsec -= 1000000000L;
}
2013-03-03 22:56:25 +01:00
#endif
}
}
2011-04-23 22:43:07 +02:00
#endif
// CServer implementation ******************************************************
CServer::CServer ( const int iNewMaxNumChan,
2020-03-21 19:57:18 +01:00
const int iMaxDaysHistory,
const QString& strLoggingFileName,
const quint16 iPortNumber,
const QString& strHTMLStatusFileName,
const QString& strHistoryFileName,
const QString& strServerNameForHTMLStatusFile,
const QString& strCentralServer,
const QString& strServerInfo,
const QString& strNewWelcomeMessage,
Add recording support with Reaper Project generation Includes the following changes * Initial .gitignore Administrative * Fix up warning message * Not all Windows file systems are case insensitive Bugfixes * (Qt5) Use QCoreApplication for headless Possible solution to get the application to run as a headless server but it loses the nice history graph, so not ideal. * Avoid ESC closing chat Because ESC shouldn't close the chat window. Or the main app window. * Add console logging support for Windows Whilst looking for the headless support, I found this idea for Windows logging. New improved version. This makes far fewer changes. ---- * Add recording support with Reaper Project generation The main feature! * New -r option to enable recording of PCM files and conversion to Reaper RPP with WAV files * New -R option to set the directory in which to create recording sessions You need to specify the -R option, there's no default... so I guess -r and -R could be combined. * New -T option to convert a session directory with PCM files into a Reaper RPP with WAV files You can use -T on "failed" sessions, if the -r option captures the PCMs but the RPP converter doesn't run for some reaon. (It was useful during development, maybe less so once things seem stable.) The recorder is implemented as a new thread with queuing from the main "real time" server thread. When a new client connects or if its audio format changes (e.g. mono to stereo), a new RIFF WAVE file is started. Each frame of decompressed audio for each client written out as LPCM to the file. When the client disconnects, the RIFF WAVE headers are updated to reflect the file length. Once all clients disconnect, the session is considered ended and a Reaper RPP file is written.
2019-04-03 19:12:45 +02:00
const QString& strRecordingDirName,
const bool bNCentServPingServerInList,
const bool bNDisconnectAllClientsOnQuit,
const bool bNUseDoubleSystemFrameSize,
const ELicenceType eNLicenceType ) :
bUseDoubleSystemFrameSize ( bNUseDoubleSystemFrameSize ),
iMaxNumChannels ( iNewMaxNumChan ),
Socket ( this, iPortNumber ),
Logging ( iMaxDaysHistory ),
iFrameCount ( 0 ),
bWriteStatusHTMLFile ( false ),
HighPrecisionTimer ( bNUseDoubleSystemFrameSize ),
ServerListManager ( iPortNumber,
strCentralServer,
strServerInfo,
iNewMaxNumChan,
bNCentServPingServerInList,
&ConnLessProtocol ),
bAutoRunMinimized ( false ),
eLicenceType ( eNLicenceType ),
bDisconnectAllClientsOnQuit ( bNDisconnectAllClientsOnQuit ),
pSignalHandler ( CSignalHandler::getSingletonP() )
2011-04-23 22:43:07 +02:00
{
int iOpusError;
2011-04-23 22:43:07 +02:00
int i;
// create OPUS encoder/decoder for each channel (must be done before
2011-04-23 22:43:07 +02:00
// enabling the channels), create a mono and stereo encoder/decoder
// for each channel
for ( i = 0; i < iMaxNumChannels; i++ )
2011-04-23 22:43:07 +02:00
{
// init OPUS -----------------------------------------------------------
OpusMode[i] = opus_custom_mode_create ( SYSTEM_SAMPLE_RATE_HZ,
DOUBLE_SYSTEM_FRAME_SIZE_SAMPLES,
&iOpusError );
Opus64Mode[i] = opus_custom_mode_create ( SYSTEM_SAMPLE_RATE_HZ,
SYSTEM_FRAME_SIZE_SAMPLES,
&iOpusError );
// init audio encoders and decoders
OpusEncoderMono[i] = opus_custom_encoder_create ( OpusMode[i], 1, &iOpusError ); // mono encoder legacy
OpusDecoderMono[i] = opus_custom_decoder_create ( OpusMode[i], 1, &iOpusError ); // mono decoder legacy
OpusEncoderStereo[i] = opus_custom_encoder_create ( OpusMode[i], 2, &iOpusError ); // stereo encoder legacy
OpusDecoderStereo[i] = opus_custom_decoder_create ( OpusMode[i], 2, &iOpusError ); // stereo decoder legacy
Opus64EncoderMono[i] = opus_custom_encoder_create ( Opus64Mode[i], 1, &iOpusError ); // mono encoder OPUS64
Opus64DecoderMono[i] = opus_custom_decoder_create ( Opus64Mode[i], 1, &iOpusError ); // mono decoder OPUS64
Opus64EncoderStereo[i] = opus_custom_encoder_create ( Opus64Mode[i], 2, &iOpusError ); // stereo encoder OPUS64
Opus64DecoderStereo[i] = opus_custom_decoder_create ( Opus64Mode[i], 2, &iOpusError ); // stereo decoder OPUS64
2013-02-16 19:16:12 +01:00
// we require a constant bit rate
opus_custom_encoder_ctl ( OpusEncoderMono[i], OPUS_SET_VBR ( 0 ) );
opus_custom_encoder_ctl ( OpusEncoderStereo[i], OPUS_SET_VBR ( 0 ) );
opus_custom_encoder_ctl ( Opus64EncoderMono[i], OPUS_SET_VBR ( 0 ) );
opus_custom_encoder_ctl ( Opus64EncoderStereo[i], OPUS_SET_VBR ( 0 ) );
2013-02-16 19:16:12 +01:00
// for 64 samples frame size we have to adjust the PLC behavior to avoid loud artifacts
opus_custom_encoder_ctl ( Opus64EncoderMono[i], OPUS_SET_PACKET_LOSS_PERC ( 35 ) );
opus_custom_encoder_ctl ( Opus64EncoderStereo[i], OPUS_SET_PACKET_LOSS_PERC ( 35 ) );
// we want as low delay as possible
opus_custom_encoder_ctl ( OpusEncoderMono[i], OPUS_SET_APPLICATION ( OPUS_APPLICATION_RESTRICTED_LOWDELAY ) );
opus_custom_encoder_ctl ( OpusEncoderStereo[i], OPUS_SET_APPLICATION ( OPUS_APPLICATION_RESTRICTED_LOWDELAY ) );
opus_custom_encoder_ctl ( Opus64EncoderMono[i], OPUS_SET_APPLICATION ( OPUS_APPLICATION_RESTRICTED_LOWDELAY ) );
opus_custom_encoder_ctl ( Opus64EncoderStereo[i], OPUS_SET_APPLICATION ( OPUS_APPLICATION_RESTRICTED_LOWDELAY ) );
// set encoder low complexity for legacy 128 samples frame size
opus_custom_encoder_ctl ( OpusEncoderMono[i], OPUS_SET_COMPLEXITY ( 1 ) );
opus_custom_encoder_ctl ( OpusEncoderStereo[i], OPUS_SET_COMPLEXITY ( 1 ) );
// init double-to-normal frame size conversion buffers -----------------
// use worst case memory initialization to avoid allocating memory in
// the time-critical thread
DoubleFrameSizeConvBufIn[i].Init ( 2 /* stereo */ * DOUBLE_SYSTEM_FRAME_SIZE_SAMPLES /* worst case buffer size */ );
DoubleFrameSizeConvBufOut[i].Init ( 2 /* stereo */ * DOUBLE_SYSTEM_FRAME_SIZE_SAMPLES /* worst case buffer size */ );
2011-04-23 22:43:07 +02:00
}
// define colors for chat window identifiers
vstrChatColors.Init ( 6 );
vstrChatColors[0] = "mediumblue";
vstrChatColors[1] = "red";
vstrChatColors[2] = "darkorchid";
vstrChatColors[3] = "green";
vstrChatColors[4] = "maroon";
vstrChatColors[5] = "coral";
// set the server frame size
if ( bUseDoubleSystemFrameSize )
{
iServerFrameSizeSamples = DOUBLE_SYSTEM_FRAME_SIZE_SAMPLES;
}
else
{
iServerFrameSizeSamples = SYSTEM_FRAME_SIZE_SAMPLES;
}
// To avoid audio clitches, in the entire realtime timer audio processing
// routine including the ProcessData no memory must be allocated. Since we
// do not know the required sizes for the vectors, we allocate memory for
// the worst case here:
// allocate worst case memory for the temporary vectors
vecChanIDsCurConChan.Init ( iMaxNumChannels );
vecvecdGains.Init ( iMaxNumChannels );
vecvecdPannings.Init ( iMaxNumChannels );
vecvecsData.Init ( iMaxNumChannels );
2020-06-19 21:29:09 +02:00
vecvecsSendData.Init ( iMaxNumChannels );
vecvecbyCodedData.Init ( iMaxNumChannels );
vecNumAudioChannels.Init ( iMaxNumChannels );
vecNumFrameSizeConvBlocks.Init ( iMaxNumChannels );
vecUseDoubleSysFraSizeConvBuf.Init ( iMaxNumChannels );
vecAudioComprType.Init ( iMaxNumChannels );
for ( i = 0; i < iMaxNumChannels; i++ )
{
// init vectors storing information of all channels
2020-06-19 21:29:09 +02:00
vecvecdGains[i].Init ( iMaxNumChannels );
vecvecdPannings[i].Init ( iMaxNumChannels );
2020-06-19 21:29:09 +02:00
// we always use stereo audio buffers (which is the worst case)
vecvecsData[i].Init ( 2 /* stereo */ * DOUBLE_SYSTEM_FRAME_SIZE_SAMPLES /* worst case buffer size */ );
2020-06-19 21:29:09 +02:00
// (note that we only allocate iMaxNumChannels buffers for the send
// and coded data because of the OMP implementation)
vecvecsSendData[i].Init ( 2 /* stereo */ * DOUBLE_SYSTEM_FRAME_SIZE_SAMPLES /* worst case buffer size */ );
// allocate worst case memory for the coded data
vecvecbyCodedData[i].Init ( MAX_SIZE_BYTES_NETW_BUF );
}
2020-04-04 23:57:16 +02:00
// allocate worst case memory for the channel levels
2020-06-19 21:29:09 +02:00
vecChannelLevels.Init ( iMaxNumChannels );
2011-04-23 22:43:07 +02:00
// enable history graph (if requested)
if ( !strHistoryFileName.isEmpty() )
{
Logging.EnableHistory ( strHistoryFileName );
}
// enable logging (if requested)
if ( !strLoggingFileName.isEmpty() )
{
// in case the history is enabled and a logging file name is
// given, parse the logging file for old entries which are then
// added in the history on software startup
if ( !strHistoryFileName.isEmpty() )
{
Logging.ParseLogFile ( strLoggingFileName );
}
Logging.Start ( strLoggingFileName );
}
// HTML status file writing
if ( !strHTMLStatusFileName.isEmpty() )
{
QString strCurServerNameForHTMLStatusFile = strServerNameForHTMLStatusFile;
// if server name is empty, substitute a default name
2011-04-23 22:43:07 +02:00
if ( strCurServerNameForHTMLStatusFile.isEmpty() )
{
strCurServerNameForHTMLStatusFile = "[server address]";
}
// (the static cast to integer of the port number is required so that it
// works correctly under Linux)
StartStatusHTMLFileWriting ( strHTMLStatusFileName,
strCurServerNameForHTMLStatusFile + ":" +
QString().number( static_cast<int> ( iPortNumber ) ) );
}
// manage welcome message: if the welcome message is a valid link to a local
// file, the content of that file is used as the welcome message (#361)
SetWelcomeMessage ( strNewWelcomeMessage ); // first use given text, may be overwritten
if ( QFileInfo ( strNewWelcomeMessage ).exists() )
{
QFile file ( strNewWelcomeMessage );
if ( file.open ( QIODevice::ReadOnly | QIODevice::Text ) )
{
// use entire file content for the welcome message
SetWelcomeMessage ( file.readAll() );
}
}
2020-06-28 15:03:23 +02:00
// enable jam recording (if requested) - kicks off the thread (note
// that jam recorder needs the frame size which is given to the jam
// recorder in the SetRecordingDir() function)
2020-06-20 18:04:06 +02:00
SetRecordingDir ( strRecordingDirName );
Add recording support with Reaper Project generation Includes the following changes * Initial .gitignore Administrative * Fix up warning message * Not all Windows file systems are case insensitive Bugfixes * (Qt5) Use QCoreApplication for headless Possible solution to get the application to run as a headless server but it loses the nice history graph, so not ideal. * Avoid ESC closing chat Because ESC shouldn't close the chat window. Or the main app window. * Add console logging support for Windows Whilst looking for the headless support, I found this idea for Windows logging. New improved version. This makes far fewer changes. ---- * Add recording support with Reaper Project generation The main feature! * New -r option to enable recording of PCM files and conversion to Reaper RPP with WAV files * New -R option to set the directory in which to create recording sessions You need to specify the -R option, there's no default... so I guess -r and -R could be combined. * New -T option to convert a session directory with PCM files into a Reaper RPP with WAV files You can use -T on "failed" sessions, if the -r option captures the PCMs but the RPP converter doesn't run for some reaon. (It was useful during development, maybe less so once things seem stable.) The recorder is implemented as a new thread with queuing from the main "real time" server thread. When a new client connects or if its audio format changes (e.g. mono to stereo), a new RIFF WAVE file is started. Each frame of decompressed audio for each client written out as LPCM to the file. When the client disconnects, the RIFF WAVE headers are updated to reflect the file length. Once all clients disconnect, the session is considered ended and a Reaper RPP file is written.
2019-04-03 19:12:45 +02:00
2011-04-23 22:43:07 +02:00
// enable all channels (for the server all channel must be enabled the
// entire life time of the software)
for ( i = 0; i < iMaxNumChannels; i++ )
2011-04-23 22:43:07 +02:00
{
vecChannels[i].SetEnable ( true );
}
// Connections -------------------------------------------------------------
// connect timer timeout signal
QObject::connect ( &HighPrecisionTimer, &CHighPrecisionTimer::timeout,
this, &CServer::OnTimer );
2011-04-23 22:43:07 +02:00
QObject::connect ( &ConnLessProtocol, &CProtocol::CLMessReadyForSending,
this, &CServer::OnSendCLProtMessage );
2011-04-23 22:43:07 +02:00
QObject::connect ( &ConnLessProtocol, &CProtocol::CLPingReceived,
this, &CServer::OnCLPingReceived );
QObject::connect ( &ConnLessProtocol, &CProtocol::CLPingWithNumClientsReceived,
this, &CServer::OnCLPingWithNumClientsReceived );
2011-04-23 22:43:07 +02:00
QObject::connect ( &ConnLessProtocol, &CProtocol::CLRegisterServerReceived,
this, &CServer::OnCLRegisterServerReceived );
2011-04-23 22:43:07 +02:00
QObject::connect ( &ConnLessProtocol, &CProtocol::CLRegisterServerExReceived,
this, &CServer::OnCLRegisterServerExReceived );
QObject::connect ( &ConnLessProtocol, &CProtocol::CLUnregisterServerReceived,
this, &CServer::OnCLUnregisterServerReceived );
QObject::connect ( &ConnLessProtocol, &CProtocol::CLReqServerList,
this, &CServer::OnCLReqServerList );
2011-04-23 22:43:07 +02:00
QObject::connect ( &ConnLessProtocol, &CProtocol::CLRegisterServerResp,
this, &CServer::OnCLRegisterServerResp );
QObject::connect ( &ConnLessProtocol, &CProtocol::CLSendEmptyMes,
this, &CServer::OnCLSendEmptyMes );
2011-04-23 22:43:07 +02:00
QObject::connect ( &ConnLessProtocol, &CProtocol::CLDisconnection,
this, &CServer::OnCLDisconnection );
QObject::connect ( &ConnLessProtocol, &CProtocol::CLReqVersionAndOS,
this, &CServer::OnCLReqVersionAndOS );
2011-04-23 22:43:07 +02:00
QObject::connect ( &ConnLessProtocol, &CProtocol::CLReqConnClientsList,
this, &CServer::OnCLReqConnClientsList );
QObject::connect ( &ServerListManager, &CServerListManager::SvrRegStatusChanged,
this, &CServer::SvrRegStatusChanged );
2020-06-21 18:22:38 +02:00
QObject::connect ( &JamController, &recorder::CJamController::RestartRecorder,
this, &CServer::RestartRecorder );
QObject::connect ( &JamController, &recorder::CJamController::StopRecorder,
this, &CServer::StopRecorder );
QObject::connect ( &JamController, &recorder::CJamController::RecordingSessionStarted,
this, &CServer::RecordingSessionStarted );
QObject::connect ( &JamController, &recorder::CJamController::EndRecorderThread,
this, &CServer::EndRecorderThread );
2020-06-28 15:03:23 +02:00
QObject::connect ( this, &CServer::Stopped,
2020-06-21 18:22:38 +02:00
&JamController, &recorder::CJamController::Stopped );
2020-06-28 15:03:23 +02:00
QObject::connect ( this, &CServer::ClientDisconnected,
2020-06-21 18:22:38 +02:00
&JamController, &recorder::CJamController::ClientDisconnected );
qRegisterMetaType<CVector<int16_t>> ( "CVector<int16_t>" );
2020-06-28 15:03:23 +02:00
QObject::connect ( this, &CServer::AudioFrame,
2020-06-21 18:22:38 +02:00
&JamController, &recorder::CJamController::AudioFrame );
QObject::connect ( QCoreApplication::instance(), &QCoreApplication::aboutToQuit,
this, &CServer::OnAboutToQuit );
QObject::connect ( pSignalHandler, &CSignalHandler::HandledSignal,
this, &CServer::OnHandledSignal );
connectChannelSignalsToServerSlots<MAX_NUM_CHANNELS>();
// start the socket (it is important to start the socket after all
// initializations and connections)
Socket.Start();
2011-04-23 22:43:07 +02:00
}
template<unsigned int slotId>
inline void CServer::connectChannelSignalsToServerSlots()
{
int iCurChanID = slotId - 1;
void ( CServer::* pOnSendProtMessCh )( CVector<uint8_t> ) =
&CServerSlots<slotId>::OnSendProtMessCh;
void ( CServer::* pOnReqConnClientsListCh )() =
&CServerSlots<slotId>::OnReqConnClientsListCh;
void ( CServer::* pOnChatTextReceivedCh )( QString ) =
&CServerSlots<slotId>::OnChatTextReceivedCh;
void ( CServer::* pOnMuteStateHasChangedCh )( int, bool ) =
&CServerSlots<slotId>::OnMuteStateHasChangedCh;
void ( CServer::* pOnServerAutoSockBufSizeChangeCh )( int ) =
&CServerSlots<slotId>::OnServerAutoSockBufSizeChangeCh;
// send message
QObject::connect ( &vecChannels[iCurChanID], &CChannel::MessReadyForSending,
this, pOnSendProtMessCh );
// request connected clients list
QObject::connect ( &vecChannels[iCurChanID], &CChannel::ReqConnClientsList,
this, pOnReqConnClientsListCh );
// channel info has changed
QObject::connect ( &vecChannels[iCurChanID], &CChannel::ChanInfoHasChanged,
2020-05-14 21:12:06 +02:00
this, &CServer::CreateAndSendChanListForAllConChannels );
// chat text received
QObject::connect ( &vecChannels[iCurChanID], &CChannel::ChatTextReceived,
2020-05-14 21:12:06 +02:00
this, pOnChatTextReceivedCh );
// other mute state has changed
QObject::connect ( &vecChannels[iCurChanID], &CChannel::MuteStateHasChanged,
this, pOnMuteStateHasChangedCh );
// auto socket buffer size change
QObject::connect ( &vecChannels[iCurChanID], &CChannel::ServerAutoSockBufSizeChange,
2020-05-14 21:12:06 +02:00
this, pOnServerAutoSockBufSizeChangeCh );
connectChannelSignalsToServerSlots<slotId - 1>();
}
template<>
inline void CServer::connectChannelSignalsToServerSlots<0>() {}
void CServer::CreateAndSendJitBufMessage ( const int iCurChanID,
const int iNNumFra )
{
vecChannels[iCurChanID].CreateJitBufMes ( iNNumFra );
}
CServer::~CServer()
{
for ( int i = 0; i < iMaxNumChannels; i++ )
{
// free audio encoders and decoders
opus_custom_encoder_destroy ( OpusEncoderMono[i] );
opus_custom_decoder_destroy ( OpusDecoderMono[i] );
opus_custom_encoder_destroy ( OpusEncoderStereo[i] );
opus_custom_decoder_destroy ( OpusDecoderStereo[i] );
opus_custom_encoder_destroy ( Opus64EncoderMono[i] );
opus_custom_decoder_destroy ( Opus64DecoderMono[i] );
opus_custom_encoder_destroy ( Opus64EncoderStereo[i] );
opus_custom_decoder_destroy ( Opus64DecoderStereo[i] );
// free audio modes
opus_custom_mode_destroy ( OpusMode[i] );
opus_custom_mode_destroy ( Opus64Mode[i] );
}
}
void CServer::SendProtMessage ( int iChID, CVector<uint8_t> vecMessage )
2011-04-23 22:43:07 +02:00
{
// the protocol queries me to call the function to send the message
// send it through the network
Socket.SendPacket ( vecMessage, vecChannels[iChID].GetAddress() );
}
void CServer::OnNewConnection ( int iChID,
CHostAddress RecHostAddr )
{
// inform the client about its own ID at the server (note that this
// must be the first message to be sent for a new connection)
vecChannels[iChID].CreateClientIDMes ( iChID );
// on a new connection we query the network transport properties for the
// audio packets (to use the correct network block size and audio
// compression properties, etc.)
vecChannels[iChID].CreateReqNetwTranspPropsMes();
// this is a new connection, query the jitter buffer size we shall use
// for this client (note that at the same time on a new connection the
// client sends the jitter buffer size by default but maybe we have
// reached a state where this did not happen because of network trouble,
// client or server thinks that the connection was still active, etc.)
vecChannels[iChID].CreateReqJitBufMes();
// A new client connected to the server, the channel list
// at all clients have to be updated. This is done by sending
// a channel name request to the client which causes a channel
// name message to be transmitted to the server. If the server
// receives this message, the channel list will be automatically
// updated (implicitly).
//
// Usually it is not required to send the channel list to the
// client currently connecting since it automatically requests
// the channel list on a new connection (as a result, he will
// usually get the list twice which has no impact on functionality
// but will only increase the network load a tiny little bit). But
// in case the client thinks he is still connected but the server
// was restartet, it is important that we send the channel list
// at this place.
vecChannels[iChID].CreateReqChanInfoMes();
// send welcome message (if enabled)
MutexWelcomeMessage.lock();
{
if ( !strWelcomeMessage.isEmpty() )
{
// create formatted server welcome message and send it just to
// the client which just connected to the server
const QString strWelcomeMessageFormated =
"<b>Server Welcome Message:</b> " + strWelcomeMessage;
vecChannels[iChID].CreateChatTextMes ( strWelcomeMessageFormated );
}
}
MutexWelcomeMessage.unlock();
// send licence request message (if enabled)
if ( eLicenceType != LT_NO_LICENCE )
{
vecChannels[iChID].CreateLicReqMes ( eLicenceType );
}
// send version info (for, e.g., feature activation in the client)
vecChannels[iChID].CreateVersionAndOSMes();
2020-06-14 20:16:43 +02:00
// send recording state message on connection
2020-06-21 18:22:38 +02:00
vecChannels[iChID].CreateRecorderStateMes ( JamController.GetRecorderState() );
2020-06-14 20:16:43 +02:00
// reset the conversion buffers
DoubleFrameSizeConvBufIn[iChID].Reset();
DoubleFrameSizeConvBufOut[iChID].Reset();
// logging of new connected channel
Logging.AddNewConnection ( RecHostAddr.InetAddr );
}
void CServer::OnServerFull ( CHostAddress RecHostAddr )
{
// inform the calling client that no channel is free
ConnLessProtocol.CreateCLServerFullMes ( RecHostAddr );
}
2011-04-23 22:43:07 +02:00
void CServer::OnSendCLProtMessage ( CHostAddress InetAddr,
CVector<uint8_t> vecMessage )
{
// the protocol queries me to call the function to send the message
// send it through the network
Socket.SendPacket ( vecMessage, InetAddr );
}
void CServer::OnProtcolCLMessageReceived ( int iRecID,
CVector<uint8_t> vecbyMesBodyData,
CHostAddress RecHostAddr )
{
// connection less messages are always processed
ConnLessProtocol.ParseConnectionLessMessageBody ( vecbyMesBodyData,
iRecID,
RecHostAddr );
}
void CServer::OnCLDisconnection ( CHostAddress InetAddr )
{
// check if the given address is actually a client which is connected to
// this server, if yes, disconnect it
const int iCurChanID = FindChannel ( InetAddr );
if ( iCurChanID != INVALID_CHANNEL_ID )
{
vecChannels[iCurChanID].Disconnect();
}
}
void CServer::OnAboutToQuit()
{
// if enabled, disconnect all clients on quit
if ( bDisconnectAllClientsOnQuit )
{
Mutex.lock();
{
for ( int i = 0; i < iMaxNumChannels; i++ )
{
if ( vecChannels[i].IsConnected() )
{
ConnLessProtocol.CreateCLDisconnection ( vecChannels[i].GetAddress() );
}
}
}
Mutex.unlock(); // release mutex
}
2020-05-11 21:12:16 +02:00
Stop();
// if server was registered at the central server, unregister on shutdown
if ( GetServerListEnabled() )
{
UnregisterSlaveServer();
}
}
2020-05-15 21:01:57 +02:00
void CServer::OnHandledSignal ( int sigNum )
{
2020-06-13 08:20:03 +02:00
// show the signal number on the command line (note that this does not work for the Windows command line)
// TODO we should use the ConsoleWriterFactory() instead of qDebug()
qDebug() << "OnHandledSignal: " << sigNum;
2020-05-15 21:01:57 +02:00
#ifdef _WIN32
// Windows does not actually get OnHandledSignal triggered
QCoreApplication::instance()->exit();
2020-05-15 21:01:57 +02:00
Q_UNUSED ( sigNum )
#else
switch ( sigNum )
{
2020-05-15 22:52:13 +02:00
case SIGUSR1:
RequestNewRecording();
break;
case SIGUSR2:
2020-06-21 18:22:38 +02:00
SetEnableRecording ( !JamController.GetRecordingEnabled() );
break;
2020-05-15 21:01:57 +02:00
case SIGINT:
case SIGTERM:
// This should trigger OnAboutToQuit
QCoreApplication::instance()->exit();
break;
default:
break;
}
#endif
}
2011-04-23 22:43:07 +02:00
void CServer::Start()
{
// only start if not already running
if ( !IsRunning() )
{
// start timer
HighPrecisionTimer.Start();
// emit start signal
emit Started();
2011-04-23 22:43:07 +02:00
}
}
void CServer::Stop()
{
// Under Mac we have the problem that the timer shutdown might
// take some time and therefore we get a lot of "server stopped"
// entries in the log. The following condition shall prevent this.
// For the other OSs this should not hurt either.
if ( IsRunning() )
{
// stop timer
HighPrecisionTimer.Stop();
// logging (add "server stopped" logging entry)
Logging.AddServerStopped();
// emit stopped signal
emit Stopped();
2011-04-23 22:43:07 +02:00
}
}
void CServer::OnTimer()
{
/*
2020-06-07 13:08:06 +02:00
static CTimingMeas JitterMeas ( 1000, "test2.dat" ); JitterMeas.Measure(); // TEST do a timer jitter measurement
*/
2011-04-23 22:43:07 +02:00
// Get data from all connected clients -------------------------------------
// some inits
int iUnused;
int iNumClients = 0; // init connected client counter
2011-04-23 22:43:07 +02:00
bool bChannelIsNowDisconnected = false;
bool bUpdateChannelLevels = false;
2020-04-04 23:57:16 +02:00
bool bSendChannelLevels = false;
2011-04-23 22:43:07 +02:00
// Make put and get calls thread safe. Do not forget to unlock mutex
// afterwards!
Mutex.lock();
{
// first, get number and IDs of connected channels
2020-06-07 13:08:06 +02:00
for ( int i = 0; i < iMaxNumChannels; i++ )
2011-04-23 22:43:07 +02:00
{
if ( vecChannels[i].IsConnected() )
{
// add ID and increment counter (note that the vector length is
// according to the worst case scenario, if the number of
// connected clients is less, only a subset of elements of this
// vector are actually used and the others are dummy elements)
vecChanIDsCurConChan[iNumClients] = i;
iNumClients++;
2011-04-23 22:43:07 +02:00
}
}
// process connected channels
2020-06-07 13:08:06 +02:00
for ( int i = 0; i < iNumClients; i++ )
2011-04-23 22:43:07 +02:00
{
2020-06-07 13:08:06 +02:00
int iClientFrameSizeSamples = 0; // initialize to avoid a compiler warning
OpusCustomDecoder* CurOpusDecoder;
unsigned char* pCurCodedData;
2011-04-23 22:43:07 +02:00
// get actual ID of current channel
const int iCurChanID = vecChanIDsCurConChan[i];
2011-04-23 22:43:07 +02:00
// get and store number of audio channels and compression type
vecNumAudioChannels[i] = vecChannels[iCurChanID].GetNumAudioChannels();
vecAudioComprType[i] = vecChannels[iCurChanID].GetAudioCompressionType();
2020-04-05 17:35:40 +02:00
// get info about required frame size conversion properties
vecUseDoubleSysFraSizeConvBuf[i] = ( !bUseDoubleSystemFrameSize && ( vecAudioComprType[i] == CT_OPUS ) );
2020-04-05 17:35:40 +02:00
if ( bUseDoubleSystemFrameSize && ( vecAudioComprType[i] == CT_OPUS64 ) )
{
vecNumFrameSizeConvBlocks[i] = 2;
}
else
{
vecNumFrameSizeConvBlocks[i] = 1;
}
// update conversion buffer size (nothing will happen if the size stays the same)
if ( vecUseDoubleSysFraSizeConvBuf[i] )
{
DoubleFrameSizeConvBufIn[iCurChanID].SetBufferSize ( DOUBLE_SYSTEM_FRAME_SIZE_SAMPLES * vecNumAudioChannels[i] );
DoubleFrameSizeConvBufOut[iCurChanID].SetBufferSize ( DOUBLE_SYSTEM_FRAME_SIZE_SAMPLES * vecNumAudioChannels[i] );
}
// select the opus decoder and raw audio frame length
if ( vecAudioComprType[i] == CT_OPUS )
{
iClientFrameSizeSamples = DOUBLE_SYSTEM_FRAME_SIZE_SAMPLES;
if ( vecNumAudioChannels[i] == 1 )
{
CurOpusDecoder = OpusDecoderMono[iCurChanID];
}
else
{
CurOpusDecoder = OpusDecoderStereo[iCurChanID];
}
}
else if ( vecAudioComprType[i] == CT_OPUS64 )
{
iClientFrameSizeSamples = SYSTEM_FRAME_SIZE_SAMPLES;
if ( vecNumAudioChannels[i] == 1 )
{
CurOpusDecoder = Opus64DecoderMono[iCurChanID];
}
else
{
CurOpusDecoder = Opus64DecoderStereo[iCurChanID];
}
}
else
{
CurOpusDecoder = nullptr;
}
2011-04-23 22:43:07 +02:00
// get gains of all connected channels
2020-06-07 13:08:06 +02:00
for ( int j = 0; j < iNumClients; j++ )
2011-04-23 22:43:07 +02:00
{
// The second index of "vecvecdGains" does not represent
// the channel ID! Therefore we have to use
// "vecChanIDsCurConChan" to query the IDs of the currently
// connected channels
vecvecdGains[i][j] = vecChannels[iCurChanID].GetGain ( vecChanIDsCurConChan[j] );
// consider audio fade-in
vecvecdGains[i][j] *= vecChannels[vecChanIDsCurConChan[j]].GetFadeInGain();
2020-04-26 00:55:28 +02:00
// panning
vecvecdPannings[i][j] = vecChannels[iCurChanID].GetPan ( vecChanIDsCurConChan[j] );
2011-04-23 22:43:07 +02:00
}
// flag for updating channel levels (if at least one clients wants it)
if ( vecChannels[iCurChanID].ChannelLevelsRequired() )
{
bUpdateChannelLevels = true;
}
// If the server frame size is smaller than the received OPUS frame size, we need a conversion
// buffer which stores the large buffer.
// Note that we have a shortcut here. If the conversion buffer is not needed, the boolean flag
// is false and the Get() function is not called at all. Therefore if the buffer is not needed
// we do not spend any time in the function but go directly inside the if condition.
if ( ( vecUseDoubleSysFraSizeConvBuf[i] == 0 ) ||
!DoubleFrameSizeConvBufIn[iCurChanID].Get ( vecvecsData[i], SYSTEM_FRAME_SIZE_SAMPLES * vecNumAudioChannels[i] ) )
2011-04-23 22:43:07 +02:00
{
// get current number of OPUS coded bytes
const int iCeltNumCodedBytes = vecChannels[iCurChanID].GetNetwFrameSize();
for ( int iB = 0; iB < vecNumFrameSizeConvBlocks[i]; iB++ )
2011-04-23 22:43:07 +02:00
{
// get data
2020-06-19 21:29:09 +02:00
const EGetDataStat eGetStat = vecChannels[iCurChanID].GetData ( vecvecbyCodedData[i], iCeltNumCodedBytes );
// if channel was just disconnected, set flag that connected
// client list is sent to all other clients
// and emit the client disconnected signal
if ( eGetStat == GS_CHAN_NOW_DISCONNECTED )
2013-02-16 19:16:12 +01:00
{
2020-06-21 18:22:38 +02:00
if ( JamController.GetRecordingEnabled() )
{
emit ClientDisconnected ( iCurChanID ); // TODO do this outside the mutex lock?
}
bChannelIsNowDisconnected = true;
2013-02-16 19:16:12 +01:00
}
// get pointer to coded data
if ( eGetStat == GS_BUFFER_OK )
{
2020-06-19 21:29:09 +02:00
pCurCodedData = &vecvecbyCodedData[i][0];
}
else
{
// for lost packets use null pointer as coded input data
pCurCodedData = nullptr;
}
// OPUS decode received data stream
if ( CurOpusDecoder != nullptr )
{
iUnused = opus_custom_decode ( CurOpusDecoder,
pCurCodedData,
iCeltNumCodedBytes,
&vecvecsData[i][iB * SYSTEM_FRAME_SIZE_SAMPLES * vecNumAudioChannels[i]],
iClientFrameSizeSamples );
}
}
// a new large frame is ready, if the conversion buffer is required, put it in the buffer
// and read out the small frame size immediately for further processing
if ( vecUseDoubleSysFraSizeConvBuf[i] != 0 )
2011-04-23 22:43:07 +02:00
{
DoubleFrameSizeConvBufIn[iCurChanID].PutAll ( vecvecsData[i] );
DoubleFrameSizeConvBufIn[iCurChanID].Get ( vecvecsData[i], SYSTEM_FRAME_SIZE_SAMPLES * vecNumAudioChannels[i] );
2011-04-23 22:43:07 +02:00
}
}
}
// a channel is now disconnected, take action on it
if ( bChannelIsNowDisconnected )
{
// update channel list for all currently connected clients
CreateAndSendChanListForAllConChannels();
}
}
Mutex.unlock(); // release mutex
// Process data ------------------------------------------------------------
// Check if at least one client is connected. If not, stop server until
// one client is connected.
if ( iNumClients > 0 )
2011-04-23 22:43:07 +02:00
{
// calculate levels for all connected clients
if ( bUpdateChannelLevels )
2020-04-04 23:57:16 +02:00
{
bSendChannelLevels = CreateLevelsForAllConChannels ( iNumClients,
vecNumAudioChannels,
vecvecsData,
vecChannelLevels );
}
2020-04-04 23:57:16 +02:00
2020-06-08 21:47:36 +02:00
#ifdef USE_OMP
2020-06-20 13:47:15 +02:00
// TODO This does not work as expected, the CPU is at high levels even if not much work is to be done. So we
// have an issue using OMP in the OnTimer() function. Even if #pragma omp parallel for is used on a trivial
// for loop for testing, still the CPU usage goes to very high values -> What is the cause of this issue?
2020-06-20 22:11:03 +02:00
// NOTE Most probably it is the overhead of threads creation/destruction which causes this effect.
// See https://software.intel.com/content/www/us/en/develop/articles/performance-obstacles-for-threading-how-do-they-affect-openmp-code.html
// "[...] overhead numbers are high enough that it doesnt make sense to thread that code. In those cases, were better off leaving the code in its original serial form."
2020-06-08 21:47:36 +02:00
# pragma omp parallel for
#endif
2011-04-23 22:43:07 +02:00
for ( int i = 0; i < iNumClients; i++ )
{
2020-06-07 13:08:06 +02:00
int iClientFrameSizeSamples = 0; // initialize to avoid a compiler warning
OpusCustomEncoder* CurOpusEncoder;
2011-04-23 22:43:07 +02:00
// get actual ID of current channel
const int iCurChanID = vecChanIDsCurConChan[i];
// get number of audio channels of current channel
const int iCurNumAudChan = vecNumAudioChannels[i];
// export the audio data for recording purpose
2020-06-21 18:22:38 +02:00
if ( JamController.GetRecordingEnabled() )
{
emit AudioFrame ( iCurChanID,
vecChannels[iCurChanID].GetName(),
vecChannels[iCurChanID].GetAddress(),
iCurNumAudChan,
vecvecsData[i] );
}
Add recording support with Reaper Project generation Includes the following changes * Initial .gitignore Administrative * Fix up warning message * Not all Windows file systems are case insensitive Bugfixes * (Qt5) Use QCoreApplication for headless Possible solution to get the application to run as a headless server but it loses the nice history graph, so not ideal. * Avoid ESC closing chat Because ESC shouldn't close the chat window. Or the main app window. * Add console logging support for Windows Whilst looking for the headless support, I found this idea for Windows logging. New improved version. This makes far fewer changes. ---- * Add recording support with Reaper Project generation The main feature! * New -r option to enable recording of PCM files and conversion to Reaper RPP with WAV files * New -R option to set the directory in which to create recording sessions You need to specify the -R option, there's no default... so I guess -r and -R could be combined. * New -T option to convert a session directory with PCM files into a Reaper RPP with WAV files You can use -T on "failed" sessions, if the -r option captures the PCMs but the RPP converter doesn't run for some reaon. (It was useful during development, maybe less so once things seem stable.) The recorder is implemented as a new thread with queuing from the main "real time" server thread. When a new client connects or if its audio format changes (e.g. mono to stereo), a new RIFF WAVE file is started. Each frame of decompressed audio for each client written out as LPCM to the file. When the client disconnects, the RIFF WAVE headers are updated to reflect the file length. Once all clients disconnect, the session is considered ended and a Reaper RPP file is written.
2019-04-03 19:12:45 +02:00
// generate a separate mix for each channel
2011-04-23 22:43:07 +02:00
// actual processing of audio data -> mix
ProcessData ( vecvecsData,
vecvecdGains[i],
2020-04-26 00:55:28 +02:00
vecvecdPannings[i],
vecNumAudioChannels,
2020-06-19 21:29:09 +02:00
vecvecsSendData[i],
iCurNumAudChan,
iNumClients );
2011-04-23 22:43:07 +02:00
// get current number of CELT coded bytes
const int iCeltNumCodedBytes = vecChannels[iCurChanID].GetNetwFrameSize();
// select the opus encoder and raw audio frame length
if ( vecAudioComprType[i] == CT_OPUS )
{
iClientFrameSizeSamples = DOUBLE_SYSTEM_FRAME_SIZE_SAMPLES;
if ( vecNumAudioChannels[i] == 1 )
{
CurOpusEncoder = OpusEncoderMono[iCurChanID];
}
else
{
CurOpusEncoder = OpusEncoderStereo[iCurChanID];
}
}
else if ( vecAudioComprType[i] == CT_OPUS64 )
{
iClientFrameSizeSamples = SYSTEM_FRAME_SIZE_SAMPLES;
if ( vecNumAudioChannels[i] == 1 )
{
CurOpusEncoder = Opus64EncoderMono[iCurChanID];
}
else
{
CurOpusEncoder = Opus64EncoderStereo[iCurChanID];
}
}
else
{
CurOpusEncoder = nullptr;
}
2011-04-23 22:43:07 +02:00
// If the server frame size is smaller than the received OPUS frame size, we need a conversion
// buffer which stores the large buffer.
// Note that we have a shortcut here. If the conversion buffer is not needed, the boolean flag
// is false and the Get() function is not called at all. Therefore if the buffer is not needed
// we do not spend any time in the function but go directly inside the if condition.
if ( ( vecUseDoubleSysFraSizeConvBuf[i] == 0 ) ||
2020-06-19 21:29:09 +02:00
DoubleFrameSizeConvBufOut[iCurChanID].Put ( vecvecsSendData[i], SYSTEM_FRAME_SIZE_SAMPLES * vecNumAudioChannels[i] ) )
2011-04-23 22:43:07 +02:00
{
if ( vecUseDoubleSysFraSizeConvBuf[i] != 0 )
{
// get the large frame from the conversion buffer
2020-06-19 21:29:09 +02:00
DoubleFrameSizeConvBufOut[iCurChanID].GetAll ( vecvecsSendData[i], DOUBLE_SYSTEM_FRAME_SIZE_SAMPLES * vecNumAudioChannels[i] );
}
for ( int iB = 0; iB < vecNumFrameSizeConvBlocks[i]; iB++ )
{
// OPUS encoding
if ( CurOpusEncoder != nullptr )
{
2013-02-16 19:16:12 +01:00
// TODO find a better place than this: the setting does not change all the time
// so for speed optimization it would be better to set it only if the network
// frame size is changed
opus_custom_encoder_ctl ( CurOpusEncoder,
OPUS_SET_BITRATE ( CalcBitRateBitsPerSecFromCodedBytes ( iCeltNumCodedBytes, iClientFrameSizeSamples ) ) );
iUnused = opus_custom_encode ( CurOpusEncoder,
2020-06-19 21:29:09 +02:00
&vecvecsSendData[i][iB * SYSTEM_FRAME_SIZE_SAMPLES * vecNumAudioChannels[i]],
iClientFrameSizeSamples,
2020-06-19 21:29:09 +02:00
&vecvecbyCodedData[i][0],
iCeltNumCodedBytes );
}
2011-04-23 22:43:07 +02:00
// send separate mix to current clients
vecChannels[iCurChanID].PrepAndSendPacket ( &Socket,
2020-06-19 21:29:09 +02:00
vecvecbyCodedData[i],
iCeltNumCodedBytes );
}
// update socket buffer size
vecChannels[iCurChanID].UpdateSocketBufferSize();
2020-04-04 23:57:16 +02:00
// send channel levels
if ( bSendChannelLevels && vecChannels[iCurChanID].ChannelLevelsRequired() )
{
ConnLessProtocol.CreateCLChannelLevelListMes ( vecChannels[iCurChanID].GetAddress(),
vecChannelLevels,
iNumClients );
}
2020-04-04 23:57:16 +02:00
}
2011-04-23 22:43:07 +02:00
}
}
else
{
// Disable server if no clients are connected. In this case the server
// does not consume any significant CPU when no client is connected.
Stop();
}
Q_UNUSED ( iUnused )
2011-04-23 22:43:07 +02:00
}
/// @brief Mix all audio data from all clients together.
void CServer::ProcessData ( const CVector<CVector<int16_t> >& vecvecsData,
const CVector<double>& vecdGains,
const CVector<double>& vecdPannings,
const CVector<int>& vecNumAudioChannels,
CVector<int16_t>& vecsOutData,
const int iCurNumAudChan,
const int iNumClients )
2011-04-23 22:43:07 +02:00
{
int i, j, k;
// init return vector with zeros since we mix all channels on that vector
vecsOutData.Reset ( 0 );
2011-04-23 22:43:07 +02:00
// distinguish between stereo and mono mode
2011-04-23 22:43:07 +02:00
if ( iCurNumAudChan == 1 )
{
// Mono target channel -------------------------------------------------
for ( j = 0; j < iNumClients; j++ )
{
// get a reference to the audio data and gain of the current client
const CVector<int16_t>& vecsData = vecvecsData[j];
const double dGain = vecdGains[j];
2011-04-23 22:43:07 +02:00
// if channel gain is 1, avoid multiplication for speed optimization
if ( dGain == static_cast<double> ( 1.0 ) )
2011-04-23 22:43:07 +02:00
{
if ( vecNumAudioChannels[j] == 1 )
{
// mono
for ( i = 0; i < iServerFrameSizeSamples; i++ )
2011-04-23 22:43:07 +02:00
{
vecsOutData[i] = Double2Short (
static_cast<double> ( vecsOutData[i] ) + vecsData[i] );
2011-04-23 22:43:07 +02:00
}
}
else
{
// stereo: apply stereo-to-mono attenuation
for ( i = 0, k = 0; i < iServerFrameSizeSamples; i++, k += 2 )
2011-04-23 22:43:07 +02:00
{
vecsOutData[i] =
Double2Short ( vecsOutData[i] +
( static_cast<double> ( vecsData[k] ) + vecsData[k + 1] ) / 2 );
2011-04-23 22:43:07 +02:00
}
}
}
else
{
if ( vecNumAudioChannels[j] == 1 )
{
// mono
for ( i = 0; i < iServerFrameSizeSamples; i++ )
2011-04-23 22:43:07 +02:00
{
vecsOutData[i] = Double2Short (
vecsOutData[i] + vecsData[i] * dGain );
2011-04-23 22:43:07 +02:00
}
}
else
{
// stereo: apply stereo-to-mono attenuation
for ( i = 0, k = 0; i < iServerFrameSizeSamples; i++, k += 2 )
2011-04-23 22:43:07 +02:00
{
vecsOutData[i] =
Double2Short ( vecsOutData[i] + dGain *
( static_cast<double> ( vecsData[k] ) + vecsData[k + 1] ) / 2 );
2011-04-23 22:43:07 +02:00
}
}
}
}
}
else
{
// Stereo target channel -----------------------------------------------
for ( j = 0; j < iNumClients; j++ )
{
// get a reference to the audio data and gain/pan of the current client
const CVector<int16_t>& vecsData = vecvecsData[j];
const double dGain = vecdGains[j];
const double dPan = vecdPannings[j];
// calculate combined gain/pan for each stereo channel where we define
// the panning that center equals full gain for both channels
const double dGainL = MathUtils::GetLeftPan ( dPan, false ) * dGain;
const double dGainR = MathUtils::GetRightPan ( dPan, false ) * dGain;
2011-04-23 22:43:07 +02:00
// if channel gain is 1, avoid multiplication for speed optimization
if ( ( dGainL == static_cast<double> ( 1.0 ) ) && ( dGainR == static_cast<double> ( 1.0 ) ) )
2011-04-23 22:43:07 +02:00
{
if ( vecNumAudioChannels[j] == 1 )
{
// mono: copy same mono data in both out stereo audio channels
for ( i = 0, k = 0; i < iServerFrameSizeSamples; i++, k += 2 )
2011-04-23 22:43:07 +02:00
{
// left channel
vecsOutData[k] = Double2Short (
static_cast<double> ( vecsOutData[k] ) + vecsData[i] );
2011-04-23 22:43:07 +02:00
// right channel
vecsOutData[k + 1] = Double2Short (
static_cast<double> ( vecsOutData[k + 1] ) + vecsData[i] );
2011-04-23 22:43:07 +02:00
}
}
else
{
// stereo
for ( i = 0; i < ( 2 * iServerFrameSizeSamples ); i++ )
2011-04-23 22:43:07 +02:00
{
vecsOutData[i] = Double2Short (
static_cast<double> ( vecsOutData[i] ) + vecsData[i] );
2011-04-23 22:43:07 +02:00
}
}
}
else
{
if ( vecNumAudioChannels[j] == 1 )
{
// mono: copy same mono data in both out stereo audio channels
for ( i = 0, k = 0; i < iServerFrameSizeSamples; i++, k += 2 )
2011-04-23 22:43:07 +02:00
{
// left/right channel
vecsOutData[k] = Double2Short ( vecsOutData[k] + vecsData[i] * dGainL );
vecsOutData[k + 1] = Double2Short ( vecsOutData[k + 1] + vecsData[i] * dGainR );
2011-04-23 22:43:07 +02:00
}
}
else
{
// stereo
for ( i = 0; i < ( 2 * iServerFrameSizeSamples ); i += 2 )
2011-04-23 22:43:07 +02:00
{
// left/right channel
vecsOutData[i] = Double2Short ( vecsOutData[i] + vecsData[i] * dGainL );
vecsOutData[i + 1] = Double2Short ( vecsOutData[i + 1] + vecsData[i + 1] * dGainR );
2011-04-23 22:43:07 +02:00
}
}
}
}
}
}
CVector<CChannelInfo> CServer::CreateChannelList()
2011-04-23 22:43:07 +02:00
{
CVector<CChannelInfo> vecChanInfo ( 0 );
2011-04-23 22:43:07 +02:00
// look for free channels
for ( int i = 0; i < iMaxNumChannels; i++ )
2011-04-23 22:43:07 +02:00
{
if ( vecChannels[i].IsConnected() )
{
// append channel ID, IP address and channel name to storing vectors
vecChanInfo.Add ( CChannelInfo (
2011-04-23 22:43:07 +02:00
i, // ID
QHostAddress ( QHostAddress::Null ).toIPv4Address(), // use invalid IP address (for privacy reason, #316)
vecChannels[i].GetChanInfo() ) );
2011-04-23 22:43:07 +02:00
}
}
return vecChanInfo;
}
void CServer::CreateAndSendChanListForAllConChannels()
{
// create channel list
CVector<CChannelInfo> vecChanInfo ( CreateChannelList() );
2011-04-23 22:43:07 +02:00
// now send connected channels list to all connected clients
for ( int i = 0; i < iMaxNumChannels; i++ )
2011-04-23 22:43:07 +02:00
{
if ( vecChannels[i].IsConnected() )
{
// send message
vecChannels[i].CreateConClientListMes ( vecChanInfo );
}
}
// create status HTML file if enabled
if ( bWriteStatusHTMLFile )
{
WriteHTMLChannelList();
}
}
void CServer::CreateAndSendChanListForThisChan ( const int iCurChanID )
{
// create channel list
CVector<CChannelInfo> vecChanInfo ( CreateChannelList() );
2011-04-23 22:43:07 +02:00
// now send connected channels list to the channel with the ID "iCurChanID"
vecChannels[iCurChanID].CreateConClientListMes ( vecChanInfo );
}
void CServer::CreateAndSendChatTextForAllConChannels ( const int iCurChanID,
const QString& strChatText )
{
// Create message which is sent to all connected clients -------------------
// get client name, if name is empty, use IP address instead
QString ChanName = vecChannels[iCurChanID].GetName();
2011-04-23 22:43:07 +02:00
// add time and name of the client at the beginning of the message text and
// use different colors
QString sCurColor = vstrChatColors[iCurChanID % vstrChatColors.Size()];
2011-04-23 22:43:07 +02:00
const QString strActualMessageText =
"<font color=""" + sCurColor + """>(" +
QTime::currentTime().toString ( "hh:mm:ss AP" ) + ") <b>" +
ChanName.toHtmlEscaped() +
2011-04-23 22:43:07 +02:00
"</b></font> " + strChatText;
// Send chat text to all connected clients ---------------------------------
for ( int i = 0; i < iMaxNumChannels; i++ )
2011-04-23 22:43:07 +02:00
{
if ( vecChannels[i].IsConnected() )
{
// send message
vecChannels[i].CreateChatTextMes ( strActualMessageText );
}
}
}
2020-06-28 15:03:23 +02:00
void CServer::CreateAndSendRecorderStateForAllConChannels()
{
// get recorder state
ERecorderState eRecorderState = JamController.GetRecorderState();
// now send recorder state to all connected clients
for ( int i = 0; i < iMaxNumChannels; i++ )
{
if ( vecChannels[i].IsConnected() )
{
// send message
vecChannels[i].CreateRecorderStateMes ( eRecorderState );
}
}
}
void CServer::CreateOtherMuteStateChanged ( const int iCurChanID,
const int iOtherChanID,
const bool bIsMuted )
{
if ( vecChannels[iOtherChanID].IsConnected() )
{
// send message
vecChannels[iOtherChanID].CreateMuteStateHasChangedMes ( iCurChanID, bIsMuted );
}
}
2011-04-23 22:43:07 +02:00
int CServer::GetFreeChan()
{
// look for a free channel
for ( int i = 0; i < iMaxNumChannels; i++ )
2011-04-23 22:43:07 +02:00
{
if ( !vecChannels[i].IsConnected() )
{
return i;
}
}
// no free channel found, return invalid ID
return INVALID_CHANNEL_ID;
}
int CServer::GetNumberOfConnectedClients()
{
int iNumConnClients = 0;
// check all possible channels for connection status
for ( int i = 0; i < iMaxNumChannels; i++ )
{
if ( vecChannels[i].IsConnected() )
{
// this channel is connected, increment counter
iNumConnClients++;
}
}
return iNumConnClients;
}
int CServer::FindChannel ( const CHostAddress& CheckAddr )
2011-04-23 22:43:07 +02:00
{
CHostAddress InetAddr;
// check for all possible channels if IP is already in use
for ( int i = 0; i < iMaxNumChannels; i++ )
2011-04-23 22:43:07 +02:00
{
// the "GetAddress" gives a valid address and returns true if the
// channel is connected
if ( vecChannels[i].GetAddress ( InetAddr ) )
2011-04-23 22:43:07 +02:00
{
// IP found, return channel number
if ( InetAddr == CheckAddr )
2011-04-23 22:43:07 +02:00
{
return i;
2011-04-23 22:43:07 +02:00
}
}
}
// IP not found, return invalid ID
return INVALID_CHANNEL_ID;
}
void CServer::OnProtcolMessageReceived ( int iRecCounter,
int iRecID,
CVector<uint8_t> vecbyMesBodyData,
CHostAddress RecHostAddr )
2011-04-23 22:43:07 +02:00
{
Mutex.lock();
{
// find the channel with the received address
const int iCurChanID = FindChannel ( RecHostAddr );
// if the channel exists, apply the protocol message to the channel
if ( iCurChanID != INVALID_CHANNEL_ID )
{
vecChannels[iCurChanID].PutProtcolData ( iRecCounter,
iRecID,
vecbyMesBodyData,
RecHostAddr );
}
}
Mutex.unlock();
}
bool CServer::PutAudioData ( const CVector<uint8_t>& vecbyRecBuf,
const int iNumBytesRead,
const CHostAddress& HostAdr,
int& iCurChanID )
{
bool bNewConnection = false; // init return value
bool bChanOK = true; // init with ok, might be overwritten
2011-04-23 22:43:07 +02:00
Mutex.lock();
{
// Get channel ID ------------------------------------------------------
// check address
iCurChanID = FindChannel ( HostAdr );
2011-04-23 22:43:07 +02:00
if ( iCurChanID == INVALID_CHANNEL_ID )
{
// a new client is calling, look for free channel
iCurChanID = GetFreeChan();
2011-04-23 22:43:07 +02:00
if ( iCurChanID != INVALID_CHANNEL_ID )
{
// initialize current channel by storing the calling host
// address
vecChannels[iCurChanID].SetAddress ( HostAdr );
2011-04-23 22:43:07 +02:00
// reset channel info
vecChannels[iCurChanID].ResetInfo();
2011-04-23 22:43:07 +02:00
// reset the channel gains of current channel, at the same
// time reset gains of this channel ID for all other channels
for ( int i = 0; i < iMaxNumChannels; i++ )
2011-04-23 22:43:07 +02:00
{
2019-05-17 22:55:46 +02:00
vecChannels[iCurChanID].SetGain ( i, 1.0 );
2011-04-23 22:43:07 +02:00
// other channels (we do not distinguish the case if
// i == iCurChanID for simplicity)
2019-05-17 22:55:46 +02:00
vecChannels[i].SetGain ( iCurChanID, 1.0 );
2011-04-23 22:43:07 +02:00
}
}
else
{
// no free channel available
2011-04-23 22:43:07 +02:00
bChanOK = false;
}
}
// Put received audio data in jitter buffer ----------------------------
2011-04-23 22:43:07 +02:00
if ( bChanOK )
{
// put packet in socket buffer
if ( vecChannels[iCurChanID].PutAudioData ( vecbyRecBuf,
iNumBytesRead,
HostAdr ) == PS_NEW_CONNECTION )
2011-04-23 22:43:07 +02:00
{
// in case we have a new connection return this information
bNewConnection = true;
}
2011-04-23 22:43:07 +02:00
}
}
Mutex.unlock();
// return the state if a new connection was happening
return bNewConnection;
2011-04-23 22:43:07 +02:00
}
void CServer::GetConCliParam ( CVector<CHostAddress>& vecHostAddresses,
CVector<QString>& vecsName,
CVector<int>& veciJitBufNumFrames,
CVector<int>& veciNetwFrameSizeFact )
{
CHostAddress InetAddr;
// init return values
vecHostAddresses.Init ( iMaxNumChannels );
vecsName.Init ( iMaxNumChannels );
veciJitBufNumFrames.Init ( iMaxNumChannels );
veciNetwFrameSizeFact.Init ( iMaxNumChannels );
2011-04-23 22:43:07 +02:00
// check all possible channels
for ( int i = 0; i < iMaxNumChannels; i++ )
2011-04-23 22:43:07 +02:00
{
if ( vecChannels[i].GetAddress ( InetAddr ) )
{
// get requested data
vecHostAddresses[i] = InetAddr;
vecsName[i] = vecChannels[i].GetName();
veciJitBufNumFrames[i] = vecChannels[i].GetSockBufNumFrames();
veciNetwFrameSizeFact[i] = vecChannels[i].GetNetwFrameSizeFact();
}
}
}
2020-06-21 18:22:38 +02:00
void CServer::SetEnableRecording ( bool bNewEnableRecording )
{
JamController.SetEnableRecording ( bNewEnableRecording, IsRunning() );
2020-06-28 15:03:23 +02:00
// the recording state may have changed, send recording state message
2020-06-21 18:22:38 +02:00
CreateAndSendRecorderStateForAllConChannels();
}
void CServer::SetWelcomeMessage ( const QString& strNWelcMess )
{
// we need a mutex to secure access
QMutexLocker locker ( &MutexWelcomeMessage );
strWelcomeMessage = strNWelcMess;
// restrict welcome message to maximum allowed length
strWelcomeMessage = strWelcomeMessage.left ( MAX_LEN_CHAT_TEXT );
}
2011-04-23 22:43:07 +02:00
void CServer::StartStatusHTMLFileWriting ( const QString& strNewFileName,
const QString& strNewServerNameWithPort )
{
// set important parameters
strServerHTMLFileListName = strNewFileName;
strServerNameWithPort = strNewServerNameWithPort;
// set flag
bWriteStatusHTMLFile = true;
// write initial file
WriteHTMLChannelList();
}
void CServer::WriteHTMLChannelList()
{
// prepare file and stream
QFile serverFileListFile ( strServerHTMLFileListName );
2011-04-23 22:43:07 +02:00
if ( !serverFileListFile.open ( QIODevice::WriteOnly | QIODevice::Text ) )
{
return;
}
QTextStream streamFileOut ( &serverFileListFile );
streamFileOut << strServerNameWithPort.toHtmlEscaped() << endl << "<ul>" << endl;
2011-04-23 22:43:07 +02:00
// depending on number of connected clients write list
if ( GetNumberOfConnectedClients() == 0 )
2011-04-23 22:43:07 +02:00
{
// no clients are connected -> empty server
streamFileOut << " No client connected" << endl;
}
else
{
// write entry for each connected client
for ( int i = 0; i < iMaxNumChannels; i++ )
2011-04-23 22:43:07 +02:00
{
if ( vecChannels[i].IsConnected() )
{
streamFileOut << " <li>" << vecChannels[i].GetName().toHtmlEscaped() << "</li>" << endl;
2011-04-23 22:43:07 +02:00
}
}
}
// finish list
streamFileOut << "</ul>" << endl;
}
void CServer::customEvent ( QEvent* pEvent )
2011-04-23 22:43:07 +02:00
{
if ( pEvent->type() == QEvent::User + 11 )
2011-04-23 22:43:07 +02:00
{
2013-03-24 16:42:23 +01:00
const int iMessType = ( (CCustomEvent*) pEvent )->iMessType;
2011-04-23 22:43:07 +02:00
switch ( iMessType )
{
case MS_PACKET_RECEIVED:
// wake up the server if a packet was received
// if the server is still running, the call to Start() will have
// no effect
Start();
break;
}
}
}
2020-04-04 23:57:16 +02:00
/// @brief Compute frame peak level for each client
bool CServer::CreateLevelsForAllConChannels ( const int iNumClients,
2020-04-04 23:57:16 +02:00
const CVector<int>& vecNumAudioChannels,
const CVector<CVector<int16_t> > vecvecsData,
CVector<uint16_t>& vecLevelsOut )
{
bool bLevelsWereUpdated = false;
2020-04-04 23:57:16 +02:00
// low frequency updates
if ( iFrameCount > CHANNEL_LEVEL_UPDATE_INTERVAL )
2020-04-04 23:57:16 +02:00
{
iFrameCount = 0;
bLevelsWereUpdated = true;
2020-04-04 23:57:16 +02:00
for ( int j = 0; j < iNumClients; j++ )
2020-04-04 23:57:16 +02:00
{
// update and get signal level for meter in dB for each channel
const double dCurSigLevelForMeterdB = vecChannels[vecChanIDsCurConChan[j]].
UpdateAndGetLevelForMeterdB ( vecvecsData[j],
iServerFrameSizeSamples,
vecNumAudioChannels[j] > 1 );
2020-04-04 23:57:16 +02:00
// map value to integer for transmission via the protocol (4 bit available)
vecLevelsOut[j] = static_cast<uint16_t> ( ceil ( dCurSigLevelForMeterdB ) );
2020-04-04 23:57:16 +02:00
}
}
// increment the frame counter needed for low frequency update trigger
iFrameCount++;
2020-04-04 23:57:16 +02:00
if ( bUseDoubleSystemFrameSize )
{
// additional increment needed for double frame size to get to the same time interval
iFrameCount++;
2020-04-04 23:57:16 +02:00
}
return bLevelsWereUpdated;
2020-04-04 23:57:16 +02:00
}