Added Oboe support code for android audio. There are problems rendering

audio (audio is dithered), still more work required.
This commit is contained in:
Simon Tomlinson 2020-05-11 07:36:46 +01:00
parent 4e721f37a9
commit c112b1e0fc
7 changed files with 521 additions and 309 deletions

View File

@ -97,11 +97,136 @@ win32 {
LIBS += /usr/local/lib/libjack.dylib
}
} else:android {
# we want to compile with C++14
CONFIG += c++14
QT += androidextras
# enabled only for debugging on android devices
DEFINES += ANDROIDDEBUG
target.path = /tmp/your_executable # path on device
INSTALLS += target
HEADERS += android/sound.h
SOURCES += android/sound.cpp
LIBS += -lOpenSLES
ANDROID_PACKAGE_SOURCE_DIR = $$PWD/android
OTHER_FILES += android/AndroidManifest.xml
# if compiling for android you need to use Oboe library which is included as a git submodule
# make sure you git pull with submodules to pull the latest Oboe library
OBOE_SOURCES = libs/oboe/src/aaudio/AAudioLoader.cpp \
libs/oboe/src/aaudio/AudioStreamAAudio.cpp \
libs/oboe/src/common/AudioSourceCaller.cpp \
libs/oboe/src/common/AudioStream.cpp \
libs/oboe/src/common/AudioStreamBuilder.cpp \
libs/oboe/src/common/DataConversionFlowGraph.cpp \
libs/oboe/src/common/FilterAudioStream.cpp \
libs/oboe/src/common/FixedBlockAdapter.cpp \
libs/oboe/src/common/FixedBlockReader.cpp \
libs/oboe/src/common/FixedBlockWriter.cpp \
libs/oboe/src/common/LatencyTuner.cpp \
libs/oboe/src/common/QuirksManager.cpp \
libs/oboe/src/common/SourceFloatCaller.cpp \
libs/oboe/src/common/SourceI16Caller.cpp \
libs/oboe/src/common/StabilizedCallback.cpp \
libs/oboe/src/common/Trace.cpp \
libs/oboe/src/common/Utilities.cpp \
libs/oboe/src/common/Version.cpp \
libs/oboe/src/fifo/FifoBuffer.cpp \
libs/oboe/src/fifo/FifoController.cpp \
libs/oboe/src/fifo/FifoControllerBase.cpp \
libs/oboe/src/fifo/FifoControllerIndirect.cpp \
libs/oboe/src/flowgraph/ClipToRange.cpp \
libs/oboe/src/flowgraph/FlowGraphNode.cpp \
libs/oboe/src/flowgraph/ManyToMultiConverter.cpp \
libs/oboe/src/flowgraph/MonoToMultiConverter.cpp \
libs/oboe/src/flowgraph/RampLinear.cpp \
libs/oboe/src/flowgraph/SampleRateConverter.cpp \
libs/oboe/src/flowgraph/SinkFloat.cpp \
libs/oboe/src/flowgraph/SinkI16.cpp \
libs/oboe/src/flowgraph/SinkI24.cpp \
libs/oboe/src/flowgraph/SourceFloat.cpp \
libs/oboe/src/flowgraph/SourceI16.cpp \
libs/oboe/src/flowgraph/SourceI24.cpp \
libs/oboe/src/flowgraph/resampler/IntegerRatio.cpp \
libs/oboe/src/flowgraph/resampler/LinearResampler.cpp \
libs/oboe/src/flowgraph/resampler/MultiChannelResampler.cpp \
libs/oboe/src/flowgraph/resampler/PolyphaseResampler.cpp \
libs/oboe/src/flowgraph/resampler/PolyphaseResamplerMono.cpp \
libs/oboe/src/flowgraph/resampler/PolyphaseResamplerStereo.cpp \
libs/oboe/src/flowgraph/resampler/SincResampler.cpp \
libs/oboe/src/flowgraph/resampler/SincResamplerStereo.cpp \
libs/oboe/src/opensles/AudioInputStreamOpenSLES.cpp \
libs/oboe/src/opensles/AudioOutputStreamOpenSLES.cpp \
libs/oboe/src/opensles/AudioStreamBuffered.cpp \
libs/oboe/src/opensles/AudioStreamOpenSLES.cpp \
libs/oboe/src/opensles/EngineOpenSLES.cpp \
libs/oboe/src/opensles/OpenSLESUtilities.cpp \
libs/oboe/src/opensles/OutputMixerOpenSLES.cpp
OBOE_HEADERS = libs/oboe/src/aaudio/AAudioLoader.h \
libs/oboe/src/aaudio/AudioStreamAAudio.h \
libs/oboe/src/common/AudioClock.h \
libs/oboe/src/common/AudioSourceCaller.h \
libs/oboe/src/common/DataConversionFlowGraph.h \
libs/oboe/src/common/FilterAudioStream.h \
libs/oboe/src/common/FixedBlockAdapter.h \
libs/oboe/src/common/FixedBlockReader.h \
libs/oboe/src/common/FixedBlockWriter.h \
libs/oboe/src/common/MonotonicCounter.h \
libs/oboe/src/common/OboeDebug.h \
libs/oboe/src/common/QuirksManager.h \
libs/oboe/src/common/SourceFloatCaller.h \
libs/oboe/src/common/SourceI16Caller.h \
libs/oboe/src/common/Trace.h \
libs/oboe/src/fifo/FifoBuffer.h \
libs/oboe/src/fifo/FifoController.h \
libs/oboe/src/fifo/FifoControllerBase.h \
libs/oboe/src/fifo/FifoControllerIndirect.h \
libs/oboe/src/flowgraph/ClipToRange.h \
libs/oboe/src/flowgraph/FlowGraphNode.h \
libs/oboe/src/flowgraph/ManyToMultiConverter.h \
libs/oboe/src/flowgraph/MonoToMultiConverter.h \
libs/oboe/src/flowgraph/RampLinear.h \
libs/oboe/src/flowgraph/SampleRateConverter.h \
libs/oboe/src/flowgraph/SinkFloat.h \
libs/oboe/src/flowgraph/SinkI16.h \
libs/oboe/src/flowgraph/SinkI24.h \
libs/oboe/src/flowgraph/SourceFloat.h \
libs/oboe/src/flowgraph/SourceI16.h \
libs/oboe/src/flowgraph/SourceI24.h \
libs/oboe/src/flowgraph/resampler/HyperbolicCosineWindow.h \
libs/oboe/src/flowgraph/resampler/IntegerRatio.h \
libs/oboe/src/flowgraph/resampler/LinearResampler.h \
libs/oboe/src/flowgraph/resampler/MultiChannelResampler.h \
libs/oboe/src/flowgraph/resampler/PolyphaseResampler.h \
libs/oboe/src/flowgraph/resampler/PolyphaseResamplerMono.h \
libs/oboe/src/flowgraph/resampler/PolyphaseResamplerStereo.h \
libs/oboe/src/flowgraph/resampler/SincResampler.h \
libs/oboe/src/flowgraph/resampler/SincResamplerStereo.h \
libs/oboe/src/opensles/AudioInputStreamOpenSLES.h \
libs/oboe/src/opensles/AudioOutputStreamOpenSLES.h \
libs/oboe/src/opensles/AudioStreamBuffered.h \
libs/oboe/src/opensles/AudioStreamOpenSLES.h \
libs/oboe/src/opensles/EngineOpenSLES.h \
libs/oboe/src/opensles/OpenSLESUtilities.h \
libs/oboe/src/opensles/OutputMixerOpenSLES.h
INCLUDEPATH_OBOE = libs/oboe/include/ \
libs/oboe/src/
DISTFILES_OBOE += libs/oboe/AUTHORS \
libs/oboe/CONTRIBUTING \
libs/oboe/LICENSE \
libs/oboe/README
INCLUDEPATH += $$INCLUDEPATH_OBOE
HEADERS += $$OBOE_HEADERS
SOURCES += $$OBOE_SOURCES
DISTFILES += $$DISTFILES_OBOE
} else:unix {
# we want to compile with C++11
QMAKE_CXXFLAGS += -std=c++11
@ -248,6 +373,7 @@ HEADERS_OPUS = libs/opus/include/opus.h \
libs/opus/silk/float/SigProc_FLP.h
SOURCES += src/audiomixerboard.cpp \
android/androiddebug.cpp \
src/buffer.cpp \
src/channel.cpp \
src/chatdlg.cpp \
@ -415,6 +541,12 @@ DISTFILES += ChangeLog \
COPYING \
INSTALL.md \
README.md \
android/build.gradle \
android/gradle/wrapper/gradle-wrapper.jar \
android/gradle/wrapper/gradle-wrapper.properties \
android/gradlew \
android/gradlew.bat \
android/res/values/libs.xml \
src/res/CLEDBlack.png \
src/res/CLEDBlackSmall.png \
src/res/CLEDDisabledSmall.png \

View File

@ -1,41 +1,94 @@
<?xml version='1.0' encoding='utf-8'?>
<manifest android:versionName="1.0" android:installLocation="auto" package="org.qtproject.jamulus" android:versionCode="1" xmlns:android="http://schemas.android.com/apk/res/android">
<application android:label="Jamulus" android:name="org.qtproject.qt5.android.bindings.QtApplication">
<activity android:configChanges="orientation|uiMode|screenLayout|screenSize|smallestScreenSize|locale|fontScale|keyboard|keyboardHidden|navigation" android:label="@string/app_name" android:screenOrientation="landscape" android:name="org.qtproject.qt5.android.bindings.QtActivity">
<?xml version="1.0"?>
<manifest package="org.qtproject.jamulus" xmlns:android="http://schemas.android.com/apk/res/android" android:versionName="1.0" android:versionCode="1" android:installLocation="auto">
<uses-sdk android:minSdkVersion="21" android:targetSdkVersion="28"/>
<!-- The following comment will be replaced upon deployment with default permissions based on the dependencies of the application.
Remove the comment if you do not require these default permissions. -->
<!-- %%INSERT_PERMISSIONS -->
<uses-permission android:name="android.permission.INTERNET"/>
<uses-permission android:name="android.permission.RECORD_AUDIO"/>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS"/>
<!-- The following comment will be replaced upon deployment with default features based on the dependencies of the application.
Remove the comment if you do not require these default features. -->
<!-- %%INSERT_FEATURES -->
<uses-feature android:name="android.hardware.microphone" android:required="true"/>
<uses-feature android:name="android.hardware.audio.output" android:required="true"/>
<supports-screens android:largeScreens="true" android:normalScreens="true" android:anyDensity="true" android:smallScreens="true"/>
<application android:hardwareAccelerated="true" android:name="org.qtproject.qt5.android.bindings.QtApplication" android:label="Jamulus" android:extractNativeLibs="true">
<activity android:configChanges="orientation|uiMode|screenLayout|screenSize|smallestScreenSize|layoutDirection|locale|fontScale|keyboard|keyboardHidden|navigation|mcc|mnc|density" android:name="org.qtproject.qt5.android.bindings.QtActivity" android:label="@string/app_name" android:screenOrientation="landscape" android:launchMode="singleTop">
<intent-filter>
<action android:name="android.intent.action.MAIN"/>
<category android:name="android.intent.category.LAUNCHER"/>
</intent-filter>
<meta-data android:value="Jamulus" android:name="android.app.lib_name"/>
<meta-data android:resource="@array/qt_sources" android:name="android.app.qt_sources_resource_id"/>
<meta-data android:value="default" android:name="android.app.repository"/>
<meta-data android:resource="@array/qt_libs" android:name="android.app.qt_libs_resource_id"/>
<meta-data android:resource="@array/bundled_libs" android:name="android.app.bundled_libs_resource_id"/>
<!-- Application arguments -->
<!-- meta-data android:name="android.app.arguments" android:value="arg1 arg2 arg3"/ -->
<!-- Application arguments -->
<meta-data android:name="android.app.lib_name" android:value="Jamulus"/>
<meta-data android:name="android.app.qt_sources_resource_id" android:resource="@array/qt_sources"/>
<meta-data android:name="android.app.repository" android:value="default"/>
<meta-data android:name="android.app.qt_libs_resource_id" android:resource="@array/qt_libs"/>
<meta-data android:name="android.app.bundled_libs_resource_id" android:resource="@array/bundled_libs"/>
<!-- Deploy Qt libs as part of package -->
<meta-data android:value="-- %%BUNDLE_LOCAL_QT_LIBS%% --" android:name="android.app.bundle_local_qt_libs"/>
<meta-data android:resource="@array/bundled_in_lib" android:name="android.app.bundled_in_lib_resource_id"/>
<meta-data android:resource="@array/bundled_in_assets" android:name="android.app.bundled_in_assets_resource_id"/>
<meta-data android:name="android.app.bundle_local_qt_libs" android:value="-- %%BUNDLE_LOCAL_QT_LIBS%% --"/>
<!-- Run with local libs -->
<meta-data android:value="-- %%USE_LOCAL_QT_LIBS%% --" android:name="android.app.use_local_qt_libs"/>
<meta-data android:value="/data/local/tmp/qt/" android:name="android.app.libs_prefix"/>
<meta-data android:value="-- %%INSERT_LOCAL_LIBS%% --" android:name="android.app.load_local_libs"/>
<meta-data android:value="-- %%INSERT_LOCAL_JARS%% --" android:name="android.app.load_local_jars"/>
<meta-data android:value="-- %%INSERT_INIT_CLASSES%% --" android:name="android.app.static_init_classes"/>
<meta-data android:name="android.app.use_local_qt_libs" android:value="-- %%USE_LOCAL_QT_LIBS%% --"/>
<meta-data android:name="android.app.libs_prefix" android:value="/data/local/tmp/qt/"/>
<meta-data android:name="android.app.load_local_libs_resource_id" android:resource="@array/load_local_libs"/>
<meta-data android:name="android.app.load_local_jars" android:value="-- %%INSERT_LOCAL_JARS%% --"/>
<meta-data android:name="android.app.static_init_classes" android:value="-- %%INSERT_INIT_CLASSES%% --"/>
<!-- Used to specify custom system library path to run with local system libs -->
<!-- <meta-data android:name="android.app.system_libs_prefix" android:value="/system/lib/"/> -->
<!-- Messages maps -->
<meta-data android:value="@string/ministro_not_found_msg" android:name="android.app.ministro_not_found_msg"/>
<meta-data android:value="@string/ministro_needed_msg" android:name="android.app.ministro_needed_msg"/>
<meta-data android:value="@string/fatal_error_msg" android:name="android.app.fatal_error_msg"/>
<meta-data android:value="@string/unsupported_android_version" android:name="android.app.unsupported_android_version"/>
<!-- Messages maps -->
<!-- Splash screen -->
<meta-data android:resource="@layout/splash" android:name="android.app.splash_screen"/>
<!-- Orientation-specific (portrait/landscape) data is checked first. If not available for current orientation,
then android.app.splash_screen_drawable. For best results, use together with splash_screen_sticky and
use hideSplashScreen() with a fade-out animation from Qt Android Extras to hide the splash screen when you
are done populating your window with content. -->
<!-- meta-data android:name="android.app.splash_screen_drawable_portrait" android:resource="@drawable/logo_portrait" / -->
<!-- meta-data android:name="android.app.splash_screen_drawable_landscape" android:resource="@drawable/logo_landscape" / -->
<!-- meta-data android:name="android.app.splash_screen_drawable" android:resource="@drawable/logo"/ -->
<!-- meta-data android:name="android.app.splash_screen_sticky" android:value="true"/ -->
<!-- Splash screen -->
</activity>
<!-- Background running -->
<!-- Warning: changing this value to true may cause unexpected crashes if the
application still try to draw after
"applicationStateChanged(Qt::ApplicationSuspended)"
signal is sent! -->
<meta-data android:name="android.app.background_running" android:value="false"/>
<!-- Background running -->
<!-- auto screen scale factor -->
<meta-data android:name="android.app.auto_screen_scale_factor" android:value="false"/>
<!-- auto screen scale factor -->
<!-- extract android style -->
<!-- available android:values :
* default - In most cases this will be the same as "full", but it can also be something else if needed, e.g., for compatibility reasons
* full - useful QWidget & Quick Controls 1 apps
* minimal - useful for Quick Controls 2 apps, it is much faster than "full"
* none - useful for apps that don't use any of the above Qt modules
-->
<meta-data android:name="android.app.extract_android_style" android:value="default"/>
<!-- extract android style -->
</activity>
<!-- For adding service(s) please check: https://wiki.qt.io/AndroidServices -->
</application>
<uses-sdk android:targetSdkVersion="19" android:minSdkVersion="17"/>
<supports-screens android:normalScreens="true" android:smallScreens="true" android:largeScreens="true" android:anyDensity="true"/>
<!-- %%INSERT_PERMISSIONS -->
<!-- %%INSERT_FEATURES -->
<uses-permission android:name="android.permission.INTERNET"/>
<uses-permission android:name="android.permission.RECORD_AUDIO"/>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
</manifest>

45
android/androiddebug.cpp Normal file
View File

@ -0,0 +1,45 @@
const char*const applicationName="Jamulus";
#ifdef ANDROIDDEBUG // Set in my myapp.pro file for android builds
#include <android/log.h>
#include <QString>
#include <QEvent>
#include <QDebug>
#include <stdio.h>
#include <math.h>
#include <string>
void myMessageHandler(QtMsgType type, const QMessageLogContext& context, const QString& msg)
{
QString report=msg;
if (context.file && !QString(context.file).isEmpty()) {
report+=" in file ";
report+=QString(context.file);
report+=" line ";
report+=QString::number(context.line);
}
if (context.function && !QString(context.function).isEmpty()) {
report+=+" function ";
report+=QString(context.function);
}
const char*const local=report.toLocal8Bit().constData();
switch (type) {
case QtDebugMsg:
__android_log_write(ANDROID_LOG_DEBUG,applicationName,local);
break;
case QtInfoMsg:
__android_log_write(ANDROID_LOG_INFO,applicationName,local);
break;
case QtWarningMsg:
__android_log_write(ANDROID_LOG_WARN,applicationName,local);
break;
case QtCriticalMsg:
__android_log_write(ANDROID_LOG_ERROR,applicationName,local);
break;
case QtFatalMsg:
default:
__android_log_write(ANDROID_LOG_FATAL,applicationName,local);
abort();
}
}
#endif

View File

@ -23,255 +23,156 @@
\******************************************************************************/
#include "sound.h"
#include "androiddebug.cpp"
/* Implementation *************************************************************/
CSound::CSound ( void (*fpNewProcessCallback) ( CVector<short>& psData, void* arg ),
void* arg,
const int iCtrlMIDIChannel,
const bool bNoAutoJackConnect ) :
CSoundBase ( "OpenSL", true, fpNewProcessCallback, arg, iCtrlMIDIChannel, bNoAutoJackConnect )
{
pSound = this;
#ifdef ANDROIDDEBUG
qInstallMessageHandler(myMessageHandler);
#endif
}
void CSound::InitializeOpenSL()
void CSound::setupCommonStreamParams(oboe::AudioStreamBuilder *builder)
{
// set up stream formats for input and output
SLDataFormat_PCM inStreamFormat;
inStreamFormat.formatType = SL_DATAFORMAT_PCM;
inStreamFormat.numChannels = 1;
inStreamFormat.samplesPerSec = SL_SAMPLINGRATE_16;
inStreamFormat.bitsPerSample = SL_PCMSAMPLEFORMAT_FIXED_16;
inStreamFormat.containerSize = 16;
inStreamFormat.channelMask = SL_SPEAKER_FRONT_CENTER;
inStreamFormat.endianness = SL_BYTEORDER_LITTLEENDIAN;
SLDataFormat_PCM outStreamFormat;
outStreamFormat.formatType = SL_DATAFORMAT_PCM;
outStreamFormat.numChannels = 2;
outStreamFormat.samplesPerSec = SYSTEM_SAMPLE_RATE_HZ * 1000; // unit is mHz
outStreamFormat.bitsPerSample = SL_PCMSAMPLEFORMAT_FIXED_16;
outStreamFormat.containerSize = 16;
outStreamFormat.channelMask = SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT;
outStreamFormat.endianness = SL_BYTEORDER_LITTLEENDIAN;
// create the OpenSL root engine object
slCreateEngine ( &engineObject,
0,
nullptr,
0,
nullptr,
nullptr );
// realize the engine
(*engineObject)->Realize ( engineObject,
SL_BOOLEAN_FALSE );
// get the engine interface (required to create other objects)
(*engineObject)->GetInterface ( engineObject,
SL_IID_ENGINE,
&engine );
// create the main output mix
(*engine)->CreateOutputMix ( engine,
&outputMixObject,
0,
nullptr,
nullptr );
// realize the output mix
(*outputMixObject)->Realize ( outputMixObject,
SL_BOOLEAN_FALSE );
// configure the audio (data) source for input
SLDataLocator_IODevice micLocator;
micLocator.locatorType = SL_DATALOCATOR_IODEVICE;
micLocator.deviceType = SL_IODEVICE_AUDIOINPUT;
micLocator.deviceID = SL_DEFAULTDEVICEID_AUDIOINPUT;
micLocator.device = nullptr;
SLDataSource inDataSource;
inDataSource.pLocator = &micLocator;
inDataSource.pFormat = nullptr;
// configure the input buffer queue
SLDataLocator_AndroidSimpleBufferQueue inBufferQueue;
inBufferQueue.locatorType = SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE;
inBufferQueue.numBuffers = 2; // max number of buffers in queue
// configure the audio (data) sink for input
SLDataSink inDataSink;
inDataSink.pLocator = &inBufferQueue;
inDataSink.pFormat = &inStreamFormat;
// create the audio recorder
const SLInterfaceID recorderIds[] = { SL_IID_ANDROIDSIMPLEBUFFERQUEUE };
const SLboolean recorderReq[] = { SL_BOOLEAN_TRUE };
(*engine)->CreateAudioRecorder ( engine,
&recorderObject,
&inDataSource,
&inDataSink,
1,
recorderIds,
recorderReq );
// realize the audio recorder
(*recorderObject)->Realize ( recorderObject,
SL_BOOLEAN_FALSE );
// get the audio recorder interface
(*recorderObject)->GetInterface ( recorderObject,
SL_IID_RECORD,
&recorder );
// get the audio recorder simple buffer queue interface
(*recorderObject)->GetInterface ( recorderObject,
SL_IID_ANDROIDSIMPLEBUFFERQUEUE,
&recorderSimpleBufQueue );
// register the audio input callback
(*recorderSimpleBufQueue)->RegisterCallback ( recorderSimpleBufQueue,
processInput,
this );
// configure the output buffer queue
SLDataLocator_AndroidSimpleBufferQueue outBufferQueue;
outBufferQueue.locatorType = SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE;
outBufferQueue.numBuffers = 2; // max number of buffers in queue
// configure the audio (data) source for output
SLDataSource outDataSource;
outDataSource.pLocator = &outBufferQueue;
outDataSource.pFormat = &outStreamFormat;
// configure the output mix
SLDataLocator_OutputMix outputMix;
outputMix.locatorType = SL_DATALOCATOR_OUTPUTMIX;
outputMix.outputMix = outputMixObject;
// configure the audio (data) sink for output
SLDataSink outDataSink;
outDataSink.pLocator = &outputMix;
outDataSink.pFormat = nullptr;
// create the audio player
const SLInterfaceID playerIds[] = { SL_IID_ANDROIDSIMPLEBUFFERQUEUE };
const SLboolean playerReq[] = { SL_BOOLEAN_TRUE };
(*engine)->CreateAudioPlayer ( engine,
&playerObject,
&outDataSource,
&outDataSink,
1,
playerIds,
playerReq );
// realize the audio player
(*playerObject)->Realize ( playerObject,
SL_BOOLEAN_FALSE );
// get the audio player interface
(*playerObject)->GetInterface ( playerObject,
SL_IID_PLAY,
&player );
// get the audio player simple buffer queue interface
(*playerObject)->GetInterface ( playerObject,
SL_IID_ANDROIDSIMPLEBUFFERQUEUE,
&playerSimpleBufQueue );
// register the audio output callback
(*playerSimpleBufQueue)->RegisterCallback ( playerSimpleBufQueue,
processOutput,
this );
// We request EXCLUSIVE mode since this will give us the lowest possible
// latency. If EXCLUSIVE mode isn't available the builder will fall back to SHARED mode
builder->setCallback(this)
->setFormat(oboe::AudioFormat::Float)
->setSharingMode(oboe::SharingMode::Shared)
->setChannelCount(oboe::ChannelCount::Mono)
// ->setSampleRate(48000)
// ->setSampleRateConversionQuality(oboe::SampleRateConversionQuality::Medium)
->setPerformanceMode(oboe::PerformanceMode::None);
return;
}
void CSound::CloseOpenSL()
void CSound::openStreams()
{
// Create callback
mCallback = this;
//Setup output stream
oboe::AudioStreamBuilder inBuilder, outBuilder;
outBuilder.setDirection(oboe::Direction::Output);
setupCommonStreamParams(&outBuilder);
oboe::Result result = outBuilder.openManagedStream(mPlayStream);
if (result != oboe::Result::OK) {
return;
}
mPlayStream->setBufferSizeInFrames(pSound->iOpenSLBufferSizeStereo);
warnIfNotLowLatency(mPlayStream, "PlayStream");
printStreamDetails(mPlayStream);
//Setup input stream
inBuilder.setDirection(oboe::Direction::Input);
setupCommonStreamParams(&inBuilder);
result = inBuilder.openManagedStream(mRecordingStream);
if (result != oboe::Result::OK) {
closeStream(mPlayStream);
return;
}
mRecordingStream->setBufferSizeInFrames(pSound->iOpenSLBufferSizeStereo);
warnIfNotLowLatency(mRecordingStream, "RecordStream");
printStreamDetails(mRecordingStream);
}
void CSound::printStreamDetails(oboe::ManagedStream &stream)
{
QString sDirection = (stream->getDirection()==oboe::Direction::Input?"Input":"Output");
QString sFramesPerBurst = QString::number(stream->getFramesPerBurst());
QString sBufferSizeInFrames = QString::number(stream->getBufferSizeInFrames());
QString sBytesPerFrame = QString::number(stream->getBytesPerFrame());
QString sBytesPerSample = QString::number(stream->getBytesPerSample());
QString sBufferCapacityInFrames = QString::number(stream->getBufferCapacityInFrames());
QString sPerformanceMode = (stream->getPerformanceMode()==oboe::PerformanceMode::LowLatency?"LowLatency":"NotLowLatency");
QString sSharingMode = (stream->getSharingMode() == oboe::SharingMode::Exclusive?"Exclusive":"Shared");
QString sDeviceID = QString::number(stream->getDeviceId());
QString sSampleRate = QString::number(stream->getSampleRate());
QString sAudioFormat = (stream->getFormat()==oboe::AudioFormat::I16?"I16":"Float");
QString sFramesPerCallback = QString::number(stream->getFramesPerCallback());
//QString sSampleRateConversionQuality = (stream.getSampleRateConversionQuality()==oboe::SampleRateConversionQuality::
qInfo() << "Stream details: [sDirection: " << sDirection <<
", FramesPerBurst: " << sFramesPerBurst <<
", BufferSizeInFrames: " << sBufferSizeInFrames <<
", BytesPerFrame: " << sBytesPerFrame <<
", BytesPerSample: " << sBytesPerSample <<
", BufferCapacityInFrames: " << sBufferCapacityInFrames <<
", PerformanceMode: " << sPerformanceMode <<
", SharingMode: " << sSharingMode <<
", DeviceID: " << sDeviceID <<
", SampleRate: " << sSampleRate <<
", AudioFormat: " << sAudioFormat <<
", FramesPerCallback: " << sFramesPerCallback << "]";
}
void CSound::warnIfNotLowLatency(oboe::ManagedStream &stream, QString streamName) {
if (stream->getPerformanceMode() != oboe::PerformanceMode::LowLatency) {
QString latencyMode = (stream->getPerformanceMode()==oboe::PerformanceMode::None ? "None" : "Power Saving");
// throw CGenErr ( tr ( "Stream is NOT low latency."
// "Check your requested format, sample rate and channel count." ) );
}
}
void CSound::closeStream(oboe::ManagedStream &stream)
{
if (stream) {
oboe::Result requestStopRes = stream->requestStop();
oboe::Result result = stream->close();
if (result != oboe::Result::OK) {
throw CGenErr ( tr ( "Error closing stream: $s",
oboe::convertToText(result) ) );
}
stream.reset();
}
}
void CSound::closeStreams()
{
// clean up
(*recorderObject)->Destroy ( recorderObject );
(*playerObject)->Destroy ( playerObject );
(*outputMixObject)->Destroy ( outputMixObject );
(*engineObject)->Destroy ( engineObject );
closeStream(mRecordingStream);
closeStream(mPlayStream);
}
void CSound::Start()
{
InitializeOpenSL();
// TEST We have to supply the interface with initial buffers, otherwise
// the rendering will not start.
// Note that the number of buffers enqueued here must match the maximum
// numbers of buffers configured in the constructor of this class.
vecsTmpAudioSndCrdStereo.Reset ( 0 );
// enqueue initial buffers for record
(*recorderSimpleBufQueue)->Enqueue ( recorderSimpleBufQueue,
&vecsTmpAudioSndCrdStereo[0],
iOpenSLBufferSizeStereo * 2 /* 2 bytes */ );
(*recorderSimpleBufQueue)->Enqueue ( recorderSimpleBufQueue,
&vecsTmpAudioSndCrdStereo[0],
iOpenSLBufferSizeStereo * 2 /* 2 bytes */ );
// enqueue initial buffers for playback
(*playerSimpleBufQueue)->Enqueue ( playerSimpleBufQueue,
&vecsTmpAudioSndCrdStereo[0],
iOpenSLBufferSizeStereo * 2 /* 2 bytes */ );
(*playerSimpleBufQueue)->Enqueue ( playerSimpleBufQueue,
&vecsTmpAudioSndCrdStereo[0],
iOpenSLBufferSizeStereo * 2 /* 2 bytes */ );
// start the rendering
(*recorder)->SetRecordState ( recorder, SL_RECORDSTATE_RECORDING );
(*player)->SetPlayState ( player, SL_PLAYSTATE_PLAYING );
openStreams();
// call base class
CSoundBase::Start();
// finally start the streams so the callback begins, start with inputstream first.
mRecordingStream->requestStart();
mPlayStream->requestStart();
}
void CSound::Stop()
{
// stop the audio stream
(*recorder)->SetRecordState ( recorder, SL_RECORDSTATE_STOPPED );
(*player)->SetPlayState ( player, SL_PLAYSTATE_STOPPED );
// clear the buffers
(*recorderSimpleBufQueue)->Clear ( recorderSimpleBufQueue );
(*playerSimpleBufQueue)->Clear ( playerSimpleBufQueue );
closeStreams();
// call base class
CSoundBase::Stop();
CloseOpenSL();
}
int CSound::Init ( const int iNewPrefMonoBufferSize )
{
// TODO make use of the following:
// String sampleRate = am.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE));
// String framesPerBuffer = am.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER));
/*
// get the Audio IO DEVICE CAPABILITIES interface
SLAudioIODeviceCapabilitiesItf audioCapabilities;
(*engineObject)->GetInterface ( engineObject,
SL_IID_AUDIOIODEVICECAPABILITIES,
&audioCapabilities );
(*audioCapabilities)->QueryAudioInputCapabilities ( audioCapabilities,
inputDeviceIDs[i],
&audioInputDescriptor );
*/
// store buffer size
iOpenSLBufferSizeMono = iNewPrefMonoBufferSize;
iOpenSLBufferSizeMono = 512 ;
//iNewPrefMonoBufferSize;
// init base class
CSoundBase::Init ( iOpenSLBufferSizeMono );
@ -282,7 +183,6 @@ SLAudioIODeviceCapabilitiesItf audioCapabilities;
// create memory for intermediate audio buffer
vecsTmpAudioSndCrdStereo.Init ( iOpenSLBufferSizeStereo );
// TEST
#if ( SYSTEM_SAMPLE_RATE_HZ != 48000 )
# error "Only a system sample rate of 48 kHz is supported by this module"
@ -295,57 +195,105 @@ SLAudioIODeviceCapabilitiesItf audioCapabilities;
iModifiedInBufSize = iOpenSLBufferSizeMono / 3;
vecsTmpAudioInSndCrd.Init ( iModifiedInBufSize );
return iOpenSLBufferSizeMono;
}
void CSound::processInput ( SLAndroidSimpleBufferQueueItf bufferQueue,
void* instance )
// This is the main callback method for when an audio stream is ready to publish data to an output stream
// or has received data on an input stream. As per manual much be very careful not to do anything in this back that
// can cause delays such as sleeping, file processing, allocate memory, etc
oboe::DataCallbackResult CSound::onAudioReady(oboe::AudioStream *oboeStream, void *audioData, int32_t numFrames)
{
CSound* pSound = static_cast<CSound*> ( instance );
// only process if we are running
if ( ! pSound->bRun )
{
return oboe::DataCallbackResult::Continue;
}
// only process if we are running
if ( !pSound->bRun )
// Need to modify the size of the buffer based on the numFrames requested in this callback.
// Buffer size can change regularly by android devices
int& iBufferSizeMono = pSound->iOpenSLBufferSizeMono;
// perform the processing for input and output
// QMutexLocker locker ( &pSound->Mutex );
// locker.mutex();
//This can be called from both input and output at different times
if (oboeStream == pSound->mPlayStream.get() && audioData)
{
return;
float *floatData = static_cast<float *>(audioData);
// Zero out the incoming container array
memset(audioData, 0, sizeof(float) * numFrames * oboeStream->getChannelCount());
// Only copy data if we have data to copy, otherwise fill with silence
if (!pSound->vecsTmpAudioSndCrdStereo.empty())
{
for (int frmNum = 0; frmNum < numFrames; ++frmNum)
{
for (int channelNum = 0; channelNum < oboeStream->getChannelCount(); channelNum++)
{
// copy sample received from server into output buffer
// convert to 32 bit
const int32_t iCurSam = static_cast<int32_t> (
pSound->vecsTmpAudioSndCrdStereo [frmNum * oboeStream->getChannelCount() + channelNum] );
floatData[frmNum * oboeStream->getChannelCount() + channelNum] = (float) iCurSam/ _MAXSHORT;
}
}
}
else
{
// prime output stream buffer with silence
memset(static_cast<float*>(audioData) + numFrames * oboeStream->getChannelCount(), 0,
(numFrames) * oboeStream->getBytesPerFrame());
}
}
QMutexLocker locker ( &pSound->Mutex );
// enqueue the buffer for record
(*bufferQueue)->Enqueue ( bufferQueue,
&pSound->vecsTmpAudioInSndCrd[0],
pSound->iModifiedInBufSize * 2 /* 2 bytes */ );
// upsampling (without filtering) and channel management
pSound->vecsTmpAudioSndCrdStereo.Reset ( 0 );
for ( int i = 0; i < pSound->iModifiedInBufSize; i++ )
{
pSound->vecsTmpAudioSndCrdStereo[6 * i] =
pSound->vecsTmpAudioSndCrdStereo[6 * i + 1] =
pSound->vecsTmpAudioInSndCrd[i];
}
}
void CSound::processOutput ( SLAndroidSimpleBufferQueueItf bufferQueue,
void* instance )
{
CSound* pSound = static_cast<CSound*> ( instance );
// only process if we are running
if ( !pSound->bRun )
else if (oboeStream == pSound->mRecordingStream.get() && audioData)
{
return;
// First things first, we need to discard the input queue a little for 500ms or so
if (pSound->mCountCallbacksToDrain > 0)
{
// discard the input buffer
int32_t numBytes = numFrames * oboeStream->getBytesPerFrame();
memset(audioData, 0 /* value */, numBytes);
pSound->mCountCallbacksToDrain--;
}
// We're good to start recording now
// Take the data from the recording device ouput buffer and move
// it to the vector ready to send up to the server
float *floatData = static_cast<float *>(audioData);
// Copy recording data to internal vector
for (int frmNum = 0; frmNum < numFrames; ++frmNum)
{
for (int channelNum = 0; channelNum < oboeStream->getChannelCount(); channelNum++)
{
pSound->vecsTmpAudioSndCrdStereo [frmNum * oboeStream->getChannelCount() + channelNum] =
(short) floatData[frmNum * oboeStream->getChannelCount() + channelNum] * _MAXSHORT;
}
}
// Tell parent class that we've put some data ready to send to the server
pSound->ProcessCallback ( pSound->vecsTmpAudioSndCrdStereo );
}
QMutexLocker locker ( &pSound->Mutex );
// call processing callback function
pSound->ProcessCallback ( pSound->vecsTmpAudioSndCrdStereo );
// enqueue the buffer for playback
(*bufferQueue)->Enqueue ( bufferQueue,
&pSound->vecsTmpAudioSndCrdStereo[0],
pSound->iOpenSLBufferSizeStereo * 2 /* 2 bytes */ );
// locker.unlock();
return oboe::DataCallbackResult::Continue;
}
//TODO better handling of stream closing errors
void CSound::onErrorAfterClose(oboe::AudioStream *oboeStream, oboe::Result result)
{
qDebug() << "CSound::onErrorAfterClose";
}
//TODO better handling of stream closing errors
void CSound::onErrorBeforeClose(oboe::AudioStream *oboeStream, oboe::Result result)
{
qDebug() << "CSound::onErrorBeforeClose";
}

View File

@ -24,15 +24,18 @@
#pragma once
#include <SLES/OpenSLES.h>
#include <SLES/OpenSLES_Android.h>
/* Deprecated, moving to OBOE
* #include <SLES/OpenSLES.h>
* #include <SLES/OpenSLES_Android.h> */
#include <oboe/Oboe.h>
#include <QMutex>
#include "soundbase.h"
#include "global.h"
#include <QDebug>
#include <android/log.h>
/* Classes ********************************************************************/
class CSound : public CSoundBase
class CSound : public CSoundBase, public oboe::AudioStreamCallback//, public IRenderableAudio, public IRestartable
{
public:
CSound ( void (*fpNewProcessCallback) ( CVector<short>& psData, void* arg ),
@ -45,10 +48,30 @@ public:
virtual void Start();
virtual void Stop();
// Call backs for Oboe
virtual oboe::DataCallbackResult onAudioReady(oboe::AudioStream *oboeStream, void *audioData, int32_t numFrames);
virtual void onErrorAfterClose(oboe::AudioStream *oboeStream, oboe::Result result);
virtual void onErrorBeforeClose(oboe::AudioStream *oboeStream, oboe::Result result);
// these variables should be protected but cannot since we want
// to access them from the callback function
CVector<short> vecsTmpAudioSndCrdStereo;
static void android_message_handler(QtMsgType type,
const QMessageLogContext &context,
const QString &message)
{
android_LogPriority priority = ANDROID_LOG_DEBUG;
switch (type) {
case QtDebugMsg: priority = ANDROID_LOG_DEBUG; break;
case QtWarningMsg: priority = ANDROID_LOG_WARN; break;
case QtCriticalMsg: priority = ANDROID_LOG_ERROR; break;
case QtFatalMsg: priority = ANDROID_LOG_FATAL; break;
};
__android_log_print(priority, "Qt", "%s", qPrintable(message));
};
// TEST
CVector<short> vecsTmpAudioInSndCrd;
int iModifiedInBufSize;
@ -56,27 +79,25 @@ int iModifiedInBufSize;
int iOpenSLBufferSizeMono;
int iOpenSLBufferSizeStereo;
protected:
private:
void setupCommonStreamParams(oboe::AudioStreamBuilder *builder);
void printStreamDetails(oboe::ManagedStream &stream);
void openStreams();
void closeStreams();
void warnIfNotLowLatency(oboe::ManagedStream &stream, QString streamName);
void closeStream(oboe::ManagedStream &stream);
void InitializeOpenSL();
void CloseOpenSL();
oboe::ManagedStream mRecordingStream;
oboe::ManagedStream mPlayStream;
AudioStreamCallback *mCallback;
// callbacks
static void processInput ( SLAndroidSimpleBufferQueueItf bufferQueue,
void* instance );
// used to reach a state where the input buffer is
// empty and the garbage in the first 500ms or so is discarded
static constexpr int32_t kNumCallbacksToDrain = 10;
int32_t mCountCallbacksToDrain = kNumCallbacksToDrain;
static void processOutput ( SLAndroidSimpleBufferQueueItf bufferQueue,
void* instance );
SLObjectItf engineObject;
SLEngineItf engine;
SLObjectItf recorderObject;
SLRecordItf recorder;
SLAndroidSimpleBufferQueueItf recorderSimpleBufQueue;
SLObjectItf outputMixObject;
SLObjectItf playerObject;
SLPlayItf player;
SLAndroidSimpleBufferQueueItf playerSimpleBufQueue;
// Used to reference this instance of class from within the static callback
CSound *pSound;
QMutex Mutex;

View File

@ -255,6 +255,8 @@ typedef unsigned __int64 uint64_t;
typedef unsigned __int32 uint32_t;
typedef unsigned __int16 uint16_t;
typedef unsigned __int8 uint8_t;
#elif defined ( __ANDROID__ )
/* don't redfine types for android as these ones below don't work. */
#else
typedef long long int64_t;
typedef int int32_t;

View File

@ -32,12 +32,15 @@
#include "settings.h"
#include "testbench.h"
#include "util.h"
#ifdef ANDROID
#include <QtAndroidExtras/QtAndroid>
#endif
// Implementation **************************************************************
int main ( int argc, char** argv )
{
QTextStream& tsConsole = *( ( new ConsoleWriterFactory() )->get() );
QString strArgument;
double rDbleArgument;
@ -466,6 +469,14 @@ int main ( int argc, char** argv )
QCoreApplication* pApp = bUseGUI
? new QApplication ( argc, argv )
: new QCoreApplication ( argc, argv );
#ifdef ANDROID
auto result = QtAndroid::checkPermission(QString("android.permission.RECORD_AUDIO"));
if(result == QtAndroid::PermissionResult::Denied){
QtAndroid::PermissionResultMap resultHash = QtAndroid::requestPermissionsSync(QStringList({"android.permission.RECORD_AUDIO"}));
if(resultHash["android.permission.RECORD_AUDIO"] == QtAndroid::PermissionResult::Denied)
return 0;
}
#endif
#ifdef _WIN32
// set application priority class -> high priority