mirror of
https://github.com/hrydgard/ppsspp.git
synced 2024-11-23 13:30:02 +00:00
Merge pull request #13575 from Florin9doi/android_audio_record2
[Android] Audio record using OpenSL
This commit is contained in:
commit
f3c05cb81a
@ -99,3 +99,5 @@ float System_GetPropertyFloat(SystemProperty prop);
|
||||
bool System_GetPropertyBool(SystemProperty prop);
|
||||
|
||||
std::vector<std::string> __cameraGetDeviceList();
|
||||
bool audioRecording_Available();
|
||||
bool audioRecording_State();
|
||||
|
@ -137,6 +137,11 @@ static int sceUsbCamReadMicBlocking(u32 bufAddr, u32 size) {
|
||||
return __MicInputBlocking(size >> 1, config->micParam.frequency, bufAddr);
|
||||
}
|
||||
|
||||
static int sceUsbCamReadMic(u32 bufAddr, u32 size) {
|
||||
INFO_LOG(HLE, "UNIMPL sceUsbCamReadMic: size: %d", size);
|
||||
return __MicInputBlocking(size >> 1, config->micParam.frequency, bufAddr);
|
||||
}
|
||||
|
||||
static int sceUsbCamSetupVideo(u32 paramAddr, u32 workareaAddr, int wasize) {
|
||||
if (Memory::IsValidRange(paramAddr, sizeof(PspUsbCamSetupVideoParam))) {
|
||||
Memory::ReadStruct(paramAddr, &config->videoParam);
|
||||
@ -185,7 +190,7 @@ static int sceUsbCamReadVideoFrameBlocking(u32 bufAddr, u32 size) {
|
||||
if (Memory::IsValidRange(bufAddr, size)) {
|
||||
Memory::Memcpy(bufAddr, videoBuffer, transferSize);
|
||||
}
|
||||
return videoBufferLength;
|
||||
return transferSize;
|
||||
}
|
||||
|
||||
static int sceUsbCamReadVideoFrame(u32 bufAddr, u32 size) {
|
||||
@ -194,7 +199,7 @@ static int sceUsbCamReadVideoFrame(u32 bufAddr, u32 size) {
|
||||
if (Memory::IsValidRange(bufAddr, size)) {
|
||||
Memory::Memcpy(bufAddr, videoBuffer, transferSize);
|
||||
}
|
||||
nextVideoFrame = videoBufferLength;
|
||||
nextVideoFrame = transferSize;
|
||||
return 0;
|
||||
}
|
||||
|
||||
@ -243,7 +248,7 @@ const HLEFunction sceUsbCam[] =
|
||||
{ 0X82A64030, &WrapI_V<sceUsbCamStartMic>, "sceUsbCamStartMic", 'i', "" },
|
||||
{ 0X5145868A, &WrapI_V<sceUsbCamStopMic>, "sceUsbCamStopMic", 'i', "" },
|
||||
{ 0X36636925, &WrapI_UU<sceUsbCamReadMicBlocking>, "sceUsbCamReadMicBlocking", 'i', "xx" },
|
||||
{ 0X3DC0088E, nullptr, "sceUsbCamReadMic", '?', "" },
|
||||
{ 0X3DC0088E, &WrapI_UU<sceUsbCamReadMic>, "sceUsbCamReadMic", 'i', "xx" },
|
||||
{ 0XB048A67D, nullptr, "sceUsbCamWaitReadMicEnd", '?', "" },
|
||||
{ 0XF8847F60, nullptr, "sceUsbCamPollReadMicEnd", '?', "" },
|
||||
{ 0X5778B452, nullptr, "sceUsbCamGetMicDataLength", '?', "" },
|
||||
@ -301,7 +306,7 @@ void Register_sceUsbCam()
|
||||
}
|
||||
|
||||
std::vector<std::string> Camera::getDeviceList() {
|
||||
#ifdef HAVE_WIN32_CAMERA
|
||||
#ifdef HAVE_WIN32_CAMERA
|
||||
if (winCamera) {
|
||||
return winCamera->getDeviceList();
|
||||
}
|
||||
@ -321,7 +326,7 @@ int Camera::startCapture() {
|
||||
INFO_LOG(HLE, "%s resolution: %dx%d", __FUNCTION__, width, height);
|
||||
|
||||
config->mode = Camera::Mode::Video;
|
||||
#ifdef HAVE_WIN32_CAMERA
|
||||
#ifdef HAVE_WIN32_CAMERA
|
||||
if (winCamera) {
|
||||
if (winCamera->isShutDown()) {
|
||||
delete winCamera;
|
||||
@ -345,7 +350,7 @@ int Camera::startCapture() {
|
||||
|
||||
int Camera::stopCapture() {
|
||||
INFO_LOG(HLE, "%s", __FUNCTION__);
|
||||
#ifdef HAVE_WIN32_CAMERA
|
||||
#ifdef HAVE_WIN32_CAMERA
|
||||
if (winCamera) {
|
||||
winCamera->sendMessage({ CAPTUREDEVIDE_COMMAND::STOP, nullptr });
|
||||
}
|
||||
|
@ -92,14 +92,16 @@ const HLEFunction sceUsbGps[] =
|
||||
{0X268F95CA, nullptr, "sceUsbGpsSetInitDataLocation", '?', "" },
|
||||
{0X31F95CDE, nullptr, "sceUsbGpsGetPowerSaveMode", '?', "" },
|
||||
{0X54D26AA4, &WrapI_U<sceUsbGpsGetInitDataLocation>, "sceUsbGpsGetInitDataLocation", 'i', "x" },
|
||||
{0X5881C826, nullptr, "sceUsbGpsGetStaticNavMode", '?', "" },
|
||||
{0X63D1F89D, nullptr, "sceUsbGpsResetInitialPosition", '?', "" },
|
||||
{0X69E4AAA8, nullptr, "sceUsbGpsSaveInitData", '?', "" },
|
||||
{0X6EED4811, &WrapI_V<sceUsbGpsClose>, "sceUsbGpsClose", 'i', "" },
|
||||
{0X7C16AC3A, &WrapI_U<sceUsbGpsGetState>, "sceUsbGpsGetState", 'i', "x"},
|
||||
{0X7C16AC3A, &WrapI_U<sceUsbGpsGetState>, "sceUsbGpsGetState", 'i', "x" },
|
||||
{0X934EC2B2, &WrapI_UU<sceUsbGpsGetData>, "sceUsbGpsGetData", 'i', "xx" },
|
||||
{0X9D8F99E8, nullptr, "sceUsbGpsSetPowerSaveMode", '?', "" },
|
||||
{0X9F267D34, &WrapI_V<sceUsbGpsOpen>, "sceUsbGpsOpen", 'i', "" },
|
||||
{0XA259CD67, nullptr, "sceUsbGpsReset", '?', "" },
|
||||
{0XA8ED0BC2, nullptr, "sceUsbGpsSetStaticNavMode", '?', "" },
|
||||
};
|
||||
|
||||
void Register_sceUsbGps()
|
||||
|
@ -43,7 +43,7 @@ typedef struct {
|
||||
float garbage3;
|
||||
float speed;
|
||||
float bearing;
|
||||
} GpsData;
|
||||
} GpsData;
|
||||
|
||||
typedef struct {
|
||||
unsigned char id;
|
||||
|
@ -19,6 +19,7 @@
|
||||
|
||||
#include "Common/Serialize/Serializer.h"
|
||||
#include "Common/Serialize/SerializeFuncs.h"
|
||||
#include "Common/System/System.h"
|
||||
#include "Core/HLE/HLE.h"
|
||||
#include "Core/HLE/FunctionWrappers.h"
|
||||
#include "Core/HLE/sceKernelThread.h"
|
||||
@ -309,6 +310,12 @@ int Microphone::startMic(void *param) {
|
||||
#ifdef HAVE_WIN32_MICROPHONE
|
||||
if (winMic)
|
||||
winMic->sendMessage({ CAPTUREDEVIDE_COMMAND::START, param });
|
||||
#elif PPSSPP_PLATFORM(ANDROID)
|
||||
std::vector<u32> *micParam = static_cast<std::vector<u32>*>(param);
|
||||
int sampleRate = micParam->at(0);
|
||||
int channels = micParam->at(1);
|
||||
INFO_LOG(HLE, "microphone_command : sr = %d", sampleRate);
|
||||
System_SendMessage("microphone_command", ("startRecording:" + std::to_string(sampleRate)).c_str());
|
||||
#endif
|
||||
micState = 1;
|
||||
return 0;
|
||||
@ -318,6 +325,8 @@ int Microphone::stopMic() {
|
||||
#ifdef HAVE_WIN32_MICROPHONE
|
||||
if (winMic)
|
||||
winMic->sendMessage({ CAPTUREDEVIDE_COMMAND::STOP, nullptr });
|
||||
#elif PPSSPP_PLATFORM(ANDROID)
|
||||
System_SendMessage("microphone_command", "stopRecording");
|
||||
#endif
|
||||
micState = 0;
|
||||
return 0;
|
||||
@ -326,6 +335,8 @@ int Microphone::stopMic() {
|
||||
bool Microphone::isHaveDevice() {
|
||||
#ifdef HAVE_WIN32_MICROPHONE
|
||||
return winMic->getDeviceCounts() >= 1;
|
||||
#elif PPSSPP_PLATFORM(ANDROID)
|
||||
return audioRecording_Available();
|
||||
#endif
|
||||
return false;
|
||||
}
|
||||
@ -334,6 +345,8 @@ bool Microphone::isMicStarted() {
|
||||
#ifdef HAVE_WIN32_MICROPHONE
|
||||
if(winMic)
|
||||
return winMic->isStarted();
|
||||
#elif PPSSPP_PLATFORM(ANDROID)
|
||||
return audioRecording_State();
|
||||
#endif
|
||||
return false;
|
||||
}
|
||||
@ -428,7 +441,7 @@ const HLEFunction sceUsbMic[] =
|
||||
{0x06128E42, &WrapI_V<sceUsbMicPollInputEnd>, "sceUsbMicPollInputEnd", 'i', "" },
|
||||
{0x2E6DCDCD, &WrapI_UUU<sceUsbMicInputBlocking>, "sceUsbMicInputBlocking", 'i', "xxx" },
|
||||
{0x45310F07, &WrapI_U<sceUsbMicInputInitEx>, "sceUsbMicInputInitEx", 'i', "x" },
|
||||
{0x5F7F368D, &WrapI_V<sceUsbMicInput> , "sceUsbMicInput", 'i', "" },
|
||||
{0x5F7F368D, &WrapI_V<sceUsbMicInput>, "sceUsbMicInput", 'i', "" },
|
||||
{0x63400E20, &WrapI_V<sceUsbMicGetInputLength>, "sceUsbMicGetInputLength", 'i', "" },
|
||||
{0xB8E536EB, &WrapI_III<sceUsbMicInputInit>, "sceUsbMicInputInit", 'i', "iii" },
|
||||
{0xF899001C, &WrapI_V<sceUsbMicWaitInputEnd>, "sceUsbMicWaitInputEnd", 'i', "" },
|
||||
|
@ -26,6 +26,7 @@
|
||||
<uses-permission-sdk-23 android:name="android.permission.ACCESS_COARSE_LOCATION" />
|
||||
<uses-permission-sdk-23 android:name="android.permission.ACCESS_FINE_LOCATION" />
|
||||
<uses-permission-sdk-23 android:name="android.permission.CAMERA" />
|
||||
<uses-permission-sdk-23 android:name="android.permission.RECORD_AUDIO" />
|
||||
|
||||
<supports-screens
|
||||
android:largeScreens="true"
|
||||
|
@ -18,8 +18,12 @@ AudioContext::AudioContext(AndroidAudioCallback cb, int _FramesPerBuffer, int _S
|
||||
struct AndroidAudioState {
|
||||
AudioContext *ctx = nullptr;
|
||||
AndroidAudioCallback callback = nullptr;
|
||||
// output
|
||||
int frames_per_buffer = 0;
|
||||
int sample_rate = 0;
|
||||
// input
|
||||
int input_enable = 0;
|
||||
int input_sample_rate = 0;
|
||||
};
|
||||
|
||||
AndroidAudioState *AndroidAudio_Init(AndroidAudioCallback callback, int optimalFramesPerBuffer, int optimalSampleRate) {
|
||||
@ -30,6 +34,54 @@ AndroidAudioState *AndroidAudio_Init(AndroidAudioCallback callback, int optimalF
|
||||
return state;
|
||||
}
|
||||
|
||||
bool AndroidAudio_Recording_SetSampleRate(AndroidAudioState *state, int sampleRate) {
|
||||
if (!state) {
|
||||
ERROR_LOG(AUDIO, "AndroidAudioState not initialized, cannot set recording sample rate");
|
||||
return false;
|
||||
}
|
||||
state->input_sample_rate = sampleRate;
|
||||
INFO_LOG(AUDIO, "AndroidAudio_Recording_SetSampleRate=%d", sampleRate);
|
||||
return true;
|
||||
}
|
||||
|
||||
bool AndroidAudio_Recording_Start(AndroidAudioState *state) {
|
||||
if (!state) {
|
||||
ERROR_LOG(AUDIO, "AndroidAudioState not initialized, cannot start recording!");
|
||||
return false;
|
||||
}
|
||||
state->input_enable = 1;
|
||||
if (!state->ctx) {
|
||||
ERROR_LOG(AUDIO, "OpenSLContext not initialized, cannot start recording!");
|
||||
return false;
|
||||
}
|
||||
state->ctx->AudioRecord_Start(state->input_sample_rate);
|
||||
INFO_LOG(AUDIO, "AndroidAudio_Recording_Start");
|
||||
return true;
|
||||
}
|
||||
|
||||
bool AndroidAudio_Recording_Stop(AndroidAudioState *state) {
|
||||
if (!state) {
|
||||
ERROR_LOG(AUDIO, "AndroidAudioState not initialized, cannot stop recording!");
|
||||
return false;
|
||||
}
|
||||
if (!state->ctx) {
|
||||
ERROR_LOG(AUDIO, "OpenSLContext not initialized, cannot stop recording!");
|
||||
return false;
|
||||
}
|
||||
state->input_enable = 0;
|
||||
state->input_sample_rate = 0;
|
||||
state->ctx->AudioRecord_Stop();
|
||||
INFO_LOG(AUDIO, "AndroidAudio_Recording_Stop");
|
||||
return true;
|
||||
}
|
||||
|
||||
bool AndroidAudio_Recording_State(AndroidAudioState *state) {
|
||||
if (!state) {
|
||||
return false;
|
||||
}
|
||||
return state->input_enable;
|
||||
}
|
||||
|
||||
bool AndroidAudio_Resume(AndroidAudioState *state) {
|
||||
if (!state) {
|
||||
ERROR_LOG(AUDIO, "Audio was shutdown, cannot resume!");
|
||||
@ -44,6 +96,9 @@ bool AndroidAudio_Resume(AndroidAudioState *state) {
|
||||
delete state->ctx;
|
||||
state->ctx = nullptr;
|
||||
}
|
||||
if (state->input_enable) {
|
||||
state->ctx->AudioRecord_Start(state->input_sample_rate);
|
||||
}
|
||||
return init_retval;
|
||||
}
|
||||
return false;
|
||||
|
@ -8,6 +8,8 @@ class AudioContext {
|
||||
public:
|
||||
AudioContext(AndroidAudioCallback cb, int _FramesPerBuffer, int _SampleRate);
|
||||
virtual bool Init() { return false; }
|
||||
virtual int AudioRecord_Start(int sampleRate) { return false; };
|
||||
virtual int AudioRecord_Stop() { return false; };
|
||||
virtual ~AudioContext() {}
|
||||
|
||||
protected:
|
||||
@ -21,6 +23,10 @@ struct AndroidAudioState;
|
||||
|
||||
// It's okay for optimalFramesPerBuffer and optimalSampleRate to be 0. Defaults will be used.
|
||||
AndroidAudioState *AndroidAudio_Init(AndroidAudioCallback cb, int optimalFramesPerBuffer, int optimalSampleRate);
|
||||
bool AndroidAudio_Recording_SetSampleRate(AndroidAudioState *state, int sampleRate);
|
||||
bool AndroidAudio_Recording_Start(AndroidAudioState *state);
|
||||
bool AndroidAudio_Recording_Stop(AndroidAudioState *state);
|
||||
bool AndroidAudio_Recording_State(AndroidAudioState *state);
|
||||
bool AndroidAudio_Pause(AndroidAudioState *state);
|
||||
bool AndroidAudio_Resume(AndroidAudioState *state);
|
||||
bool AndroidAudio_Shutdown(AndroidAudioState *state);
|
||||
|
@ -11,6 +11,32 @@
|
||||
|
||||
#include "Common/Log.h"
|
||||
#include "OpenSLContext.h"
|
||||
#include "Core/HLE/sceUsbMic.h"
|
||||
|
||||
void OpenSLContext::bqRecorderCallback(SLAndroidSimpleBufferQueueItf bq, void *context) {
|
||||
OpenSLContext *ctx = (OpenSLContext *)context;
|
||||
SLresult result;
|
||||
|
||||
SLuint32 recordsState;
|
||||
result = (*ctx->recorderRecord)->GetRecordState(ctx->recorderRecord, &recordsState);
|
||||
if(result != SL_RESULT_SUCCESS) {
|
||||
ERROR_LOG(AUDIO, "GetRecordState error: %d", result);
|
||||
return;
|
||||
}
|
||||
|
||||
Microphone::addAudioData((uint8_t*) ctx->recordBuffer[ctx->activeRecordBuffer], ctx->recordBufferSize);
|
||||
|
||||
if (recordsState == SL_RECORDSTATE_RECORDING) {
|
||||
result = (*ctx->recorderBufferQueue)->Enqueue(ctx->recorderBufferQueue, ctx->recordBuffer[ctx->activeRecordBuffer], ctx->recordBufferSize);
|
||||
if (result != SL_RESULT_SUCCESS) {
|
||||
ERROR_LOG(AUDIO, "Enqueue error: %d", result);
|
||||
}
|
||||
}
|
||||
|
||||
ctx->activeRecordBuffer += 1; // Switch buffer
|
||||
if (ctx->activeRecordBuffer == NUM_BUFFERS)
|
||||
ctx->activeRecordBuffer = 0;
|
||||
}
|
||||
|
||||
// This callback handler is called every time a buffer finishes playing.
|
||||
// The documentation available is very unclear about how to best manage buffers.
|
||||
@ -101,7 +127,8 @@ bool OpenSLContext::Init() {
|
||||
// create audio player
|
||||
const SLInterfaceID ids[2] = {SL_IID_BUFFERQUEUE, SL_IID_VOLUME};
|
||||
const SLboolean req[2] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE};
|
||||
result = (*engineEngine)->CreateAudioPlayer(engineEngine, &bqPlayerObject, &audioSrc, &audioSnk, 2, ids, req);
|
||||
result = (*engineEngine)->CreateAudioPlayer(engineEngine, &bqPlayerObject, &audioSrc, &audioSnk,
|
||||
sizeof(ids)/sizeof(ids[0]), ids, req);
|
||||
if (result != SL_RESULT_SUCCESS) {
|
||||
ERROR_LOG(AUDIO, "OpenSL: CreateAudioPlayer failed: %d", (int)result);
|
||||
(*outputMixObject)->Destroy(outputMixObject);
|
||||
@ -145,6 +172,102 @@ bool OpenSLContext::Init() {
|
||||
return true;
|
||||
}
|
||||
|
||||
int OpenSLContext::AudioRecord_Start(int sampleRate) {
|
||||
SLresult result;
|
||||
|
||||
// configure audio source
|
||||
SLDataLocator_IODevice loc_dev = {SL_DATALOCATOR_IODEVICE, SL_IODEVICE_AUDIOINPUT, SL_DEFAULTDEVICEID_AUDIOINPUT, NULL};
|
||||
SLDataSource audioSrc = {&loc_dev, NULL};
|
||||
|
||||
// configure audio sink
|
||||
SLDataLocator_AndroidSimpleBufferQueue loc_bq = {SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, 2};
|
||||
SLDataFormat_PCM format_pcm = {
|
||||
SL_DATAFORMAT_PCM,
|
||||
1,
|
||||
(SLuint32) sampleRate * 1000, // The constants such as SL_SAMPLINGRATE_44_1 are 44100000
|
||||
SL_PCMSAMPLEFORMAT_FIXED_16,
|
||||
SL_PCMSAMPLEFORMAT_FIXED_16,
|
||||
SL_SPEAKER_FRONT_CENTER,
|
||||
SL_BYTEORDER_LITTLEENDIAN
|
||||
};
|
||||
SLDataSink audioSnk = {&loc_bq, &format_pcm};
|
||||
|
||||
// create audio recorder
|
||||
const SLInterfaceID id[1] = {SL_IID_ANDROIDSIMPLEBUFFERQUEUE};
|
||||
const SLboolean req[1] = {SL_BOOLEAN_TRUE};
|
||||
result = (*engineEngine)->CreateAudioRecorder(engineEngine, &recorderObject, &audioSrc, &audioSnk,
|
||||
sizeof(id)/sizeof(id[0]), id, req);
|
||||
if (SL_RESULT_SUCCESS != result) {
|
||||
ERROR_LOG(AUDIO, "CreateAudioRecorder failed: %d", result);
|
||||
return false;
|
||||
}
|
||||
|
||||
// realize the audio recorder
|
||||
result = (*recorderObject)->Realize(recorderObject, SL_BOOLEAN_FALSE);
|
||||
if (SL_RESULT_SUCCESS != result) {
|
||||
ERROR_LOG(AUDIO, "Realize failed: %d", result);
|
||||
return false;
|
||||
}
|
||||
|
||||
// get the record interface
|
||||
result = (*recorderObject)->GetInterface(recorderObject, SL_IID_RECORD, &recorderRecord);
|
||||
if (SL_RESULT_SUCCESS != result) {
|
||||
ERROR_LOG(AUDIO, "GetInterface(record) failed: %d", result);
|
||||
return false;
|
||||
}
|
||||
|
||||
// get the buffer queue interface
|
||||
result = (*recorderObject)->GetInterface(recorderObject, SL_IID_ANDROIDSIMPLEBUFFERQUEUE, (void*) &recorderBufferQueue);
|
||||
if (SL_RESULT_SUCCESS != result) {
|
||||
ERROR_LOG(AUDIO, "GetInterface(queue interface) failed: %d", result);
|
||||
return false;
|
||||
}
|
||||
|
||||
// register callback on the buffer queue
|
||||
result = (*recorderBufferQueue)->RegisterCallback(recorderBufferQueue, &bqRecorderCallback, this);
|
||||
if (SL_RESULT_SUCCESS != result) {
|
||||
ERROR_LOG(AUDIO, "RegisterCallback failed: %d", result);
|
||||
return false;
|
||||
}
|
||||
|
||||
recordBufferSize = (44100 * 20 / 1000 * 2);
|
||||
for (int i = 0; i < NUM_BUFFERS; i++) {
|
||||
recordBuffer[i] = new short[recordBufferSize];
|
||||
}
|
||||
for (int i = 0; i < NUM_BUFFERS; i++) {
|
||||
result = (*recorderBufferQueue)->Enqueue(recorderBufferQueue, recordBuffer[i], recordBufferSize);
|
||||
if (SL_RESULT_SUCCESS != result) {
|
||||
ERROR_LOG(AUDIO, "Enqueue failed: %d", result);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
result = (*recorderRecord)->SetRecordState(recorderRecord, SL_RECORDSTATE_RECORDING);
|
||||
assert(SL_RESULT_SUCCESS == result);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
int OpenSLContext::AudioRecord_Stop() {
|
||||
if (recorderRecord != nullptr) {
|
||||
SLresult result = (*recorderRecord)->SetRecordState(recorderRecord, SL_RECORDSTATE_STOPPED);
|
||||
assert(SL_RESULT_SUCCESS == result);
|
||||
}
|
||||
if (recorderObject != nullptr) {
|
||||
(*recorderObject)->Destroy(recorderObject);
|
||||
recorderObject = nullptr;
|
||||
recorderRecord = nullptr;
|
||||
recorderBufferQueue = nullptr;
|
||||
}
|
||||
if (recordBuffer[0] != nullptr) {
|
||||
delete [] recordBuffer[0];
|
||||
delete [] recordBuffer[1];
|
||||
recordBuffer[0] = nullptr;
|
||||
recordBuffer[1] = nullptr;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// shut down the native audio system
|
||||
OpenSLContext::~OpenSLContext() {
|
||||
if (bqPlayerPlay) {
|
||||
@ -172,6 +295,7 @@ OpenSLContext::~OpenSLContext() {
|
||||
(*outputMixObject)->Destroy(outputMixObject);
|
||||
outputMixObject = nullptr;
|
||||
}
|
||||
AudioRecord_Stop();
|
||||
|
||||
INFO_LOG(AUDIO, "OpenSL: Shutdown - deleting engine object");
|
||||
|
||||
@ -186,5 +310,4 @@ OpenSLContext::~OpenSLContext() {
|
||||
buffer[i] = nullptr;
|
||||
}
|
||||
INFO_LOG(AUDIO, "OpenSL: Shutdown - finished");
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -10,25 +10,38 @@ public:
|
||||
OpenSLContext(AndroidAudioCallback cb, int framesPerBuffer, int sampleRate);
|
||||
|
||||
bool Init() override;
|
||||
int AudioRecord_Start(int sampleRate) override;
|
||||
int AudioRecord_Stop() override;
|
||||
~OpenSLContext();
|
||||
|
||||
private:
|
||||
// Should be no reason to need more than two buffers, but make it clear in the code.
|
||||
enum {
|
||||
NUM_BUFFERS = 2,
|
||||
};
|
||||
|
||||
// engine interfaces
|
||||
SLObjectItf engineObject = nullptr;
|
||||
SLEngineItf engineEngine = nullptr;
|
||||
SLObjectItf outputMixObject = nullptr;
|
||||
|
||||
// audio recorder interfaces
|
||||
SLObjectItf recorderObject = nullptr;
|
||||
SLRecordItf recorderRecord = nullptr;
|
||||
SLAndroidSimpleBufferQueueItf recorderBufferQueue = nullptr;
|
||||
|
||||
int recordBufferSize = 0;
|
||||
short *recordBuffer[NUM_BUFFERS]{};
|
||||
int activeRecordBuffer = 0;
|
||||
|
||||
static void bqRecorderCallback(SLAndroidSimpleBufferQueueItf bq, void *context);
|
||||
|
||||
// buffer queue player interfaces
|
||||
SLObjectItf bqPlayerObject = nullptr;
|
||||
SLPlayItf bqPlayerPlay = nullptr;
|
||||
SLAndroidSimpleBufferQueueItf bqPlayerBufferQueue = nullptr;
|
||||
SLVolumeItf bqPlayerVolume = nullptr;
|
||||
|
||||
// Should be no reason to need more than two buffers, but make it clear in the code.
|
||||
enum {
|
||||
NUM_BUFFERS = 2,
|
||||
};
|
||||
|
||||
// Double buffering.
|
||||
short *buffer[NUM_BUFFERS]{};
|
||||
int curBuffer = 0;
|
||||
|
@ -678,6 +678,26 @@ extern "C" void Java_org_ppsspp_ppsspp_NativeApp_audioShutdown(JNIEnv *, jclass)
|
||||
}
|
||||
}
|
||||
|
||||
extern "C" void Java_org_ppsspp_ppsspp_NativeApp_audioRecording_1SetSampleRate(JNIEnv *, jclass, jint sampleRate) {
|
||||
AndroidAudio_Recording_SetSampleRate(g_audioState, sampleRate);
|
||||
}
|
||||
|
||||
extern "C" void Java_org_ppsspp_ppsspp_NativeApp_audioRecording_1Start(JNIEnv *, jclass) {
|
||||
AndroidAudio_Recording_Start(g_audioState);
|
||||
}
|
||||
|
||||
extern "C" void Java_org_ppsspp_ppsspp_NativeApp_audioRecording_1Stop(JNIEnv *, jclass) {
|
||||
AndroidAudio_Recording_Stop(g_audioState);
|
||||
}
|
||||
|
||||
bool audioRecording_Available() {
|
||||
return true;
|
||||
}
|
||||
|
||||
bool audioRecording_State() {
|
||||
return AndroidAudio_Recording_State(g_audioState);
|
||||
}
|
||||
|
||||
extern "C" void Java_org_ppsspp_ppsspp_NativeApp_resume(JNIEnv *, jclass) {
|
||||
INFO_LOG(SYSTEM, "NativeApp.resume() - resuming audio");
|
||||
AndroidAudio_Resume(g_audioState);
|
||||
|
@ -122,10 +122,14 @@ public abstract class NativeActivity extends Activity {
|
||||
private static final String[] permissionsForCamera = {
|
||||
Manifest.permission.CAMERA
|
||||
};
|
||||
private static final String[] permissionsForMicrophone = {
|
||||
Manifest.permission.RECORD_AUDIO
|
||||
};
|
||||
|
||||
public static final int REQUEST_CODE_STORAGE_PERMISSION = 1;
|
||||
public static final int REQUEST_CODE_LOCATION_PERMISSION = 2;
|
||||
public static final int REQUEST_CODE_CAMERA_PERMISSION = 3;
|
||||
public static final int REQUEST_CODE_MICROPHONE_PERMISSION = 4;
|
||||
|
||||
// Functions for the app activity to override to change behaviour.
|
||||
|
||||
@ -224,6 +228,11 @@ public abstract class NativeActivity extends Activity {
|
||||
mCameraHelper.startCamera();
|
||||
}
|
||||
break;
|
||||
case REQUEST_CODE_MICROPHONE_PERMISSION:
|
||||
if (permissionsGranted(permissions, grantResults)) {
|
||||
NativeApp.audioRecording_Start();
|
||||
}
|
||||
break;
|
||||
default:
|
||||
}
|
||||
}
|
||||
@ -1289,6 +1298,16 @@ public abstract class NativeActivity extends Activity {
|
||||
} else if (mCameraHelper != null && params.equals("stopVideo")) {
|
||||
mCameraHelper.stopCamera();
|
||||
}
|
||||
} else if (command.equals("microphone_command")) {
|
||||
if (params.startsWith("startRecording:")) {
|
||||
int sampleRate = Integer.parseInt(params.replace("startRecording:", ""));
|
||||
NativeApp.audioRecording_SetSampleRate(sampleRate);
|
||||
if (!askForPermissions(permissionsForMicrophone, REQUEST_CODE_MICROPHONE_PERMISSION)) {
|
||||
NativeApp.audioRecording_Start();
|
||||
}
|
||||
} else if (params.equals("stopRecording")) {
|
||||
NativeApp.audioRecording_Stop();
|
||||
}
|
||||
} else if (command.equals("uistate")) {
|
||||
Window window = this.getWindow();
|
||||
if (params.equals("ingame")) {
|
||||
|
@ -17,6 +17,10 @@ public class NativeApp {
|
||||
public static native void audioShutdown();
|
||||
public static native void audioConfig(int optimalFramesPerBuffer, int optimalSampleRate);
|
||||
|
||||
public static native void audioRecording_SetSampleRate(int sampleRate);
|
||||
public static native void audioRecording_Start();
|
||||
public static native void audioRecording_Stop();
|
||||
|
||||
public static native void computeDesiredBackbufferDimensions();
|
||||
public static native int getDesiredBackbufferWidth();
|
||||
public static native int getDesiredBackbufferHeight();
|
||||
|
@ -909,6 +909,8 @@ void NativeResized() {}
|
||||
|
||||
#if PPSSPP_PLATFORM(ANDROID) || PPSSPP_PLATFORM(IOS)
|
||||
std::vector<std::string> __cameraGetDeviceList() { return std::vector<std::string>(); }
|
||||
bool audioRecording_Available() { return false; }
|
||||
bool audioRecording_State() { return false; }
|
||||
|
||||
void System_InputBoxGetString(const std::string &title, const std::string &defaultValue, std::function<void(bool, const std::string &)> cb) { cb(false, ""); }
|
||||
#endif
|
||||
|
Loading…
Reference in New Issue
Block a user