mirror of
https://github.com/libretro/ppsspp.git
synced 2025-02-03 15:42:32 +00:00
Android 4.2+: Ask for optimal audio buffer size + sample rate. Ignore sample rate for now.
This commit is contained in:
parent
9f510771bd
commit
0e5cab2b0c
@ -42,6 +42,9 @@ static float right_joystick_y_async;
|
||||
|
||||
static uint32_t pad_buttons_down;
|
||||
|
||||
int optimalFramesPerBuffer = 0;
|
||||
int optimalSampleRate = 0;
|
||||
|
||||
// Android implementation of callbacks to the Java part of the app
|
||||
void SystemToast(const char *text) {
|
||||
frameCommand = "toast";
|
||||
@ -93,7 +96,7 @@ InputState input_state;
|
||||
|
||||
static bool renderer_inited = false;
|
||||
static bool first_lost = true;
|
||||
static bool use_native_audio = false;
|
||||
static bool use_opensl_audio = false;
|
||||
|
||||
std::string GetJavaString(JNIEnv *env, jstring jstr)
|
||||
{
|
||||
@ -119,6 +122,12 @@ extern "C" jboolean Java_com_henrikrydgard_libnative_NativeApp_isAtTopLevel(JNIE
|
||||
return NativeIsAtTopLevel();
|
||||
}
|
||||
|
||||
extern "C" void Java_com_henrikrydgard_libnative_NativeApp_audioConfig
|
||||
(JNIEnv *env, jclass, jint optimalFPB, jint optimalSR) {
|
||||
optimalFramesPerBuffer = optimalFPB;
|
||||
optimalSampleRate = optimalSR;
|
||||
}
|
||||
|
||||
extern "C" void Java_com_henrikrydgard_libnative_NativeApp_init
|
||||
(JNIEnv *env, jclass, jint xxres, jint yyres, jint dpi, jstring japkpath,
|
||||
jstring jdataDir, jstring jexternalDir, jstring jlibraryDir, jstring jinstallID, jboolean juseNativeAudio) {
|
||||
@ -167,24 +176,26 @@ extern "C" void Java_com_henrikrydgard_libnative_NativeApp_init
|
||||
const char *argv[2] = {app_name.c_str(), 0};
|
||||
NativeInit(1, argv, user_data_path.c_str(), externalDir.c_str(), installID.c_str());
|
||||
|
||||
use_native_audio = juseNativeAudio;
|
||||
if (use_native_audio) {
|
||||
ILOG("Using OpenSL audio!");
|
||||
AndroidAudio_Init(&NativeMix, library_path);
|
||||
use_opensl_audio = juseNativeAudio;
|
||||
if (use_opensl_audio) {
|
||||
// TODO: PPSSPP doesn't support 48khz yet so let's not use that yet.
|
||||
ILOG("Using OpenSL audio! frames/buffer: %i optimal sr: %i actual sr: 44100", optimalFramesPerBuffer, optimalSampleRate);
|
||||
optimalSampleRate = 44100;
|
||||
AndroidAudio_Init(&NativeMix, library_path, optimalFramesPerBuffer, optimalSampleRate);
|
||||
}
|
||||
ILOG("NativeApp.init() -- end");
|
||||
}
|
||||
|
||||
extern "C" void Java_com_henrikrydgard_libnative_NativeApp_resume(JNIEnv *, jclass) {
|
||||
ILOG("NativeApp.resume() - resuming audio");
|
||||
if (use_native_audio) {
|
||||
if (use_opensl_audio) {
|
||||
AndroidAudio_Resume();
|
||||
}
|
||||
}
|
||||
|
||||
extern "C" void Java_com_henrikrydgard_libnative_NativeApp_pause(JNIEnv *, jclass) {
|
||||
ILOG("NativeApp.pause() - pausing audio");
|
||||
if (use_native_audio) {
|
||||
if (use_opensl_audio) {
|
||||
AndroidAudio_Pause();
|
||||
}
|
||||
ILOG("NativeApp.pause() - paused audio");
|
||||
@ -192,7 +203,7 @@ extern "C" void Java_com_henrikrydgard_libnative_NativeApp_pause(JNIEnv *, jclas
|
||||
|
||||
extern "C" void Java_com_henrikrydgard_libnative_NativeApp_shutdown(JNIEnv *, jclass) {
|
||||
ILOG("NativeApp.shutdown() -- begin");
|
||||
if (use_native_audio) {
|
||||
if (use_opensl_audio) {
|
||||
AndroidAudio_Shutdown();
|
||||
}
|
||||
if (renderer_inited) {
|
||||
|
@ -30,12 +30,11 @@ static SLAndroidSimpleBufferQueueItf bqPlayerBufferQueue;
|
||||
static SLMuteSoloItf bqPlayerMuteSolo;
|
||||
static SLVolumeItf bqPlayerVolume;
|
||||
|
||||
#define BUFFER_SIZE 512
|
||||
#define BUFFER_SIZE_IN_SAMPLES (BUFFER_SIZE / 2)
|
||||
|
||||
// Double buffering.
|
||||
static short buffer[2][BUFFER_SIZE];
|
||||
static short *buffer[2];
|
||||
static int curBuffer = 0;
|
||||
static int framesPerBuffer;
|
||||
int sampleRate;
|
||||
|
||||
static AndroidAudioCallback audioCallback;
|
||||
|
||||
@ -44,15 +43,17 @@ static AndroidAudioCallback audioCallback;
|
||||
// I've chosen to this approach: Instantly enqueue a buffer that was rendered to the last time,
|
||||
// and then render the next. Hopefully it's okay to spend time in this callback after having enqueued.
|
||||
static void bqPlayerCallback(SLAndroidSimpleBufferQueueItf bq, void *context) {
|
||||
assert(bq == bqPlayerBufferQueue);
|
||||
assert(NULL == context);
|
||||
if (bq != bqPlayerBufferQueue) {
|
||||
ELOG("Wrong bq!");
|
||||
return;
|
||||
}
|
||||
|
||||
int nextSamples = audioCallback(buffer[curBuffer], BUFFER_SIZE_IN_SAMPLES);
|
||||
int nextSamples = audioCallback(buffer[curBuffer], framesPerBuffer);
|
||||
// We can't enqueue nothing, the callback will never be called again.
|
||||
// Delay until we get some audio.
|
||||
while (nextSamples == 0) {
|
||||
usleep(40);
|
||||
nextSamples = audioCallback(buffer[curBuffer], BUFFER_SIZE_IN_SAMPLES);
|
||||
nextSamples = audioCallback(buffer[curBuffer], framesPerBuffer);
|
||||
}
|
||||
|
||||
short *nextBuffer = buffer[curBuffer];
|
||||
@ -63,14 +64,29 @@ static void bqPlayerCallback(SLAndroidSimpleBufferQueueItf bq, void *context) {
|
||||
// Comment from sample code:
|
||||
// the most likely other result is SL_RESULT_BUFFER_INSUFFICIENT,
|
||||
// which for this code example would indicate a programming error
|
||||
assert(SL_RESULT_SUCCESS == result);
|
||||
if (result != SL_RESULT_SUCCESS) {
|
||||
ELOG("OpenSL ES: Failed to enqueue! %i %i", nextBuffer, nextSize);
|
||||
}
|
||||
|
||||
curBuffer ^= 1; // Switch buffer
|
||||
}
|
||||
|
||||
// create the engine and output mix objects
|
||||
extern "C" bool OpenSLWrap_Init(AndroidAudioCallback cb) {
|
||||
extern "C" bool OpenSLWrap_Init(AndroidAudioCallback cb, int _FramesPerBuffer, int _SampleRate) {
|
||||
audioCallback = cb;
|
||||
framesPerBuffer = _FramesPerBuffer;
|
||||
if (framesPerBuffer == 0)
|
||||
framesPerBuffer = 256;
|
||||
if (framesPerBuffer < 32)
|
||||
framesPerBuffer = 32;
|
||||
sampleRate = _SampleRate;
|
||||
if (sampleRate != 44100 && sampleRate != 48000) {
|
||||
ELOG("Invalid sample rate %s - choosing 44100", sampleRate);
|
||||
sampleRate = 44100;
|
||||
}
|
||||
|
||||
buffer[0] = new short[framesPerBuffer * 2];
|
||||
buffer[1] = new short[framesPerBuffer * 2];
|
||||
|
||||
SLresult result;
|
||||
// create engine
|
||||
@ -85,11 +101,16 @@ extern "C" bool OpenSLWrap_Init(AndroidAudioCallback cb) {
|
||||
result = (*outputMixObject)->Realize(outputMixObject, SL_BOOLEAN_FALSE);
|
||||
assert(SL_RESULT_SUCCESS == result);
|
||||
|
||||
int sr = SL_SAMPLINGRATE_44_1;
|
||||
if (sampleRate == 48000) {
|
||||
sr = SL_SAMPLINGRATE_48;
|
||||
}
|
||||
|
||||
SLDataLocator_AndroidSimpleBufferQueue loc_bufq = {SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, 2};
|
||||
SLDataFormat_PCM format_pcm = {
|
||||
SL_DATAFORMAT_PCM,
|
||||
2,
|
||||
SL_SAMPLINGRATE_44_1,
|
||||
sr,
|
||||
SL_PCMSAMPLEFORMAT_FIXED_16,
|
||||
SL_PCMSAMPLEFORMAT_FIXED_16,
|
||||
SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT,
|
||||
@ -124,7 +145,7 @@ extern "C" bool OpenSLWrap_Init(AndroidAudioCallback cb) {
|
||||
|
||||
// Render and enqueue a first buffer. (or should we just play the buffer empty?)
|
||||
curBuffer = 0;
|
||||
audioCallback(buffer[curBuffer], BUFFER_SIZE_IN_SAMPLES);
|
||||
audioCallback(buffer[curBuffer], framesPerBuffer);
|
||||
|
||||
result = (*bqPlayerBufferQueue)->Enqueue(bqPlayerBufferQueue, buffer[curBuffer], sizeof(buffer[curBuffer]));
|
||||
if (SL_RESULT_SUCCESS != result) {
|
||||
@ -166,6 +187,8 @@ extern "C" void OpenSLWrap_Shutdown() {
|
||||
engineObject = NULL;
|
||||
engineEngine = NULL;
|
||||
}
|
||||
delete [] buffer[0];
|
||||
delete [] buffer[1];
|
||||
ILOG("OpenSLWrap_Shutdown - finished");
|
||||
}
|
||||
|
||||
|
@ -4,5 +4,5 @@
|
||||
|
||||
typedef int (*AndroidAudioCallback)(short *buffer, int num_samples);
|
||||
|
||||
typedef bool (*OpenSLWrap_Init_T)(AndroidAudioCallback cb);
|
||||
typedef bool (*OpenSLWrap_Init_T)(AndroidAudioCallback cb, int framesPerBuffer, int sampleRate);
|
||||
typedef void (*OpenSLWrap_Shutdown_T)();
|
||||
|
@ -11,11 +11,13 @@ struct AudioState {
|
||||
OpenSLWrap_Init_T init_func;
|
||||
OpenSLWrap_Shutdown_T shutdown_func;
|
||||
bool playing;
|
||||
int frames_per_buffer;
|
||||
int sample_rate;
|
||||
};
|
||||
|
||||
static AudioState *state = 0;
|
||||
|
||||
bool AndroidAudio_Init(AndroidAudioCallback cb, std::string libraryDir) {
|
||||
bool AndroidAudio_Init(AndroidAudioCallback cb, std::string libraryDir, int optimalFramesPerBuffer, int optimalSampleRate) {
|
||||
if (state != 0) {
|
||||
ELOG("Audio state already exists");
|
||||
return false;
|
||||
@ -35,6 +37,9 @@ bool AndroidAudio_Init(AndroidAudioCallback cb, std::string libraryDir) {
|
||||
state->playing = false;
|
||||
state->init_func = (OpenSLWrap_Init_T)dlsym(state->so, "OpenSLWrap_Init");
|
||||
state->shutdown_func = (OpenSLWrap_Shutdown_T)dlsym(state->so, "OpenSLWrap_Shutdown");
|
||||
state->frames_per_buffer = optimalFramesPerBuffer ? optimalFramesPerBuffer : 256;
|
||||
state->sample_rate = optimalSampleRate ? optimalSampleRate : 44100;
|
||||
|
||||
ILOG("OpenSLWrap init_func: %p shutdown_func: %p", (void *)state->init_func, (void *)state->shutdown_func);
|
||||
|
||||
return true;
|
||||
@ -47,7 +52,7 @@ bool AndroidAudio_Resume() {
|
||||
}
|
||||
if (!state->playing) {
|
||||
ILOG("Calling OpenSLWrap_Init_T...");
|
||||
bool init_retval = state->init_func(state->s_cb);
|
||||
bool init_retval = state->init_func(state->s_cb, state->frames_per_buffer, state->sample_rate);
|
||||
ILOG("Returned from OpenSLWrap_Init_T");
|
||||
state->playing = true;
|
||||
return init_retval;
|
||||
|
@ -9,7 +9,8 @@
|
||||
// Do not call this if you have detected that the android version is below
|
||||
// 2.2, as it will fail miserably.
|
||||
|
||||
bool AndroidAudio_Init(AndroidAudioCallback cb, std::string libraryDir);
|
||||
// It's okay for optimalFramesPerBuffer and optimalSampleRate to be 0. Defaults will be used.
|
||||
bool AndroidAudio_Init(AndroidAudioCallback cb, std::string libraryDir, int optimalFramesPerBuffer, int optimalSampleRate);
|
||||
bool AndroidAudio_Pause();
|
||||
bool AndroidAudio_Resume();
|
||||
void AndroidAudio_Shutdown();
|
||||
|
@ -91,6 +91,17 @@ public class NativeActivity extends Activity {
|
||||
public static String commandParameter;
|
||||
public static String installID;
|
||||
|
||||
// Settings for best audio latency
|
||||
private int optimalFramesPerBuffer;
|
||||
private int optimalSampleRate;
|
||||
|
||||
@TargetApi(17)
|
||||
private void detectOptimalAudioSettings() {
|
||||
AudioManager am = (AudioManager)getSystemService(Context.AUDIO_SERVICE);
|
||||
optimalFramesPerBuffer = Integer.parseInt(am.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER));
|
||||
optimalSampleRate = Integer.parseInt(am.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE));
|
||||
}
|
||||
|
||||
String getApplicationLibraryDir(ApplicationInfo application) {
|
||||
String libdir = null;
|
||||
try {
|
||||
@ -116,6 +127,10 @@ public class NativeActivity extends Activity {
|
||||
// Native OpenSL is available. Let's use it!
|
||||
useOpenSL = true;
|
||||
}
|
||||
if (Build.VERSION.SDK_INT >= 17) {
|
||||
// Get the optimal buffer sz
|
||||
detectOptimalAudioSettings();
|
||||
}
|
||||
|
||||
if (NativeApp.isLandscape()) {
|
||||
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
|
||||
@ -154,8 +169,9 @@ public class NativeActivity extends Activity {
|
||||
int dpi = metrics.densityDpi;
|
||||
|
||||
// INIT!
|
||||
NativeApp.audioConfig(optimalFramesPerBuffer, optimalSampleRate);
|
||||
NativeApp.init(scrWidth, scrHeight, dpi, apkFilePath, dataDir, externalStorageDir, libraryDir, installID, useOpenSL);
|
||||
|
||||
|
||||
// Keep the screen bright - very annoying if it goes dark when tilting away
|
||||
Window window = this.getWindow();
|
||||
window.addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
|
||||
|
@ -2,7 +2,8 @@ package com.henrikrydgard.libnative;
|
||||
|
||||
public class NativeApp {
|
||||
public static native void init(int xxres, int yyres, int dpi, String apkPath, String dataDir, String externalDir, String libraryDir, String installID, boolean useOpenSL);
|
||||
|
||||
public static native void audioConfig(int optimalFramesPerBuffer, int optimalSampleRate);
|
||||
|
||||
public static native boolean isLandscape();
|
||||
|
||||
public static native boolean isAtTopLevel();
|
||||
|
@ -10,7 +10,10 @@ public class NativeAudioPlayer {
|
||||
private String TAG = "NativeAudioPlayer";
|
||||
private Thread thread;
|
||||
private boolean playing_;
|
||||
|
||||
|
||||
public NativeAudioPlayer() {
|
||||
}
|
||||
|
||||
// Calling stop() is allowed at any time, whether stopped or not.
|
||||
// If playing, blocks until not.
|
||||
public synchronized void stop() {
|
||||
|
@ -3,17 +3,14 @@ package com.henrikrydgard.libnative;
|
||||
// Touch- and sensor-enabled GLSurfaceView.
|
||||
// Supports simple multitouch and pressure.
|
||||
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.lang.reflect.Method;
|
||||
|
||||
import android.app.Activity;
|
||||
import android.hardware.Sensor;
|
||||
import android.hardware.SensorEvent;
|
||||
import android.hardware.SensorEventListener;
|
||||
import android.hardware.SensorManager;
|
||||
import android.opengl.GLSurfaceView;
|
||||
import android.os.Build;
|
||||
import android.util.Log;
|
||||
// import android.os.Build;
|
||||
// import android.util.Log;
|
||||
import android.view.MotionEvent;
|
||||
|
||||
public class NativeGLView extends GLSurfaceView implements SensorEventListener {
|
||||
@ -21,8 +18,6 @@ public class NativeGLView extends GLSurfaceView implements SensorEventListener {
|
||||
private SensorManager mSensorManager;
|
||||
private Sensor mAccelerometer;
|
||||
|
||||
|
||||
|
||||
public NativeGLView(NativeActivity activity) {
|
||||
super(activity);
|
||||
setEGLContextClientVersion(2);
|
||||
|
Loading…
x
Reference in New Issue
Block a user