Merge pull request from unknownbrackets/gles

Android: Allow using OpenGL 4.x with javaGL disabled
This commit is contained in:
Henrik Rydgård 2018-06-18 10:58:39 +02:00 committed by GitHub
commit f3ab56e15d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 121 additions and 53 deletions

@ -202,14 +202,21 @@ bool cInterfaceEGL::ChooseAndCreate(void *window_handle, bool core, bool use565)
EGLint ctx_attribs[] = {
EGL_CONTEXT_CLIENT_VERSION, 2,
EGL_NONE, 0
EGL_NONE, 0,
EGL_NONE, 0,
EGL_NONE, 0,
EGL_NONE, 0,
};
switch (s_opengl_mode) {
case MODE_OPENGL:
EGL_ILOG("Setting RENDERABLE_TYPE to EGL_OPENGL_BIT");
attribs[1] = EGL_OPENGL_BIT;
ctx_attribs[0] = EGL_NONE;
// 1 will be major version, and 3 the minor version.
ctx_attribs[2] = 0x30FB; /* EGL_CONTEXT_MINOR_VERSION_KHR */
// Let's always use a core profile here.
ctx_attribs[4] = 0x30FD; /* EGL_CONTEXT_OPENGL_PROFILE_MASK_KHR */
ctx_attribs[5] = 1; /* EGL_CONTEXT_OPENGL_CORE_PROFILE_BIT_KHR */
break;
case MODE_OPENGLES2:
EGL_ILOG("Setting RENDERABLE_TYPE to EGL_OPENGL_ES2_BIT");
@ -230,7 +237,7 @@ bool cInterfaceEGL::ChooseAndCreate(void *window_handle, bool core, bool use565)
EGL_ILOG("Calling eglChooseConfig to get number of configs (use16bit=%d)...", (int)use565);
EGLConfig *configs;
EGLint num_configs;
EGLint num_configs = 0;
if (!eglChooseConfig(egl_dpy, attribs, NULL, 0, &num_configs) || num_configs == 0) {
EGL_ILOG("Error: couldn't get a number of configs. Trying with fallback config (no stencil, not specifying transparent:none)\n");
attribsFallback[1] = attribs[1];
@ -298,7 +305,23 @@ bool cInterfaceEGL::ChooseAndCreate(void *window_handle, bool core, bool use565)
s = eglQueryString(egl_dpy, EGL_CLIENT_APIS);
EGL_ILOG("EGL_CLIENT_APIS = %s\n", s);
egl_ctx = eglCreateContext(egl_dpy, configs[chosenConfig], EGL_NO_CONTEXT, ctx_attribs);
if (s_opengl_mode == MODE_OPENGL) {
EGL_ILOG("Finding a good GL version");
egl_ctx = nullptr;
for (int minor = 6; minor >= 0 && !egl_ctx; --minor) {
ctx_attribs[1] = 4;
ctx_attribs[3] = minor;
egl_ctx = eglCreateContext(egl_dpy, configs[chosenConfig], EGL_NO_CONTEXT, ctx_attribs);
}
if (!egl_ctx) {
ctx_attribs[1] = 3;
ctx_attribs[3] = 3;
egl_ctx = eglCreateContext(egl_dpy, configs[chosenConfig], EGL_NO_CONTEXT, ctx_attribs);
}
} else {
egl_ctx = eglCreateContext(egl_dpy, configs[chosenConfig], EGL_NO_CONTEXT, ctx_attribs);
}
if (!egl_ctx) {
EGL_ILOG("Error: eglCreateContext failed: %s\n", EGLGetErrorString(eglGetError()));
delete[] configs;
@ -339,7 +362,7 @@ bool cInterfaceEGL::Create(void *window_handle, bool core, bool use565) {
if (s_opengl_mode == MODE_DETECT || s_opengl_mode == MODE_DETECT_ES)
DetectMode();
if (!ChooseAndCreate(window_handle, core, use565) && s_opengl_mode == MODE_OPENGLES3) {
if (!ChooseAndCreate(window_handle, core, use565) && (s_opengl_mode == MODE_OPENGLES3 || s_opengl_mode == MODE_OPENGL)) {
// Fallback to ES 2.0 and try again.
s_opengl_mode = MODE_OPENGLES2;
if (!ChooseAndCreate(window_handle, core, use565)) {

@ -25,15 +25,10 @@
#include "GPU/GLES/TextureCacheGLES.h"
#include "GPU/Common/DepalettizeShaderCommon.h"
#ifdef _WIN32
#define SHADERLOG
#endif
static const char *depalVShader100 =
#ifdef USING_GLES2
"#version 100\n"
"#ifdef GL_ES\n"
"precision highp float;\n"
#endif
"#endif\n"
"attribute vec4 a_position;\n"
"attribute vec2 a_texcoord0;\n"
"varying vec2 v_texcoord0;\n"
@ -43,12 +38,9 @@ static const char *depalVShader100 =
"}\n";
static const char *depalVShader300 =
#ifdef USING_GLES2
"#version 300 es\n"
"#ifdef GL_ES\n"
"precision highp float;\n"
#else
"#version 330\n"
#endif
"#endif\n"
"in vec4 a_position;\n"
"in vec2 a_texcoord0;\n"
"out vec2 v_texcoord0;\n"
@ -76,7 +68,13 @@ void DepalShaderCacheGLES::DeviceRestore(Draw::DrawContext *draw) {
bool DepalShaderCacheGLES::CreateVertexShader() {
std::string src(useGL3_ ? depalVShader300 : depalVShader100);
vertexShader_ = render_->CreateShader(GL_VERTEX_SHADER, src, "depal");
std::string prelude;
if (gl_extensions.IsGLES) {
prelude = useGL3_ ? "#version 300 es\n" : "#version 100\n";
} else if (useGL3_) {
prelude = "#version 330\n";
}
vertexShader_ = render_->CreateShader(GL_VERTEX_SHADER, prelude + src, "depal");
return true;
}

@ -54,9 +54,9 @@ static const char tex_fs[] =
"#define gl_FragColor fragColor0\n"
"out vec4 fragColor0;\n"
"#endif\n"
#ifdef USING_GLES2
"#ifdef GL_ES\n"
"precision mediump float;\n"
#endif
"#endif\n"
"uniform sampler2D sampler0;\n"
"varying vec2 v_texcoord0;\n"
"void main() {\n"

@ -55,9 +55,7 @@ static const char tex_fs[] =
"}\n";
static const char basic_vs[] =
#ifndef USING_GLES2
"#version 120\n"
#endif
"attribute vec4 a_position;\n"
"attribute vec2 a_texcoord0;\n"
"uniform mat4 u_viewproj;\n"

@ -36,9 +36,7 @@ static const char preview_fs[] =
"}\n";
static const char preview_vs[] =
#ifndef USING_GLES2
"#version 120\n"
#endif
"attribute vec4 a_position;\n"
"uniform mat4 u_viewproj;\n"
"void main() {\n"

@ -22,7 +22,7 @@ bool AndroidEGLGraphicsContext::InitFromRenderThread(ANativeWindow *wnd, int des
// Apparently we still have to set this through Java through setFixedSize on the bufferHolder for it to take effect...
gl->SetBackBufferDimensions(backbufferWidth, backbufferHeight);
gl->SetMode(MODE_DETECT_ES);
gl->SetMode(MODE_DETECT);
bool use565 = false;
@ -44,18 +44,26 @@ bool AndroidEGLGraphicsContext::InitFromRenderThread(ANativeWindow *wnd, int des
return false;
}
gl->MakeCurrent();
if (gl->GetMode() == GLInterfaceMode::MODE_OPENGL)
SetGLCoreContext(true);
CheckGLExtensions();
draw_ = Draw::T3DCreateGLContext();
SetGPUBackend(GPUBackend::OPENGL);
renderManager_ = (GLRenderManager *)draw_->GetNativeObject(Draw::NativeObject::RENDER_MANAGER);
bool success = draw_->CreatePresets(); // There will always be a GLSL compiler capable of compiling these.
assert(success);
return true;
}
void AndroidEGLGraphicsContext::Shutdown() {
void AndroidEGLGraphicsContext::ShutdownFromRenderThread() {
ILOG("AndroidEGLGraphicsContext::Shutdown");
renderManager_->WaitUntilQueueIdle();
renderManager_ = nullptr; // owned by draw_.
delete draw_;
draw_ = nullptr;
NativeShutdownGraphics();
}
void AndroidEGLGraphicsContext::Shutdown() {
gl->ClearCurrent();
gl->Shutdown();
delete gl;

@ -1,5 +1,6 @@
#pragma once
#include "thin3d/GLRenderManager.h"
#include "AndroidGraphicsContext.h"
#include "Common/GL/GLInterfaceBase.h"
@ -7,6 +8,7 @@ class AndroidEGLGraphicsContext : public AndroidGraphicsContext {
public:
AndroidEGLGraphicsContext() : draw_(nullptr), wnd_(nullptr), gl(nullptr) {}
bool InitFromRenderThread(ANativeWindow *wnd, int desiredBackbufferSizeX, int desiredBackbufferSizeY, int backbufferFormat, int androidVersion) override;
void ShutdownFromRenderThread() override;
void Shutdown() override;
void SwapBuffers() override;
void SwapInterval(int interval) override {}
@ -18,8 +20,26 @@ public:
return draw_ != nullptr;
}
void ThreadStart() override {
renderManager_->ThreadStart();
}
bool ThreadFrame() override {
return renderManager_->ThreadFrame();
}
void ThreadEnd() override {
renderManager_->ThreadEnd();
}
void StopThread() override {
renderManager_->WaitUntilQueueIdle();
renderManager_->StopThread();
}
private:
Draw::DrawContext *draw_;
ANativeWindow *wnd_;
cInterfaceBase *gl;
GLRenderManager *renderManager_ = nullptr;
};

@ -442,9 +442,12 @@ retry:
switch (g_Config.iGPUBackend) {
case (int)GPUBackend::OPENGL:
useCPUThread = true;
_assert_(javaGL);
ILOG("NativeApp.init() -- creating OpenGL context (JavaGL)");
graphicsContext = new AndroidJavaEGLGraphicsContext();
if (javaGL) {
ILOG("NativeApp.init() -- creating OpenGL context (JavaGL)");
graphicsContext = new AndroidJavaEGLGraphicsContext();
} else {
graphicsContext = new AndroidEGLGraphicsContext();
}
break;
case (int)GPUBackend::VULKAN:
{
@ -521,7 +524,8 @@ extern "C" void Java_org_ppsspp_ppsspp_NativeApp_pause(JNIEnv *, jclass) {
}
extern "C" void Java_org_ppsspp_ppsspp_NativeApp_shutdown(JNIEnv *, jclass) {
if (useCPUThread && graphicsContext) {
if (renderer_inited && useCPUThread && graphicsContext) {
// Only used in Java EGL path.
EmuThreadStop();
while (graphicsContext->ThreadFrame()) {
continue;
@ -963,31 +967,54 @@ retry:
}
if (!exitRenderLoop) {
NativeInitGraphics(graphicsContext);
if (!useCPUThread) {
NativeInitGraphics(graphicsContext);
}
graphicsContext->ThreadStart();
renderer_inited = true;
}
while (!exitRenderLoop) {
if (!exitRenderLoop) {
static bool hasSetThreadName = false;
if (!hasSetThreadName) {
hasSetThreadName = true;
setCurrentThreadName("AndroidRender");
}
}
NativeUpdate();
if (useCPUThread) {
ELOG("Running graphics loop");
while (!exitRenderLoop) {
// This is the "GPU thread".
graphicsContext->ThreadFrame();
graphicsContext->SwapBuffers();
}
} else {
while (!exitRenderLoop) {
NativeUpdate();
NativeRender(graphicsContext);
time_update();
NativeRender(graphicsContext);
time_update();
graphicsContext->SwapBuffers();
graphicsContext->SwapBuffers();
ProcessFrameCommands(env);
ProcessFrameCommands(env);
}
}
ILOG("Leaving EGL/Vulkan render loop.");
NativeShutdownGraphics();
if (useCPUThread) {
EmuThreadStop();
while (graphicsContext->ThreadFrame()) {
continue;
}
EmuThreadJoin();
} else {
NativeShutdownGraphics();
}
renderer_inited = false;
graphicsContext->ThreadEnd();
// Shut the graphics context down to the same state it was in when we entered the render thread.
ILOG("Shutting down graphics context from render thread...");

@ -116,7 +116,7 @@ void CheckGLExtensions() {
gl_extensions.IsCoreContext = useCoreContext;
#ifdef USING_GLES2
gl_extensions.IsGLES = true;
gl_extensions.IsGLES = !useCoreContext;
#endif
const char *renderer = (const char *)glGetString(GL_RENDERER);
@ -201,6 +201,10 @@ void CheckGLExtensions() {
// Most of it could be enabled on lower GPUs as well, but let's start this way.
if (gl_extensions.VersionGEThan(4, 3, 0)) {
gl_extensions.GLES3 = true;
#ifdef USING_GLES2
// Try to load up the other funcs if we're not using glew.
gl3stubInit();
#endif
}
} else {
// Start by assuming we're at 2.0.

@ -347,11 +347,10 @@ public:
return caps_;
}
uint32_t GetSupportedShaderLanguages() const override {
#if defined(USING_GLES2)
return (uint32_t)ShaderLanguage::GLSL_ES_200 | (uint32_t)ShaderLanguage::GLSL_ES_300;
#else
return (uint32_t)ShaderLanguage::GLSL_ES_200 | (uint32_t)ShaderLanguage::GLSL_410;
#endif
if (gl_extensions.IsGLES)
return (uint32_t)ShaderLanguage::GLSL_ES_200 | (uint32_t)ShaderLanguage::GLSL_ES_300;
else
return (uint32_t)ShaderLanguage::GLSL_ES_200 | (uint32_t)ShaderLanguage::GLSL_410;
}
uint32_t GetDataFormatSupport(DataFormat fmt) const override;
@ -1163,11 +1162,8 @@ uint32_t OpenGLContext::GetDataFormatSupport(DataFormat fmt) const {
case DataFormat::B4G4R4A4_UNORM_PACK16:
return FMT_RENDERTARGET | FMT_TEXTURE | FMT_AUTOGEN_MIPS; // native support
case DataFormat::A4R4G4B4_UNORM_PACK16:
#ifndef USING_GLES2
// Can support this if _REV formats are supported.
return FMT_TEXTURE;
#endif
return 0;
return gl_extensions.IsGLES ? 0 : FMT_TEXTURE;
case DataFormat::R8G8B8A8_UNORM:
return FMT_RENDERTARGET | FMT_TEXTURE | FMT_INPUTLAYOUT | FMT_AUTOGEN_MIPS;

@ -350,11 +350,7 @@ int main(int argc, const char* argv[])
g_Config.bAutoSaveSymbolMap = false;
g_Config.iRenderingMode = 1;
g_Config.bHardwareTransform = true;
#ifdef USING_GLES2
g_Config.iAnisotropyLevel = 0;
#else
g_Config.iAnisotropyLevel = 0; // When testing mipmapping we really don't want this.
#endif
g_Config.bVertexCache = true;
g_Config.bTrueColor = true;
g_Config.iLanguage = PSP_SYSTEMPARAM_LANGUAGE_ENGLISH;