Fix internal formats used with texture creation

This commit is contained in:
Logan McNaughton 2016-12-14 09:54:55 -08:00
parent 5908b540de
commit fc4c7529a1
3 changed files with 19 additions and 9 deletions

View File

@ -65,11 +65,23 @@ void gl_load_texture_image(GLenum target,
{
#ifndef HAVE_PSGL
#ifdef HAVE_OPENGLES2
if (gl_check_capability(GL_CAPS_TEX_STORAGE_EXT))
if (gl_check_capability(GL_CAPS_TEX_STORAGE_EXT)) {
if (internalFormat == GL_RGB)
internalFormat = GL_RGB565;
else if (internalFormat == GL_RGBA)
internalFormat = GL_RGBA8_OES;
else if (internalFormat == GL_BGRA_EXT)
internalFormat = GL_BGRA8_EXT;
glTexStorage2DEXT(target, 1, internalFormat, width, height);
}
#else
if (gl_check_capability(GL_CAPS_TEX_STORAGE))
if (gl_check_capability(GL_CAPS_TEX_STORAGE)) {
if (internalFormat == GL_RGB)
internalFormat = GL_RGB565;
else if (internalFormat == GL_RGBA)
internalFormat = GL_RGBA8;
glTexStorage2D(target, 1, internalFormat, width, height);
}
#endif
else
#endif

View File

@ -642,7 +642,7 @@ static void gl_init_textures(gl_t *gl, const video_info_t *video)
texture_fmt = gl->texture_fmt;
#endif
#ifdef HAVE_OPENGLES
#ifdef HAVE_OPENGLES2
/* GLES is picky about which format we use here.
* Without extensions, we can *only* render to 16-bit FBOs. */
@ -650,12 +650,7 @@ static void gl_init_textures(gl_t *gl, const video_info_t *video)
{
if (gl_check_capability(GL_CAPS_ARGB8))
{
#if !defined(HAVE_PSGL)
if (gl_check_capability(GL_CAPS_GLES3_SUPPORTED))
internal_fmt = GL_RGBA8_OES;
else
#endif
internal_fmt = GL_RGBA;
internal_fmt = GL_RGBA;
texture_type = GL_RGBA;
texture_fmt = GL_UNSIGNED_BYTE;
}

View File

@ -118,6 +118,9 @@
#ifndef GL_BGRA_EXT
#define GL_BGRA_EXT 0x80E1
#endif
#ifndef GL_BGRA8_EXT
#define GL_BGRA8_EXT 0x93A1
#endif
#ifdef IOS
/* Stupid Apple. */
#define RARCH_GL_INTERNAL_FORMAT32 GL_RGBA