mirror of
https://github.com/libretro/scummvm.git
synced 2025-02-12 06:00:48 +00:00
OPENGL: Move max texture size information to Context.
This commit is contained in:
parent
b081fe63e8
commit
9844d89231
@ -25,6 +25,7 @@
|
||||
#include "backends/graphics/opengl/shader.h"
|
||||
|
||||
#include "common/tokenizer.h"
|
||||
#include "common/debug.h"
|
||||
|
||||
namespace OpenGL {
|
||||
|
||||
@ -39,6 +40,8 @@ namespace OpenGL {
|
||||
#endif
|
||||
|
||||
void Context::reset() {
|
||||
_maxTextureSize = 0;
|
||||
|
||||
NPOTSupported = false;
|
||||
#if !USE_FORCED_GLES && !USE_FORCED_GLES2
|
||||
shadersSupported = false;
|
||||
@ -185,6 +188,10 @@ void OpenGLGraphicsManager::initializeGLContext() {
|
||||
#undef GL_EXT_FUNC_DEF
|
||||
#undef LOAD_FUNC
|
||||
|
||||
// Obtain maximum texture size.
|
||||
GL_CALL(glGetIntegerv(GL_MAX_TEXTURE_SIZE, &g_context._maxTextureSize));
|
||||
debug(5, "OpenGL maximum texture size: %d", g_context._maxTextureSize);
|
||||
|
||||
const char *extString = (const char *)g_context.glGetString(GL_EXTENSIONS);
|
||||
|
||||
#if !USE_FORCED_GLES && !USE_FORCED_GLES2
|
||||
|
@ -221,8 +221,8 @@ OSystem::TransactionError OpenGLGraphicsManager::endGFXTransaction() {
|
||||
// a context existing before, which means we don't know the maximum
|
||||
// supported texture size before this. Thus, we check whether the
|
||||
// requested game resolution is supported over here.
|
||||
|| ( _currentState.gameWidth > (uint)Texture::getMaximumTextureSize()
|
||||
|| _currentState.gameHeight > (uint)Texture::getMaximumTextureSize())) {
|
||||
|| ( _currentState.gameWidth > (uint)g_context._maxTextureSize
|
||||
|| _currentState.gameHeight > (uint)g_context._maxTextureSize)) {
|
||||
if (_transactionMode == kTransactionActive) {
|
||||
// Try to setup the old state in case its valid and is
|
||||
// actually different from the new one.
|
||||
@ -806,15 +806,15 @@ void OpenGLGraphicsManager::setActualScreenSize(uint width, uint height) {
|
||||
// possible and then scale it to the physical display size. This sounds
|
||||
// bad but actually all recent chips should support full HD resolution
|
||||
// anyway. Thus, it should not be a real issue for modern hardware.
|
||||
if ( overlayWidth > (uint)Texture::getMaximumTextureSize()
|
||||
|| overlayHeight > (uint)Texture::getMaximumTextureSize()) {
|
||||
if ( overlayWidth > (uint)g_context._maxTextureSize
|
||||
|| overlayHeight > (uint)g_context._maxTextureSize) {
|
||||
const frac_t outputAspect = intToFrac(_outputScreenWidth) / _outputScreenHeight;
|
||||
|
||||
if (outputAspect > (frac_t)FRAC_ONE) {
|
||||
overlayWidth = Texture::getMaximumTextureSize();
|
||||
overlayWidth = g_context._maxTextureSize;
|
||||
overlayHeight = intToFrac(overlayWidth) / outputAspect;
|
||||
} else {
|
||||
overlayHeight = Texture::getMaximumTextureSize();
|
||||
overlayHeight = g_context._maxTextureSize;
|
||||
overlayWidth = fracToInt(overlayHeight * outputAspect);
|
||||
}
|
||||
}
|
||||
@ -899,9 +899,6 @@ void OpenGLGraphicsManager::notifyContextCreate(const Graphics::PixelFormat &def
|
||||
// code and that requires the same alignment too.
|
||||
GL_CALL(glPixelStorei(GL_PACK_ALIGNMENT, 4));
|
||||
|
||||
// Query information needed by textures.
|
||||
Texture::queryTextureInformation();
|
||||
|
||||
#if !USE_FORCED_GLES
|
||||
if (!_shader) {
|
||||
#if !USE_FORCED_GL && !USE_FORCED_GLES2
|
||||
|
@ -102,6 +102,9 @@ struct Context {
|
||||
*/
|
||||
void reset();
|
||||
|
||||
/** The maximum texture size supported by the context. */
|
||||
GLint _maxTextureSize;
|
||||
|
||||
/** Whether GL_ARB_texture_non_power_of_two is available or not. */
|
||||
bool NPOTSupported;
|
||||
|
||||
|
@ -161,13 +161,6 @@ void GLTexture::updateArea(const Common::Rect &area, const Graphics::Surface &sr
|
||||
_glFormat, _glType, src.getBasePtr(0, area.top)));
|
||||
}
|
||||
|
||||
GLint Texture::_maxTextureSize = 0;
|
||||
|
||||
void Texture::queryTextureInformation() {
|
||||
GL_CALL(glGetIntegerv(GL_MAX_TEXTURE_SIZE, &_maxTextureSize));
|
||||
debug(5, "OpenGL maximum texture size: %d", _maxTextureSize);
|
||||
}
|
||||
|
||||
Texture::Texture(GLenum glIntFormat, GLenum glFormat, GLenum glType, const Graphics::PixelFormat &format)
|
||||
: _format(format), _glTexture(glIntFormat, glFormat, glType),
|
||||
_textureData(), _userPixelData(), _allDirty(false) {
|
||||
|
@ -194,17 +194,6 @@ public:
|
||||
|
||||
virtual void *getPalette() { return 0; }
|
||||
virtual const void *getPalette() const { return 0; }
|
||||
|
||||
/**
|
||||
* Query texture related OpenGL information from the context. This only
|
||||
* queries the maximum texture size for now.
|
||||
*/
|
||||
static void queryTextureInformation();
|
||||
|
||||
/**
|
||||
* @return Return the maximum texture dimensions supported.
|
||||
*/
|
||||
static GLint getMaximumTextureSize() { return _maxTextureSize; }
|
||||
protected:
|
||||
virtual void updateTexture();
|
||||
|
||||
@ -220,8 +209,6 @@ private:
|
||||
bool _allDirty;
|
||||
Common::Rect _dirtyArea;
|
||||
void clearDirty() { _allDirty = false; _dirtyArea = Common::Rect(); }
|
||||
|
||||
static GLint _maxTextureSize;
|
||||
};
|
||||
|
||||
class TextureCLUT8 : public Texture {
|
||||
|
Loading…
x
Reference in New Issue
Block a user