Merge pull request #3956 from unknownbrackets/debugger

Add methods to grab the depth/stencil buffers too
This commit is contained in:
Henrik Rydgård 2013-09-28 03:19:19 -07:00
commit 8dac5726f4
9 changed files with 136 additions and 6 deletions

View File

@ -30,11 +30,28 @@ struct GPUDebugOp {
std::string desc;
};
enum GPUDebugBufferFormat {
// These match GEBufferFormat.
GPU_DBG_FORMAT_565 = 0,
GPU_DBG_FORMAT_5551 = 1,
GPU_DBG_FORMAT_4444 = 2,
GPU_DBG_FORMAT_8888 = 3,
GPU_DBG_FORMAT_INVALID = 0xFF,
// These don't, they're for depth buffers.
GPU_DBG_FORMAT_FLOAT = 0x10,
GPU_DBG_FORMAT_16BIT = 0x11,
};
struct GPUDebugBuffer {
GPUDebugBuffer() : alloc_(false), data_(NULL) {
}
GPUDebugBuffer(void *data, u32 stride, u32 height, GEBufferFormat fmt)
: alloc_(false), data_((u8 *)data), stride_(stride), height_(height), fmt_(GPUDebugBufferFormat(fmt)), flipped_(false) {
}
GPUDebugBuffer(void *data, u32 stride, u32 height, GPUDebugBufferFormat fmt)
: alloc_(false), data_((u8 *)data), stride_(stride), height_(height), fmt_(fmt), flipped_(false) {
}
@ -70,6 +87,10 @@ struct GPUDebugBuffer {
}
void Allocate(u32 stride, u32 height, GEBufferFormat fmt, bool flipped = false) {
Allocate(stride, height, GPUDebugBufferFormat(fmt), flipped);
}
void Allocate(u32 stride, u32 height, GPUDebugBufferFormat fmt, bool flipped = false) {
if (alloc_ && stride_ == stride && height_ == height && fmt_ == fmt) {
// Already allocated the right size.
flipped_ = flipped;
@ -84,7 +105,7 @@ struct GPUDebugBuffer {
flipped_ = flipped;
u32 pixelSize = 2;
if (fmt == GE_FORMAT_8888) {
if (fmt == GPU_DBG_FORMAT_8888 || fmt == GPU_DBG_FORMAT_FLOAT) {
pixelSize = 4;
};
@ -114,7 +135,7 @@ struct GPUDebugBuffer {
return flipped_;
}
GEBufferFormat GetFormat() const {
GPUDebugBufferFormat GetFormat() const {
return fmt_;
}
@ -124,7 +145,7 @@ private:
u32 height_;
u32 stride_;
bool flipped_;
GEBufferFormat fmt_;
GPUDebugBufferFormat fmt_;
};
class GPUDebugInterface {
@ -153,6 +174,16 @@ public:
return false;
}
// Similar to GetCurrentFramebuffer().
virtual bool GetCurrentDepthbuffer(GPUDebugBuffer &buffer) {
return false;
}
// Similar to GetCurrentFramebuffer().
virtual bool GetCurrentStencilbuffer(GPUDebugBuffer &buffer) {
return false;
}
// Similar to GetCurrentFramebuffer().
virtual bool GetCurrentTexture(GPUDebugBuffer &buffer) {
return false;

View File

@ -1342,8 +1342,7 @@ void FramebufferManager::Resized() {
resized_ = true;
}
bool FramebufferManager::GetCurrentFramebuffer(GPUDebugBuffer &buffer)
{
bool FramebufferManager::GetCurrentFramebuffer(GPUDebugBuffer &buffer) {
u32 fb_address = (gstate.fbptr & 0xFFFFFF) | ((gstate.fbwidth & 0xFF0000) << 8);
int fb_stride = gstate.fbwidth & 0x3C0;
@ -1366,3 +1365,63 @@ bool FramebufferManager::GetCurrentFramebuffer(GPUDebugBuffer &buffer)
return true;
}
bool FramebufferManager::GetCurrentDepthbuffer(GPUDebugBuffer &buffer) {
u32 fb_address = (gstate.fbptr & 0xFFFFFF) | ((gstate.fbwidth & 0xFF0000) << 8);
int fb_stride = gstate.fbwidth & 0x3C0;
u32 z_address = (gstate.zbptr & 0xFFFFFF) | ((gstate.zbwidth & 0xFF0000) << 8);
int z_stride = gstate.zbwidth & 0x3C0;
VirtualFramebuffer *vfb = currentRenderVfb_;
if (!vfb) {
vfb = GetVFBAt(fb_address);
}
if (!vfb) {
// If there's no vfb and we're drawing there, must be memory?
// TODO: Is the value 16-bit? It seems to be.
buffer = GPUDebugBuffer(Memory::GetPointer(z_address), z_stride, 512, GPU_DBG_FORMAT_16BIT);
return true;
}
#ifndef USING_GLES2
buffer.Allocate(vfb->renderWidth, vfb->renderHeight, GPU_DBG_FORMAT_FLOAT, true);
fbo_bind_for_read(vfb->fbo);
glPixelStorei(GL_PACK_ALIGNMENT, 4);
glReadPixels(0, 0, vfb->renderWidth, vfb->renderHeight, GL_DEPTH_COMPONENT, GL_FLOAT, buffer.GetData());
return true;
#else
return false;
#endif
}
bool FramebufferManager::GetCurrentStencilbuffer(GPUDebugBuffer &buffer) {
u32 fb_address = (gstate.fbptr & 0xFFFFFF) | ((gstate.fbwidth & 0xFF0000) << 8);
int fb_stride = gstate.fbwidth & 0x3C0;
VirtualFramebuffer *vfb = currentRenderVfb_;
if (!vfb) {
vfb = GetVFBAt(fb_address);
}
if (!vfb) {
// If there's no vfb and we're drawing there, must be memory?
buffer = GPUDebugBuffer(Memory::GetPointer(fb_address), fb_stride, 512, GPU_DBG_FORMAT_8888);
return true;
}
#ifndef USING_GLES2
buffer.Allocate(vfb->renderWidth, vfb->renderHeight, GPU_DBG_FORMAT_16BIT, true);
fbo_bind_for_read(vfb->fbo);
glPixelStorei(GL_PACK_ALIGNMENT, 2);
glReadPixels(0, 0, vfb->renderWidth, vfb->renderHeight, GL_STENCIL_INDEX, GL_UNSIGNED_SHORT, buffer.GetData());
return true;
#else
return false;
#endif
}

View File

@ -171,6 +171,8 @@ public:
void DestroyFramebuf(VirtualFramebuffer *vfb);
bool GetCurrentFramebuffer(GPUDebugBuffer &buffer);
bool GetCurrentDepthbuffer(GPUDebugBuffer &buffer);
bool GetCurrentStencilbuffer(GPUDebugBuffer &buffer);
private:
void CompileDraw2DProgram();

View File

@ -1519,6 +1519,14 @@ bool GLES_GPU::GetCurrentFramebuffer(GPUDebugBuffer &buffer) {
return framebufferManager_.GetCurrentFramebuffer(buffer);
}
bool GLES_GPU::GetCurrentDepthbuffer(GPUDebugBuffer &buffer) {
return framebufferManager_.GetCurrentDepthbuffer(buffer);
}
bool GLES_GPU::GetCurrentStencilbuffer(GPUDebugBuffer &buffer) {
return framebufferManager_.GetCurrentStencilbuffer(buffer);
}
bool GLES_GPU::GetCurrentTexture(GPUDebugBuffer &buffer) {
if (!gstate.isTextureMapEnabled()) {
return false;

View File

@ -67,6 +67,8 @@ public:
std::vector<FramebufferInfo> GetFramebufferList();
bool GetCurrentFramebuffer(GPUDebugBuffer &buffer);
bool GetCurrentDepthbuffer(GPUDebugBuffer &buffer);
bool GetCurrentStencilbuffer(GPUDebugBuffer &buffer);
bool GetCurrentTexture(GPUDebugBuffer &buffer);
protected:

View File

@ -752,3 +752,18 @@ bool SoftGPU::GetCurrentFramebuffer(GPUDebugBuffer &buffer)
buffer = GPUDebugBuffer(fb.data, gstate.FrameBufStride(), 512, gstate.FrameBufFormat());
return true;
}
bool SoftGPU::GetCurrentDepthbuffer(GPUDebugBuffer &buffer)
{
// We don't know the height, so just use 512, which should be the max (hopefully?)
// TODO: Could check clipping and such, though...?
// TODO: Is the value 16-bit? It seems to be.
buffer = GPUDebugBuffer(depthbuf.data, gstate.DepthBufStride(), 512, GPU_DBG_FORMAT_16BIT);
return true;
}
bool SoftGPU::GetCurrentStencilbuffer(GPUDebugBuffer &buffer)
{
// TODO: Just need the alpha value from the framebuffer...
return false;
}

View File

@ -74,7 +74,9 @@ public:
}
virtual bool GetCurrentFramebuffer(GPUDebugBuffer &buffer);
bool GetCurrentTexture(GPUDebugBuffer &buffer) {
virtual bool GetCurrentDepthbuffer(GPUDebugBuffer &buffer);
virtual bool GetCurrentStencilbuffer(GPUDebugBuffer &buffer);
virtual bool GetCurrentTexture(GPUDebugBuffer &buffer) {
// TODO
return false;
}

View File

@ -219,6 +219,9 @@ void SimpleGLWindow::Draw(u8 *data, int w, int h, bool flipped, Format fmt) {
if (fmt == FORMAT_8888) {
glPixelStorei(GL_UNPACK_ALIGNMENT, 4);
glfmt = GL_UNSIGNED_BYTE;
} else if (fmt == FORMAT_FLOAT) {
glfmt = GL_FLOAT;
components = GL_RED;
} else {
glPixelStorei(GL_UNPACK_ALIGNMENT, 2);
if (fmt == FORMAT_4444) {
@ -228,6 +231,11 @@ void SimpleGLWindow::Draw(u8 *data, int w, int h, bool flipped, Format fmt) {
} else if (fmt == FORMAT_565) {
glfmt = GL_UNSIGNED_SHORT_5_6_5;
components = GL_RGB;
} else if (fmt == FORMAT_16BIT) {
glfmt = GL_UNSIGNED_SHORT;
components = GL_RED;
} else {
_dbg_assert_msg_(COMMON, false, "Invalid SimpleGLWindow format.");
}
}

View File

@ -29,6 +29,9 @@ struct SimpleGLWindow {
FORMAT_5551 = 1,
FORMAT_4444 = 2,
FORMAT_8888 = 3,
FORMAT_FLOAT = 0x10,
FORMAT_16BIT = 0x11,
};
enum Flags {