Get rid of stale PERF_START/PERF_STOP macros in d3d files

This commit is contained in:
twinaphex 2020-03-07 23:16:25 +01:00
parent 935393b391
commit f0bf2df92f
7 changed files with 3 additions and 47 deletions

View File

@ -153,10 +153,8 @@ void d3d10_update_texture(
&mapped_texture);
#if 0
PERF_START();
conv_rgb565_argb8888(mapped_texture.pData, data, width, height,
mapped_texture.RowPitch, pitch);
PERF_STOP();
#else
dxgi_copy(
width, height, format, pitch, data, texture->desc.Format,

View File

@ -177,10 +177,8 @@ void d3d11_update_texture(
0, D3D11_MAP_WRITE, 0, &mapped_texture);
#if 0
PERF_START();
conv_rgb565_argb8888(mapped_texture.pData, data, width, height,
mapped_texture.RowPitch, pitch);
PERF_STOP();
#else
dxgi_copy(
width, height, format, pitch, data,

View File

@ -282,23 +282,17 @@ void dxgi_copy(
void* dst_data)
{
int i, j;
#if defined(PERF_START) && defined(PERF_STOP)
PERF_START();
#endif
switch ((unsigned)src_format)
{
FORMAT_SRC_LIST();
default:
assert(0);
break;
assert(0);
break;
}
#if defined(PERF_START) && defined(PERF_STOP)
PERF_STOP();
#endif
}
#ifdef _MSC_VER
#pragma warning(default : 4293)
#endif

View File

@ -835,25 +835,3 @@ void dxgi_update_title(void);
DXGI_FORMAT glslang_format_to_dxgi(glslang_format fmt);
RETRO_END_DECLS
#if 1
#include "../../performance_counters.h"
#ifndef PERF_START
#define PERF_START() \
{ \
static struct retro_perf_counter perfcounter = { __FUNCTION__ }; \
LARGE_INTEGER start, stop; \
rarch_perf_register(&perfcounter); \
perfcounter.call_cnt++; \
QueryPerformanceCounter(&start)
#define PERF_STOP() \
QueryPerformanceCounter(&stop); \
perfcounter.total += stop.QuadPart - start.QuadPart; \
}
#endif
#else
#define PERF_START()
#define PERF_STOP()
#endif

View File

@ -1230,8 +1230,6 @@ static bool d3d10_gfx_frame(
video_driver_set_size(video_info->width, video_info->height);
}
PERF_START();
#if 0 /* custom viewport doesn't call apply_state_changes, so we can't rely on this for now */
if (d3d10->resize_viewport)
#endif
@ -1520,7 +1518,6 @@ static bool d3d10_gfx_frame(
}
d3d10->sprites.enabled = false;
PERF_STOP();
DXGIPresent(d3d10->swapChain, !!d3d10->vsync, 0);
return true;

View File

@ -1316,10 +1316,6 @@ static bool d3d11_gfx_frame(
D3D11SetRenderTargets(context, 1, &d3d11->renderTargetView, NULL);
#endif
#if 0
PERF_START();
#endif
#if 0 /* custom viewport doesn't call apply_state_changes, so we can't rely on this for now */
if (d3d11->resize_viewport)
#endif
@ -1596,9 +1592,6 @@ static bool d3d11_gfx_frame(
}
d3d11->sprites.enabled = false;
#if 0
PERF_STOP();
#endif
DXGIPresent(d3d11->swapChain, !!d3d11->vsync, 0);
return true;

View File

@ -1170,7 +1170,6 @@ static bool d3d12_gfx_frame(
d3d12_video_t* d3d12 = (d3d12_video_t*)data;
d3d12_gfx_sync(d3d12);
PERF_START();
if (d3d12->resize_chain)
{
@ -1586,7 +1585,6 @@ static bool d3d12_gfx_frame(
D3D12ExecuteGraphicsCommandLists(d3d12->queue.handle, 1, &d3d12->queue.cmd);
D3D12SignalCommandQueue(d3d12->queue.handle, d3d12->queue.fence, ++d3d12->queue.fenceValue);
PERF_STOP();
#if 1
DXGIPresent(d3d12->chain.handle, !!d3d12->chain.vsync, 0);
#else