Fix a bad memory leak in sceMpeg

This commit is contained in:
Henrik Rydgard 2017-02-20 11:12:39 +01:00
parent 4c6ca76681
commit 19d321bd8c
2 changed files with 3 additions and 2 deletions

View File

@ -19,6 +19,7 @@
// This code is part shamelessly "inspired" from JPSCP.
#include <map>
#include <algorithm>
#include <memory>
#include "Core/HLE/sceMpeg.h"
#include "Core/HLE/sceKernelModule.h"
@ -1426,7 +1427,7 @@ void PostPutAction::run(MipsCall &call) {
// It seems validation is done only by older mpeg libs.
if (mpegLibVersion < 0x0105 && packetsAdded > 0) {
// TODO: Faster / less wasteful validation.
MpegDemux *demuxer = new MpegDemux(packetsAdded * 2048, 0);
std::unique_ptr<MpegDemux> demuxer(new MpegDemux(packetsAdded * 2048, 0));
int readOffset = ringbuffer->packetsRead % (s32)ringbuffer->packets;
const u8 *buf = Memory::GetPointer(ringbuffer->data + readOffset * 2048);
bool invalid = false;

View File

@ -69,7 +69,7 @@ private:
int m_index;
int m_len;
u8* m_buf;
u8 *m_buf;
BufferQueue m_audioStream;
u8 m_audioFrame[0x2000];
int m_audioChannel;