mirror of
https://github.com/stenzek/duckstation.git
synced 2024-11-23 05:49:43 +00:00
CDImageCHD: Show precaching in MB
Some checks are pending
Create rolling release / Windows x64 Build (push) Waiting to run
Create rolling release / Windows x64 SSE2 Build (push) Waiting to run
Create rolling release / Windows ARM64 Build (push) Waiting to run
Create rolling release / Linux x64 AppImage (push) Waiting to run
Create rolling release / Linux x64 SSE2 AppImage (push) Waiting to run
Create rolling release / Linux Flatpak Build (push) Waiting to run
Create rolling release / MacOS Universal Build (push) Waiting to run
Create rolling release / Create Release (push) Blocked by required conditions
Some checks are pending
Create rolling release / Windows x64 Build (push) Waiting to run
Create rolling release / Windows x64 SSE2 Build (push) Waiting to run
Create rolling release / Windows ARM64 Build (push) Waiting to run
Create rolling release / Linux x64 AppImage (push) Waiting to run
Create rolling release / Linux x64 SSE2 AppImage (push) Waiting to run
Create rolling release / Linux Flatpak Build (push) Waiting to run
Create rolling release / MacOS Universal Build (push) Waiting to run
Create rolling release / Create Release (push) Blocked by required conditions
This commit is contained in:
parent
be271e3bfb
commit
bb740499a5
@ -89,6 +89,11 @@
|
||||
|
||||
#define CHD_V1_SECTOR_SIZE 512 /* size of a "sector" in the V1 header */
|
||||
|
||||
#define CHD_MAX_HUNK_SIZE (128 * 1024 * 1024) /* hunk size probably shouldn't be more than 128MB */
|
||||
|
||||
/* we're currently only using this for CD/DVDs, if we end up with more than 10GB data, it's probably invalid */
|
||||
#define CHD_MAX_FILE_SIZE (10ULL * 1024 * 1024 * 1024)
|
||||
|
||||
#define COOKIE_VALUE 0xbaadf00d
|
||||
#define MAX_ZLIB_ALLOCS 64
|
||||
|
||||
@ -2587,12 +2592,8 @@ static chd_error header_validate(const chd_header *header)
|
||||
return CHDERR_INVALID_PARAMETER;
|
||||
}
|
||||
|
||||
/* some basic size checks to prevent huge mallocs: hunk size probably shouldn't be more than 128MB */
|
||||
if (header->hunkbytes >= (128 * 1024 * 1024))
|
||||
return CHDERR_INVALID_PARAMETER;
|
||||
|
||||
/* - we're currently only using this for CD/DVDs, if we end up with more than 10GB data, it's probably invalid */
|
||||
if (((uint64_t)header->hunkbytes * (uint64_t)header->totalhunks) >= (10ULL * 1024 * 1024 * 1024))
|
||||
/* some basic size checks to prevent huge mallocs */
|
||||
if (header->hunkbytes >= CHD_MAX_HUNK_SIZE || ((uint64_t)header->hunkbytes * (uint64_t)header->totalhunks) >= CHD_MAX_FILE_SIZE)
|
||||
return CHDERR_INVALID_PARAMETER;
|
||||
|
||||
return CHDERR_NONE;
|
||||
|
@ -453,8 +453,9 @@ CDImage::PrecacheResult CDImageCHD::Precache(ProgressCallback* progress)
|
||||
progress->SetProgressRange(100);
|
||||
|
||||
auto callback = [](size_t pos, size_t total, void* param) {
|
||||
const u32 percent = static_cast<u32>((pos * 100) / total);
|
||||
static_cast<ProgressCallback*>(param)->SetProgressValue(std::min<u32>(percent, 100));
|
||||
constexpr size_t one_mb = 1048576;
|
||||
static_cast<ProgressCallback*>(param)->SetProgressRange(static_cast<u32>((total + (one_mb - 1)) / one_mb));
|
||||
static_cast<ProgressCallback*>(param)->SetProgressValue(static_cast<u32>((pos + (one_mb - 1)) / one_mb));
|
||||
};
|
||||
|
||||
if (chd_precache_progress(m_chd, callback, progress) != CHDERR_NONE)
|
||||
|
Loading…
Reference in New Issue
Block a user