TINSEL: Implement LZSS decompression for Noir

This commit is contained in:
Einar Johan Trøan Sømåen 2021-02-06 01:02:28 +01:00
parent e387e34209
commit ea21659084
No known key found for this signature in database
GPG Key ID: E78D26458077C9C5
3 changed files with 100 additions and 5 deletions

View File

@ -26,10 +26,101 @@
namespace Tinsel {
int decompressLZSS(Common::ReadStream &input, byte *output) {
error("TODO: Implement decompression");
return 0;
static byte HIGH_BITS(byte byteValue, int numBits) {
unsigned int mask = ((1 << numBits) - 1) << (8 - numBits);
return (byteValue & mask) >> (8 - numBits);
}
static byte LOW_BITS(byte byteValue, int numBits) {
unsigned int mask = ((1 << numBits) - 1);
return byteValue & mask;
}
int decompressLZSS(Common::SeekableReadStream &input, byte *output) {
static const int kDictionarySize = 4096;
byte dictionary[kDictionarySize] = {};
int dictionaryOffset = 1;
int outputOffset = 0;
byte *data = new byte[input.size()];
input.read(data, input.size());
unsigned int offset = 0;
int bitShift = 0;
int bytesWritten = 0;
while (true) {
byte value = data[offset];
byte bitMask = 0x80 >> bitShift++;
// First bit:
// 0 -> Copy data from dictionary
// 1 -> Copy raw byte from input
bool useRawByte = value & bitMask;
if (bitShift == 8) {
bitShift = 0;
offset++;
}
if (!useRawByte) {
unsigned int bitsFromFirst = 8 - bitShift;
unsigned int bitsFromLast = 16 - 8 - bitsFromFirst;
// The dictionary lookup is 16 bit:
// 12 bits for the offset
// 4 bits for the run-length
// Combined with the first bit this makes for 17 bits,
// So we will be reading from three bytes, except when
// the first bit was read from the end of a byte, then
// bitShift will be 0, and bitsFromLast will be 8.
// We make the assumption that we can dereference the third byte
// even if we aren't using it. We will check "offset" after decompression
// to verify this assumption.
unsigned int byte1 = LOW_BITS(data[offset], bitsFromFirst);
unsigned int byte2 = data[offset + 1];
unsigned int byte3 = HIGH_BITS(data[offset + 2], bitsFromLast);
unsigned int lookup = (byte1 << (8 + bitsFromLast)) | (byte2 << bitsFromLast) | byte3;
int lookupOffset = (lookup >> 4) & 0xFFF;
if (lookupOffset == 0) {
break;
}
int lookupRunLength = (lookup & 0xF) + 2;
for (int j = 0; j < lookupRunLength; j++) {
output[outputOffset++] = dictionary[(lookupOffset + j) % kDictionarySize];
dictionary[dictionaryOffset++] = dictionary[(lookupOffset + j) % kDictionarySize];
dictionaryOffset %= kDictionarySize;
}
offset += 2;
bytesWritten += lookupRunLength;
} else {
// Raw byte, but since we spent a bit first,
// we must reassemble it from potentially two bytes.
unsigned int bitsFromFirst = 8 - bitShift;
unsigned int bitsFromLast = 8 - bitsFromFirst;
byte byteValue = LOW_BITS(data[offset], bitsFromFirst) << bitsFromLast;
byteValue |= HIGH_BITS(data[offset + 1], bitsFromLast);
offset++;
output[outputOffset++] = byteValue;
dictionary[dictionaryOffset++] = byteValue;
dictionaryOffset %= kDictionarySize;
bytesWritten++;
}
}
delete[] data;
if (offset > input.size()) {
error("Read too far during decompression");
}
return bytesWritten;
}
}

View File

@ -28,7 +28,7 @@
namespace Tinsel {
int decompressLZSS(Common::ReadStream &input, byte *output);
int decompressLZSS(Common::SeekableReadStream &input, byte *output);
}

View File

@ -725,6 +725,10 @@ void LoadBasicChunks() {
byte *cptr;
int numObjects;
if (TinselV3) {
error("TODO: Implement LoadBasicChunks for Noir");
}
// Allocate RAM for savescene data
InitializeSaveScenes();