GFX support WIP

This commit is contained in:
Luciano Ciccariello 2024-07-17 00:14:26 +01:00
parent ed7e5c0b70
commit f33b38c448
8 changed files with 267 additions and 63 deletions

View File

@ -377,7 +377,7 @@ func buildEntityLayouts(fileName string, outputDir string) error {
writeLayoutEntries := func(sb *strings.Builder, banks [][]layoutEntry, align4 bool) error {
nWritten := 0
for i, entries := range banks {
// do a sanity check on the entries as we do not want to build something that will cause the game to crash
// do a sanity check on the Entries as we do not want to build something that will cause the game to crash
if entries[0].X != -2 || entries[0].Y != -2 {
return fmt.Errorf("layout entity bank %d needs to have a X:-2 and Y:-2 entry at the beginning", i)
}
@ -422,7 +422,7 @@ func buildEntityLayouts(fileName string, outputDir string) error {
for i, entries := range banks {
sorting[i] = make([]layoutEntry, len(entries)-2)
if len(sorting[i]) > 0 { // do not sort if the list is empty
copy(sorting[i], entries[1:len(entries)-1]) // do not sort the -2 and -1 entries
copy(sorting[i], entries[1:len(entries)-1]) // do not sort the -2 and -1 Entries
toSort = sorting[i]
sort.SliceStable(toSort, less)
}

View File

@ -30,6 +30,9 @@ func mergeDataRanges(ranges []dataRange) dataRange {
// performs a sanity check before merging everything
for i := 0; i < len(ranges)-1; i++ {
if ranges[i] == ranges[i+1] {
continue
}
if ranges[i].end != ranges[i+1].begin {
var err error
if ranges[i].end < ranges[i+1].begin {

View File

@ -0,0 +1,84 @@
package main
import "io"
func decompressData(r io.Reader) (dst []byte) {
writeNibbleFlag := 0
writeNibble := func(ch byte) {
if writeNibbleFlag == 0 {
writeNibbleFlag = 1
dst = append(dst, ch)
} else {
writeNibbleFlag = 0
dst[len(dst)-1] += ch * 16
}
}
srcCh := make([]byte, 1)
readNibbleFlag := 0
readNibble := func() (ch byte) {
if readNibbleFlag != 0 {
readNibbleFlag = 0
ch = srcCh[0]
return ch & 0xF
} else {
readNibbleFlag = 1
_, _ = r.Read(srcCh)
ch = srcCh[0]
return (ch >> 4) & 0xF
}
}
buf := make([]byte, 8)
_, _ = r.Read(buf)
for {
op := int(readNibble())
switch op {
case 0:
length := int(readNibble())
op = int(readNibble())
for i := 0; i < length*0x10+op+0x13; i++ {
writeNibble(0)
}
case 2:
ch := readNibble()
writeNibble(ch)
writeNibble(ch)
case 4:
writeNibble(readNibble())
fallthrough
case 3:
writeNibble(readNibble())
fallthrough
case 1:
writeNibble(readNibble())
case 5:
ch := readNibble()
length := int(readNibble())
for i := 0; i < length+3; i++ {
writeNibble(ch)
}
case 6:
length := int(readNibble())
for i := 0; i < length+3; i++ {
writeNibble(0)
}
case 7, 8, 9, 10, 11, 12, 13, 14:
length := int(buf[op-7])
switch length & 0xF0 {
case 0x20:
writeNibble(byte(length & 0xF))
fallthrough
case 0x10:
writeNibble(byte(length & 0xF))
case 0x60:
for i := 0; i < (length&0xF)+3; i++ {
writeNibble(0)
}
}
case 15:
return dst
}
}
return []byte{}
}

View File

@ -2,6 +2,8 @@ package main
import (
"encoding/binary"
"fmt"
"io"
"os"
)
@ -15,36 +17,99 @@ const (
gfxBankCompressed
)
type gfxEntry struct {
type gfxEntryPrivate struct {
X uint16
Y uint16
Width uint16
Height uint16
Data PsxOffset
}
type gfxBlock struct {
type gfxBlockPrivate struct {
kind gfxKind
flags uint16
entries []gfxEntry
entries []gfxEntryPrivate
}
type gfxEntry struct {
dataRange
X uint16 `json:"x"`
Y uint16 `json:"y"`
Width uint16 `json:"width"`
Height uint16 `json:"height"`
Filename string `json:"filename,omitempty"`
data []byte
}
type gfxBlock struct {
Kind gfxKind `json:"Kind"`
Flags uint16 `json:"Flags"`
Entries []gfxEntry `json:"Entries"`
}
type gfx struct {
blocks []gfxBlock
indices []int
Blocks []gfxBlock `json:"Blocks"`
Indices []int `json:"Indices"`
}
func readGraphics(file *os.File, off PsxOffset) (gfx, dataRange, error) {
func readGraphicsEntry(f *os.File, kind gfxKind, entry gfxEntryPrivate) (gfxEntry, error) {
fileName := fmt.Sprintf("gfx_%s", entry.Data.String()[2:])
switch kind {
case gfxBankNone:
if entry.Data != RamNull {
return gfxEntry{}, fmt.Errorf("a None Kind should contain a NULL ptr, but I found %v", entry.Data)
}
return gfxEntry{
X: entry.X,
Y: entry.Y,
Width: entry.Width,
Height: entry.Height,
data: nil,
}, nil
case gfxBank4bpp:
return gfxEntry{}, fmt.Errorf("graphics of Kind 4bpp is not yet supported (%v)", entry.Data)
case gfxBank8bpp:
return gfxEntry{}, fmt.Errorf("graphics of Kind 8bpp is not yet supported (%v)", entry.Data)
case gfxBank16bpp:
return gfxEntry{}, fmt.Errorf("graphics of Kind 16bpp is not yet supported (%v)", entry.Data)
case gfxBankCompressed:
if err := entry.Data.moveFile(f); err != nil {
return gfxEntry{}, fmt.Errorf("unable to read graphics entry: %v", err)
}
// the only way to know the size of the compressed data is to decompress it and see how many bytes were read
start, _ := f.Seek(0, io.SeekCurrent)
_ = decompressData(f)
now, _ := f.Seek(0, io.SeekCurrent)
cmpData := make([]byte, int(now-start))
if err := entry.Data.moveFile(f); err != nil {
return gfxEntry{}, fmt.Errorf("unable to read graphics entry: %v", err)
}
f.Read(cmpData)
return gfxEntry{
dataRange: dataRange{
begin: entry.Data,
end: entry.Data.sum(len(cmpData)).align4(),
},
X: entry.X,
Y: entry.Y,
Width: entry.Width,
Height: entry.Height,
Filename: fmt.Sprintf("%s.cmp.bin", fileName),
data: cmpData,
}, nil
default:
return gfxEntry{}, fmt.Errorf("unrecognized graphics Kind %v (%v)", kind, entry.Data)
}
}
func readGraphics(file *os.File, off PsxOffset) (gfx, []dataRange, error) {
if err := off.moveFile(file); err != nil {
return gfx{}, dataRange{}, err
return gfx{}, nil, err
}
// all the offsets are before the array, so it is easy to find where the offsets array ends
blockOffsets := []PsxOffset{}
for {
var offBank PsxOffset
if err := binary.Read(file, binary.LittleEndian, &offBank); err != nil {
return gfx{}, dataRange{}, err
return gfx{}, nil, err
}
if offBank >= off {
break
@ -52,27 +117,27 @@ func readGraphics(file *os.File, off PsxOffset) (gfx, dataRange, error) {
blockOffsets = append(blockOffsets, offBank)
}
// the order of each gfxBlock must be preserved
// the order of each gfxBlockPrivate must be preserved
pool := map[PsxOffset]int{}
pool[RamNull] = -1
blocks := []gfxBlock{}
blocks := []gfxBlockPrivate{}
ranges := []dataRange{}
for _, blockOffset := range sortUniqueOffsets(blockOffsets) {
if blockOffset == RamNull { // exception for ST0
if blockOffset == RamNull { // exception for ST0 as pointers can be NULL
continue
}
if err := blockOffset.moveFile(file); err != nil {
return gfx{}, dataRange{}, err
return gfx{}, nil, err
}
var block gfxBlock
var block gfxBlockPrivate
if err := binary.Read(file, binary.LittleEndian, &block.kind); err != nil {
return gfx{}, dataRange{}, err
return gfx{}, nil, err
}
if err := binary.Read(file, binary.LittleEndian, &block.flags); err != nil {
return gfx{}, dataRange{}, err
return gfx{}, nil, err
}
if block.kind == gfxKind(0xFFFF) && block.flags == 0xFFFF { // exception for ST0
if block.kind == gfxKind(0xFFFF) && block.flags == 0xFFFF { // exception for ST0 as Blocks can be empty
pool[blockOffset] = len(blocks)
blocks = append(blocks, block)
ranges = append(ranges, dataRange{
@ -83,9 +148,9 @@ func readGraphics(file *os.File, off PsxOffset) (gfx, dataRange, error) {
}
for {
var entry gfxEntry
var entry gfxEntryPrivate
if err := binary.Read(file, binary.LittleEndian, &entry); err != nil {
return gfx{}, dataRange{}, err
return gfx{}, nil, err
}
if entry.X == 0xFFFF && entry.Y == 0xFFFF {
break
@ -101,12 +166,37 @@ func readGraphics(file *os.File, off PsxOffset) (gfx, dataRange, error) {
}
var g gfx
g.Blocks = make([]gfxBlock, len(blocks))
for i, block := range blocks {
g.Blocks[i] = gfxBlock{
Kind: block.kind,
Flags: block.flags,
Entries: make([]gfxEntry, len(block.entries)),
}
for j, e := range block.entries {
newEntry, err := readGraphicsEntry(file, block.kind, e)
if err != nil {
return gfx{}, nil, err
}
g.Blocks[i].Entries[j] = newEntry
}
}
for _, blockOffset := range blockOffsets {
g.indices = append(g.indices, pool[blockOffset])
g.Indices = append(g.Indices, pool[blockOffset])
}
return g, mergeDataRanges(append(ranges, dataRange{
headerRange := mergeDataRanges(append(ranges, dataRange{
begin: off,
end: off.sum(len(blockOffsets) * 4),
})), nil
}))
dataRanges := []dataRange{}
for _, block := range g.Blocks {
for _, entry := range block.Entries {
if !entry.dataRange.empty() {
dataRanges = append(dataRanges, entry.dataRange)
}
}
}
return g, []dataRange{headerRange, mergeDataRanges(dataRanges)}, nil
}

View File

@ -11,7 +11,7 @@ type layoutEntry struct {
X int16 `json:"x"`
Y int16 `json:"y"`
ID uint8 `json:"id"`
Flags uint8 `json:"flags"` // TODO properly de-serialize this
Flags uint8 `json:"Flags"` // TODO properly de-serialize this
Slot uint8 `json:"slot"`
SpawnID uint8 `json:"spawnId"`
Params uint16 `json:"params"`
@ -20,7 +20,7 @@ type layoutEntry struct {
type layouts struct {
Entities [][]layoutEntry `json:"entities"`
Indices []int `json:"indices"`
Indices []int `json:"Indices"`
}
func readEntityLayoutEntry(file *os.File) (layoutEntry, error) {
@ -39,15 +39,15 @@ func readEntityLayoutEntry(file *os.File) (layoutEntry, error) {
}, nil
}
// the Y-ordered entries list has a different order than the X-ordered one. The order cannot consistently get
// restored by just sorting entries by Y as usually entries with the same Y results swapped.
// the Y-ordered Entries list has a different order than the X-ordered one. The order cannot consistently get
// restored by just sorting Entries by Y as usually Entries with the same Y results swapped.
// This algorithm will fill the optional field YOrder, only useful to restore the original order.
func hydrateYOrderFields(x layouts, y layouts) error {
if len(x.Indices) != len(y.Indices) {
return fmt.Errorf("number of X and Y layout indices do not match")
return fmt.Errorf("number of X and Y layout Indices do not match")
}
if len(x.Entities) != len(y.Entities) {
return fmt.Errorf("number of X and Y layout entries do not match")
return fmt.Errorf("number of X and Y layout Entries do not match")
}
populateYOrderField := func(xEntries []layoutEntry, yEntries []layoutEntry) {
@ -66,7 +66,7 @@ func hydrateYOrderFields(x layouts, y layouts) error {
xList := x.Entities[i]
yList := y.Entities[i]
if len(xList) != len(yList) {
return fmt.Errorf("number of X and Y entries do not match")
return fmt.Errorf("number of X and Y Entries do not match")
}
populateYOrderField(xList, yList)
}

View File

@ -15,15 +15,16 @@ type dataContainer[T any] struct {
}
type ovl struct {
ranges []dataRange
rooms dataContainer[[]room]
layers dataContainer[[]roomLayers]
sprites dataContainer[spriteDefs]
graphics dataContainer[gfx]
layouts dataContainer[layouts]
layoutsExtraRange dataRange
tileMaps dataContainer[map[PsxOffset][]byte]
tileDefs dataContainer[map[PsxOffset]tileDef]
ranges []dataRange
rooms dataContainer[[]room]
layers dataContainer[[]roomLayers]
sprites dataContainer[spriteDefs]
graphics dataContainer[gfx]
graphicsHeaderRange dataRange
layouts dataContainer[layouts]
layoutsHeaderRange dataRange
tileMaps dataContainer[map[PsxOffset][]byte]
tileDefs dataContainer[map[PsxOffset]tileDef]
}
func getOvlAssets(fileName string) (ovl, error) {
@ -37,7 +38,7 @@ func getOvlAssets(fileName string) (ovl, error) {
Cluts PsxOffset // 🫥
Layouts PsxOffset // ✅
Layers PsxOffset // ✅
Graphics PsxOffset // 🫥 WIP
Graphics PsxOffset //
FnUpdateStageEntities PsxOffset
}
@ -88,7 +89,7 @@ func getOvlAssets(fileName string) (ovl, error) {
return ovl{}, fmt.Errorf("unable to gather all sprites: %w", err)
}
graphics, graphicsRange, err := readGraphics(file, header.Graphics)
graphics, graphicsRanges, err := readGraphics(file, header.Graphics)
if err != nil {
return ovl{}, fmt.Errorf("unable to gather all graphics: %w", err)
}
@ -97,7 +98,7 @@ func getOvlAssets(fileName string) (ovl, error) {
if layoutOff == RamNull {
// some overlays have this field nulled, we have to find the offset ourselves
// it should be usually be right after header.Graphics
layoutOff = graphicsRange.end // ⚠️ assumption
layoutOff = graphicsRanges[0].end // ⚠️ assumption
}
nLayouts := maxBy(rooms, func(r room) int { // ⚠️ assumption
return int(r.EntityLayoutID)
@ -113,20 +114,22 @@ func getOvlAssets(fileName string) (ovl, error) {
roomsRange,
layersRange,
spritesRange,
graphicsRange,
graphicsRanges[0],
graphicsRanges[1],
layoutsRange[0],
layoutsRange[1],
tileMapsRange,
tileDefsRange,
}),
rooms: dataContainer[[]room]{dataRange: roomsRange, content: rooms},
layers: dataContainer[[]roomLayers]{dataRange: layersRange, content: layers},
sprites: dataContainer[spriteDefs]{dataRange: spritesRange, content: sprites},
graphics: dataContainer[gfx]{dataRange: graphicsRange, content: graphics},
layouts: dataContainer[layouts]{dataRange: layoutsRange[1], content: entityLayouts},
layoutsExtraRange: layoutsRange[0],
tileMaps: dataContainer[map[PsxOffset][]byte]{dataRange: tileMapsRange, content: tileMaps},
tileDefs: dataContainer[map[PsxOffset]tileDef]{dataRange: tileDefsRange, content: tileDefs},
rooms: dataContainer[[]room]{dataRange: roomsRange, content: rooms},
layers: dataContainer[[]roomLayers]{dataRange: layersRange, content: layers},
sprites: dataContainer[spriteDefs]{dataRange: spritesRange, content: sprites},
graphics: dataContainer[gfx]{dataRange: graphicsRanges[1], content: graphics},
graphicsHeaderRange: graphicsRanges[0],
layouts: dataContainer[layouts]{dataRange: layoutsRange[1], content: entityLayouts},
layoutsHeaderRange: layoutsRange[0],
tileMaps: dataContainer[map[PsxOffset][]byte]{dataRange: tileMapsRange, content: tileMaps},
tileDefs: dataContainer[map[PsxOffset]tileDef]{dataRange: tileDefsRange, content: tileDefs},
}, nil
}
@ -159,6 +162,24 @@ func extractOvlAssets(o ovl, outputDir string) error {
return fmt.Errorf("unable to create entity layouts file: %w", err)
}
for _, block := range o.graphics.content.Blocks {
for _, entry := range block.Entries {
if len(entry.data) == 0 {
continue
}
if err := os.WriteFile(path.Join(outputDir, entry.Filename), entry.data, 0644); err != nil {
return fmt.Errorf("unable to create graphics file: %w", err)
}
}
}
content, err = json.MarshalIndent(o.graphics.content, "", " ")
if err != nil {
return err
}
if err := os.WriteFile(path.Join(outputDir, "gfx.json"), content, 0644); err != nil {
return fmt.Errorf("unable to create gfx file descriptor: %w", err)
}
for offset, bytes := range o.tileMaps.content {
fileName := path.Join(outputDir, getTilemapFileName(offset))
if err := os.WriteFile(fileName, bytes, 0644); err != nil {
@ -233,9 +254,11 @@ func info(fileName string) error {
comment string
}{
{o.layers.dataRange, "header", "layers"},
{o.layoutsExtraRange, "e_laydef", "layout entries header"},
{o.graphicsHeaderRange, "header", "entity gfx"},
{o.layoutsHeaderRange, "e_laydef", "layout Entries header"},
{o.rooms.dataRange, "rooms", ""},
{o.layouts.dataRange, "e_layout", "layout entries data"},
{o.layouts.dataRange, "e_layout", "layout Entries data"},
{o.graphics.dataRange, "gfx", ""},
{o.tileMaps.dataRange, "tile_data", "tile data"},
{o.tileDefs.dataRange, "tile_data", "tile definitions"},
{o.sprites.dataRange, "sprites", ""},
@ -327,13 +350,13 @@ func main() {
var kind string
var outputDir string
buildCmd.StringVar(&file, "file", "", "File to process")
buildCmd.StringVar(&kind, "kind", "", "Kind of the file to process")
buildCmd.StringVar(&kind, "Kind", "", "Kind of the file to process")
buildCmd.StringVar(&outputDir, "o", "", "Where to store the processed source files")
buildCmd.Parse(os.Args[2:])
if file == "" || kind == "" || outputDir == "" {
fmt.Println("file, kind, and output_dir are required for build")
fmt.Println("file, Kind, and output_dir are required for build")
buildCmd.PrintDefaults()
os.Exit(1)
}
@ -347,7 +370,7 @@ func main() {
case "sprites":
err = buildSprites(file, outputDir)
default:
fmt.Println("unknown kind, valid values are 'room', 'layer', 'sprites'")
fmt.Println("unknown Kind, valid values are 'room', 'layer', 'sprites'")
}
if err != nil {
panic(err)

View File

@ -15,7 +15,11 @@ const (
)
func (off PsxOffset) Format(f fmt.State, c rune) {
f.Write([]byte(fmt.Sprintf("0x%08X", uint32(off))))
f.Write([]byte(off.String()))
}
func (off PsxOffset) String() string {
return fmt.Sprintf("0x%08X", uint32(off))
}
func (off PsxOffset) real() int {

View File

@ -8,7 +8,7 @@ import (
)
type sprite struct {
Flags uint16 `json:"flags"`
Flags uint16 `json:"Flags"`
X int16 `json:"x"`
Y int16 `json:"y"`
Width uint16 `json:"width"`
@ -23,7 +23,7 @@ type sprite struct {
type spriteDefs struct {
Banks [][]*[]sprite `json:"banks"`
Indices []int `json:"indices"`
Indices []int `json:"Indices"`
}
func readSprites(file *os.File, off PsxOffset) ([]sprite, dataRange, error) {
@ -126,7 +126,7 @@ func readSpritesBanks(file *os.File, off PsxOffset) (spriteDefs, dataRange, erro
spriteRanges = append(spriteRanges, bankRange)
}
// the indices do not guarantee sprites to be stored in a linear order
// the Indices do not guarantee sprites to be stored in a linear order
// we must sort the offsets to preserve the order sprites are stored
sortedOffsets := make([]PsxOffset, 0, len(pool))
for offset := range pool {
@ -134,7 +134,7 @@ func readSpritesBanks(file *os.File, off PsxOffset) (spriteDefs, dataRange, erro
}
sort.Slice(sortedOffsets, func(i, j int) bool { return sortedOffsets[i] < sortedOffsets[j] })
// create a list of indices to replace the original pointers
// create a list of Indices to replace the original pointers
indices := make([]int, len(offBanks))
for i, offset := range offBanks {
if offset == RamNull {