Asset tool refactor (#1844)
Some checks are pending
Build C code / extract-assets (push) Waiting to run
Build C code / build-linux (i686, Debug, clang, custom) (push) Blocked by required conditions
Build C code / build-linux (i686, Debug, clang, lle) (push) Blocked by required conditions
Build C code / build-linux (i686, Debug, gcc, custom) (push) Blocked by required conditions
Build C code / build-linux (i686, Debug, gcc, lle) (push) Blocked by required conditions
Build C code / build-linux (i686, RelWithDebInfo, clang, custom) (push) Blocked by required conditions
Build C code / build-linux (i686, RelWithDebInfo, clang, lle) (push) Blocked by required conditions
Build C code / build-linux (i686, RelWithDebInfo, gcc, custom) (push) Blocked by required conditions
Build C code / build-linux (i686, RelWithDebInfo, gcc, lle) (push) Blocked by required conditions
Build C code / build-linux (x86_64, Debug, clang, custom) (push) Blocked by required conditions
Build C code / build-linux (x86_64, Debug, clang, lle) (push) Blocked by required conditions
Build C code / build-linux (x86_64, Debug, gcc, custom) (push) Blocked by required conditions
Build C code / build-linux (x86_64, Debug, gcc, lle) (push) Blocked by required conditions
Build C code / build-linux (x86_64, RelWithDebInfo, clang, custom) (push) Blocked by required conditions
Build C code / build-linux (x86_64, RelWithDebInfo, clang, lle) (push) Blocked by required conditions
Build C code / build-linux (x86_64, RelWithDebInfo, gcc, custom) (push) Blocked by required conditions
Build C code / build-linux (x86_64, RelWithDebInfo, gcc, lle) (push) Blocked by required conditions
Build C code / build-macos (Debug, custom) (push) Blocked by required conditions
Build C code / build-macos (Debug, lle) (push) Blocked by required conditions
Build C code / build-macos (RelWithDebInfo, custom) (push) Blocked by required conditions
Build C code / build-macos (RelWithDebInfo, lle) (push) Blocked by required conditions
Build C code / build-windows (Debug, custom) (push) Blocked by required conditions
Build C code / build-windows (Debug, lle) (push) Blocked by required conditions
Build C code / build-windows (RelWithDebInfo, custom) (push) Blocked by required conditions
Build C code / build-windows (RelWithDebInfo, lle) (push) Blocked by required conditions
Build Saturn version / build-and-test-saturn (push) Waiting to run
Build Saturn version / function-finder-saturn (push) Waiting to run
Build PSX and PSP version / build-and-test (pspeu, hd) (push) Waiting to run
Build PSX and PSP version / build-and-test (pspeu, pspeu) (push) Waiting to run
Build PSX and PSP version / build-and-test (us, us) (push) Waiting to run
Build PSX and PSP version / generate-progress-report (pspeu, hd) (push) Blocked by required conditions
Build PSX and PSP version / generate-progress-report (pspeu, pspeu) (push) Blocked by required conditions
Build PSX and PSP version / generate-progress-report (us, us) (push) Blocked by required conditions
Build PSX and PSP version / generate-duplicates-report (us, us) (push) Blocked by required conditions
Build PSX and PSP version / generate-duplicates-report-psp (pspeu, pspeu) (push) Blocked by required conditions

`sotn-assets stage extract` and `sotn-assets stage build` are gone in
favour of `config/assets.$(VERSION).yaml`. `sotn-asset stage info` is
now replaced with `sotn-stage info` to retrieve expanded metadata on how
to use the tool for new overlays and reduce the burden of hunting for
data.

There are tons of change, too many to describe. In short the tool is
much simpler than before and data is decoupled from each other. Each
data type is defined as a handler. A handler has `Extract` to create the
files in `assets/`, a `Build` to convert `assets/` files into
embeddedable code in `src/`, and `Info` to get stage metadata.

Please refer to the commit list for a breakdown of the changes done.
This commit is contained in:
Luciano Ciccariello 2024-10-29 08:38:05 +00:00 committed by GitHub
parent 09f48e6f0c
commit 1b10fd7806
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
32 changed files with 1559 additions and 1264 deletions

View File

@ -21,8 +21,8 @@ extract_us: $(addprefix $(BUILD_DIR)/,$(addsuffix .ld,$(PSX_US_TARGETS)))
make extract_assets make extract_assets
make build_assets make build_assets
extract_hd: $(addprefix $(BUILD_DIR)/,$(addsuffix .ld,$(PSX_HD_TARGETS))) extract_hd: $(addprefix $(BUILD_DIR)/,$(addsuffix .ld,$(PSX_HD_TARGETS)))
make extract_assets_hd make extract_assets
make build_assets_hd make build_assets
extract_disk_us: extract_disk_psxus extract_disk_us: extract_disk_psxus
extract_disk_hd: extract_disk_pspeu extract_disk_hd: extract_disk_pspeu
@ -86,36 +86,9 @@ $(BUILD_DIR)/$(SRC_DIR)/main/psxsdk/libgpu/sys.c.o: $(SRC_DIR)/main/psxsdk/libgp
extract_assets: $(SOTNASSETS) extract_assets: $(SOTNASSETS)
cd tools/sotn-assets; $(GO) install cd tools/sotn-assets; $(GO) install
$(SOTNASSETS) stage extract -stage_ovl disks/$(VERSION)/ST/CEN/CEN.BIN -o assets/st/cen $(SOTNASSETS) extract config/assets.$(VERSION).yaml
$(SOTNASSETS) stage extract -stage_ovl disks/$(VERSION)/ST/DRE/DRE.BIN -o assets/st/dre
$(SOTNASSETS) stage extract -stage_ovl disks/$(VERSION)/ST/NO3/NO3.BIN -o assets/st/no3
$(SOTNASSETS) stage extract -stage_ovl disks/$(VERSION)/ST/NP3/NP3.BIN -o assets/st/np3
$(SOTNASSETS) stage extract -stage_ovl disks/$(VERSION)/ST/NZ0/NZ0.BIN -o assets/st/nz0
$(SOTNASSETS) stage extract -stage_ovl disks/$(VERSION)/ST/ST0/ST0.BIN -o assets/st/st0
$(SOTNASSETS) stage extract -stage_ovl disks/$(VERSION)/ST/WRP/WRP.BIN -o assets/st/wrp
$(SOTNASSETS) stage extract -stage_ovl disks/$(VERSION)/ST/RWRP/RWRP.BIN -o assets/st/rwrp
$(SOTNASSETS) stage extract -stage_ovl disks/$(VERSION)/BOSS/MAR/MAR.BIN -o assets/boss/mar
$(SOTNASSETS) config extract config/assets.$(VERSION).yaml
extract_assets_hd: $(SOTNASSETS)
cd tools/sotn-assets; $(GO) install
$(SOTNASSETS) stage extract -stage_ovl disks/pspeu/PSP_GAME/USRDIR/res/ps/hdbin/cen.bin -o assets/st/cen
$(SOTNASSETS) stage extract -stage_ovl disks/pspeu/PSP_GAME/USRDIR/res/ps/hdbin/wrp.bin -o assets/st/wrp
$(SOTNASSETS) config extract config/assets.$(VERSION).yaml
build_assets: $(SOTNASSETS) build_assets: $(SOTNASSETS)
$(SOTNASSETS) stage build_all -i assets/st/cen -o src/st/cen/ $(SOTNASSETS) build config/assets.$(VERSION).yaml
$(SOTNASSETS) stage build_all -i assets/st/dre -o src/st/dre/
$(SOTNASSETS) stage build_all -i assets/st/no3 -o src/st/no3/
$(SOTNASSETS) stage build_all -i assets/st/np3 -o src/st/np3/
$(SOTNASSETS) stage build_all -i assets/st/nz0 -o src/st/nz0/
$(SOTNASSETS) stage build_all -i assets/st/st0 -o src/st/st0/
$(SOTNASSETS) stage build_all -i assets/st/wrp -o src/st/wrp/
$(SOTNASSETS) stage build_all -i assets/st/rwrp -o src/st/rwrp/
$(SOTNASSETS) stage build_all -i assets/boss/mar -o src/boss/mar/
$(SOTNASSETS) config build config/assets.$(VERSION).yaml
build_assets_hd: $(SOTNASSETS)
$(SOTNASSETS) stage build_all -i assets/st/cen -o src/st/cen/
$(SOTNASSETS) stage build_all -i assets/st/wrp -o src/st/wrp/
$(SOTNASSETS) config build config/assets.$(VERSION).yaml
$(BUILD_DIR)/assets/dra/memcard_%.png.o: assets/dra/memcard_%.png $(BUILD_DIR)/assets/dra/memcard_%.png.o: assets/dra/memcard_%.png
mkdir -p $(dir $@) mkdir -p $(dir $@)

View File

@ -8,6 +8,10 @@ files:
assets: assets:
- [0x40, sprite_banks, sprite_banks] - [0x40, sprite_banks, sprite_banks]
- [0xA0, skip] - [0xA0, skip]
- [0xDC, layers, layers]
- [0x134, skip]
- [0x1EC, layout, entity_layouts]
- [0x394, skip]
- [0x1308, rooms, rooms] - [0x1308, rooms, rooms]
- [0x1334, skip] - [0x1334, skip]
- target: disks/pspeu/PSP_GAME/USRDIR/res/ps/hdbin/wrp.bin - target: disks/pspeu/PSP_GAME/USRDIR/res/ps/hdbin/wrp.bin
@ -19,5 +23,9 @@ files:
assets: assets:
- [0x40, sprite_banks, sprite_banks] - [0x40, sprite_banks, sprite_banks]
- [0xA0, skip] - [0xA0, skip]
- [0xB8, layers, layers]
- [0x1B8, skip]
- [0x23C, layout, entity_layouts]
- [0x3E4, skip]
- [0x11B0, rooms, rooms] - [0x11B0, rooms, rooms]
- [0x122C, skip] - [0x122C, skip]

View File

@ -8,6 +8,10 @@ files:
assets: assets:
- [0x40, sprite_banks, sprite_banks] - [0x40, sprite_banks, sprite_banks]
- [0xA0, skip] - [0xA0, skip]
- [0xDC, layers, layers]
- [0x134, skip]
- [0x1EC, layout, entity_layouts]
- [0x394, skip]
- [0x12D4, rooms, rooms] - [0x12D4, rooms, rooms]
- [0x1300, skip] - [0x1300, skip]
- [0x13F0, cutscene, cutscene_data] - [0x13F0, cutscene, cutscene_data]
@ -21,6 +25,10 @@ files:
assets: assets:
- [0x40, sprite_banks, sprite_banks] - [0x40, sprite_banks, sprite_banks]
- [0xA0, skip] - [0xA0, skip]
- [0xE8, layers, layers]
- [0x128, skip]
- [0x220, layout, entity_layouts]
- [0x3C8, skip]
- [0x1498, rooms, rooms] - [0x1498, rooms, rooms]
- [0x14AC, skip] - [0x14AC, skip]
- [0x16C8, cutscene, cutscene_data] - [0x16C8, cutscene, cutscene_data]
@ -34,6 +42,7 @@ files:
assets: assets:
- [0x40, sprite_banks, sprite_banks] - [0x40, sprite_banks, sprite_banks]
- [0xA0, skip] - [0xA0, skip]
# TODO broken
- [0x1130, rooms, rooms] - [0x1130, rooms, rooms]
- [0x11D4, skip] - [0x11D4, skip]
- target: disks/us/ST/NO3/NO3.BIN - target: disks/us/ST/NO3/NO3.BIN
@ -45,6 +54,10 @@ files:
assets: assets:
- [0x2C, sprite_banks, sprite_banks] - [0x2C, sprite_banks, sprite_banks]
- [0x8C, skip] - [0x8C, skip]
- [0x1C4, layers, layers]
- [0x5A4, skip]
- [0x77C, layout, entity_layouts]
- [0x924, skip]
- [0x3CC4, rooms, rooms] - [0x3CC4, rooms, rooms]
- [0x3DC8, skip] - [0x3DC8, skip]
- [0x4CE0, cutscene, cutscene_data] - [0x4CE0, cutscene, cutscene_data]
@ -58,6 +71,10 @@ files:
assets: assets:
- [0x2C, sprite_banks, sprite_banks] - [0x2C, sprite_banks, sprite_banks]
- [0x8C, skip] - [0x8C, skip]
- [0x1D0, layers, layers]
- [0x558, skip]
- [0x728, layout, entity_layouts]
- [0x8D0, skip]
- [0x3A7C, rooms, rooms] - [0x3A7C, rooms, rooms]
- [0x3B68, skip] - [0x3B68, skip]
- target: disks/us/ST/NZ0/NZ0.BIN - target: disks/us/ST/NZ0/NZ0.BIN
@ -69,6 +86,10 @@ files:
assets: assets:
- [0x2C, sprite_banks, sprite_banks] - [0x2C, sprite_banks, sprite_banks]
- [0x8C, skip] - [0x8C, skip]
- [0x164, layers, layers]
- [0x47C, skip]
- [0x8EC, layout, entity_layouts]
- [0xA94, skip]
- [0x272C, rooms, rooms] - [0x272C, rooms, rooms]
- [0x2830, skip] - [0x2830, skip]
- [0x3B0C, cutscene, cutscene_data] - [0x3B0C, cutscene, cutscene_data]
@ -82,6 +103,10 @@ files:
assets: assets:
- [0x40, sprite_banks, sprite_banks] - [0x40, sprite_banks, sprite_banks]
- [0xA0, skip] - [0xA0, skip]
- [0x124, layers, layers]
- [0x1A4, skip]
- [0x314, layout, entity_layouts]
- [0x4BC, skip]
- [0x2060, rooms, rooms] - [0x2060, rooms, rooms]
- [0x2084, skip] - [0x2084, skip]
- [0x29D8, cutscene, cutscene_data] - [0x29D8, cutscene, cutscene_data]
@ -95,6 +120,10 @@ files:
assets: assets:
- [0x40, sprite_banks, sprite_banks] - [0x40, sprite_banks, sprite_banks]
- [0xA0, skip] - [0xA0, skip]
- [0xB8, layers, layers]
- [0x1B8, skip]
- [0x23C, layout, entity_layouts]
- [0x3E4, skip]
- [0x11AC, rooms, rooms] - [0x11AC, rooms, rooms]
- [0x1228, skip] - [0x1228, skip]
- target: disks/us/ST/RWRP/RWRP.BIN - target: disks/us/ST/RWRP/RWRP.BIN
@ -106,6 +135,10 @@ files:
assets: assets:
- [0x40, sprite_banks, sprite_banks] - [0x40, sprite_banks, sprite_banks]
- [0xA0, skip] - [0xA0, skip]
- [0xB8, layers, layers]
- [0x1B8, skip]
- [0x23C, layout, entity_layouts]
- [0x3E4, skip]
- [0x11AC, rooms, rooms] - [0x11AC, rooms, rooms]
- [0x1228, skip] - [0x1228, skip]
- target: disks/us/BOSS/MAR/MAR.BIN - target: disks/us/BOSS/MAR/MAR.BIN
@ -117,6 +150,10 @@ files:
assets: assets:
- [0x2C, sprite_banks, sprite_banks] - [0x2C, sprite_banks, sprite_banks]
- [0x8C, skip] - [0x8C, skip]
- [0xCC, layers, layers]
- [0xF4, skip]
- [0x168, layout, entity_layouts]
- [0x310, skip]
- [0x12EC, rooms, rooms] - [0x12EC, rooms, rooms]
- [0x1308, skip] - [0x1308, skip]
- [0x1424, cutscene, cutscene_data] - [0x1424, cutscene, cutscene_data]
@ -130,6 +167,12 @@ files:
assets: assets:
- [0x2C, sprite_banks, sprite_banks] - [0x2C, sprite_banks, sprite_banks]
- [0x8C, skip] - [0x8C, skip]
- [0xE0, layers, layers]
- [0x108, skip]
- [0x1EC, layout, entity_layouts]
- [0x394, skip]
- [0x126C, rooms, rooms]
- [0x1288, skip]
- target: disks/us/BIN/WEAPON0.BIN - target: disks/us/BIN/WEAPON0.BIN
asset_path: assets/weapon asset_path: assets/weapon
src_path: src/weapon src_path: src/weapon

View File

@ -1 +1,2 @@
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=

View File

@ -4,7 +4,10 @@ import (
"fmt" "fmt"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/assets" "github.com/xeeynamo/sotn-decomp/tools/sotn-assets/assets"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/assets/cutscene" "github.com/xeeynamo/sotn-decomp/tools/sotn-assets/assets/cutscene"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/assets/layer"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/assets/layout"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/assets/rooms" "github.com/xeeynamo/sotn-decomp/tools/sotn-assets/assets/rooms"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/assets/skip"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/assets/spritebanks" "github.com/xeeynamo/sotn-decomp/tools/sotn-assets/assets/spritebanks"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/assets/spriteset" "github.com/xeeynamo/sotn-decomp/tools/sotn-assets/assets/spriteset"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx" "github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx"
@ -31,19 +34,21 @@ type assetConfig struct {
Files []assetFileEntry `yaml:"files"` Files []assetFileEntry `yaml:"files"`
} }
var extractHandlers = map[string]func(assets.ExtractEntry) error{ var handlers = func() map[string]assets.Handler {
"cutscene": cutscene.Handler.Extract, m := make(map[string]assets.Handler)
"rooms": rooms.Handler.Extract, for _, handler := range []assets.Handler{
"sprite_banks": spritebanks.Handler.Extract, cutscene.Handler,
"spriteset": spriteset.Handler.Extract, layer.Handler,
} layout.Handler,
rooms.Handler,
var buildHandlers = map[string]func(assets.BuildEntry) error{ skip.Handler,
"cutscene": cutscene.Handler.Build, spritebanks.Handler,
"rooms": rooms.Handler.Build, spriteset.Handler,
"sprite_banks": spritebanks.Handler.Build, } {
"spriteset": spriteset.Handler.Build, m[handler.Name()] = handler
} }
return m
}()
func parseArgs(entry []string) (offset int64, kind string, args []string, err error) { func parseArgs(entry []string) (offset int64, kind string, args []string, err error) {
if len(entry) < 2 { if len(entry) < 2 {
@ -79,7 +84,7 @@ func readConfig(path string) (*assetConfig, error) {
func enqueueExtractAssetEntry( func enqueueExtractAssetEntry(
eg *errgroup.Group, eg *errgroup.Group,
handler func(assets.ExtractEntry) error, handler assets.Extractor,
assetDir string, assetDir string,
name string, name string,
data []byte, data []byte,
@ -93,7 +98,7 @@ func enqueueExtractAssetEntry(
fmt.Printf("unable to extract asset %q: %v", name, err) fmt.Printf("unable to extract asset %q: %v", name, err)
} }
}() }()
if err := handler(assets.ExtractEntry{ if err := handler.Extract(assets.ExtractArgs{
Data: data, Data: data,
Start: start, Start: start,
End: end, End: end,
@ -132,7 +137,7 @@ func extractAssetFile(file assetFileEntry) error {
return fmt.Errorf("offset 0x%X should be smaller than 0x%X, asset %v", off, off2, segment.Assets[i-1]) return fmt.Errorf("offset 0x%X should be smaller than 0x%X, asset %v", off, off2, segment.Assets[i-1])
} }
if kind != "skip" { if kind != "skip" {
if handler, found := extractHandlers[kind]; found { if handler, found := handlers[kind]; found {
name := strconv.FormatUint(uint64(off), 16) name := strconv.FormatUint(uint64(off), 16)
if len(args) > 0 { if len(args) > 0 {
name = args[0] name = args[0]
@ -155,12 +160,12 @@ func extractAssetFile(file assetFileEntry) error {
func enqueueBuildAssetEntry( func enqueueBuildAssetEntry(
eg *errgroup.Group, eg *errgroup.Group,
handler func(assets.BuildEntry) error, handler assets.Builder,
assetDir, assetDir,
sourceDir, sourceDir,
name string) { name string) {
eg.Go(func() error { eg.Go(func() error {
err := handler(assets.BuildEntry{ err := handler.Build(assets.BuildArgs{
AssetDir: assetDir, AssetDir: assetDir,
SrcDir: sourceDir, SrcDir: sourceDir,
Name: name, Name: name,
@ -199,7 +204,7 @@ func buildAssetFile(file assetFileEntry) error {
if kind == "skip" { if kind == "skip" {
continue continue
} }
if handler, found := buildHandlers[kind]; found { if handler, found := handlers[kind]; found {
name := strconv.FormatUint(uint64(off), 16) name := strconv.FormatUint(uint64(off), 16)
if len(args) > 0 { if len(args) > 0 {
name = args[0] name = args[0]

View File

@ -1,8 +1,11 @@
package assets package assets
import "github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx" import (
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/datarange"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx"
)
type ExtractEntry struct { type ExtractArgs struct {
Data []byte Data []byte
Start int Start int
End int End int
@ -11,23 +14,44 @@ type ExtractEntry struct {
Args []string Args []string
RamBase psx.Addr RamBase psx.Addr
} }
type Extractor interface {
Extract(a ExtractArgs) error
}
type BuildEntry struct { type BuildArgs struct {
AssetDir string AssetDir string
SrcDir string SrcDir string
Name string Name string
} }
type Builder interface {
type Extracter interface { Build(a BuildArgs) error
Extract(e ExtractEntry) error
} }
type Builder interface { type InfoArgs struct {
Build(e BuildEntry) error StageFilePath string
StageData []byte
}
type InfoAssetEntry struct {
DataRange datarange.DataRange
Kind string
Name string
}
type InfoSplatEntry struct {
DataRange datarange.DataRange
Name string
Comment string
}
type InfoResult struct {
AssetEntries []InfoAssetEntry
SplatEntries []InfoSplatEntry
}
type InfoGatherer interface {
Info(a InfoArgs) (InfoResult, error)
} }
type Handler interface { type Handler interface {
Name() string Name() string
Extracter Extractor
Builder Builder
InfoGatherer
} }

View File

@ -20,7 +20,7 @@ var Handler = &handler{}
func (h *handler) Name() string { return "cutscene" } func (h *handler) Name() string { return "cutscene" }
func (h *handler) Extract(e assets.ExtractEntry) error { func (h *handler) Extract(e assets.ExtractArgs) error {
if e.Start == e.End { if e.Start == e.End {
return fmt.Errorf("a cutscene script cannot be 0 bytes") return fmt.Errorf("a cutscene script cannot be 0 bytes")
} }
@ -56,7 +56,7 @@ type scriptSrc struct {
Script [][]string `yaml:"script"` Script [][]string `yaml:"script"`
} }
func (h *handler) Build(e assets.BuildEntry) error { func (h *handler) Build(e assets.BuildArgs) error {
inFileName := assetPath(e.AssetDir, e.Name) inFileName := assetPath(e.AssetDir, e.Name)
data, err := os.ReadFile(inFileName) data, err := os.ReadFile(inFileName)
if err != nil { if err != nil {
@ -110,6 +110,10 @@ func (h *handler) Build(e assets.BuildEntry) error {
return os.WriteFile(sourcePath(e.SrcDir, e.Name), []byte(sb.String()), 0644) return os.WriteFile(sourcePath(e.SrcDir, e.Name), []byte(sb.String()), 0644)
} }
func (h *handler) Info(a assets.InfoArgs) (assets.InfoResult, error) {
return assets.InfoResult{}, nil
}
func assetPath(dir, name string) string { func assetPath(dir, name string) string {
if name == "" { if name == "" {
name = "cutscene_script" name = "cutscene_script"

View File

@ -0,0 +1,106 @@
package graphics
import (
"encoding/binary"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/datarange"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/util"
"io"
)
type GfxKind uint16
const (
GfxBankNone = GfxKind(iota)
GfxBank4bpp
GfxBank8bpp
GfxBank16bpp
GfxBankCompressed
)
type GfxEntry struct {
X uint16
Y uint16
Width uint16
Height uint16
Data psx.Addr
}
type GfxBlock struct {
kind GfxKind
flags uint16
entries []GfxEntry
}
type Gfx struct {
blocks []GfxBlock
indices []int
}
func ReadGraphics(r io.ReadSeeker, off psx.Addr) (Gfx, datarange.DataRange, error) {
if err := off.MoveFile(r, psx.RamStageBegin); err != nil {
return Gfx{}, datarange.DataRange{}, err
}
// all the offsets are before the array, so it is easy to find where the offsets array ends
var blockOffsets []psx.Addr
for {
var offBank psx.Addr
if err := binary.Read(r, binary.LittleEndian, &offBank); err != nil {
return Gfx{}, datarange.DataRange{}, err
}
if offBank >= off {
break
}
blockOffsets = append(blockOffsets, offBank)
}
// the order of each GfxBlock must be preserved
pool := map[psx.Addr]int{}
pool[psx.RamNull] = -1
var blocks []GfxBlock
var ranges []datarange.DataRange
for _, blockOffset := range util.SortUniqueOffsets(blockOffsets) {
if blockOffset == psx.RamNull { // exception for ST0
continue
}
if err := blockOffset.MoveFile(r, psx.RamStageBegin); err != nil {
return Gfx{}, datarange.DataRange{}, err
}
var block GfxBlock
if err := binary.Read(r, binary.LittleEndian, &block.kind); err != nil {
return Gfx{}, datarange.DataRange{}, err
}
if err := binary.Read(r, binary.LittleEndian, &block.flags); err != nil {
return Gfx{}, datarange.DataRange{}, err
}
if block.kind == GfxKind(0xFFFF) && block.flags == 0xFFFF { // exception for ST0
pool[blockOffset] = len(blocks)
blocks = append(blocks, block)
ranges = append(ranges, datarange.FromAddr(blockOffset, 4))
continue
}
for {
var entry GfxEntry
if err := binary.Read(r, binary.LittleEndian, &entry); err != nil {
return Gfx{}, datarange.DataRange{}, err
}
if entry.X == 0xFFFF && entry.Y == 0xFFFF {
break
}
block.entries = append(block.entries, entry)
}
pool[blockOffset] = len(blocks)
blocks = append(blocks, block)
ranges = append(ranges, datarange.FromAddr(blockOffset, 4+len(block.entries)*12+4))
}
var g Gfx
for _, blockOffset := range blockOffsets {
g.indices = append(g.indices, pool[blockOffset])
}
return g, datarange.MergeDataRanges(append(ranges, datarange.FromAddr(off, len(blockOffsets)*4))), nil
}

View File

@ -0,0 +1,190 @@
package layer
import (
"bytes"
"encoding/json"
"fmt"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/assets"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/datarange"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/sotn"
"os"
"path"
"path/filepath"
)
type handler struct{}
var Handler = &handler{}
func (h *handler) Name() string { return "layers" }
func (h *handler) Extract(e assets.ExtractArgs) error {
r := bytes.NewReader(e.Data)
header, err := sotn.ReadStageHeader(r)
if err != nil {
return err
}
l, _, err := readLayers(r, header.Layers)
if err != nil {
return fmt.Errorf("unable to read layers: %w", err)
}
tileMaps, tileMapsRange, err := readAllTileMaps(r, l)
if err != nil {
return fmt.Errorf("unable to gather all the tile maps: %w", err)
}
tileDefs, tileDefsRange, err := readAllTiledefs(r, l)
if err != nil {
return fmt.Errorf("unable to gather all the tile defs: %w", err)
}
// check for unused tile defs (CEN has one)
for tileMapsRange.End() < tileDefsRange.Begin() {
offset := tileDefsRange.Begin().Sum(-0x10)
unusedTileDef, unusedTileDefRange, err := readTiledef(r, offset)
if err != nil {
return fmt.Errorf("there is a gap between tileMaps and tileDefs: %w", err)
}
tileDefs[offset] = unusedTileDef
tileDefsRange = datarange.MergeDataRanges([]datarange.DataRange{tileDefsRange, unusedTileDefRange})
}
outFileName := path.Join(e.AssetDir, "layers.json")
dir := filepath.Dir(outFileName)
if err := os.MkdirAll(dir, 0755); err != nil {
fmt.Printf("failed to create directory %s: %v\n", dir, err)
return err
}
content, err := json.MarshalIndent(l, "", " ")
if err != nil {
return err
}
if err := os.WriteFile(outFileName, content, 0644); err != nil {
return fmt.Errorf("unable to create layers file: %w", err)
}
for offset, data := range tileMaps {
fileName := path.Join(e.AssetDir, tilemapFileName(offset))
if err := os.WriteFile(fileName, data, 0644); err != nil {
return fmt.Errorf("unable to create %q: %w", fileName, err)
}
}
for offset, tileDefsData := range tileDefs {
defs := tileDefPaths{
Tiles: tiledefIndicesFileName(offset),
Pages: tiledefPagesFileName(offset),
Cluts: tiledefClutsFileName(offset),
Collisions: tiledefCollisionsFileName(offset),
}
if err := os.WriteFile(path.Join(e.AssetDir, defs.Tiles), tileDefsData.Tiles, 0644); err != nil {
return fmt.Errorf("unable to create %q: %w", defs.Tiles, err)
}
if err := os.WriteFile(path.Join(e.AssetDir, defs.Pages), tileDefsData.Pages, 0644); err != nil {
return fmt.Errorf("unable to create %q: %w", defs.Pages, err)
}
if err := os.WriteFile(path.Join(e.AssetDir, defs.Cluts), tileDefsData.Cluts, 0644); err != nil {
return fmt.Errorf("unable to create %q: %w", defs.Cluts, err)
}
if err := os.WriteFile(path.Join(e.AssetDir, defs.Collisions), tileDefsData.Cols, 0644); err != nil {
return fmt.Errorf("unable to create %q: %w", defs.Collisions, err)
}
content, err = json.MarshalIndent(defs, "", " ")
if err != nil {
return err
}
fileName := path.Join(e.AssetDir, tiledefFileName(offset))
if err := os.WriteFile(fileName, content, 0644); err != nil {
return fmt.Errorf("unable to create %q: %w", fileName, err)
}
}
return nil
}
func (h *handler) Build(e assets.BuildArgs) error {
return buildLayers(e.AssetDir, path.Join(e.AssetDir, "layers.json"), e.SrcDir)
}
func (h *handler) Info(a assets.InfoArgs) (assets.InfoResult, error) {
r := bytes.NewReader(a.StageData)
header, err := sotn.ReadStageHeader(r)
if err != nil {
return assets.InfoResult{}, err
}
l, layersRange, err := readLayers(r, header.Layers)
if err != nil {
return assets.InfoResult{}, fmt.Errorf("unable to read layers: %w", err)
}
_, tileMapsRange, err := readAllTileMaps(r, l)
if err != nil {
return assets.InfoResult{}, fmt.Errorf("unable to gather all the tile maps: %w", err)
}
tileDefs, tileDefsRange, err := readAllTiledefs(r, l)
if err != nil {
return assets.InfoResult{}, fmt.Errorf("unable to gather all the tile defs: %w", err)
}
// check for unused tile defs (CEN has one)
for tileMapsRange.End() < tileDefsRange.Begin() {
offset := tileDefsRange.Begin().Sum(-0x10)
unusedTileDef, unusedTileDefRange, err := readTiledef(r, offset)
if err != nil {
return assets.InfoResult{}, fmt.Errorf("there is a gap between tileMaps and tileDefs: %w", err)
}
tileDefs[offset] = unusedTileDef
tileDefsRange = datarange.MergeDataRanges([]datarange.DataRange{tileDefsRange, unusedTileDefRange})
}
return assets.InfoResult{
AssetEntries: []assets.InfoAssetEntry{
{
DataRange: layersRange,
Kind: "layers",
Name: "layers",
},
},
SplatEntries: []assets.InfoSplatEntry{
{
DataRange: layersRange,
Name: "header",
Comment: "layers",
},
{
DataRange: tileMapsRange,
Name: "tile_data",
Comment: "tile data",
},
{
DataRange: tileDefsRange,
Name: "tile_data",
Comment: "tile definitions",
},
},
}, nil
}
func tilemapFileName(off psx.Addr) string {
return fmt.Sprintf("tilemap_%05X.bin", off.Real(psx.RamStageBegin))
}
func tiledefFileName(off psx.Addr) string {
return fmt.Sprintf("tiledef_%05X.json", off.Real(psx.RamStageBegin))
}
func tiledefIndicesFileName(off psx.Addr) string {
return fmt.Sprintf("tiledef_%05X_tiles.bin", off.Real(psx.RamStageBegin))
}
func tiledefPagesFileName(off psx.Addr) string {
return fmt.Sprintf("tiledef_%05X_pages.bin", off.Real(psx.RamStageBegin))
}
func tiledefClutsFileName(off psx.Addr) string {
return fmt.Sprintf("tiledef_%05X_cluts.bin", off.Real(psx.RamStageBegin))
}
func tiledefCollisionsFileName(off psx.Addr) string {
return fmt.Sprintf("tiledef_%05X_cols.bin", off.Real(psx.RamStageBegin))
}

View File

@ -1,20 +1,287 @@
package main package layer
import ( import (
"encoding/binary"
"encoding/json" "encoding/json"
"errors"
"fmt" "fmt"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/datarange"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/util"
"golang.org/x/sync/errgroup" "golang.org/x/sync/errgroup"
"hash/fnv"
"io" "io"
"io/fs"
"os" "os"
"path" "path"
"sort" "slices"
"strconv"
"strings" "strings"
) )
type layerDef struct {
Data psx.Addr
Tiledef psx.Addr
PackedInfo uint32
ZPriority uint16
UnkE uint8
UnkF uint8
}
type layerUnpacked struct {
Data string `json:"data"`
Tiledef string `json:"tiledef"`
Left int `json:"left"`
Top int `json:"top"`
Right int `json:"right"`
Bottom int `json:"bottom"`
ScrollMode int `json:"scrollMode"`
IsSaveRoom bool `json:"isSaveRoom"`
IsLoadingRoom bool `json:"isLoadingRoom"`
UnusedFlag bool `json:"unusedFlag"`
ZPriority int `json:"zPriority"`
UnkE int `json:"unkE"`
UnkF int `json:"unkF"`
}
type roomLayers struct {
fg *layerDef
bg *layerDef
}
func (l *layerDef) tilemapFileSize() int {
sx := int((l.PackedInfo >> 0) & 0x3F)
sy := int((l.PackedInfo >> 6) & 0x3F)
ex := int((l.PackedInfo >> 12) & 0x3F)
ey := int((l.PackedInfo >> 18) & 0x3F)
w := ex - sx + 1
h := ey - sy + 1
return w * h * 512
}
func (l *layerDef) unpack() layerUnpacked {
return layerUnpacked{
Data: tilemapFileName(l.Data),
Tiledef: tiledefFileName(l.Tiledef),
Left: int((l.PackedInfo >> 0) & 0x3F),
Top: int((l.PackedInfo >> 6) & 0x3F),
Right: int((l.PackedInfo >> 12) & 0x3F),
Bottom: int((l.PackedInfo >> 18) & 0x3F),
ScrollMode: int((l.PackedInfo >> 24) & 0x1F),
IsSaveRoom: int((l.PackedInfo>>24)&0x20) != 0,
IsLoadingRoom: int((l.PackedInfo>>24)&0x40) != 0,
UnusedFlag: int((l.PackedInfo>>24)&0x80) != 0,
ZPriority: int(l.ZPriority),
UnkE: int(l.UnkE),
UnkF: int(l.UnkF),
}
}
func (r roomLayers) MarshalJSON() ([]byte, error) {
m := map[string]interface{}{}
if r.fg != nil {
m["fg"] = r.fg.unpack()
}
if r.bg != nil {
m["bg"] = r.bg.unpack()
}
return json.Marshal(m)
}
func readLayers(r io.ReadSeeker, off psx.Addr) ([]roomLayers, datarange.DataRange, error) {
if off == 0 {
return nil, datarange.DataRange{}, nil
}
if err := off.MoveFile(r, psx.RamStageBegin); err != nil {
return nil, datarange.DataRange{}, err
}
// when the data starts to no longer makes sense, we can assume we reached the end of the array
var layerOffsets []psx.Addr
layersOff := make([]psx.Addr, 2)
for {
if err := binary.Read(r, binary.LittleEndian, layersOff); err != nil {
return nil, datarange.DataRange{}, err
}
if layersOff[0] <= psx.RamStageBegin || layersOff[0] >= off ||
layersOff[1] <= psx.RamStageBegin || layersOff[1] >= off {
break
}
layerOffsets = append(layerOffsets, layersOff...)
}
// Creates a map of layers, so we can re-use them when a layer is used by multiple rooms
pool := map[psx.Addr]*layerDef{}
pool[psx.Addr(0)] = nil
for _, layerOffset := range layerOffsets {
if _, exists := pool[layerOffset]; exists {
continue
}
if err := layerOffset.MoveFile(r, psx.RamStageBegin); err != nil {
return nil, datarange.DataRange{}, err
}
var l layerDef
if err := binary.Read(r, binary.LittleEndian, &l); err != nil {
return nil, datarange.DataRange{}, err
}
if l.Data != psx.RamNull || l.Tiledef != psx.RamNull || l.PackedInfo != 0 {
pool[layerOffset] = &l
} else {
pool[layerOffset] = nil
}
}
// creates the real array with all the layers mapped
count := len(layerOffsets) >> 1
roomsLayers := make([]roomLayers, count)
for i := 0; i < count; i++ {
roomsLayers[i].fg = pool[layerOffsets[i*2+0]]
roomsLayers[i].bg = pool[layerOffsets[i*2+1]]
}
return roomsLayers, datarange.New(slices.Min(layerOffsets), off.Sum(count*8)), nil
}
func buildLayers(inputDir string, fileName string, outputDir string) error {
getHash := func(l layerUnpacked) string {
return fmt.Sprintf("%s-%s-%d-%d-%d-%d", l.Data, l.Tiledef, l.Left, l.Top, l.Right, l.Bottom)
}
pack := func(l layerUnpacked) map[string]interface{} {
return map[string]interface{}{
"data": makeSymbolFromFileName(l.Data),
"tiledef": makeSymbolFromFileName(l.Tiledef),
"params": l.Left | (l.Top << 6) | (l.Right << 12) | (l.Bottom << 18) | (l.ScrollMode << 24) |
(util.Btoi(l.IsSaveRoom) << 29) | (util.Btoi(l.IsLoadingRoom) << 30) | (util.Btoi(l.UnusedFlag) << 31),
"zPriority": l.ZPriority,
"unkE": l.UnkE,
"unkF": l.UnkF,
}
}
data, err := os.ReadFile(fileName)
if err != nil {
return err
}
var roomsLayers []map[string]*layerUnpacked
if err := json.Unmarshal(data, &roomsLayers); err != nil {
return err
}
tilemaps := map[string]struct{}{}
tiledefs := map[string]struct{}{}
for _, room := range roomsLayers {
// the split on '.' is to remove the extension
if layer, found := room["fg"]; found {
tilemaps[layer.Data] = struct{}{}
tiledefs[layer.Tiledef] = struct{}{}
}
if layer, found := room["bg"]; found {
tilemaps[layer.Data] = struct{}{}
tiledefs[layer.Tiledef] = struct{}{}
}
}
// use unused tiledefs
files, err := os.ReadDir(inputDir)
if err != nil {
return err
}
for _, file := range files {
if !file.IsDir() && strings.HasPrefix(file.Name(), "tiledef_") && strings.HasSuffix(file.Name(), ".json") {
tiledefs[file.Name()] = struct{}{}
}
}
eg := errgroup.Group{}
for name := range tilemaps {
fullPath := path.Join(path.Dir(fileName), name)
symbol := makeSymbolFromFileName(name)
eg.Go(func() error {
return buildGenericU16(fullPath, symbol, outputDir)
})
}
for name := range tiledefs {
fullPath := path.Join(path.Dir(fileName), name)
symbol := makeSymbolFromFileName(name)
eg.Go(func() error {
return buildTiledefs(fullPath, symbol, outputDir)
})
}
if err := eg.Wait(); err != nil {
return err
}
var layers []map[string]interface{} // first layer is always empty
layers = append(layers, map[string]interface{}{})
roomLayersId := make([]int, len(roomsLayers)*2)
pool := map[string]int{}
pool[""] = 0
for _, rl := range roomsLayers {
if l, fgFound := rl["fg"]; fgFound {
hash := getHash(*l)
if index, found := pool[hash]; !found {
pool[hash] = len(layers)
roomLayersId = append(roomLayersId, len(layers))
layers = append(layers, pack(*l))
} else {
roomLayersId = append(roomLayersId, index)
}
} else {
roomLayersId = append(roomLayersId, 0)
}
if l, bgFound := rl["bg"]; bgFound {
hash := getHash(*l)
if index, found := pool[hash]; !found {
pool[hash] = len(layers)
roomLayersId = append(roomLayersId, len(layers))
layers = append(layers, pack(*l))
} else {
roomLayersId = append(roomLayersId, index)
}
} else {
roomLayersId = append(roomLayersId, 0)
}
}
sb := strings.Builder{}
sb.WriteString("// clang-format off\n")
for name := range tilemaps {
symbol := makeSymbolFromFileName(name)
sb.WriteString(fmt.Sprintf("extern u16 %s[];\n", symbol))
}
for name := range tiledefs {
symbol := makeSymbolFromFileName(name)
sb.WriteString(fmt.Sprintf("extern TileDefinition %s[];\n", symbol))
}
sb.WriteString("static MyLayer layers[] = {\n")
sb.WriteString(" { NULL, NULL, 0, 0, 0, 0 },\n")
for _, l := range layers[1:] {
sb.WriteString(fmt.Sprintf(" { %s, %s, 0x%08X, 0x%02X, %d, %d },\n",
makeSymbolFromFileName(l["data"].(string)),
makeSymbolFromFileName(l["tiledef"].(string)),
l["params"],
l["zPriority"],
l["unkE"],
l["unkF"],
))
}
sb.WriteString("};\n")
sb.WriteString("MyRoomDef OVL_EXPORT(rooms_layers)[] = {\n")
for _, rl := range roomsLayers {
if l, found := rl["fg"]; found {
sb.WriteString(fmt.Sprintf(" { &layers[%d], ", pool[getHash(*l)]))
} else {
sb.WriteString(fmt.Sprintf(" { &layers[0], "))
}
if l, found := rl["bg"]; found {
sb.WriteString(fmt.Sprintf("&layers[%d] },\n", pool[getHash(*l)]))
} else {
sb.WriteString(fmt.Sprintf("&layers[0] },\n"))
}
}
sb.WriteString("};\n")
return os.WriteFile(path.Join(outputDir, "layers.h"), []byte(sb.String()), 0644)
}
func makeSymbolFromFileName(fileName string) string { func makeSymbolFromFileName(fileName string) string {
return strings.Split(path.Base(fileName), ".")[0] return strings.Split(path.Base(fileName), ".")[0]
} }
@ -111,302 +378,3 @@ func buildTiledefs(fileName string, symbol string, outputDir string) error {
return nil return nil
} }
func buildLayers(inputDir string, fileName string, outputDir string) error {
getHash := func(l layerUnpacked) string {
return fmt.Sprintf("%s-%s-%d-%d-%d-%d", l.Data, l.Tiledef, l.Left, l.Top, l.Right, l.Bottom)
}
pack := func(l layerUnpacked) map[string]interface{} {
return map[string]interface{}{
"data": makeSymbolFromFileName(l.Data),
"tiledef": makeSymbolFromFileName(l.Tiledef),
"params": l.Left | (l.Top << 6) | (l.Right << 12) | (l.Bottom << 18) | (l.ScrollMode << 24) |
(btoi(l.IsSaveRoom) << 29) | (btoi(l.IsLoadingRoom) << 30) | (btoi(l.UnusedFlag) << 31),
"zPriority": l.ZPriority,
"unkE": l.UnkE,
"unkF": l.UnkF,
}
}
data, err := os.ReadFile(fileName)
if err != nil {
return err
}
var roomsLayers []map[string]*layerUnpacked
if err := json.Unmarshal(data, &roomsLayers); err != nil {
return err
}
tilemaps := map[string]struct{}{}
tiledefs := map[string]struct{}{}
for _, room := range roomsLayers {
// the split on '.' is to remove the extension
if layer, found := room["fg"]; found {
tilemaps[layer.Data] = struct{}{}
tiledefs[layer.Tiledef] = struct{}{}
}
if layer, found := room["bg"]; found {
tilemaps[layer.Data] = struct{}{}
tiledefs[layer.Tiledef] = struct{}{}
}
}
// use unused tiledefs
files, err := os.ReadDir(inputDir)
if err != nil {
return err
}
for _, file := range files {
if !file.IsDir() && strings.HasPrefix(file.Name(), "tiledef_") && strings.HasSuffix(file.Name(), ".json") {
tiledefs[file.Name()] = struct{}{}
}
}
eg := errgroup.Group{}
for name := range tilemaps {
fullPath := path.Join(path.Dir(fileName), name)
symbol := makeSymbolFromFileName(name)
eg.Go(func() error {
return buildGenericU16(fullPath, symbol, outputDir)
})
}
for name := range tiledefs {
fullPath := path.Join(path.Dir(fileName), name)
symbol := makeSymbolFromFileName(name)
eg.Go(func() error {
return buildTiledefs(fullPath, symbol, outputDir)
})
}
if err := eg.Wait(); err != nil {
return err
}
layers := []map[string]interface{}{} // first layer is always empty
layers = append(layers, map[string]interface{}{})
roomLayersId := make([]int, len(roomsLayers)*2)
pool := map[string]int{}
pool[""] = 0
for _, rl := range roomsLayers {
if l, fgFound := rl["fg"]; fgFound {
hash := getHash(*l)
if index, found := pool[hash]; !found {
pool[hash] = len(layers)
roomLayersId = append(roomLayersId, len(layers))
layers = append(layers, pack(*l))
} else {
roomLayersId = append(roomLayersId, index)
}
} else {
roomLayersId = append(roomLayersId, 0)
}
if l, bgFound := rl["bg"]; bgFound {
hash := getHash(*l)
if index, found := pool[hash]; !found {
pool[hash] = len(layers)
roomLayersId = append(roomLayersId, len(layers))
layers = append(layers, pack(*l))
} else {
roomLayersId = append(roomLayersId, index)
}
} else {
roomLayersId = append(roomLayersId, 0)
}
}
sb := strings.Builder{}
sb.WriteString("// clang-format off\n")
for name := range tilemaps {
symbol := makeSymbolFromFileName(name)
sb.WriteString(fmt.Sprintf("extern u16 %s[];\n", symbol))
}
for name := range tiledefs {
symbol := makeSymbolFromFileName(name)
sb.WriteString(fmt.Sprintf("extern TileDefinition %s[];\n", symbol))
}
sb.WriteString("static MyLayer layers[] = {\n")
sb.WriteString(" { NULL, NULL, 0, 0, 0, 0 },\n")
for _, l := range layers[1:] {
sb.WriteString(fmt.Sprintf(" { %s, %s, 0x%08X, 0x%02X, %d, %d },\n",
makeSymbolFromFileName(l["data"].(string)),
makeSymbolFromFileName(l["tiledef"].(string)),
l["params"],
l["zPriority"],
l["unkE"],
l["unkF"],
))
}
sb.WriteString("};\n")
sb.WriteString("MyRoomDef OVL_EXPORT(rooms_layers)[] = {\n")
for _, rl := range roomsLayers {
if l, found := rl["fg"]; found {
sb.WriteString(fmt.Sprintf(" { &layers[%d], ", pool[getHash(*l)]))
} else {
sb.WriteString(fmt.Sprintf(" { &layers[0], "))
}
if l, found := rl["bg"]; found {
sb.WriteString(fmt.Sprintf("&layers[%d] },\n", pool[getHash(*l)]))
} else {
sb.WriteString(fmt.Sprintf("&layers[0] },\n"))
}
}
sb.WriteString("};\n")
return os.WriteFile(path.Join(outputDir, "layers.h"), []byte(sb.String()), 0644)
}
func buildEntityLayouts(fileName string, outputDir string) error {
ovlName := path.Base(outputDir)
writeLayoutEntries := func(sb *strings.Builder, banks [][]layoutEntry, align4 bool) error {
nWritten := 0
for i, entries := range banks {
// do a sanity check on the entries as we do not want to build something that will cause the game to crash
if entries[0].X != -2 || entries[0].Y != -2 {
return fmt.Errorf("layout entity bank %d needs to have a X:-2 and Y:-2 entry at the beginning", i)
}
lastEntry := entries[len(entries)-1]
if lastEntry.X != -1 || lastEntry.Y != -1 {
return fmt.Errorf("layout entity bank %d needs to have a X:-1 and Y:-1 entry at the end", i)
}
sb.WriteString(fmt.Sprintf("//%d\n", nWritten)) //label each block with offsets
for _, e := range entries {
var entityIDStr string
if int(e.Flags) != 0 {
// This will only ever be 0xA001.
id, _ := strconv.ParseInt(strings.Replace(e.ID, "0x", "", -1), 16, 16)
entityIDStr = fmt.Sprintf("0x%04X", (int(e.Flags)<<8)|int(id))
} else {
entityIDStr = e.ID
}
sb.WriteString(fmt.Sprintf(" 0x%04X, 0x%04X, %s, 0x%04X, 0x%04X,\n",
uint16(e.X), uint16(e.Y), entityIDStr, int(e.Slot)|(int(e.SpawnID)<<8), e.Params))
}
nWritten += len(entries)
}
if align4 && nWritten%2 != 0 {
sb.WriteString(" 0, // padding\n")
}
return nil
}
makeSortedBanks := func(banks [][]layoutEntry, sortByX bool) [][]layoutEntry {
var toSort []layoutEntry
var less func(i, j int) bool
if sortByX {
less = func(i, j int) bool {
return toSort[i].X < toSort[j].X
}
} else {
less = func(i, j int) bool {
if toSort[i].Y < toSort[j].Y {
return true
}
if toSort[i].Y > toSort[j].Y {
return false
}
if toSort[i].YOrder != nil && toSort[j].YOrder != nil {
return *toSort[i].YOrder < *toSort[j].YOrder
}
return i < j
}
}
sorting := make([][]layoutEntry, len(banks))
for i, entries := range banks {
sorting[i] = make([]layoutEntry, len(entries)-2)
if len(sorting[i]) > 0 { // do not sort if the list is empty
copy(sorting[i], entries[1:len(entries)-1]) // do not sort the -2 and -1 entries
toSort = sorting[i]
sort.SliceStable(toSort, less)
}
// put back the -2 and -1
sorting[i] = append([]layoutEntry{entries[0]}, sorting[i]...)
sorting[i] = append(sorting[i], entries[len(entries)-1])
}
return sorting
}
data, err := os.ReadFile(fileName)
if err != nil {
return err
}
var el layouts
if err := json.Unmarshal(data, &el); err != nil {
return err
}
h := fnv.New32()
h.Write([]byte(outputDir))
symbolVariant := strconv.FormatUint(uint64(h.Sum32()), 16)
symbolName := fmt.Sprintf("entity_layout_%s", symbolVariant)
offsets := make([]int, len(el.Entities))
offsetCur := 0
for i := 0; i < len(el.Entities); i++ {
offsets[i] = offsetCur
offsetCur += len(el.Entities[i]) * 5
}
sbHeader := strings.Builder{}
sbHeader.WriteString("#include <stage.h>\n\n")
sbHeader.WriteString("#include \"common.h\"\n\n")
sbHeader.WriteString("// clang-format off\n")
sbHeader.WriteString(fmt.Sprintf("extern LayoutEntity %s_x[];\n", symbolName))
sbHeader.WriteString(fmt.Sprintf("LayoutEntity* %s_pStObjLayoutHorizontal[] = {\n", strings.ToUpper(ovlName)))
for _, i := range el.Indices {
sbHeader.WriteString(fmt.Sprintf(" &%s_x[%d],\n", symbolName, offsets[i]/5))
}
sbHeader.WriteString(fmt.Sprintf("};\n"))
sbHeader.WriteString(fmt.Sprintf("extern LayoutEntity %s_y[];\n", symbolName))
sbHeader.WriteString(fmt.Sprintf("LayoutEntity* %s_pStObjLayoutVertical[] = {\n", strings.ToUpper(ovlName)))
for _, i := range el.Indices {
sbHeader.WriteString(fmt.Sprintf(" &%s_y[%d],\n", symbolName, offsets[i]/5))
}
sbHeader.WriteString(fmt.Sprintf("};\n"))
sbData := strings.Builder{}
sbData.WriteString(fmt.Sprintf("#include \"%s.h\"\n\n", ovlName))
sbData.WriteString("// clang-format off\n")
sbData.WriteString(fmt.Sprintf("u16 %s_x[] = {\n", symbolName))
if err := writeLayoutEntries(&sbData, makeSortedBanks(el.Entities, true), false); err != nil {
return fmt.Errorf("unable to build X entity layout: %w", err)
}
sbData.WriteString(fmt.Sprintf("};\n"))
sbData.WriteString(fmt.Sprintf("u16 %s_y[] = {\n", symbolName))
if err := writeLayoutEntries(&sbData, makeSortedBanks(el.Entities, false), true); err != nil {
return fmt.Errorf("unable to build Y entity layout: %w", err)
}
sbData.WriteString(fmt.Sprintf("};\n"))
if err := os.WriteFile(path.Join(outputDir, "e_layout.c"), []byte(sbData.String()), 0644); err != nil {
return err
}
return os.WriteFile(path.Join(outputDir, "e_laydef.c"), []byte(sbHeader.String()), 0644)
}
func buildAll(inputDir string, outputDir string) error {
if err := os.MkdirAll(outputDir, 0755); err != nil {
return err
}
eg := errgroup.Group{}
eg.Go(func() error {
if err := buildLayers(inputDir, path.Join(inputDir, "layers.json"), outputDir); err != nil {
if !errors.Is(err, fs.ErrNotExist) {
return err
}
}
return nil
})
eg.Go(func() error {
if err := buildEntityLayouts(path.Join(inputDir, "entity_layouts.json"), outputDir); err != nil {
if !errors.Is(err, fs.ErrNotExist) {
return err
}
}
return nil
})
return eg.Wait()
}

View File

@ -1,18 +1,18 @@
package main package layer
import ( import (
"encoding/binary" "encoding/binary"
"fmt" "fmt"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/datarange" "github.com/xeeynamo/sotn-decomp/tools/sotn-assets/datarange"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx" "github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx"
"os" "io"
) )
type tileDef struct { type tileDef struct {
tiles []byte Tiles []byte
pages []byte Pages []byte
cluts []byte Cluts []byte
cols []byte Cols []byte
} }
type tileDefPaths struct { type tileDefPaths struct {
@ -22,61 +22,61 @@ type tileDefPaths struct {
Collisions string `json:"collisions"` Collisions string `json:"collisions"`
} }
func readTiledef(file *os.File, off psx.Addr) (tileDef, datarange.DataRange, error) { func readTiledef(r io.ReadSeeker, off psx.Addr) (tileDef, datarange.DataRange, error) {
if err := off.MoveFile(file, psx.RamStageBegin); err != nil { if err := off.MoveFile(r, psx.RamStageBegin); err != nil {
return tileDef{}, datarange.DataRange{}, err return tileDef{}, datarange.DataRange{}, err
} }
offsets := make([]psx.Addr, 4) offsets := make([]psx.Addr, 4)
if err := binary.Read(file, binary.LittleEndian, offsets); err != nil { if err := binary.Read(r, binary.LittleEndian, offsets); err != nil {
return tileDef{}, datarange.DataRange{}, err return tileDef{}, datarange.DataRange{}, err
} }
td := tileDef{ td := tileDef{
tiles: make([]byte, offsets[1]-offsets[0]), Tiles: make([]byte, offsets[1]-offsets[0]),
pages: make([]byte, offsets[2]-offsets[1]), Pages: make([]byte, offsets[2]-offsets[1]),
cluts: make([]byte, offsets[3]-offsets[2]), Cluts: make([]byte, offsets[3]-offsets[2]),
cols: make([]byte, off-offsets[3]), Cols: make([]byte, off-offsets[3]),
} }
if err := offsets[0].MoveFile(file, psx.RamStageBegin); err != nil { if err := offsets[0].MoveFile(r, psx.RamStageBegin); err != nil {
return tileDef{}, datarange.DataRange{}, err return tileDef{}, datarange.DataRange{}, err
} }
if _, err := file.Read(td.tiles); err != nil { if _, err := r.Read(td.Tiles); err != nil {
return tileDef{}, datarange.DataRange{}, err return tileDef{}, datarange.DataRange{}, err
} }
if err := offsets[1].MoveFile(file, psx.RamStageBegin); err != nil { if err := offsets[1].MoveFile(r, psx.RamStageBegin); err != nil {
return tileDef{}, datarange.DataRange{}, err return tileDef{}, datarange.DataRange{}, err
} }
if _, err := file.Read(td.pages); err != nil { if _, err := r.Read(td.Pages); err != nil {
return tileDef{}, datarange.DataRange{}, err return tileDef{}, datarange.DataRange{}, err
} }
if err := offsets[2].MoveFile(file, psx.RamStageBegin); err != nil { if err := offsets[2].MoveFile(r, psx.RamStageBegin); err != nil {
return tileDef{}, datarange.DataRange{}, err return tileDef{}, datarange.DataRange{}, err
} }
if _, err := file.Read(td.cluts); err != nil { if _, err := r.Read(td.Cluts); err != nil {
return tileDef{}, datarange.DataRange{}, err return tileDef{}, datarange.DataRange{}, err
} }
if err := offsets[3].MoveFile(file, psx.RamStageBegin); err != nil { if err := offsets[3].MoveFile(r, psx.RamStageBegin); err != nil {
return tileDef{}, datarange.DataRange{}, err return tileDef{}, datarange.DataRange{}, err
} }
if _, err := file.Read(td.cols); err != nil { if _, err := r.Read(td.Cols); err != nil {
return tileDef{}, datarange.DataRange{}, err return tileDef{}, datarange.DataRange{}, err
} }
return td, datarange.New(offsets[0], off.Sum(0x10)), nil return td, datarange.New(offsets[0], off.Sum(0x10)), nil
} }
func readAllTiledefs(file *os.File, roomLayers []roomLayers) (map[psx.Addr]tileDef, datarange.DataRange, error) { func readAllTiledefs(r io.ReadSeeker, roomLayers []roomLayers) (map[psx.Addr]tileDef, datarange.DataRange, error) {
ranges := []datarange.DataRange{} var ranges []datarange.DataRange
processed := map[psx.Addr]tileDef{} processed := map[psx.Addr]tileDef{}
for _, rl := range roomLayers { for _, rl := range roomLayers {
if rl.fg != nil { if rl.fg != nil {
if _, found := processed[rl.fg.Tiledef]; !found { if _, found := processed[rl.fg.Tiledef]; !found {
td, r, err := readTiledef(file, rl.fg.Tiledef) td, r, err := readTiledef(r, rl.fg.Tiledef)
if err != nil { if err != nil {
return nil, datarange.DataRange{}, fmt.Errorf("unable to read fg tiledef: %w", err) return nil, datarange.DataRange{}, fmt.Errorf("unable to read fg tiledef: %w", err)
} }
@ -86,7 +86,7 @@ func readAllTiledefs(file *os.File, roomLayers []roomLayers) (map[psx.Addr]tileD
} }
if rl.bg != nil { if rl.bg != nil {
if _, found := processed[rl.bg.Tiledef]; !found { if _, found := processed[rl.bg.Tiledef]; !found {
td, r, err := readTiledef(file, rl.bg.Tiledef) td, r, err := readTiledef(r, rl.bg.Tiledef)
if err != nil { if err != nil {
return nil, datarange.DataRange{}, fmt.Errorf("unable to read fg tiledef: %w", err) return nil, datarange.DataRange{}, fmt.Errorf("unable to read fg tiledef: %w", err)
} }

View File

@ -1,30 +1,30 @@
package main package layer
import ( import (
"fmt" "fmt"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/datarange" "github.com/xeeynamo/sotn-decomp/tools/sotn-assets/datarange"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx" "github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx"
"os" "io"
) )
func readTilemap(file *os.File, layer *layer) ([]byte, datarange.DataRange, error) { func readTilemap(r io.ReadSeeker, layer *layerDef) ([]byte, datarange.DataRange, error) {
if err := layer.Data.MoveFile(file, psx.RamStageBegin); err != nil { if err := layer.Data.MoveFile(r, psx.RamStageBegin); err != nil {
return nil, datarange.DataRange{}, err return nil, datarange.DataRange{}, err
} }
data := make([]byte, layer.tilemapFileSize()) data := make([]byte, layer.tilemapFileSize())
if _, err := file.Read(data); err != nil { if _, err := r.Read(data); err != nil {
return nil, datarange.DataRange{}, err return nil, datarange.DataRange{}, err
} }
return data, datarange.FromAddr(layer.Data, len(data)), nil return data, datarange.FromAddr(layer.Data, len(data)), nil
} }
func readAllTileMaps(file *os.File, roomLayers []roomLayers) (map[psx.Addr][]byte, datarange.DataRange, error) { func readAllTileMaps(r io.ReadSeeker, roomLayers []roomLayers) (map[psx.Addr][]byte, datarange.DataRange, error) {
ranges := []datarange.DataRange{} var ranges []datarange.DataRange
processed := map[psx.Addr][]byte{} processed := map[psx.Addr][]byte{}
for _, rl := range roomLayers { for _, rl := range roomLayers {
if rl.fg != nil { if rl.fg != nil {
if _, found := processed[rl.fg.Data]; !found { if _, found := processed[rl.fg.Data]; !found {
td, r, err := readTilemap(file, rl.fg) td, r, err := readTilemap(r, rl.fg)
if err != nil { if err != nil {
return nil, datarange.DataRange{}, fmt.Errorf("unable to read fg tilemap: %w", err) return nil, datarange.DataRange{}, fmt.Errorf("unable to read fg tilemap: %w", err)
} }
@ -34,7 +34,7 @@ func readAllTileMaps(file *os.File, roomLayers []roomLayers) (map[psx.Addr][]byt
} }
if rl.bg != nil { if rl.bg != nil {
if _, found := processed[rl.bg.Data]; !found { if _, found := processed[rl.bg.Data]; !found {
td, r, err := readTilemap(file, rl.bg) td, r, err := readTilemap(r, rl.bg)
if err != nil { if err != nil {
return nil, datarange.DataRange{}, fmt.Errorf("unable to read fg tilemap: %w", err) return nil, datarange.DataRange{}, fmt.Errorf("unable to read fg tilemap: %w", err)
} }

View File

@ -0,0 +1,101 @@
package layout
import (
"bytes"
"encoding/json"
"fmt"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/assets"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/assets/graphics"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/sotn"
"io"
"os"
"path"
)
const entryCount = 53 // the number seems to be fixed
type handler struct{}
var Handler = &handler{}
func (h *handler) Name() string { return "layout" }
func (h *handler) Extract(e assets.ExtractArgs) error {
ovlName := path.Base(e.AssetDir)
r := bytes.NewReader(e.Data)
layoutOff, err := layoutOffset(r)
if err != nil {
return err
}
layouts, _, err := readEntityLayout(r, ovlName, layoutOff, entryCount, true)
if err != nil {
return err
}
content, err := json.MarshalIndent(layouts, "", " ")
if err != nil {
return err
}
return os.WriteFile(assetPath(e.AssetDir, e.Name), content, 0644)
}
func (h *handler) Build(e assets.BuildArgs) error {
return buildEntityLayouts(assetPath(e.AssetDir, e.Name), e.SrcDir)
}
func assetPath(dir, name string) string {
return path.Join(dir, fmt.Sprintf("%s.json", name))
}
func (h *handler) Info(a assets.InfoArgs) (assets.InfoResult, error) {
r := bytes.NewReader(a.StageData)
layoutOff, err := layoutOffset(r)
if err != nil {
return assets.InfoResult{}, err
}
nLayouts := 53 // it seems there are always 53 elements?!
_, layoutsRange, err := readEntityLayout(r, "dummy", layoutOff, nLayouts, true)
if err != nil {
return assets.InfoResult{}, fmt.Errorf("unable to gather all entity layouts: %w", err)
}
return assets.InfoResult{
AssetEntries: []assets.InfoAssetEntry{
{
DataRange: layoutsRange[0],
Kind: "layout",
Name: "entity_layouts",
},
},
SplatEntries: []assets.InfoSplatEntry{
{
DataRange: layoutsRange[0],
Name: "e_laydef",
Comment: "layout entries header",
},
{
DataRange: layoutsRange[1],
Name: "e_layout",
Comment: "layout entries data",
},
},
}, nil
}
func layoutOffset(r io.ReadSeeker) (psx.Addr, error) {
header, err := sotn.ReadStageHeader(r)
if err != nil {
return psx.RamNull, err
}
if header.Layouts != psx.RamNull {
return header.Layouts, nil
}
// ⚠️ assumption
// some overlays have this field nulled, we have to find the offset ourselves
// it should be usually be right after header.Graphics
_, graphicsRange, err := graphics.ReadGraphics(r, header.Graphics)
if err != nil {
return psx.RamNull, fmt.Errorf("unable to gather all graphics: %w", err)
}
return graphicsRange.End(), nil
}

View File

@ -0,0 +1,305 @@
package layout
import (
"encoding/binary"
"encoding/json"
"fmt"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/datarange"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/sotn"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/util"
"hash/fnv"
"io"
"os"
"path"
"sort"
"strconv"
"strings"
)
type layoutEntry struct {
X int16 `json:"x"`
Y int16 `json:"y"`
ID string `json:"id"`
Flags uint8 `json:"flags"` // TODO properly de-serialize this
Slot uint8 `json:"slot"`
SpawnID uint8 `json:"spawnId"`
Params uint16 `json:"params"`
YOrder *int `json:"yOrder,omitempty"`
}
type layouts struct {
Entities [][]layoutEntry `json:"entities"`
Indices []int `json:"indices"`
}
func fetchEntityIDsFromHeaderFile(overlay string) (map[int]string, error) {
f, err := os.Open("src/st/" + overlay + "/" + overlay + ".h")
if err != nil {
return nil, err
}
defer f.Close()
return sotn.ParseCEnum(f, "EntityIDs")
}
func readEntityLayoutEntry(file io.ReadSeeker, ovlName string) (layoutEntry, error) {
entityIDs, _ := fetchEntityIDsFromHeaderFile(ovlName)
bs := make([]byte, 10)
if _, err := io.ReadFull(file, bs); err != nil {
return layoutEntry{}, err
}
var entityIDStr string
id := int(bs[4])
// Try to load the proper enum
entityIDStr = entityIDs[id]
// If enum unknown or flags are set, override, don't use enums
if entityIDStr == "" || bs[5] != 0 {
entityIDStr = fmt.Sprintf("0x%02X", id)
}
return layoutEntry{
X: int16(binary.LittleEndian.Uint16(bs[0:2])),
Y: int16(binary.LittleEndian.Uint16(bs[2:4])),
ID: entityIDStr,
Flags: bs[5],
Slot: bs[6],
SpawnID: bs[7],
Params: binary.LittleEndian.Uint16(bs[8:10]),
}, nil
}
// the Y-ordered entries list has a different order than the X-ordered one. The order cannot consistently get
// restored by just sorting entries by Y as usually entries with the same Y results swapped.
// This algorithm will fill the optional field YOrder, only useful to restore the original order.
func hydrateYOrderFields(x layouts, y layouts) error {
if len(x.Indices) != len(y.Indices) {
return fmt.Errorf("number of X and Y layout indices do not match")
}
if len(x.Entities) != len(y.Entities) {
return fmt.Errorf("number of X and Y layout entries do not match")
}
populateYOrderField := func(xEntries []layoutEntry, yEntries []layoutEntry) {
yIndexMap := make(map[layoutEntry]int, len(yEntries))
for i, e := range yEntries {
yIndexMap[e] = i
}
for i := 0; i < len(xEntries); i++ {
if yOrder, found := yIndexMap[xEntries[i]]; found {
xEntries[i].YOrder = &yOrder
}
}
}
for i := 0; i < len(x.Entities); i++ {
xList := x.Entities[i]
yList := y.Entities[i]
if len(xList) != len(yList) {
return fmt.Errorf("number of X and Y entries do not match")
}
populateYOrderField(xList, yList)
}
return nil
}
func readEntityLayout(r io.ReadSeeker, ovlName string, off psx.Addr, count int, isX bool) (layouts, []datarange.DataRange, error) {
if err := off.MoveFile(r, psx.RamStageBegin); err != nil {
return layouts{}, nil, err
}
// there are two copies of the layout, one ordered by X and the other one ordered by Y
// we will only read the first one, which is ordered by Y
blockOffsets := make([]psx.Addr, count)
if err := binary.Read(r, binary.LittleEndian, blockOffsets); err != nil {
return layouts{}, nil, err
}
// the order of each layout entry must be preserved
pool := map[psx.Addr]int{}
var blocks [][]layoutEntry
var xRanges []datarange.DataRange
for _, blockOffset := range util.SortUniqueOffsets(blockOffsets) {
if err := blockOffset.MoveFile(r, psx.RamStageBegin); err != nil {
return layouts{}, nil, err
}
var entries []layoutEntry
for {
entry, err := readEntityLayoutEntry(r, ovlName)
if err != nil {
return layouts{}, nil, err
}
if entry.X == -1 && entry.Y == -1 {
entries = append(entries, entry)
break
}
entries = append(entries, entry)
}
// sanity check on the first entry
if entries[0].X != -2 || entries[0].Y != -2 {
err := fmt.Errorf("first layout entry does not mark the beginning of the array: %v", entries[0])
return layouts{}, nil, err
}
pool[blockOffset] = len(blocks)
blocks = append(blocks, entries)
xRanges = append(xRanges, datarange.FromAddr(blockOffset, len(entries)*10))
}
// the very last entry needs to be aligned by 4
xRanges[len(xRanges)-1] = xRanges[len(xRanges)-1].Align4()
l := layouts{Entities: blocks}
for _, blockOffset := range blockOffsets {
l.Indices = append(l.Indices, pool[blockOffset])
}
endOfArray := off.Sum(count * 4)
if isX { // we want to do the same thing with the vertically aligned layout
yLayouts, yRanges, err := readEntityLayout(r, ovlName, endOfArray, count, false)
if err != nil {
return layouts{}, nil, fmt.Errorf("readEntityLayout failed on Y: %w", err)
}
if err := hydrateYOrderFields(l, yLayouts); err != nil {
return layouts{}, nil, fmt.Errorf("unable to populate YOrder field: %w", err)
}
xMerged := datarange.MergeDataRanges(xRanges)
yMerged := yRanges[1]
return l, []datarange.DataRange{
datarange.MergeDataRanges([]datarange.DataRange{datarange.New(off, endOfArray), yRanges[0]}),
datarange.MergeDataRanges([]datarange.DataRange{xMerged, yMerged}),
}, nil
} else {
return l, []datarange.DataRange{datarange.New(off, endOfArray), datarange.MergeDataRanges(xRanges)}, nil
}
}
func buildEntityLayouts(fileName string, outputDir string) error {
ovlName := path.Base(outputDir)
writeLayoutEntries := func(sb *strings.Builder, banks [][]layoutEntry, align4 bool) error {
nWritten := 0
for i, entries := range banks {
// do a sanity check on the entries as we do not want to build something that will cause the game to crash
if entries[0].X != -2 || entries[0].Y != -2 {
return fmt.Errorf("layout entity bank %d needs to have a X:-2 and Y:-2 entry at the beginning", i)
}
lastEntry := entries[len(entries)-1]
if lastEntry.X != -1 || lastEntry.Y != -1 {
return fmt.Errorf("layout entity bank %d needs to have a X:-1 and Y:-1 entry at the end", i)
}
sb.WriteString(fmt.Sprintf("//%d\n", nWritten)) //label each block with offsets
for _, e := range entries {
var entityIDStr string
if int(e.Flags) != 0 {
// This will only ever be 0xA001.
id, _ := strconv.ParseInt(strings.Replace(e.ID, "0x", "", -1), 16, 16)
entityIDStr = fmt.Sprintf("0x%04X", (int(e.Flags)<<8)|int(id))
} else {
entityIDStr = e.ID
}
sb.WriteString(fmt.Sprintf(" 0x%04X, 0x%04X, %s, 0x%04X, 0x%04X,\n",
uint16(e.X), uint16(e.Y), entityIDStr, int(e.Slot)|(int(e.SpawnID)<<8), e.Params))
}
nWritten += len(entries)
}
if align4 && nWritten%2 != 0 {
sb.WriteString(" 0, // padding\n")
}
return nil
}
makeSortedBanks := func(banks [][]layoutEntry, sortByX bool) [][]layoutEntry {
var toSort []layoutEntry
var less func(i, j int) bool
if sortByX {
less = func(i, j int) bool {
return toSort[i].X < toSort[j].X
}
} else {
less = func(i, j int) bool {
if toSort[i].Y < toSort[j].Y {
return true
}
if toSort[i].Y > toSort[j].Y {
return false
}
if toSort[i].YOrder != nil && toSort[j].YOrder != nil {
return *toSort[i].YOrder < *toSort[j].YOrder
}
return i < j
}
}
sorting := make([][]layoutEntry, len(banks))
for i, entries := range banks {
sorting[i] = make([]layoutEntry, len(entries)-2)
if len(sorting[i]) > 0 { // do not sort if the list is empty
copy(sorting[i], entries[1:len(entries)-1]) // do not sort the -2 and -1 entries
toSort = sorting[i]
sort.SliceStable(toSort, less)
}
// put back the -2 and -1
sorting[i] = append([]layoutEntry{entries[0]}, sorting[i]...)
sorting[i] = append(sorting[i], entries[len(entries)-1])
}
return sorting
}
data, err := os.ReadFile(fileName)
if err != nil {
return err
}
var el layouts
if err := json.Unmarshal(data, &el); err != nil {
return err
}
h := fnv.New32()
_, _ = h.Write([]byte(outputDir))
symbolVariant := strconv.FormatUint(uint64(h.Sum32()), 16)
symbolName := fmt.Sprintf("entity_layout_%s", symbolVariant)
offsets := make([]int, len(el.Entities))
offsetCur := 0
for i := 0; i < len(el.Entities); i++ {
offsets[i] = offsetCur
offsetCur += len(el.Entities[i]) * 5
}
sbHeader := strings.Builder{}
sbHeader.WriteString("#include <stage.h>\n\n")
sbHeader.WriteString("#include \"common.h\"\n\n")
sbHeader.WriteString("// clang-format off\n")
sbHeader.WriteString(fmt.Sprintf("extern LayoutEntity %s_x[];\n", symbolName))
sbHeader.WriteString(fmt.Sprintf("LayoutEntity* %s_pStObjLayoutHorizontal[] = {\n", strings.ToUpper(ovlName)))
for _, i := range el.Indices {
sbHeader.WriteString(fmt.Sprintf(" &%s_x[%d],\n", symbolName, offsets[i]/5))
}
sbHeader.WriteString(fmt.Sprintf("};\n"))
sbHeader.WriteString(fmt.Sprintf("extern LayoutEntity %s_y[];\n", symbolName))
sbHeader.WriteString(fmt.Sprintf("LayoutEntity* %s_pStObjLayoutVertical[] = {\n", strings.ToUpper(ovlName)))
for _, i := range el.Indices {
sbHeader.WriteString(fmt.Sprintf(" &%s_y[%d],\n", symbolName, offsets[i]/5))
}
sbHeader.WriteString(fmt.Sprintf("};\n"))
sbData := strings.Builder{}
sbData.WriteString(fmt.Sprintf("#include \"%s.h\"\n\n", ovlName))
sbData.WriteString("// clang-format off\n")
sbData.WriteString(fmt.Sprintf("u16 %s_x[] = {\n", symbolName))
if err := writeLayoutEntries(&sbData, makeSortedBanks(el.Entities, true), false); err != nil {
return fmt.Errorf("unable to build X entity layout: %w", err)
}
sbData.WriteString(fmt.Sprintf("};\n"))
sbData.WriteString(fmt.Sprintf("u16 %s_y[] = {\n", symbolName))
if err := writeLayoutEntries(&sbData, makeSortedBanks(el.Entities, false), true); err != nil {
return fmt.Errorf("unable to build Y entity layout: %w", err)
}
sbData.WriteString(fmt.Sprintf("};\n"))
if err := os.WriteFile(path.Join(outputDir, "e_layout.c"), []byte(sbData.String()), 0644); err != nil {
return err
}
return os.WriteFile(path.Join(outputDir, "e_laydef.c"), []byte(sbHeader.String()), 0644)
}

View File

@ -8,6 +8,7 @@ import (
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/assets" "github.com/xeeynamo/sotn-decomp/tools/sotn-assets/assets"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/datarange" "github.com/xeeynamo/sotn-decomp/tools/sotn-assets/datarange"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx" "github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/sotn"
"io" "io"
"os" "os"
"path" "path"
@ -32,9 +33,9 @@ var Handler = &handler{}
func (h *handler) Name() string { return "rooms" } func (h *handler) Name() string { return "rooms" }
func (h *handler) Extract(e assets.ExtractEntry) error { func (h *handler) Extract(e assets.ExtractArgs) error {
r := bytes.NewReader(e.Data) r := bytes.NewReader(e.Data)
rooms, _, err := ReadRooms(r, e.RamBase.Sum(e.Start)) rooms, _, err := readRooms(r, e.RamBase.Sum(e.Start))
if err != nil { if err != nil {
return fmt.Errorf("failed to read rooms: %w", err) return fmt.Errorf("failed to read rooms: %w", err)
} }
@ -51,7 +52,7 @@ func (h *handler) Extract(e assets.ExtractEntry) error {
return os.WriteFile(outFileName, content, 0644) return os.WriteFile(outFileName, content, 0644)
} }
func (h *handler) Build(e assets.BuildEntry) error { func (h *handler) Build(e assets.BuildArgs) error {
inPath := assetPath(e.AssetDir, e.Name) inPath := assetPath(e.AssetDir, e.Name)
outPath := sourcePath(e.SrcDir, e.Name) outPath := sourcePath(e.SrcDir, e.Name)
ovlName := path.Base(path.Dir(outPath)) ovlName := path.Base(path.Dir(outPath))
@ -78,6 +79,33 @@ func (h *handler) Build(e assets.BuildEntry) error {
return os.WriteFile(outPath, []byte(content.String()), 0644) return os.WriteFile(outPath, []byte(content.String()), 0644)
} }
func (h *handler) Info(a assets.InfoArgs) (assets.InfoResult, error) {
r := bytes.NewReader(a.StageData)
header, err := sotn.ReadStageHeader(r)
if err != nil {
return assets.InfoResult{}, err
}
_, dataRange, err := readRooms(r, header.Rooms)
if err != nil {
return assets.InfoResult{}, err
}
return assets.InfoResult{
AssetEntries: []assets.InfoAssetEntry{
{
DataRange: dataRange,
Kind: "rooms",
Name: "rooms",
},
},
SplatEntries: []assets.InfoSplatEntry{
{
DataRange: dataRange,
Name: "rooms",
},
},
}, nil
}
func assetPath(dir, name string) string { func assetPath(dir, name string) string {
return path.Join(dir, fmt.Sprintf("%s.json", name)) return path.Join(dir, fmt.Sprintf("%s.json", name))
} }
@ -90,7 +118,7 @@ func (r Room) isTerminator() bool {
return r.Left == 0x40 return r.Left == 0x40
} }
func ReadRooms(r io.ReadSeeker, off psx.Addr) ([]Room, datarange.DataRange, error) { func readRooms(r io.ReadSeeker, off psx.Addr) ([]Room, datarange.DataRange, error) {
if off == 0 { if off == 0 {
return nil, datarange.DataRange{}, nil return nil, datarange.DataRange{}, nil
} }

View File

@ -0,0 +1,21 @@
package skip
import "github.com/xeeynamo/sotn-decomp/tools/sotn-assets/assets"
type handler struct{}
var Handler = &handler{}
func (h *handler) Name() string { return "skip" }
func (h *handler) Extract(e assets.ExtractArgs) error {
return nil
}
func (h *handler) Build(e assets.BuildArgs) error {
return nil
}
func (h *handler) Info(a assets.InfoArgs) (assets.InfoResult, error) {
return assets.InfoResult{}, nil
}

View File

@ -5,18 +5,23 @@ import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/assets" "github.com/xeeynamo/sotn-decomp/tools/sotn-assets/assets"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/datarange"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/sotn"
"os" "os"
"path" "path"
"path/filepath" "path/filepath"
) )
const banksCount = 24 // the number seems to be fixed
type handler struct{} type handler struct{}
var Handler = &handler{} var Handler = &handler{}
func (h *handler) Name() string { return "sprites" } func (h *handler) Name() string { return "sprite_banks" }
func (h *handler) Extract(e assets.ExtractEntry) error { func (h *handler) Extract(e assets.ExtractArgs) error {
if e.Start == e.End { if e.Start == e.End {
return fmt.Errorf("a group of sprites cannot be 0 bytes long") return fmt.Errorf("a group of sprites cannot be 0 bytes long")
} }
@ -38,10 +43,37 @@ func (h *handler) Extract(e assets.ExtractEntry) error {
return os.WriteFile(outFileName, content, 0644) return os.WriteFile(outFileName, content, 0644)
} }
func (h *handler) Build(e assets.BuildEntry) error { func (h *handler) Build(e assets.BuildArgs) error {
return buildSprites(assetPath(e.AssetDir, e.Name), e.SrcDir) return buildSprites(assetPath(e.AssetDir, e.Name), e.SrcDir)
} }
func (h *handler) Info(a assets.InfoArgs) (assets.InfoResult, error) {
r := bytes.NewReader(a.StageData)
header, err := sotn.ReadStageHeader(r)
if err != nil {
return assets.InfoResult{}, err
}
_, dataRange, err := ReadSpritesBanks(r, psx.RamStageBegin, header.Sprites)
if err != nil {
return assets.InfoResult{}, err
}
return assets.InfoResult{
AssetEntries: []assets.InfoAssetEntry{
{
DataRange: datarange.FromAddr(header.Sprites, banksCount*4),
Kind: "sprite_banks",
Name: "sprite_banks",
},
},
SplatEntries: []assets.InfoSplatEntry{
{
DataRange: dataRange,
Name: "sprites",
},
},
}, nil
}
func assetPath(dir, name string) string { func assetPath(dir, name string) string {
if name == "" { if name == "" {
name = "sprite_banks" name = "sprite_banks"

View File

@ -23,8 +23,7 @@ func ReadSpritesBanks(r io.ReadSeeker, baseAddr, addr psx.Addr) (SpriteBanks, da
return SpriteBanks{}, datarange.DataRange{}, err return SpriteBanks{}, datarange.DataRange{}, err
} }
// start with a capacity of 24 as that'sprites the length for all the stage overlays offBanks := make([]psx.Addr, 0, banksCount)
offBanks := make([]psx.Addr, 0, 24)
for { for {
addr := psx.ReadAddr(r) addr := psx.ReadAddr(r)
if addr != psx.RamNull && !addr.InRange(baseAddr, psx.RamGameEnd) { if addr != psx.RamNull && !addr.InRange(baseAddr, psx.RamGameEnd) {
@ -95,6 +94,9 @@ func buildSprites(fileName string, outputDir string) error {
if err := json.Unmarshal(data, &spritesBanks); err != nil { if err := json.Unmarshal(data, &spritesBanks); err != nil {
return err return err
} }
if len(spritesBanks.Indices) != banksCount {
return fmt.Errorf("the number of banks must be exactly %d, got %d", banksCount, len(spritesBanks.Banks))
}
sbHeader := strings.Builder{} sbHeader := strings.Builder{}
sbHeader.WriteString("// clang-format off\n") sbHeader.WriteString("// clang-format off\n")

View File

@ -15,9 +15,9 @@ type handler struct{}
var Handler = &handler{} var Handler = &handler{}
func (h *handler) Name() string { return "sprites" } func (h *handler) Name() string { return "spriteset" }
func (h *handler) Extract(e assets.ExtractEntry) error { func (h *handler) Extract(e assets.ExtractArgs) error {
if e.Name == "" { if e.Name == "" {
return fmt.Errorf("data at 0x%X must have a name", e.Start) return fmt.Errorf("data at 0x%X must have a name", e.Start)
} }
@ -46,7 +46,7 @@ func (h *handler) Extract(e assets.ExtractEntry) error {
return os.WriteFile(outFileName, content, 0644) return os.WriteFile(outFileName, content, 0644)
} }
func (h *handler) Build(e assets.BuildEntry) error { func (h *handler) Build(e assets.BuildArgs) error {
in := assetPath(e.AssetDir, e.Name) in := assetPath(e.AssetDir, e.Name)
out := sourcePath(e.SrcDir, e.Name) out := sourcePath(e.SrcDir, e.Name)
data, err := os.ReadFile(in) data, err := os.ReadFile(in)
@ -66,6 +66,10 @@ func (h *handler) Build(e assets.BuildEntry) error {
return os.WriteFile(out, []byte(sb.String()), 0644) return os.WriteFile(out, []byte(sb.String()), 0644)
} }
func (h *handler) Info(a assets.InfoArgs) (assets.InfoResult, error) {
return assets.InfoResult{}, nil
}
func assetPath(dir, name string) string { func assetPath(dir, name string) string {
return path.Join(dir, fmt.Sprintf("%s.animset.json", name)) return path.Join(dir, fmt.Sprintf("%s.animset.json", name))
} }

View File

@ -3,6 +3,13 @@ module github.com/xeeynamo/sotn-decomp/tools/sotn-assets
go 1.22 go 1.22
require ( require (
golang.org/x/sync v0.7.0 // indirect github.com/stretchr/testify v1.9.0
gopkg.in/yaml.v2 v2.4.0 // indirect golang.org/x/sync v0.7.0
gopkg.in/yaml.v2 v2.4.0
)
require (
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
) )

View File

@ -1,5 +1,14 @@
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M= golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M=
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

View File

@ -1,105 +0,0 @@
package main
import (
"encoding/binary"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/datarange"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx"
"os"
)
type gfxKind uint16
const (
gfxBankNone = gfxKind(iota)
gfxBank4bpp
gfxBank8bpp
gfxBank16bpp
gfxBankCompressed
)
type gfxEntry struct {
X uint16
Y uint16
Width uint16
Height uint16
Data psx.Addr
}
type gfxBlock struct {
kind gfxKind
flags uint16
entries []gfxEntry
}
type gfx struct {
blocks []gfxBlock
indices []int
}
func readGraphics(file *os.File, off psx.Addr) (gfx, datarange.DataRange, error) {
if err := off.MoveFile(file, psx.RamStageBegin); err != nil {
return gfx{}, datarange.DataRange{}, err
}
// all the offsets are before the array, so it is easy to find where the offsets array ends
blockOffsets := []psx.Addr{}
for {
var offBank psx.Addr
if err := binary.Read(file, binary.LittleEndian, &offBank); err != nil {
return gfx{}, datarange.DataRange{}, err
}
if offBank >= off {
break
}
blockOffsets = append(blockOffsets, offBank)
}
// the order of each gfxBlock must be preserved
pool := map[psx.Addr]int{}
pool[psx.RamNull] = -1
blocks := []gfxBlock{}
ranges := []datarange.DataRange{}
for _, blockOffset := range sortUniqueOffsets(blockOffsets) {
if blockOffset == psx.RamNull { // exception for ST0
continue
}
if err := blockOffset.MoveFile(file, psx.RamStageBegin); err != nil {
return gfx{}, datarange.DataRange{}, err
}
var block gfxBlock
if err := binary.Read(file, binary.LittleEndian, &block.kind); err != nil {
return gfx{}, datarange.DataRange{}, err
}
if err := binary.Read(file, binary.LittleEndian, &block.flags); err != nil {
return gfx{}, datarange.DataRange{}, err
}
if block.kind == gfxKind(0xFFFF) && block.flags == 0xFFFF { // exception for ST0
pool[blockOffset] = len(blocks)
blocks = append(blocks, block)
ranges = append(ranges, datarange.FromAddr(blockOffset, 4))
continue
}
for {
var entry gfxEntry
if err := binary.Read(file, binary.LittleEndian, &entry); err != nil {
return gfx{}, datarange.DataRange{}, err
}
if entry.X == 0xFFFF && entry.Y == 0xFFFF {
break
}
block.entries = append(block.entries, entry)
}
pool[blockOffset] = len(blocks)
blocks = append(blocks, block)
ranges = append(ranges, datarange.FromAddr(blockOffset, 4+len(block.entries)*12+4))
}
var g gfx
for _, blockOffset := range blockOffsets {
g.indices = append(g.indices, pool[blockOffset])
}
return g, datarange.MergeDataRanges(append(ranges, datarange.FromAddr(off, len(blockOffsets)*4))), nil
}

74
tools/sotn-assets/info.go Normal file
View File

@ -0,0 +1,74 @@
package main
import (
"fmt"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/assets"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx"
"io"
"os"
"sort"
)
func info(w io.Writer, filePath string) error {
data, err := os.ReadFile(filePath)
if err != nil {
return fmt.Errorf("unable to read file %q: %s", filePath, err)
}
var assetEntries []assets.InfoAssetEntry
var splatEntries []assets.InfoSplatEntry
for _, h := range handlers {
info, err := h.Info(assets.InfoArgs{
StageFilePath: filePath,
StageData: data,
})
if err != nil {
return fmt.Errorf("unable to gather info for file %q: %s", filePath, err)
}
assetEntries = append(assetEntries, info.AssetEntries...)
splatEntries = append(splatEntries, info.SplatEntries...)
}
_, _ = fmt.Fprintln(w, "asset config hints:")
infoAssetEntries(w, assetEntries)
_, _ = fmt.Fprintln(w, "splat config hints:")
infoSplatEntries(w, splatEntries)
return nil
}
func infoAssetEntries(w io.Writer, entries []assets.InfoAssetEntry) {
if len(entries) == 0 {
return
}
sort.Slice(entries, func(i, j int) bool {
return entries[i].DataRange.Begin() < entries[j].DataRange.Begin()
})
_, _ = fmt.Fprintln(w, " - [0x0, .data, header]")
for i, e := range entries {
_, _ = fmt.Fprintf(w, " - [0x%X, %s, %s]\n", e.DataRange.Begin().Real(psx.RamStageBegin), e.Kind, e.Name)
// if there is a gap between the current entry and the next one, mark it as unrecognized data
if i == len(entries)-1 || e.DataRange.End() != entries[i+1].DataRange.Begin() {
_, _ = fmt.Fprintf(w, " - [0x%X, skip]\n", e.DataRange.End().Real(psx.RamStageBegin))
}
}
}
func infoSplatEntries(w io.Writer, entries []assets.InfoSplatEntry) {
if len(entries) == 0 {
return
}
sort.Slice(entries, func(i, j int) bool {
return entries[i].DataRange.Begin() < entries[j].DataRange.Begin()
})
for i, e := range entries {
s := fmt.Sprintf(" - [0x%X, .data, %s]", e.DataRange.Begin().Real(psx.RamStageBegin), e.Name)
if e.Comment != "" {
s = fmt.Sprintf("%s # %s", s, e.Comment)
}
_, _ = fmt.Fprintln(w, s)
// if there is a gap between the current entry and the next one, mark it as unrecognized data
if i == len(entries)-1 || e.DataRange.End() != entries[i+1].DataRange.Begin() {
_, _ = fmt.Fprintf(w, " - [0x%X, data]\n", e.DataRange.End().Real(psx.RamStageBegin))
}
}
}

View File

@ -0,0 +1,78 @@
package main
import (
"bytes"
"fmt"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"os"
"path"
"sync"
"testing"
)
var chdirMutex sync.Mutex
func TestGatherAssetInfo(t *testing.T) {
changeDirToRepoRoot()
t.Run("for NZ0", func(t *testing.T) {
buf := new(bytes.Buffer)
require.NoError(t, info(buf, "disks/us/ST/NZ0/NZ0.BIN"))
stdout := buf.String()
t.Run("asset config hints", func(t *testing.T) {
assert.Contains(t, stdout, "asset config hints:\n")
assert.Contains(t, stdout, " - [0x2C, sprite_banks, sprite_banks]")
assert.Contains(t, stdout, " - [0x8C, skip]")
assert.Contains(t, stdout, " - [0x164, layers, layers]\n")
assert.Contains(t, stdout, " - [0x8EC, layout, entity_layouts]\n")
assert.Contains(t, stdout, " - [0x272C, rooms, rooms]")
assert.Contains(t, stdout, " - [0x2830, skip]")
if t.Failed() {
require.FailNow(t, "unexpected output", stdout)
}
})
t.Run("splat config hints", func(t *testing.T) {
assert.Contains(t, stdout, "splat config hints:\n")
assert.Contains(t, stdout, " - [0x0, .data, header]\n")
assert.Contains(t, stdout, " - [0x164, .data, header] # layers\n")
assert.Contains(t, stdout, " - [0x8EC, .data, e_laydef] # layout entries header\n")
assert.Contains(t, stdout, " - [0xA94, data]\n")
assert.Contains(t, stdout, " - [0x272C, .data, rooms]\n")
assert.Contains(t, stdout, " - [0x2830, data]\n")
assert.Contains(t, stdout, " - [0x2884, .data, e_layout] # layout entries data\n")
assert.Contains(t, stdout, " - [0x3B0C, data]\n")
assert.Contains(t, stdout, " - [0x16A5C, .data, tile_data] # tile data\n")
assert.Contains(t, stdout, " - [0x20A5C, .data, tile_data] # tile definitions\n")
assert.Contains(t, stdout, " - [0x26E8C, .data, sprites]\n")
assert.Contains(t, stdout, " - [0x3058C, data]\n")
if t.Failed() {
require.FailNow(t, "unexpected output", stdout)
}
})
})
}
func changeDirToRepoRoot() {
chdirMutex.Lock()
defer chdirMutex.Unlock()
for {
stat, err := os.Stat("disks/us/DRA.BIN")
if err == nil && !stat.IsDir() {
return
}
if !os.IsNotExist(err) {
panic(err)
}
cwd, err := os.Getwd()
if err != nil {
panic(err)
}
parent := path.Dir(cwd)
if cwd == parent {
panic(fmt.Errorf("unable to find repo root"))
}
if err := os.Chdir(".."); err != nil {
panic(err)
}
}
}

View File

@ -1,133 +0,0 @@
package main
import (
"encoding/binary"
"encoding/json"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/datarange"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx"
"os"
"slices"
)
type layer struct {
Data psx.Addr
Tiledef psx.Addr
PackedInfo uint32
ZPriority uint16
UnkE uint8
UnkF uint8
}
type layerUnpacked struct {
Data string `json:"data"`
Tiledef string `json:"tiledef"`
Left int `json:"left"`
Top int `json:"top"`
Right int `json:"right"`
Bottom int `json:"bottom"`
ScrollMode int `json:"scrollMode"`
IsSaveRoom bool `json:"isSaveRoom"`
IsLoadingRoom bool `json:"isLoadingRoom"`
UnusedFlag bool `json:"unusedFlag"`
ZPriority int `json:"zPriority"`
UnkE int `json:"unkE"`
UnkF int `json:"unkF"`
}
type roomLayers struct {
fg *layer
bg *layer
}
func (l *layer) tilemapFileSize() int {
sx := int((l.PackedInfo >> 0) & 0x3F)
sy := int((l.PackedInfo >> 6) & 0x3F)
ex := int((l.PackedInfo >> 12) & 0x3F)
ey := int((l.PackedInfo >> 18) & 0x3F)
w := ex - sx + 1
h := ey - sy + 1
return w * h * 512
}
func (l *layer) unpack() layerUnpacked {
return layerUnpacked{
Data: getTilemapFileName(l.Data),
Tiledef: getTiledefFileName(l.Tiledef),
Left: int((l.PackedInfo >> 0) & 0x3F),
Top: int((l.PackedInfo >> 6) & 0x3F),
Right: int((l.PackedInfo >> 12) & 0x3F),
Bottom: int((l.PackedInfo >> 18) & 0x3F),
ScrollMode: int((l.PackedInfo >> 24) & 0x1F),
IsSaveRoom: int((l.PackedInfo>>24)&0x20) != 0,
IsLoadingRoom: int((l.PackedInfo>>24)&0x40) != 0,
UnusedFlag: int((l.PackedInfo>>24)&0x80) != 0,
ZPriority: int(l.ZPriority),
UnkE: int(l.UnkE),
UnkF: int(l.UnkF),
}
}
func (r roomLayers) MarshalJSON() ([]byte, error) {
m := map[string]interface{}{}
if r.fg != nil {
m["fg"] = r.fg.unpack()
}
if r.bg != nil {
m["bg"] = r.bg.unpack()
}
return json.Marshal(m)
}
func readLayers(file *os.File, off psx.Addr) ([]roomLayers, datarange.DataRange, error) {
if off == 0 {
return nil, datarange.DataRange{}, nil
}
if err := off.MoveFile(file, psx.RamStageBegin); err != nil {
return nil, datarange.DataRange{}, err
}
// when the data starts to no longer makes sense, we can assume we reached the end of the array
layerOffsets := []psx.Addr{}
layersOff := make([]psx.Addr, 2)
for {
if err := binary.Read(file, binary.LittleEndian, layersOff); err != nil {
return nil, datarange.DataRange{}, err
}
if layersOff[0] <= psx.RamStageBegin || layersOff[0] >= off ||
layersOff[1] <= psx.RamStageBegin || layersOff[1] >= off {
break
}
layerOffsets = append(layerOffsets, layersOff...)
}
// Creates a map of layers, so we can re-use them when a layer is used by multiple rooms
pool := map[psx.Addr]*layer{}
pool[psx.Addr(0)] = nil
for _, layerOffset := range layerOffsets {
if _, exists := pool[layerOffset]; exists {
continue
}
if err := layerOffset.MoveFile(file, psx.RamStageBegin); err != nil {
return nil, datarange.DataRange{}, err
}
var l layer
if err := binary.Read(file, binary.LittleEndian, &l); err != nil {
return nil, datarange.DataRange{}, err
}
if l.Data != psx.RamNull || l.Tiledef != psx.RamNull || l.PackedInfo != 0 {
pool[layerOffset] = &l
} else {
pool[layerOffset] = nil
}
}
// creates the real array with all the layers mapped
count := len(layerOffsets) >> 1
roomsLayers := make([]roomLayers, count)
for i := 0; i < count; i++ {
roomsLayers[i].fg = pool[layerOffsets[i*2+0]]
roomsLayers[i].bg = pool[layerOffsets[i*2+1]]
}
return roomsLayers, datarange.New(slices.Min(layerOffsets), off.Sum(count*8)), nil
}

View File

@ -1,213 +0,0 @@
package main
import (
"encoding/binary"
"fmt"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/datarange"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx"
"io"
"os"
"path"
"strconv"
"strings"
)
type layoutEntry struct {
X int16 `json:"x"`
Y int16 `json:"y"`
ID string `json:"id"`
Flags uint8 `json:"flags"` // TODO properly de-serialize this
Slot uint8 `json:"slot"`
SpawnID uint8 `json:"spawnId"`
Params uint16 `json:"params"`
YOrder *int `json:"yOrder,omitempty"`
}
type layouts struct {
Entities [][]layoutEntry `json:"entities"`
Indices []int `json:"indices"`
}
func fetchEntityIDsFromHFile(overlay string) (map[int]string, error) {
// Get the EntityIDs enum from the .h file and invert it to get a lookup table
// Keys are integers, values are the names from the enum.
hFile, err := os.ReadFile("src/st/" + overlay + "/" + overlay + ".h")
if err != nil {
return nil, err
}
lines := strings.Split(string(hFile), "\n")
// Extract all the lines that are part of the enum.
// Do this by searching for the first "EntityIDs" (in typedef enum EntityIDs {)
// and the last "EntityIDs" (in } EntityIDs;)
enumData := []string{}
inEnum := false
for _,line := range lines {
if strings.Contains(line, "EntityIDs"){
if inEnum{
break
} else {
inEnum = true
}
} else if inEnum {
enumData = append(enumData, line)
}
}
// Now we have the enum's lines loaded. Iterate through populating a map.
entityNames := make(map[int]string, 255)
// Increments in the enum, updates if enum has a direct assign
index := -1 // start at -1 so first increment takes it to 0 to begin
for _,line := range enumData {
line = strings.Split(line, ",")[0] // go up to the comma
parts := strings.Split(line, " = ")
if len(parts) > 1 {
hexVal := strings.Replace(parts[1], "0x", "", -1)
// Windows nonsense, remove any \r that exists
hexVal = strings.Replace(hexVal, "\r", "", -1)
parsed, err := strconv.ParseInt(hexVal, 16, 16)
if err != nil {
return nil, err
}
index = int(parsed)
} else {
index ++
}
parts = strings.Split(parts[0], " ")
name := parts[len(parts) - 1]
entityNames[index] = name
}
return entityNames, nil
}
func readEntityLayoutEntry(file *os.File) (layoutEntry, error) {
ovlName := strings.ToLower(path.Base(path.Dir(file.Name())))
entityIDs, _ := fetchEntityIDsFromHFile(ovlName)
bs := make([]byte, 10)
if _, err := io.ReadFull(file, bs); err != nil {
return layoutEntry{}, err
}
var entityIDStr string
id := int(bs[4])
// Try to load the proper enum
entityIDStr = entityIDs[id]
// If enum unknown or flags are set, override, don't use enums
if entityIDStr == "" || bs[5] != 0 {
entityIDStr = fmt.Sprintf("0x%02X", id)
}
return layoutEntry{
X: int16(binary.LittleEndian.Uint16(bs[0:2])),
Y: int16(binary.LittleEndian.Uint16(bs[2:4])),
ID: entityIDStr,
Flags: bs[5],
Slot: bs[6],
SpawnID: bs[7],
Params: binary.LittleEndian.Uint16(bs[8:10]),
}, nil
}
// the Y-ordered entries list has a different order than the X-ordered one. The order cannot consistently get
// restored by just sorting entries by Y as usually entries with the same Y results swapped.
// This algorithm will fill the optional field YOrder, only useful to restore the original order.
func hydrateYOrderFields(x layouts, y layouts) error {
if len(x.Indices) != len(y.Indices) {
return fmt.Errorf("number of X and Y layout indices do not match")
}
if len(x.Entities) != len(y.Entities) {
return fmt.Errorf("number of X and Y layout entries do not match")
}
populateYOrderField := func(xEntries []layoutEntry, yEntries []layoutEntry) {
yIndexMap := make(map[layoutEntry]int, len(yEntries))
for i, e := range yEntries {
yIndexMap[e] = i
}
for i := 0; i < len(xEntries); i++ {
if yOrder, found := yIndexMap[xEntries[i]]; found {
xEntries[i].YOrder = &yOrder
}
}
}
for i := 0; i < len(x.Entities); i++ {
xList := x.Entities[i]
yList := y.Entities[i]
if len(xList) != len(yList) {
return fmt.Errorf("number of X and Y entries do not match")
}
populateYOrderField(xList, yList)
}
return nil
}
func readEntityLayout(file *os.File, off psx.Addr, count int, isX bool) (layouts, []datarange.DataRange, error) {
if err := off.MoveFile(file, psx.RamStageBegin); err != nil {
return layouts{}, nil, err
}
// there are two copies of the layout, one ordered by X and the other one ordered by Y
// we will only read the first one, which is ordered by Y
blockOffsets := make([]psx.Addr, count)
if err := binary.Read(file, binary.LittleEndian, blockOffsets); err != nil {
return layouts{}, nil, err
}
// the order of each layout entry must be preserved
pool := map[psx.Addr]int{}
blocks := [][]layoutEntry{}
xRanges := []datarange.DataRange{}
for _, blockOffset := range sortUniqueOffsets(blockOffsets) {
if err := blockOffset.MoveFile(file, psx.RamStageBegin); err != nil {
return layouts{}, nil, err
}
entries := []layoutEntry{}
for {
entry, err := readEntityLayoutEntry(file)
if err != nil {
return layouts{}, nil, err
}
if entry.X == -1 && entry.Y == -1 {
entries = append(entries, entry)
break
}
entries = append(entries, entry)
}
// sanity check on the first entry
if entries[0].X != -2 || entries[0].Y != -2 {
err := fmt.Errorf("first layout entry does not mark the beginning of the array: %v", entries[0])
return layouts{}, nil, err
}
pool[blockOffset] = len(blocks)
blocks = append(blocks, entries)
xRanges = append(xRanges, datarange.FromAddr(blockOffset, len(entries)*10))
}
// the very last entry needs to be aligned by 4
xRanges[len(xRanges)-1] = xRanges[len(xRanges)-1].Align4()
l := layouts{Entities: blocks}
for _, blockOffset := range blockOffsets {
l.Indices = append(l.Indices, pool[blockOffset])
}
endOfArray := off.Sum(count * 4)
if isX { // we want to do the same thing with the vertically aligned layout
yLayouts, yRanges, err := readEntityLayout(file, endOfArray, count, false)
if err != nil {
return layouts{}, nil, fmt.Errorf("readEntityLayout failed on Y: %w", err)
}
if err := hydrateYOrderFields(l, yLayouts); err != nil {
return layouts{}, nil, fmt.Errorf("unable to populate YOrder field: %w", err)
}
xMerged := datarange.MergeDataRanges(xRanges)
yMerged := yRanges[1]
return l, []datarange.DataRange{
datarange.MergeDataRanges([]datarange.DataRange{datarange.New(off, endOfArray), yRanges[0]}),
datarange.MergeDataRanges([]datarange.DataRange{xMerged, yMerged}),
}, nil
} else {
return l, []datarange.DataRange{datarange.New(off, endOfArray), datarange.MergeDataRanges(xRanges)}, nil
}
}

View File

@ -1,348 +1,15 @@
package main package main
import ( import (
"encoding/binary"
"encoding/json"
"flag"
"fmt" "fmt"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/assets/rooms" "github.com/xeeynamo/sotn-decomp/tools/sotn-assets/util"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/assets/spritebanks"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/datarange"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx"
"os" "os"
"path"
) )
type dataContainer[T any] struct {
dataRange datarange.DataRange
content T
}
type ovl struct {
ranges []datarange.DataRange
layers dataContainer[[]roomLayers]
graphics dataContainer[gfx]
layouts dataContainer[layouts]
layoutsExtraRange datarange.DataRange
tileMaps dataContainer[map[psx.Addr][]byte]
tileDefs dataContainer[map[psx.Addr]tileDef]
}
type stageHeader struct {
FnUpdate psx.Addr
FnHitDetection psx.Addr
FnUpdateRoomPos psx.Addr
FnInitRoomEntities psx.Addr
Rooms psx.Addr // ✅
Sprites psx.Addr // ✅
Cluts psx.Addr // 🫥
Layouts psx.Addr // ✅
Layers psx.Addr // ✅
Graphics psx.Addr // 🫥 WIP
FnUpdateStageEntities psx.Addr
}
func getStageHeader(fileName string) (stageHeader, error) {
file, err := os.Open(fileName)
if err != nil {
return stageHeader{}, fmt.Errorf("failed to read stage header: %w", err)
}
defer file.Close()
var header stageHeader
if err := binary.Read(file, binary.LittleEndian, &header); err != nil {
return stageHeader{}, fmt.Errorf("failed to read stage header: %w", err)
}
return header, nil
}
func getOvlAssets(fileName string) (ovl, error) {
header, err := getStageHeader(fileName)
if err != nil {
return ovl{}, fmt.Errorf("failed to get ovl assets: %w", err)
}
file, err := os.Open(fileName)
if err != nil {
return ovl{}, err
}
defer file.Close()
_, roomsRange, err := rooms.ReadRooms(file, header.Rooms)
if err != nil {
return ovl{}, fmt.Errorf("unable to read rooms: %w", err)
}
layers, layersRange, err := readLayers(file, header.Layers)
if err != nil {
return ovl{}, fmt.Errorf("unable to read layers: %w", err)
}
tileMaps, tileMapsRange, err := readAllTileMaps(file, layers)
if err != nil {
return ovl{}, fmt.Errorf("unable to gather all the tile maps: %w", err)
}
tileDefs, tileDefsRange, err := readAllTiledefs(file, layers)
if err != nil {
return ovl{}, fmt.Errorf("unable to gather all the tile defs: %w", err)
}
// check for unused tile defs (CEN has one)
for tileMapsRange.End() < tileDefsRange.Begin() {
offset := tileDefsRange.Begin().Sum(-0x10)
unusedTileDef, unusedTileDefRange, err := readTiledef(file, offset)
if err != nil {
return ovl{}, fmt.Errorf("there is a gap between tileMaps and tileDefs: %w", err)
}
tileDefs[offset] = unusedTileDef
tileDefsRange = datarange.MergeDataRanges([]datarange.DataRange{tileDefsRange, unusedTileDefRange})
}
_, spritesRange, err := spritebanks.ReadSpritesBanks(file, psx.RamStageBegin, header.Sprites)
if err != nil {
return ovl{}, fmt.Errorf("unable to gather all sprites: %w", err)
}
graphics, graphicsRange, err := readGraphics(file, header.Graphics)
if err != nil {
return ovl{}, fmt.Errorf("unable to gather all graphics: %w", err)
}
layoutOff := header.Layouts
if layoutOff == psx.RamNull {
// some overlays have this field nulled, we have to find the offset ourselves
// it should be usually be right after header.Graphics
layoutOff = graphicsRange.End() // ⚠️ assumption
}
nLayouts := 53 // it seems there are always 53 elements?!
entityLayouts, layoutsRange, err := readEntityLayout(file, layoutOff, nLayouts, true)
if err != nil {
return ovl{}, fmt.Errorf("unable to gather all entity layouts: %w", err)
}
return ovl{
ranges: datarange.ConsolidateDataRanges([]datarange.DataRange{
roomsRange,
layersRange,
spritesRange,
graphicsRange,
layoutsRange[0],
layoutsRange[1],
tileMapsRange,
tileDefsRange,
}),
layers: dataContainer[[]roomLayers]{dataRange: layersRange, content: layers},
graphics: dataContainer[gfx]{dataRange: graphicsRange, content: graphics},
layouts: dataContainer[layouts]{dataRange: layoutsRange[1], content: entityLayouts},
layoutsExtraRange: layoutsRange[0],
tileMaps: dataContainer[map[psx.Addr][]byte]{dataRange: tileMapsRange, content: tileMaps},
tileDefs: dataContainer[map[psx.Addr]tileDef]{dataRange: tileDefsRange, content: tileDefs},
}, nil
}
func extractOvlAssets(o ovl, outputDir string) error {
if err := os.MkdirAll(outputDir, 0755); err != nil {
return err
}
content, err := json.MarshalIndent(o.layers.content, "", " ")
if err != nil {
return err
}
if err := os.WriteFile(path.Join(outputDir, "layers.json"), content, 0644); err != nil {
return fmt.Errorf("unable to create layers file: %w", err)
}
content, err = json.MarshalIndent(o.layouts.content, "", " ")
if err != nil {
return err
}
if err := os.WriteFile(path.Join(outputDir, "entity_layouts.json"), content, 0644); err != nil {
return fmt.Errorf("unable to create entity layouts file: %w", err)
}
for offset, bytes := range o.tileMaps.content {
fileName := path.Join(outputDir, getTilemapFileName(offset))
if err := os.WriteFile(fileName, bytes, 0644); err != nil {
return fmt.Errorf("unable to create %q: %w", fileName, err)
}
}
for offset, tileDefsData := range o.tileDefs.content {
defs := tileDefPaths{
Tiles: getTiledefIndicesFileName(offset),
Pages: getTiledefPagesFileName(offset),
Cluts: getTiledefClutsFileName(offset),
Collisions: getTiledefCollisionsFileName(offset),
}
if err := os.WriteFile(path.Join(outputDir, defs.Tiles), tileDefsData.tiles, 0644); err != nil {
return fmt.Errorf("unable to create %q: %w", defs.Tiles, err)
}
if err := os.WriteFile(path.Join(outputDir, defs.Pages), tileDefsData.pages, 0644); err != nil {
return fmt.Errorf("unable to create %q: %w", defs.Pages, err)
}
if err := os.WriteFile(path.Join(outputDir, defs.Cluts), tileDefsData.cluts, 0644); err != nil {
return fmt.Errorf("unable to create %q: %w", defs.Cluts, err)
}
if err := os.WriteFile(path.Join(outputDir, defs.Collisions), tileDefsData.cols, 0644); err != nil {
return fmt.Errorf("unable to create %q: %w", defs.Collisions, err)
}
content, err = json.MarshalIndent(defs, "", " ")
if err != nil {
return err
}
fileName := path.Join(outputDir, getTiledefFileName(offset))
if err := os.WriteFile(fileName, content, 0644); err != nil {
return fmt.Errorf("unable to create %q: %w", fileName, err)
}
}
return nil
}
func extract(fileName string, outputDir string) error {
o, err := getOvlAssets(fileName)
if err != nil {
return fmt.Errorf("unable to retrieve OVL assets: %w", err)
}
err = extractOvlAssets(o, outputDir)
if err != nil {
return fmt.Errorf("unable to extract OVL assets: %w", err)
}
return nil
}
func info(fileName string) error {
stHeader, err := getStageHeader(fileName)
if err != nil {
return fmt.Errorf("unable to retrieve stage info: %w", err)
}
fmt.Println("asset config hints:")
fmt.Printf(" - [0x%X, sprite_banks]\n", stHeader.Sprites.Real(psx.RamStageBegin))
fmt.Printf(" - [0x%X, skip]\n", stHeader.Sprites.Sum(24*4).Real(psx.RamStageBegin))
o, err := getOvlAssets(fileName)
if err != nil {
return fmt.Errorf("unable to retrieve OVL assets: %w", err)
}
entries := []struct {
dataRange datarange.DataRange
name string
comment string
}{
{o.layers.dataRange, "header", "layers"},
{o.layoutsExtraRange, "e_laydef", "layout entries header"},
{o.layouts.dataRange, "e_layout", "layout entries data"},
{o.tileMaps.dataRange, "tile_data", "tile data"},
{o.tileDefs.dataRange, "tile_data", "tile definitions"},
}
fmt.Printf("data coverage: %+v\n", o.ranges)
fmt.Println("subsegment hints:")
fmt.Println(" - [0x0, .data, header]")
for i := 0; i < len(entries); i++ {
e := entries[i]
s := fmt.Sprintf(" - [0x%X, .data, %s]", e.dataRange.Begin().Real(psx.RamStageBegin), e.name)
if e.comment != "" {
s = fmt.Sprintf("%s # %s", s, e.comment)
}
fmt.Println(s)
// if there is a gap between the current entry and the next one, mark it as unrecognized data
if i == len(entries)-1 || e.dataRange.End() != entries[i+1].dataRange.Begin() {
fmt.Printf(" - [0x%X, data]\n", e.dataRange.End().Real(psx.RamStageBegin))
}
}
return nil
}
func testStuff() {
_ = []string{
"ARE", "CAT", "CEN", "CHI", "DAI", "DRE", "LIB", "MAD",
"NO0", "NO1", "NO2", "NO3", "NO4", "NP3", "NZ0", "NZ1",
"ST0", "TE1", "TE2", "TE3", "TE4", "TE5", "TOP", "WRP",
"RARE", "RCAT", "RCEN", "RCHI", "RDAI", "RLIB", "RNO0", "RNO1",
"RNO2", "RNO3", "RNO4", "RNZ0", "RNZ1", "RTOP", "RWRP"}
//ovls := []string{
// /*"ARE",*/ "CAT", "CEN", "CHI" /*"DAI",*/, "DRE", "LIB", /*"MAD",*/
// /*"NO0",*/ "NO1", "NO2", "NO3" /*"NO4",*/, "NP3", "NZ0", "NZ1",
// "ST0", "TE1", "TE2", "TE3", "TE4", "TE5" /*"TOP",*/, "WRP",
// "RARE", "RCAT" /*"RCEN",*/, "RCHI" /*"RDAI",*/ /*"RLIB",*/ /*"RNO0",*/, "RNO1",
// /*"RNO2",*/ "RNO3" /*"RNO4",*/ /*"RNZ0",*/ /*"RNZ1",*/ /*"RTOP",*/, "RWRP"}
ovls := []string{"NZ0"}
for _, ovl := range ovls {
fmt.Printf("processing %s...\n", ovl)
fileName := fmt.Sprintf("../../disks/us/ST/%s/%s.BIN", ovl, ovl)
if err := extract(fileName, "sample/"+ovl); err != nil {
panic(err)
}
}
if err := buildAll("sample/NZ0", "buildAll/nz0"); err != nil {
panic(err)
}
}
func handlerStage(args []string) error {
commands := map[string]func(args []string) error{}
commands["info"] = func(args []string) error {
var stageOvl string
extractCmd := flag.NewFlagSet("info", flag.ExitOnError)
extractCmd.StringVar(&stageOvl, "stage_ovl", "", "The overlay file to process")
extractCmd.Parse(args)
return info(stageOvl)
}
commands["extract"] = func(args []string) error {
var stageOvl string
var assetDir string
extractCmd := flag.NewFlagSet("extract", flag.ExitOnError)
extractCmd.StringVar(&stageOvl, "stage_ovl", "", "The overlay file to process")
extractCmd.StringVar(&assetDir, "o", "", "Where to extract the asset files")
extractCmd.Parse(args)
if stageOvl == "" || assetDir == "" {
fmt.Fprintln(os.Stderr, "stage_ovl and asset_dir are required for extract")
extractCmd.PrintDefaults()
os.Exit(1)
}
return extract(stageOvl, assetDir)
}
commands["build_all"] = func(args []string) error {
buildCmd := flag.NewFlagSet("build_all", flag.ExitOnError)
var inputDir string
var outputDir string
buildCmd.StringVar(&inputDir, "i", "", "Folder where all the assets are located")
buildCmd.StringVar(&outputDir, "o", "", "Where to store the processed source files")
buildCmd.Parse(args)
if inputDir == "" || outputDir == "" {
fmt.Fprintln(os.Stderr, "input_dir and output_dir are required for build")
buildCmd.PrintDefaults()
os.Exit(1)
}
return buildAll(inputDir, outputDir)
}
if len(args) > 0 {
command := args[0]
if f, found := commands[command]; found {
return f(args[1:])
}
fmt.Fprintf(os.Stderr, "unknown subcommand %q. Valid subcommands are %s\n", command, joinMapKeys(commands, ", "))
} else {
fmt.Fprintf(os.Stderr, "Need a subcommand. Valid subcommands are %s\n", joinMapKeys(commands, ", "))
}
os.Exit(1)
return nil
}
func handlerConfigExtract(args []string) error { func handlerConfigExtract(args []string) error {
if len(args) != 1 {
return fmt.Errorf("usage: sotn-assets extract <asset_config_path>")
}
c, err := readConfig(args[0]) c, err := readConfig(args[0])
if err != nil { if err != nil {
return err return err
@ -351,6 +18,9 @@ func handlerConfigExtract(args []string) error {
} }
func handlerConfigBuild(args []string) error { func handlerConfigBuild(args []string) error {
if len(args) != 1 {
return fmt.Errorf("usage: sotn-assets build <asset_config_path>")
}
c, err := readConfig(args[0]) c, err := readConfig(args[0])
if err != nil { if err != nil {
return err return err
@ -358,33 +28,18 @@ func handlerConfigBuild(args []string) error {
return buildFromConfig(c) return buildFromConfig(c)
} }
func handlerConfig(args []string) error { func handlerInfo(args []string) error {
commands := map[string]func(args []string) error{ if len(args) != 1 {
"extract": handlerConfigExtract, return fmt.Errorf("usage: sotn-assets info <stage_file_path>")
"build": handlerConfigBuild,
} }
return info(os.Stdout, args[0])
if len(args) > 0 {
command := args[0]
if f, found := commands[command]; found {
if err := f(args[1:]); err != nil {
fmt.Fprintln(os.Stderr, err)
os.Exit(1)
}
return nil
}
fmt.Fprintf(os.Stderr, "unknown subcommand %q. Valid subcommand are %s\n", command, joinMapKeys(commands, ", "))
} else {
fmt.Fprintf(os.Stderr, "Need a subcommand. Valid subcommand are %s\n", joinMapKeys(commands, ", "))
}
os.Exit(1)
return nil
} }
func main() { func main() {
commands := map[string]func(args []string) error{ commands := map[string]func(args []string) error{
"stage": handlerStage, "extract": handlerConfigExtract,
"config": handlerConfig, "build": handlerConfigBuild,
"info": handlerInfo,
} }
args := os.Args[1:] args := os.Args[1:]
@ -392,14 +47,14 @@ func main() {
command := args[0] command := args[0]
if f, found := commands[command]; found { if f, found := commands[command]; found {
if err := f(args[1:]); err != nil { if err := f(args[1:]); err != nil {
fmt.Fprintln(os.Stderr, err) _, _ = fmt.Fprintln(os.Stderr, err)
os.Exit(1) os.Exit(1)
} }
return return
} }
fmt.Fprintf(os.Stderr, "unknown command %q. Valid commands are %s\n", command, joinMapKeys(commands, ", ")) fmt.Fprintf(os.Stderr, "unknown command %q. Valid commands are %s\n", command, util.JoinMapKeys(commands, ", "))
} else { } else {
fmt.Fprintf(os.Stderr, "Need a command. Valid commands are %s\n", joinMapKeys(commands, ", ")) fmt.Fprintf(os.Stderr, "Need a command. Valid commands are %s\n", util.JoinMapKeys(commands, ", "))
} }
os.Exit(1) os.Exit(1)
} }

View File

@ -1,30 +0,0 @@
package main
import (
"fmt"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx"
)
func getTilemapFileName(off psx.Addr) string {
return fmt.Sprintf("tilemap_%05X.bin", off.Real(psx.RamStageBegin))
}
func getTiledefFileName(off psx.Addr) string {
return fmt.Sprintf("tiledef_%05X.json", off.Real(psx.RamStageBegin))
}
func getTiledefIndicesFileName(off psx.Addr) string {
return fmt.Sprintf("tiledef_%05X_tiles.bin", off.Real(psx.RamStageBegin))
}
func getTiledefPagesFileName(off psx.Addr) string {
return fmt.Sprintf("tiledef_%05X_pages.bin", off.Real(psx.RamStageBegin))
}
func getTiledefClutsFileName(off psx.Addr) string {
return fmt.Sprintf("tiledef_%05X_cluts.bin", off.Real(psx.RamStageBegin))
}
func getTiledefCollisionsFileName(off psx.Addr) string {
return fmt.Sprintf("tiledef_%05X_cols.bin", off.Real(psx.RamStageBegin))
}

View File

@ -0,0 +1,72 @@
package sotn
import (
"bufio"
"fmt"
"io"
"regexp"
"strconv"
"strings"
)
func removeComments(line string) string {
for {
start := strings.Index(line, "/*")
end := strings.Index(line, "*/")
if start == -1 || end == -1 || end < start {
break
}
line = line[:start] + line[end+2:]
}
trailingCommentIndex := strings.Index(line, "//")
if trailingCommentIndex != -1 {
line = line[:trailingCommentIndex]
}
return strings.TrimSpace(line)
}
func ParseCEnum(r io.Reader, name string) (map[int]string, error) {
enumMap := make(map[int]string, 0x100)
for i := 0; i < 0x100; i++ {
enumMap[i] = fmt.Sprintf("0x%02X", i)
}
scanner := bufio.NewScanner(r)
startRegex := regexp.MustCompile(fmt.Sprintf(`enum\s+%s\s*{`, name))
currentValue := 0
for scanner.Scan() {
line := removeComments(scanner.Text())
if startRegex.MatchString(line) {
for scanner.Scan() {
line := removeComments(scanner.Text())
line = strings.TrimRight(line, ",")
if strings.Contains(line, "}") {
break
}
parts := strings.Split(line, "=")
name := strings.TrimSpace(parts[0])
if name == "" {
continue
}
if len(parts) > 1 {
valueStr := strings.TrimSpace(parts[1])
base := 10
if strings.HasPrefix(valueStr, "0x") {
valueStr = valueStr[2:]
base = 16
}
value, err := strconv.ParseInt(valueStr, base, 32)
if err != nil {
return nil, err
}
currentValue = int(value)
}
enumMap[currentValue] = name
currentValue++
}
}
}
if err := scanner.Err(); err != nil {
return nil, err
}
return enumMap, nil
}

View File

@ -0,0 +1,33 @@
package sotn
import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"strings"
"testing"
)
func TestParseCEnum(t *testing.T) {
src := "" +
"typedef enum IgnoreMe{ ONE, TWO, THREE };\n" +
"// typedef struct MyEnum { this is a comment\n" +
"enum MyEnum { // this is also a comment\n" +
"First,\n" +
"Second,\n" +
"SomeId = 10\n" +
"SomeHexId = 0x10\n" +
"/* 0x18nope */ E_MARIA = 0x18\n" +
"E_COMMENT = 123 // ignore\n" +
"E_COMMENT_2 /* ignore this as well */\n" +
"} // malformed, it misses a semicolon\n"
m, err := ParseCEnum(strings.NewReader(src), "MyEnum")
require.NoError(t, err)
assert.Equal(t, "First", m[0])
assert.Equal(t, "Second", m[1])
assert.Equal(t, "0x02", m[2])
assert.Equal(t, "SomeId", m[10])
assert.Equal(t, "SomeHexId", m[0x10])
assert.Equal(t, "E_MARIA", m[0x18])
assert.Equal(t, "E_COMMENT", m[123])
assert.Equal(t, "E_COMMENT_2", m[124])
}

View File

@ -0,0 +1,33 @@
package sotn
import (
"encoding/binary"
"fmt"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx"
"io"
)
type StageHeader struct {
FnUpdate psx.Addr
FnHitDetection psx.Addr
FnUpdateRoomPos psx.Addr
FnInitRoomEntities psx.Addr
Rooms psx.Addr // ✅
Sprites psx.Addr // ✅
Cluts psx.Addr // 🫥
Layouts psx.Addr // ✅
Layers psx.Addr // ✅
Graphics psx.Addr // 🫥 WIP
FnUpdateStageEntities psx.Addr
}
func ReadStageHeader(r io.ReadSeeker) (StageHeader, error) {
var header StageHeader
if _, err := r.Seek(0, io.SeekStart); err != nil {
return header, fmt.Errorf("failed to seek to stage header: %w", err)
}
if err := binary.Read(r, binary.LittleEndian, &header); err != nil {
return header, fmt.Errorf("failed to read stage header: %w", err)
}
return header, nil
}

View File

@ -1,4 +1,4 @@
package main package util
import ( import (
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx" "github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx"
@ -6,7 +6,7 @@ import (
"strings" "strings"
) )
func joinMapKeys[T any](m map[string]T, sep string) string { func JoinMapKeys[T any](m map[string]T, sep string) string {
keys := make([]string, 0, len(m)) keys := make([]string, 0, len(m))
for k := range m { for k := range m {
keys = append(keys, k) keys = append(keys, k)
@ -14,7 +14,7 @@ func joinMapKeys[T any](m map[string]T, sep string) string {
return strings.Join(keys, sep) return strings.Join(keys, sep)
} }
func minBy[T any](slice []T, getter func(T) int) (max int) { func MinBy[T any](slice []T, getter func(T) int) (max int) {
if len(slice) == 0 { if len(slice) == 0 {
return max return max
} }
@ -28,7 +28,7 @@ func minBy[T any](slice []T, getter func(T) int) (max int) {
return max return max
} }
func maxBy[T any](slice []T, getter func(T) int) (max int) { func MaxBy[T any](slice []T, getter func(T) int) (max int) {
if len(slice) == 0 { if len(slice) == 0 {
return max return max
} }
@ -42,14 +42,14 @@ func maxBy[T any](slice []T, getter func(T) int) (max int) {
return max return max
} }
func btoi(b bool) int { func Btoi(b bool) int {
if b { if b {
return 1 return 1
} }
return 0 return 0
} }
func sortUniqueOffsets(slice []psx.Addr) []psx.Addr { func SortUniqueOffsets(slice []psx.Addr) []psx.Addr {
unique := map[psx.Addr]struct{}{} unique := map[psx.Addr]struct{}{}
for _, v := range slice { for _, v := range slice {
unique[v] = struct{}{} unique[v] = struct{}{}