Asset cutscene two stages (#1738)

I completely rewrote the cutscene asset handler. Now instead of parsing
the data from the original overlay into a C-like header file, it instead
follows a two-stage process. This works by extracting it in `asset/`
with `make extract_assets`, to then allow modders to modify the file and
build it as a C-like header with `make build_assets`. This also aims to
fix #1701 as the build process takes account of the two-stage process.

I created a framework where each asset type should only make available
the two methods `Extract` and `Build`. The entire transformation process
should be isolated to not create cognitive overload like what we can
find in `build.go`. I would need to migrate all the existing asset types
to properly use this new framework. The old code served well enough to
understand how to build the entire infrastructure, but it needs to be
migrated using the new pattern.

Last, but not least, I renamed `config/assets.us.weapon.yaml` to
`config/assets.us.yaml` as it is now used by all the overlays
This commit is contained in:
Luciano Ciccariello 2024-10-06 13:10:06 +01:00 committed by GitHub
parent 02e4c62f6a
commit 30652db2dd
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
17 changed files with 423 additions and 312 deletions

2
.gitignore vendored
View File

@ -12,7 +12,7 @@ generated.symbols.*.txt
__pycache__ __pycache__
asm/ asm/
assets/ /assets/
build/ build/
expected/ expected/
disks/ disks/

View File

@ -95,7 +95,7 @@ extract_assets: $(SOTNASSETS)
$(SOTNASSETS) stage extract -stage_ovl disks/$(VERSION)/ST/WRP/WRP.BIN -o assets/st/wrp $(SOTNASSETS) stage extract -stage_ovl disks/$(VERSION)/ST/WRP/WRP.BIN -o assets/st/wrp
$(SOTNASSETS) stage extract -stage_ovl disks/$(VERSION)/ST/RWRP/RWRP.BIN -o assets/st/rwrp $(SOTNASSETS) stage extract -stage_ovl disks/$(VERSION)/ST/RWRP/RWRP.BIN -o assets/st/rwrp
$(SOTNASSETS) stage extract -stage_ovl disks/$(VERSION)/BOSS/MAR/MAR.BIN -o assets/boss/mar $(SOTNASSETS) stage extract -stage_ovl disks/$(VERSION)/BOSS/MAR/MAR.BIN -o assets/boss/mar
$(SOTNASSETS) config extract config/assets.us.weapon.yaml $(SOTNASSETS) config extract config/assets.us.yaml
extract_assets_hd: $(SOTNASSETS) extract_assets_hd: $(SOTNASSETS)
cd tools/sotn-assets; $(GO) install cd tools/sotn-assets; $(GO) install
$(SOTNASSETS) stage extract -stage_ovl disks/pspeu/PSP_GAME/USRDIR/res/ps/hdbin/cen.bin -o assets/st/cen $(SOTNASSETS) stage extract -stage_ovl disks/pspeu/PSP_GAME/USRDIR/res/ps/hdbin/cen.bin -o assets/st/cen
@ -110,7 +110,7 @@ build_assets: $(SOTNASSETS)
$(SOTNASSETS) stage build_all -i assets/st/wrp -o src/st/wrp/ $(SOTNASSETS) stage build_all -i assets/st/wrp -o src/st/wrp/
$(SOTNASSETS) stage build_all -i assets/st/rwrp -o src/st/rwrp/ $(SOTNASSETS) stage build_all -i assets/st/rwrp -o src/st/rwrp/
$(SOTNASSETS) stage build_all -i assets/boss/mar -o src/boss/mar/ $(SOTNASSETS) stage build_all -i assets/boss/mar -o src/boss/mar/
$(SOTNASSETS) config build config/assets.$(VERSION).weapon.yaml $(SOTNASSETS) config build config/assets.$(VERSION).yaml
build_assets_hd: $(SOTNASSETS) build_assets_hd: $(SOTNASSETS)
$(SOTNASSETS) stage build_all -i assets/st/cen -o src/st/cen/ $(SOTNASSETS) stage build_all -i assets/st/cen -o src/st/cen/
$(SOTNASSETS) stage build_all -i assets/st/wrp -o src/st/wrp/ $(SOTNASSETS) stage build_all -i assets/st/wrp -o src/st/wrp/

View File

@ -50,3 +50,4 @@ typedef enum {
#define SET_FLAG(x) CSOP_SET_FLAG, x #define SET_FLAG(x) CSOP_SET_FLAG, x
#define LOAD_PORTRAIT(addr, id) CSOP_LOAD_PORTRAIT, script_word(addr), id #define LOAD_PORTRAIT(addr, id) CSOP_LOAD_PORTRAIT, script_word(addr), id
#define SCRIPT_UNKNOWN_20(x) CSOP_SCRIPT_UNKNOWN_20, script_half(x) #define SCRIPT_UNKNOWN_20(x) CSOP_SCRIPT_UNKNOWN_20, script_half(x)
#define SCRIPT_UNKNOWN_23() CSOP_SCRIPT_UNKNOWN_23

View File

@ -4,6 +4,8 @@ import (
"bytes" "bytes"
"encoding/json" "encoding/json"
"fmt" "fmt"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/assets"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/assets/cutscene"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx" "github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx"
"golang.org/x/sync/errgroup" "golang.org/x/sync/errgroup"
"gopkg.in/yaml.v2" "gopkg.in/yaml.v2"
@ -30,30 +32,13 @@ type assetConfig struct {
Files []assetFileEntry `yaml:"files"` Files []assetFileEntry `yaml:"files"`
} }
type assetEntry struct { var extractHandlers = map[string]func(assets.ExtractEntry) error{
data []byte "frameset": func(e assets.ExtractEntry) error {
start int
end int
assetDir string
srcDir string
name string
args []string
ramBase psx.Addr
}
type assetBuildEntry struct {
assetDir string
srcDir string
name string
}
var extractHandlers = map[string]func(assetEntry) error{
"frameset": func(e assetEntry) error {
var set []*[]sprite var set []*[]sprite
var err error var err error
if e.start != e.end { if e.Start != e.End {
r := bytes.NewReader(e.data) r := bytes.NewReader(e.Data)
set, _, err = readFrameSet(r, e.ramBase, e.ramBase.Sum(e.start)) set, _, err = readFrameSet(r, e.RamBase, e.RamBase.Sum(e.Start))
if err != nil { if err != nil {
return err return err
} }
@ -65,7 +50,7 @@ var extractHandlers = map[string]func(assetEntry) error{
return err return err
} }
outPath := path.Join(e.assetDir, fmt.Sprintf("%s.frameset.json", e.name)) outPath := path.Join(e.AssetDir, fmt.Sprintf("%s.frameset.json", e.Name))
dir := filepath.Dir(outPath) dir := filepath.Dir(outPath)
if err := os.MkdirAll(dir, 0755); err != nil { if err := os.MkdirAll(dir, 0755); err != nil {
fmt.Printf("failed to create directory %s: %v\n", dir, err) fmt.Printf("failed to create directory %s: %v\n", dir, err)
@ -73,31 +58,16 @@ var extractHandlers = map[string]func(assetEntry) error{
} }
return os.WriteFile(outPath, content, 0644) return os.WriteFile(outPath, content, 0644)
}, },
"cutscene": func(e assetEntry) error { "cutscene": cutscene.Handler.Extract,
if e.start == e.end {
return fmt.Errorf("cutscene cannot be 0 bytes")
}
r := bytes.NewReader(e.data)
script, err := parseCutsceneAsC(r, e.ramBase, e.ramBase.Sum(e.start), e.end - e.start)
if err != nil {
return err
}
outPath := path.Join(e.srcDir, fmt.Sprintf("%s.h", e.name))
dir := filepath.Dir(outPath)
if err := os.MkdirAll(dir, 0755); err != nil {
fmt.Printf("failed to create directory %s: %v\n", dir, err)
return err
}
return os.WriteFile(outPath, []byte(script), 0644)
},
} }
var buildHandlers = map[string]func(assetBuildEntry) error{ var buildHandlers = map[string]func(assets.BuildEntry) error{
"frameset": func(e assetBuildEntry) error { "frameset": func(e assets.BuildEntry) error {
inFileName := path.Join(e.assetDir, fmt.Sprintf("%s.frameset.json", e.name)) inFileName := path.Join(e.AssetDir, fmt.Sprintf("%s.frameset.json", e.Name))
outFileName := path.Join(e.srcDir, fmt.Sprintf("%s.h", e.name)) outFileName := path.Join(e.SrcDir, fmt.Sprintf("%s.h", e.Name))
return buildFrameSet(inFileName, outFileName, e.name) return buildFrameSet(inFileName, outFileName, e.Name)
}, },
"cutscene": cutscene.Handler.Build,
} }
func parseArgs(entry []string) (offset int64, kind string, args []string, err error) { func parseArgs(entry []string) (offset int64, kind string, args []string, err error) {
@ -134,7 +104,7 @@ func readConfig(path string) (*assetConfig, error) {
func enqueueExtractAssetEntry( func enqueueExtractAssetEntry(
eg *errgroup.Group, eg *errgroup.Group,
handler func(assetEntry) error, handler func(assets.ExtractEntry) error,
assetDir string, assetDir string,
srcDir string, srcDir string,
name string, name string,
@ -144,15 +114,14 @@ func enqueueExtractAssetEntry(
args []string, args []string,
ramBase psx.Addr) { ramBase psx.Addr) {
eg.Go(func() error { eg.Go(func() error {
if err := handler(assetEntry{ if err := handler(assets.ExtractEntry{
data: data, Data: data,
start: start, Start: start,
end: end, End: end,
assetDir: assetDir, AssetDir: assetDir,
srcDir: srcDir, RamBase: ramBase,
ramBase: ramBase, Name: name,
name: name, Args: args,
args: args,
}); err != nil { }); err != nil {
return fmt.Errorf("unable to extract asset %q: %v", name, err) return fmt.Errorf("unable to extract asset %q: %v", name, err)
} }
@ -206,18 +175,18 @@ func extractAssetFile(file assetFileEntry) error {
func enqueueBuildAssetEntry( func enqueueBuildAssetEntry(
eg *errgroup.Group, eg *errgroup.Group,
handler func(assetBuildEntry) error, handler func(assets.BuildEntry) error,
assetDir, assetDir,
sourceDir, sourceDir,
name string) { name string) {
eg.Go(func() error { eg.Go(func() error {
err := handler(assetBuildEntry{ err := handler(assets.BuildEntry{
assetDir: assetDir, AssetDir: assetDir,
srcDir: sourceDir, SrcDir: sourceDir,
name: name, Name: name,
}) })
if err != nil { if err != nil {
return fmt.Errorf("unable to build asset %q: %v", name, err) return fmt.Errorf("unable to build asset %q at %q: %v", name, assetDir, err)
} }
return nil return nil
}) })

View File

@ -0,0 +1,33 @@
package assets
import "github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx"
type ExtractEntry struct {
Data []byte
Start int
End int
AssetDir string
Name string
Args []string
RamBase psx.Addr
}
type BuildEntry struct {
AssetDir string
SrcDir string
Name string
}
type Extracter interface {
Extract(e ExtractEntry) error
}
type Builder interface {
Build(e BuildEntry) error
}
type Handler interface {
Name() string
Extracter
Builder
}

View File

@ -0,0 +1,224 @@
package cutscene
import (
"bytes"
"fmt"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/assets"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx"
"gopkg.in/yaml.v2"
"io"
"os"
"path"
"path/filepath"
"strconv"
"strings"
)
type handler struct{}
var Handler = &handler{}
func (h *handler) Name() string { return "cutscene" }
func (h *handler) Extract(e assets.ExtractEntry) error {
if e.Start == e.End {
return fmt.Errorf("a cutscene script cannot be 0 bytes")
}
r := bytes.NewReader(e.Data)
script, err := parseScript(r, e.RamBase, e.RamBase.Sum(e.Start), e.End-e.Start)
if err != nil {
return err
}
outFileName := assetPath(e.AssetDir, e.Name)
dir := filepath.Dir(outFileName)
if err := os.MkdirAll(dir, 0755); err != nil {
fmt.Printf("failed to create directory %s: %v\n", dir, err)
return err
}
yaml := "script:\n"
for _, command := range script {
if len(command) == 0 {
continue
}
switch command[0] {
case "TEXT":
yaml += fmt.Sprintf(" - [TEXT, \"%s\"]\n", command[1])
case "BYTE":
yaml += fmt.Sprintf(" - [BYTE, %s]\n", command[1])
default:
yaml += fmt.Sprintf(" - [%s]\n", strings.Join(command, ", "))
}
}
return os.WriteFile(outFileName, []byte(yaml), 0644)
}
type scriptSrc struct {
Script [][]string `yaml:"script"`
}
func (h *handler) Build(e assets.BuildEntry) error {
inFileName := assetPath(e.AssetDir, e.Name)
data, err := os.ReadFile(inFileName)
if err != nil {
return fmt.Errorf("failed to read cutscene file: %w", err)
}
var script scriptSrc
if err := yaml.Unmarshal(data, &script); err != nil {
return fmt.Errorf("failed to parse cutscene file: %w", err)
}
pool := getCommandPool()
sb := strings.Builder{}
sb.WriteString("// clang-format off\n")
for i, args := range script.Script {
if len(args) == 0 {
return fmt.Errorf("")
}
op := args[0]
if op == "TEXT" {
text := args[1]
for i, _ := range text {
if text[i] == '\'' {
sb.WriteString("'\\'',")
} else {
sb.WriteString(fmt.Sprintf("'%c',", text[i]))
}
}
sb.WriteString("\n")
continue
}
if op == "BYTE" {
if len(args) != 2 {
return fmt.Errorf("BYTE must have exactly one argument")
}
sb.WriteString(args[1])
sb.WriteString(",\n")
continue
}
cmd, found := pool[op]
if !found {
return fmt.Errorf("script %q does not have a command", args[0])
}
sb.WriteString(args[0])
sb.WriteString("(")
if len(cmd.params) != len(args)-1 {
return fmt.Errorf("command %q at line %d expects %d arguments but got %d",
op, i+1, len(cmd.params), len(args)-1)
}
sb.WriteString(strings.Join(args[1:], ","))
sb.WriteString("),\n")
}
return os.WriteFile(sourcePath(e.SrcDir, e.Name), []byte(sb.String()), 0644)
}
func assetPath(dir, name string) string {
if name == "" {
name = "cutscene_script"
}
return path.Join(dir, fmt.Sprintf("%s.yaml", name))
}
func sourcePath(dir, name string) string {
if name == "" {
name = "cutscene_script"
}
return path.Join(dir, fmt.Sprintf("%s.h", name))
}
type cmdDef struct {
name string
params []int
}
var commandDefinitions = []cmdDef{
{name: "END_CUTSCENE", params: []int{}},
{name: "LINE_BREAK", params: []int{}},
{name: "SET_SPEED", params: []int{1}},
{name: "SET_WAIT", params: []int{1}},
{name: "HIDE_DIALOG", params: []int{}},
{name: "SET_PORTRAIT", params: []int{1, 1}},
{name: "NEXT_DIALOG", params: []int{}},
{name: "SET_POS", params: []int{1, 1}},
{name: "CLOSE_DIALOG", params: []int{}},
{name: "PLAY_SOUND", params: []int{2}},
{name: "WAIT_FOR_SOUND", params: []int{}},
{name: "SCRIPT_UNKNOWN_11", params: []int{}},
{name: "SET_END", params: []int{4}},
{name: "SCRIPT_UNKNOWN_13", params: []int{}},
{name: "SCRIPT_UNKNOWN_14", params: []int{4, 4, 4}},
{name: "SCRIPT_UNKNOWN_15", params: []int{4}},
{name: "WAIT_FOR_FLAG", params: []int{1}},
{name: "SET_FLAG", params: []int{1}},
{name: "SCRIPT_UNKNOWN_18", params: []int{}},
{name: "LOAD_PORTRAIT", params: []int{4, 1}},
{name: "SCRIPT_UNKNOWN_20", params: []int{2}},
{name: "SCRIPT_UNKNOWN_21", params: []int{}},
{name: "RESET_FLAG", params: []int{1}},
{name: "SCRIPT_UNKNOWN_23", params: []int{}},
{name: "WAIT_FOR_FLAG_RESET", params: []int{1}},
}
func getCommandPool() map[string]cmdDef {
cmdPool := map[string]cmdDef{}
for _, command := range commandDefinitions {
cmdPool[command.name] = command
}
return cmdPool
}
func parseScript(r io.ReadSeeker, baseAddr, addr psx.Addr, length int) ([][]string, error) {
if err := addr.MoveFile(r, baseAddr); err != nil {
return nil, fmt.Errorf("unable to read cutscene script: %w", err)
}
script := make([][]string, 0)
text := ""
flushText := func() {
if len(text) > 0 {
script = append(script, []string{"TEXT", text})
text = ""
}
}
read1 := func(r io.ReadSeeker) byte {
b := make([]byte, 1)
_, _ = r.Read(b)
length -= 1
return b[0]
}
read2 := func(r io.ReadSeeker) int {
b := make([]byte, 2)
_, _ = r.Read(b)
length -= 2
return int(b[1]) | (int(b[0]) << 4)
}
read4 := func(r io.ReadSeeker) int {
b := make([]byte, 4)
_, _ = r.Read(b)
length -= 4
return int(b[3]) | (int(b[2]) << 4) | (int(b[1]) << 8) | (int(b[0]) << 12) | 0x80100000
}
for length > 0 {
op := int(read1(r))
if op < 0x20 {
flushText()
command := []string{commandDefinitions[op].name}
for _, param := range commandDefinitions[op].params {
switch param {
case 1:
command = append(command, strconv.FormatInt(int64(read1(r)), 10))
case 2:
command = append(command, "0x"+strconv.FormatInt(int64(read2(r)), 16))
case 4:
command = append(command, "0x"+strconv.FormatInt(int64(read4(r)), 16))
}
}
script = append(script, command)
} else if op < 0x7F {
text += string([]byte{byte(op)})
} else {
strByte := "0x" + strconv.FormatInt(int64(op), 16)
script = append(script, []string{"BYTE", strByte})
}
}
flushText()
return script, nil
}

View File

@ -1,104 +0,0 @@
package main
import (
"fmt"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx"
"io"
"strings"
)
func readCutscene(r io.ReadSeeker, baseAddr, addr psx.Addr, length int) ([]string, error) {
if err := addr.MoveFile(r, baseAddr); err != nil {
return []string{}, fmt.Errorf("unable to read cutscene: %w", err)
}
read1 := func(r io.ReadSeeker) byte {
b := make([]byte, 1)
_, _ = r.Read(b)
length -= 1
return b[0]
}
read2 := func(r io.ReadSeeker) int {
b := make([]byte, 2)
_, _ = r.Read(b)
length -= 2
return int(b[1]) | (int(b[0]) << 4)
}
read4 := func(r io.ReadSeeker) int {
b := make([]byte, 4)
_, _ = r.Read(b)
length -= 4
return int(b[3]) | (int(b[2]) << 4) | (int(b[1]) << 8) | (int(b[0]) << 12) | 0x80100000
}
script := make([]string, 0)
for length > 0 {
op := read1(r)
switch op {
case 0:
script = append(script, "END_CUTSCENE()")
case 1:
script = append(script, "LINE_BREAK()")
case 2:
script = append(script, fmt.Sprintf("SET_SPEED(%d)", read1(r)))
case 3:
script = append(script, fmt.Sprintf("SET_WAIT(%d)", read1(r)))
case 4:
script = append(script, "HIDE_DIALOG()")
case 5:
script = append(script, fmt.Sprintf("SET_PORTRAIT(%d, %d)", read1(r), read1(r)))
case 6:
script = append(script, "NEXT_DIALOG()")
case 7:
script = append(script, fmt.Sprintf("SET_POS(%d, %d)", read1(r), read1(r)))
case 8:
script = append(script, "CLOSE_DIALOG()")
case 9:
script = append(script, fmt.Sprintf("PLAY_SOUND(0x%X)", read2(r)))
case 10:
script = append(script, "WAIT_FOR_SOUND()")
case 11:
script = append(script, "SCRIPT_UNKNOWN_11()")
case 12:
script = append(script, fmt.Sprintf(
"SET_END(0x%08X)", read4(r)))
case 13:
script = append(script, "SCRIPT_UNKNOWN_13()")
case 14:
script = append(script, fmt.Sprintf(
"SCRIPT_UNKNOWN_14(0x%08X, 0x%08X, 0x%08X)", read4(r), read4(r), read4(r)))
case 15:
script = append(script, fmt.Sprintf(
"SCRIPT_UNKNOWN_15(0x%08X)", read4(r)))
case 16:
script = append(script, fmt.Sprintf("WAIT_FOR_FLAG(%d)", read1(r)))
case 17:
script = append(script, fmt.Sprintf("SET_FLAG(%d)", read1(r)))
case 18:
script = append(script, "SCRIPT_UNKOWN_18()")
case 19:
script = append(script, fmt.Sprintf(
"LOAD_PORTRAIT(0x%08X, %d)", read4(r), read1(r)))
case 20:
script = append(script, fmt.Sprintf("SCRIPT_UNKNOWN_20(0x%X)", read2(r)))
case 0x27:
script = append(script, "'\\''")
case 0xFF:
script = append(script, "0xFF")
default:
if op >= 0x20 && op <= 0x7E {
script = append(script, fmt.Sprintf("'%s'", string([]byte{op})))
} else {
script = append(script, fmt.Sprintf("0x%02X", op))
}
}
}
return script, nil
}
func parseCutsceneAsC(r io.ReadSeeker, baseAddr, addr psx.Addr, length int) (string, error) {
script, err := readCutscene(r, baseAddr, addr, length)
if err != nil {
return "", err
}
return "// clang-format off\n" + strings.ReplaceAll(strings.Join(script, ",\n"), "',\n'", "','"), nil
}

View File

@ -1,4 +1,4 @@
package main package datarange
import ( import (
"fmt" "fmt"
@ -6,20 +6,43 @@ import (
"sort" "sort"
) )
type dataRange struct { type DataRange struct {
begin psx.Addr begin psx.Addr
end psx.Addr end psx.Addr
} }
func (r dataRange) Format(f fmt.State, c rune) { func New(begin, end psx.Addr) DataRange {
f.Write([]byte(fmt.Sprintf("(%s, %s)", r.begin, r.end))) return DataRange{
begin: begin,
end: end,
}
} }
func (r dataRange) empty() bool { func FromAddr(addr psx.Addr, len int) DataRange {
return New(addr, addr.Sum(len))
}
func FromAlignedAddr(addr psx.Addr, len int) DataRange {
return New(addr, addr.Sum(len).Align4())
}
func (r DataRange) Align4() DataRange {
return New(r.begin, r.end.Align4())
}
func (r DataRange) Format(f fmt.State, c rune) {
_, _ = f.Write([]byte(fmt.Sprintf("(%s, %s)", r.begin, r.end)))
}
func (r DataRange) Begin() psx.Addr { return r.begin }
func (r DataRange) End() psx.Addr { return r.end }
func (r DataRange) Empty() bool {
return r.begin == psx.RamNull && r.end == psx.RamNull return r.begin == psx.RamNull && r.end == psx.RamNull
} }
func mergeDataRanges(ranges []dataRange) dataRange { func MergeDataRanges(ranges []DataRange) DataRange {
if len(ranges) == 0 { if len(ranges) == 0 {
err := fmt.Errorf("no datarange, bug?!") err := fmt.Errorf("no datarange, bug?!")
panic(err) panic(err)
@ -42,30 +65,30 @@ func mergeDataRanges(ranges []dataRange) dataRange {
} }
} }
return dataRange{ return DataRange{
begin: ranges[0].begin, begin: ranges[0].begin,
end: ranges[len(ranges)-1].end, end: ranges[len(ranges)-1].end,
} }
} }
func consolidateDataRanges(ranges []dataRange) []dataRange { func ConsolidateDataRanges(ranges []DataRange) []DataRange {
if len(ranges) == 0 { if len(ranges) == 0 {
return []dataRange{} return []DataRange{}
} }
sort.Slice(ranges, func(i, j int) bool { sort.Slice(ranges, func(i, j int) bool {
return ranges[i].begin < ranges[j].begin return ranges[i].begin < ranges[j].begin
}) })
for ranges[0].empty() { for ranges[0].Empty() {
ranges = ranges[1:] ranges = ranges[1:]
} }
consolidated := []dataRange{} consolidated := []DataRange{}
first := 0 first := 0
for i := 0; i < len(ranges)-1; i++ { for i := 0; i < len(ranges)-1; i++ {
if ranges[i].end != ranges[i+1].begin { if ranges[i].end != ranges[i+1].begin {
consolidated = append(consolidated, dataRange{ consolidated = append(consolidated, DataRange{
begin: ranges[first].begin, begin: ranges[first].begin,
end: ranges[i].end, end: ranges[i].end,
}) })
@ -73,7 +96,7 @@ func consolidateDataRanges(ranges []dataRange) []dataRange {
} }
} }
return append(consolidated, dataRange{ return append(consolidated, DataRange{
begin: ranges[first].begin, begin: ranges[first].begin,
end: ranges[len(ranges)-1].end, end: ranges[len(ranges)-1].end,
}) })

View File

@ -2,6 +2,7 @@ package main
import ( import (
"encoding/binary" "encoding/binary"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/datarange"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx" "github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx"
"os" "os"
) )
@ -35,9 +36,9 @@ type gfx struct {
indices []int indices []int
} }
func readGraphics(file *os.File, off psx.Addr) (gfx, dataRange, error) { func readGraphics(file *os.File, off psx.Addr) (gfx, datarange.DataRange, error) {
if err := off.MoveFile(file, psx.RamStageBegin); err != nil { if err := off.MoveFile(file, psx.RamStageBegin); err != nil {
return gfx{}, dataRange{}, err return gfx{}, datarange.DataRange{}, err
} }
// all the offsets are before the array, so it is easy to find where the offsets array ends // all the offsets are before the array, so it is easy to find where the offsets array ends
@ -45,7 +46,7 @@ func readGraphics(file *os.File, off psx.Addr) (gfx, dataRange, error) {
for { for {
var offBank psx.Addr var offBank psx.Addr
if err := binary.Read(file, binary.LittleEndian, &offBank); err != nil { if err := binary.Read(file, binary.LittleEndian, &offBank); err != nil {
return gfx{}, dataRange{}, err return gfx{}, datarange.DataRange{}, err
} }
if offBank >= off { if offBank >= off {
break break
@ -57,36 +58,33 @@ func readGraphics(file *os.File, off psx.Addr) (gfx, dataRange, error) {
pool := map[psx.Addr]int{} pool := map[psx.Addr]int{}
pool[psx.RamNull] = -1 pool[psx.RamNull] = -1
blocks := []gfxBlock{} blocks := []gfxBlock{}
ranges := []dataRange{} ranges := []datarange.DataRange{}
for _, blockOffset := range sortUniqueOffsets(blockOffsets) { for _, blockOffset := range sortUniqueOffsets(blockOffsets) {
if blockOffset == psx.RamNull { // exception for ST0 if blockOffset == psx.RamNull { // exception for ST0
continue continue
} }
if err := blockOffset.MoveFile(file, psx.RamStageBegin); err != nil { if err := blockOffset.MoveFile(file, psx.RamStageBegin); err != nil {
return gfx{}, dataRange{}, err return gfx{}, datarange.DataRange{}, err
} }
var block gfxBlock var block gfxBlock
if err := binary.Read(file, binary.LittleEndian, &block.kind); err != nil { if err := binary.Read(file, binary.LittleEndian, &block.kind); err != nil {
return gfx{}, dataRange{}, err return gfx{}, datarange.DataRange{}, err
} }
if err := binary.Read(file, binary.LittleEndian, &block.flags); err != nil { if err := binary.Read(file, binary.LittleEndian, &block.flags); err != nil {
return gfx{}, dataRange{}, err return gfx{}, datarange.DataRange{}, err
} }
if block.kind == gfxKind(0xFFFF) && block.flags == 0xFFFF { // exception for ST0 if block.kind == gfxKind(0xFFFF) && block.flags == 0xFFFF { // exception for ST0
pool[blockOffset] = len(blocks) pool[blockOffset] = len(blocks)
blocks = append(blocks, block) blocks = append(blocks, block)
ranges = append(ranges, dataRange{ ranges = append(ranges, datarange.FromAddr(blockOffset, 4))
begin: blockOffset,
end: blockOffset.Sum(4),
})
continue continue
} }
for { for {
var entry gfxEntry var entry gfxEntry
if err := binary.Read(file, binary.LittleEndian, &entry); err != nil { if err := binary.Read(file, binary.LittleEndian, &entry); err != nil {
return gfx{}, dataRange{}, err return gfx{}, datarange.DataRange{}, err
} }
if entry.X == 0xFFFF && entry.Y == 0xFFFF { if entry.X == 0xFFFF && entry.Y == 0xFFFF {
break break
@ -95,10 +93,7 @@ func readGraphics(file *os.File, off psx.Addr) (gfx, dataRange, error) {
} }
pool[blockOffset] = len(blocks) pool[blockOffset] = len(blocks)
blocks = append(blocks, block) blocks = append(blocks, block)
ranges = append(ranges, dataRange{ ranges = append(ranges, datarange.FromAddr(blockOffset, 4+len(block.entries)*12+4))
begin: blockOffset,
end: blockOffset.Sum(4 + len(block.entries)*12 + 4),
})
} }
var g gfx var g gfx
@ -106,8 +101,5 @@ func readGraphics(file *os.File, off psx.Addr) (gfx, dataRange, error) {
g.indices = append(g.indices, pool[blockOffset]) g.indices = append(g.indices, pool[blockOffset])
} }
return g, mergeDataRanges(append(ranges, dataRange{ return g, datarange.MergeDataRanges(append(ranges, datarange.FromAddr(off, len(blockOffsets)*4))), nil
begin: off,
end: off.Sum(len(blockOffsets) * 4),
})), nil
} }

View File

@ -3,6 +3,7 @@ package main
import ( import (
"encoding/binary" "encoding/binary"
"encoding/json" "encoding/json"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/datarange"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx" "github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx"
"os" "os"
"slices" "slices"
@ -77,12 +78,12 @@ func (r roomLayers) MarshalJSON() ([]byte, error) {
return json.Marshal(m) return json.Marshal(m)
} }
func readLayers(file *os.File, off psx.Addr) ([]roomLayers, dataRange, error) { func readLayers(file *os.File, off psx.Addr) ([]roomLayers, datarange.DataRange, error) {
if off == 0 { if off == 0 {
return nil, dataRange{}, nil return nil, datarange.DataRange{}, nil
} }
if err := off.MoveFile(file, psx.RamStageBegin); err != nil { if err := off.MoveFile(file, psx.RamStageBegin); err != nil {
return nil, dataRange{}, err return nil, datarange.DataRange{}, err
} }
// when the data starts to no longer makes sense, we can assume we reached the end of the array // when the data starts to no longer makes sense, we can assume we reached the end of the array
@ -90,7 +91,7 @@ func readLayers(file *os.File, off psx.Addr) ([]roomLayers, dataRange, error) {
layersOff := make([]psx.Addr, 2) layersOff := make([]psx.Addr, 2)
for { for {
if err := binary.Read(file, binary.LittleEndian, layersOff); err != nil { if err := binary.Read(file, binary.LittleEndian, layersOff); err != nil {
return nil, dataRange{}, err return nil, datarange.DataRange{}, err
} }
if layersOff[0] <= psx.RamStageBegin || layersOff[0] >= off || if layersOff[0] <= psx.RamStageBegin || layersOff[0] >= off ||
layersOff[1] <= psx.RamStageBegin || layersOff[1] >= off { layersOff[1] <= psx.RamStageBegin || layersOff[1] >= off {
@ -108,11 +109,11 @@ func readLayers(file *os.File, off psx.Addr) ([]roomLayers, dataRange, error) {
} }
if err := layerOffset.MoveFile(file, psx.RamStageBegin); err != nil { if err := layerOffset.MoveFile(file, psx.RamStageBegin); err != nil {
return nil, dataRange{}, err return nil, datarange.DataRange{}, err
} }
var l layer var l layer
if err := binary.Read(file, binary.LittleEndian, &l); err != nil { if err := binary.Read(file, binary.LittleEndian, &l); err != nil {
return nil, dataRange{}, err return nil, datarange.DataRange{}, err
} }
if l.Data != psx.RamNull || l.Tiledef != psx.RamNull || l.PackedInfo != 0 { if l.Data != psx.RamNull || l.Tiledef != psx.RamNull || l.PackedInfo != 0 {
pool[layerOffset] = &l pool[layerOffset] = &l
@ -128,8 +129,5 @@ func readLayers(file *os.File, off psx.Addr) ([]roomLayers, dataRange, error) {
roomsLayers[i].fg = pool[layerOffsets[i*2+0]] roomsLayers[i].fg = pool[layerOffsets[i*2+0]]
roomsLayers[i].bg = pool[layerOffsets[i*2+1]] roomsLayers[i].bg = pool[layerOffsets[i*2+1]]
} }
return roomsLayers, dataRange{ return roomsLayers, datarange.New(slices.Min(layerOffsets), off.Sum(count*8)), nil
begin: slices.Min(layerOffsets),
end: off.Sum(count * 8),
}, nil
} }

View File

@ -3,6 +3,7 @@ package main
import ( import (
"encoding/binary" "encoding/binary"
"fmt" "fmt"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/datarange"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx" "github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx"
"io" "io"
"os" "os"
@ -74,7 +75,7 @@ func hydrateYOrderFields(x layouts, y layouts) error {
return nil return nil
} }
func readEntityLayout(file *os.File, off psx.Addr, count int, isX bool) (layouts, []dataRange, error) { func readEntityLayout(file *os.File, off psx.Addr, count int, isX bool) (layouts, []datarange.DataRange, error) {
if err := off.MoveFile(file, psx.RamStageBegin); err != nil { if err := off.MoveFile(file, psx.RamStageBegin); err != nil {
return layouts{}, nil, err return layouts{}, nil, err
} }
@ -89,7 +90,7 @@ func readEntityLayout(file *os.File, off psx.Addr, count int, isX bool) (layouts
// the order of each layout entry must be preserved // the order of each layout entry must be preserved
pool := map[psx.Addr]int{} pool := map[psx.Addr]int{}
blocks := [][]layoutEntry{} blocks := [][]layoutEntry{}
xRanges := []dataRange{} xRanges := []datarange.DataRange{}
for _, blockOffset := range sortUniqueOffsets(blockOffsets) { for _, blockOffset := range sortUniqueOffsets(blockOffsets) {
if err := blockOffset.MoveFile(file, psx.RamStageBegin); err != nil { if err := blockOffset.MoveFile(file, psx.RamStageBegin); err != nil {
return layouts{}, nil, err return layouts{}, nil, err
@ -115,13 +116,10 @@ func readEntityLayout(file *os.File, off psx.Addr, count int, isX bool) (layouts
pool[blockOffset] = len(blocks) pool[blockOffset] = len(blocks)
blocks = append(blocks, entries) blocks = append(blocks, entries)
xRanges = append(xRanges, dataRange{ xRanges = append(xRanges, datarange.FromAddr(blockOffset, len(entries)*10))
begin: blockOffset,
end: blockOffset.Sum(len(entries) * 10),
})
} }
// the very last entry needs to be aligned by 4 // the very last entry needs to be aligned by 4
xRanges[len(xRanges)-1].end = xRanges[len(xRanges)-1].end.Align4() xRanges[len(xRanges)-1] = xRanges[len(xRanges)-1].Align4()
l := layouts{Entities: blocks} l := layouts{Entities: blocks}
for _, blockOffset := range blockOffsets { for _, blockOffset := range blockOffsets {
@ -137,25 +135,13 @@ func readEntityLayout(file *os.File, off psx.Addr, count int, isX bool) (layouts
if err := hydrateYOrderFields(l, yLayouts); err != nil { if err := hydrateYOrderFields(l, yLayouts); err != nil {
return layouts{}, nil, fmt.Errorf("unable to populate YOrder field: %w", err) return layouts{}, nil, fmt.Errorf("unable to populate YOrder field: %w", err)
} }
xMerged := mergeDataRanges(xRanges) xMerged := datarange.MergeDataRanges(xRanges)
yMerged := yRanges[1] yMerged := yRanges[1]
return l, []dataRange{ return l, []datarange.DataRange{
mergeDataRanges([]dataRange{ datarange.MergeDataRanges([]datarange.DataRange{datarange.New(off, endOfArray), yRanges[0]}),
{ datarange.MergeDataRanges([]datarange.DataRange{xMerged, yMerged}),
begin: off,
end: endOfArray,
},
yRanges[0],
}),
mergeDataRanges([]dataRange{xMerged, yMerged}),
}, nil }, nil
} else { } else {
return l, []dataRange{ return l, []datarange.DataRange{datarange.New(off, endOfArray), datarange.MergeDataRanges(xRanges)}, nil
{
begin: off,
end: endOfArray,
},
mergeDataRanges(xRanges),
}, nil
} }
} }

View File

@ -5,24 +5,25 @@ import (
"encoding/json" "encoding/json"
"flag" "flag"
"fmt" "fmt"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/datarange"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx" "github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx"
"os" "os"
"path" "path"
) )
type dataContainer[T any] struct { type dataContainer[T any] struct {
dataRange dataRange dataRange datarange.DataRange
content T content T
} }
type ovl struct { type ovl struct {
ranges []dataRange ranges []datarange.DataRange
rooms dataContainer[[]room] rooms dataContainer[[]room]
layers dataContainer[[]roomLayers] layers dataContainer[[]roomLayers]
sprites dataContainer[spriteDefs] sprites dataContainer[spriteDefs]
graphics dataContainer[gfx] graphics dataContainer[gfx]
layouts dataContainer[layouts] layouts dataContainer[layouts]
layoutsExtraRange dataRange layoutsExtraRange datarange.DataRange
tileMaps dataContainer[map[psx.Addr][]byte] tileMaps dataContainer[map[psx.Addr][]byte]
tileDefs dataContainer[map[psx.Addr]tileDef] tileDefs dataContainer[map[psx.Addr]tileDef]
} }
@ -74,14 +75,14 @@ func getOvlAssets(fileName string) (ovl, error) {
} }
// check for unused tile defs (CEN has one) // check for unused tile defs (CEN has one)
for tileMapsRange.end < tileDefsRange.begin { for tileMapsRange.End() < tileDefsRange.Begin() {
offset := tileDefsRange.begin.Sum(-0x10) offset := tileDefsRange.Begin().Sum(-0x10)
unusedTileDef, unusedTileDefRange, err := readTiledef(file, offset) unusedTileDef, unusedTileDefRange, err := readTiledef(file, offset)
if err != nil { if err != nil {
return ovl{}, fmt.Errorf("there is a gap between tileMaps and tileDefs: %w", err) return ovl{}, fmt.Errorf("there is a gap between tileMaps and tileDefs: %w", err)
} }
tileDefs[offset] = unusedTileDef tileDefs[offset] = unusedTileDef
tileDefsRange = mergeDataRanges([]dataRange{tileDefsRange, unusedTileDefRange}) tileDefsRange = datarange.MergeDataRanges([]datarange.DataRange{tileDefsRange, unusedTileDefRange})
} }
sprites, spritesRange, err := readSpritesBanks(file, psx.RamStageBegin, header.Sprites) sprites, spritesRange, err := readSpritesBanks(file, psx.RamStageBegin, header.Sprites)
@ -98,7 +99,7 @@ func getOvlAssets(fileName string) (ovl, error) {
if layoutOff == psx.RamNull { if layoutOff == psx.RamNull {
// some overlays have this field nulled, we have to find the offset ourselves // some overlays have this field nulled, we have to find the offset ourselves
// it should be usually be right after header.Graphics // it should be usually be right after header.Graphics
layoutOff = graphicsRange.end // ⚠️ assumption layoutOff = graphicsRange.End() // ⚠️ assumption
} }
nLayouts := maxBy(rooms, func(r room) int { // ⚠️ assumption nLayouts := maxBy(rooms, func(r room) int { // ⚠️ assumption
return int(r.EntityLayoutID) return int(r.EntityLayoutID)
@ -110,7 +111,7 @@ func getOvlAssets(fileName string) (ovl, error) {
} }
return ovl{ return ovl{
ranges: consolidateDataRanges([]dataRange{ ranges: datarange.ConsolidateDataRanges([]datarange.DataRange{
roomsRange, roomsRange,
layersRange, layersRange,
spritesRange, spritesRange,
@ -229,7 +230,7 @@ func info(fileName string) error {
} }
entries := []struct { entries := []struct {
dataRange dataRange dataRange datarange.DataRange
name string name string
comment string comment string
}{ }{
@ -247,15 +248,15 @@ func info(fileName string) error {
fmt.Println(" - [0x0, .data, header]") fmt.Println(" - [0x0, .data, header]")
for i := 0; i < len(entries); i++ { for i := 0; i < len(entries); i++ {
e := entries[i] e := entries[i]
s := fmt.Sprintf(" - [0x%X, .data, %s]", e.dataRange.begin.Real(psx.RamStageBegin), e.name) s := fmt.Sprintf(" - [0x%X, .data, %s]", e.dataRange.Begin().Real(psx.RamStageBegin), e.name)
if e.comment != "" { if e.comment != "" {
s = fmt.Sprintf("%s # %s", s, e.comment) s = fmt.Sprintf("%s # %s", s, e.comment)
} }
fmt.Println(s) fmt.Println(s)
// if there is a gap between the current entry and the next one, mark it as unrecognized data // if there is a gap between the current entry and the next one, mark it as unrecognized data
if i == len(entries)-1 || e.dataRange.end != entries[i+1].dataRange.begin { if i == len(entries)-1 || e.dataRange.End() != entries[i+1].dataRange.Begin() {
fmt.Printf(" - [0x%X, data]\n", e.dataRange.end.Real(psx.RamStageBegin)) fmt.Printf(" - [0x%X, data]\n", e.dataRange.End().Real(psx.RamStageBegin))
} }
} }
return nil return nil

View File

@ -2,6 +2,7 @@ package main
import ( import (
"encoding/binary" "encoding/binary"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/datarange"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx" "github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx"
"os" "os"
) )
@ -21,27 +22,24 @@ func (r room) isTerminator() bool {
return r.Left == 0x40 return r.Left == 0x40
} }
func readRooms(file *os.File, off psx.Addr) ([]room, dataRange, error) { func readRooms(file *os.File, off psx.Addr) ([]room, datarange.DataRange, error) {
if off == 0 { if off == 0 {
return nil, dataRange{}, nil return nil, datarange.DataRange{}, nil
} }
if err := off.MoveFile(file, psx.RamStageBegin); err != nil { if err := off.MoveFile(file, psx.RamStageBegin); err != nil {
return nil, dataRange{}, err return nil, datarange.DataRange{}, err
} }
rooms := []room{} rooms := []room{}
for { for {
var room room var room room
if err := binary.Read(file, binary.LittleEndian, &room); err != nil { if err := binary.Read(file, binary.LittleEndian, &room); err != nil {
return nil, dataRange{}, err return nil, datarange.DataRange{}, err
} }
if room.isTerminator() { if room.isTerminator() {
break break
} }
rooms = append(rooms, room) rooms = append(rooms, room)
} }
return rooms, dataRange{ return rooms, datarange.FromAddr(off, len(rooms)*8+4), nil
begin: off,
end: off.Sum(len(rooms)*8 + 4),
}, nil
} }

View File

@ -3,6 +3,7 @@ package main
import ( import (
"encoding/binary" "encoding/binary"
"fmt" "fmt"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/datarange"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx" "github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx"
"io" "io"
"sort" "sort"
@ -27,30 +28,27 @@ type spriteDefs struct {
Indices []int `json:"indices"` Indices []int `json:"indices"`
} }
func readSprites(r io.ReadSeeker, baseAddr, addr psx.Addr) ([]sprite, dataRange, error) { func readSprites(r io.ReadSeeker, baseAddr, addr psx.Addr) ([]sprite, datarange.DataRange, error) {
if err := addr.MoveFile(r, baseAddr); err != nil { if err := addr.MoveFile(r, baseAddr); err != nil {
return nil, dataRange{}, fmt.Errorf("invalid sprites: %w", err) return nil, datarange.DataRange{}, fmt.Errorf("invalid sprites: %w", err)
} }
var count uint16 var count uint16
if err := binary.Read(r, binary.LittleEndian, &count); err != nil { if err := binary.Read(r, binary.LittleEndian, &count); err != nil {
return nil, dataRange{}, err return nil, datarange.DataRange{}, err
} }
sprites := make([]sprite, count) sprites := make([]sprite, count)
if err := binary.Read(r, binary.LittleEndian, sprites); err != nil { if err := binary.Read(r, binary.LittleEndian, sprites); err != nil {
return nil, dataRange{}, err return nil, datarange.DataRange{}, err
} }
return sprites, dataRange{ return sprites, datarange.FromAlignedAddr(addr, 4+0x16*int(count)), nil
begin: addr,
end: addr.Sum(4 + 0x16*int(count)).Align4(),
}, nil
} }
func readFrameSet(r io.ReadSeeker, baseAddr, addr psx.Addr) ([]*[]sprite, dataRange, error) { func readFrameSet(r io.ReadSeeker, baseAddr, addr psx.Addr) ([]*[]sprite, datarange.DataRange, error) {
if err := addr.MoveFile(r, baseAddr); err != nil { if err := addr.MoveFile(r, baseAddr); err != nil {
return nil, dataRange{}, fmt.Errorf("invalid sprite Indices: %w", err) return nil, datarange.DataRange{}, fmt.Errorf("invalid sprite Indices: %w", err)
} }
// the end of the sprite array is the beginning of the earliest sprite offset // the end of the sprite array is the beginning of the earliest sprite offset
@ -65,24 +63,20 @@ func readFrameSet(r io.ReadSeeker, baseAddr, addr psx.Addr) ([]*[]sprite, dataRa
var spriteOffset psx.Addr var spriteOffset psx.Addr
if err := binary.Read(r, binary.LittleEndian, &spriteOffset); err != nil { if err := binary.Read(r, binary.LittleEndian, &spriteOffset); err != nil {
return nil, dataRange{}, err return nil, datarange.DataRange{}, err
} }
spriteOffsets = append(spriteOffsets, spriteOffset) spriteOffsets = append(spriteOffsets, spriteOffset)
if spriteOffset != psx.RamNull { if spriteOffset != psx.RamNull {
if !spriteOffset.InRange(baseAddr, psx.RamGameEnd) { if !spriteOffset.InRange(baseAddr, psx.RamGameEnd) {
err := fmt.Errorf("sprite offset %s is not valid", spriteOffset) err := fmt.Errorf("sprite offset %s is not valid", spriteOffset)
return nil, dataRange{}, err return nil, datarange.DataRange{}, err
} }
earliestSpriteOff = min(earliestSpriteOff, spriteOffset) earliestSpriteOff = min(earliestSpriteOff, spriteOffset)
} }
} }
headerRange := dataRange{ headerRange := datarange.New(addr, earliestSpriteOff)
begin: addr,
end: earliestSpriteOff,
}
spriteBank := make([]*[]sprite, len(spriteOffsets)) spriteBank := make([]*[]sprite, len(spriteOffsets))
spriteRanges := []dataRange{} spriteRanges := []datarange.DataRange{}
for i, offset := range spriteOffsets { for i, offset := range spriteOffsets {
if offset == psx.RamNull { if offset == psx.RamNull {
spriteBank[i] = nil spriteBank[i] = nil
@ -90,18 +84,18 @@ func readFrameSet(r io.ReadSeeker, baseAddr, addr psx.Addr) ([]*[]sprite, dataRa
} }
sprites, ranges, err := readSprites(r, baseAddr, offset) sprites, ranges, err := readSprites(r, baseAddr, offset)
if err != nil { if err != nil {
return nil, dataRange{}, fmt.Errorf("unable to read sprites: %w", err) return nil, datarange.DataRange{}, fmt.Errorf("unable to read sprites: %w", err)
} }
spriteBank[i] = &sprites spriteBank[i] = &sprites
spriteRanges = append(spriteRanges, ranges) spriteRanges = append(spriteRanges, ranges)
} }
return spriteBank, mergeDataRanges(append(spriteRanges, headerRange)), nil return spriteBank, datarange.MergeDataRanges(append(spriteRanges, headerRange)), nil
} }
func readSpritesBanks(r io.ReadSeeker, baseAddr, addr psx.Addr) (spriteDefs, dataRange, error) { func readSpritesBanks(r io.ReadSeeker, baseAddr, addr psx.Addr) (spriteDefs, datarange.DataRange, error) {
if err := addr.MoveFile(r, baseAddr); err != nil { if err := addr.MoveFile(r, baseAddr); err != nil {
return spriteDefs{}, dataRange{}, err return spriteDefs{}, datarange.DataRange{}, err
} }
// start with a capacity of 24 as that's the length for all the stage overlays // start with a capacity of 24 as that's the length for all the stage overlays
@ -117,7 +111,7 @@ func readSpritesBanks(r io.ReadSeeker, baseAddr, addr psx.Addr) (spriteDefs, dat
// the order sprites are stored must be preserved // the order sprites are stored must be preserved
pool := map[psx.Addr][]*[]sprite{} pool := map[psx.Addr][]*[]sprite{}
spriteRanges := []dataRange{} spriteRanges := []datarange.DataRange{}
for _, spriteAddr := range offBanks { for _, spriteAddr := range offBanks {
if spriteAddr == psx.RamNull { if spriteAddr == psx.RamNull {
continue continue
@ -127,7 +121,7 @@ func readSpritesBanks(r io.ReadSeeker, baseAddr, addr psx.Addr) (spriteDefs, dat
} }
bank, bankRange, err := readFrameSet(r, baseAddr, spriteAddr) bank, bankRange, err := readFrameSet(r, baseAddr, spriteAddr)
if err != nil { if err != nil {
return spriteDefs{}, dataRange{}, fmt.Errorf("unable to read sprite Indices: %w", err) return spriteDefs{}, datarange.DataRange{}, fmt.Errorf("unable to read sprite Indices: %w", err)
} }
pool[spriteAddr] = bank pool[spriteAddr] = bank
spriteRanges = append(spriteRanges, bankRange) spriteRanges = append(spriteRanges, bankRange)
@ -162,5 +156,5 @@ func readSpritesBanks(r io.ReadSeeker, baseAddr, addr psx.Addr) (spriteDefs, dat
return spriteDefs{ return spriteDefs{
Banks: banks, Banks: banks,
Indices: indices, Indices: indices,
}, mergeDataRanges(spriteRanges), nil }, datarange.MergeDataRanges(spriteRanges), nil
} }

View File

@ -3,6 +3,7 @@ package main
import ( import (
"encoding/binary" "encoding/binary"
"fmt" "fmt"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/datarange"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx" "github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx"
"os" "os"
) )
@ -21,14 +22,14 @@ type tileDefPaths struct {
Collisions string `json:"collisions"` Collisions string `json:"collisions"`
} }
func readTiledef(file *os.File, off psx.Addr) (tileDef, dataRange, error) { func readTiledef(file *os.File, off psx.Addr) (tileDef, datarange.DataRange, error) {
if err := off.MoveFile(file, psx.RamStageBegin); err != nil { if err := off.MoveFile(file, psx.RamStageBegin); err != nil {
return tileDef{}, dataRange{}, err return tileDef{}, datarange.DataRange{}, err
} }
offsets := make([]psx.Addr, 4) offsets := make([]psx.Addr, 4)
if err := binary.Read(file, binary.LittleEndian, offsets); err != nil { if err := binary.Read(file, binary.LittleEndian, offsets); err != nil {
return tileDef{}, dataRange{}, err return tileDef{}, datarange.DataRange{}, err
} }
td := tileDef{ td := tileDef{
@ -39,48 +40,45 @@ func readTiledef(file *os.File, off psx.Addr) (tileDef, dataRange, error) {
} }
if err := offsets[0].MoveFile(file, psx.RamStageBegin); err != nil { if err := offsets[0].MoveFile(file, psx.RamStageBegin); err != nil {
return tileDef{}, dataRange{}, err return tileDef{}, datarange.DataRange{}, err
} }
if _, err := file.Read(td.tiles); err != nil { if _, err := file.Read(td.tiles); err != nil {
return tileDef{}, dataRange{}, err return tileDef{}, datarange.DataRange{}, err
} }
if err := offsets[1].MoveFile(file, psx.RamStageBegin); err != nil { if err := offsets[1].MoveFile(file, psx.RamStageBegin); err != nil {
return tileDef{}, dataRange{}, err return tileDef{}, datarange.DataRange{}, err
} }
if _, err := file.Read(td.pages); err != nil { if _, err := file.Read(td.pages); err != nil {
return tileDef{}, dataRange{}, err return tileDef{}, datarange.DataRange{}, err
} }
if err := offsets[2].MoveFile(file, psx.RamStageBegin); err != nil { if err := offsets[2].MoveFile(file, psx.RamStageBegin); err != nil {
return tileDef{}, dataRange{}, err return tileDef{}, datarange.DataRange{}, err
} }
if _, err := file.Read(td.cluts); err != nil { if _, err := file.Read(td.cluts); err != nil {
return tileDef{}, dataRange{}, err return tileDef{}, datarange.DataRange{}, err
} }
if err := offsets[3].MoveFile(file, psx.RamStageBegin); err != nil { if err := offsets[3].MoveFile(file, psx.RamStageBegin); err != nil {
return tileDef{}, dataRange{}, err return tileDef{}, datarange.DataRange{}, err
} }
if _, err := file.Read(td.cols); err != nil { if _, err := file.Read(td.cols); err != nil {
return tileDef{}, dataRange{}, err return tileDef{}, datarange.DataRange{}, err
} }
return td, dataRange{ return td, datarange.New(offsets[0], off.Sum(0x10)), nil
begin: offsets[0],
end: off.Sum(0x10),
}, nil
} }
func readAllTiledefs(file *os.File, roomLayers []roomLayers) (map[psx.Addr]tileDef, dataRange, error) { func readAllTiledefs(file *os.File, roomLayers []roomLayers) (map[psx.Addr]tileDef, datarange.DataRange, error) {
ranges := []dataRange{} ranges := []datarange.DataRange{}
processed := map[psx.Addr]tileDef{} processed := map[psx.Addr]tileDef{}
for _, rl := range roomLayers { for _, rl := range roomLayers {
if rl.fg != nil { if rl.fg != nil {
if _, found := processed[rl.fg.Tiledef]; !found { if _, found := processed[rl.fg.Tiledef]; !found {
td, r, err := readTiledef(file, rl.fg.Tiledef) td, r, err := readTiledef(file, rl.fg.Tiledef)
if err != nil { if err != nil {
return nil, dataRange{}, fmt.Errorf("unable to read fg tiledef: %w", err) return nil, datarange.DataRange{}, fmt.Errorf("unable to read fg tiledef: %w", err)
} }
processed[rl.fg.Tiledef] = td processed[rl.fg.Tiledef] = td
ranges = append(ranges, r) ranges = append(ranges, r)
@ -90,12 +88,12 @@ func readAllTiledefs(file *os.File, roomLayers []roomLayers) (map[psx.Addr]tileD
if _, found := processed[rl.bg.Tiledef]; !found { if _, found := processed[rl.bg.Tiledef]; !found {
td, r, err := readTiledef(file, rl.bg.Tiledef) td, r, err := readTiledef(file, rl.bg.Tiledef)
if err != nil { if err != nil {
return nil, dataRange{}, fmt.Errorf("unable to read fg tiledef: %w", err) return nil, datarange.DataRange{}, fmt.Errorf("unable to read fg tiledef: %w", err)
} }
processed[rl.bg.Tiledef] = td processed[rl.bg.Tiledef] = td
ranges = append(ranges, r) ranges = append(ranges, r)
} }
} }
} }
return processed, mergeDataRanges(ranges), nil return processed, datarange.MergeDataRanges(ranges), nil
} }

View File

@ -2,33 +2,31 @@ package main
import ( import (
"fmt" "fmt"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/datarange"
"github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx" "github.com/xeeynamo/sotn-decomp/tools/sotn-assets/psx"
"os" "os"
) )
func readTilemap(file *os.File, layer *layer) ([]byte, dataRange, error) { func readTilemap(file *os.File, layer *layer) ([]byte, datarange.DataRange, error) {
if err := layer.Data.MoveFile(file, psx.RamStageBegin); err != nil { if err := layer.Data.MoveFile(file, psx.RamStageBegin); err != nil {
return nil, dataRange{}, err return nil, datarange.DataRange{}, err
} }
data := make([]byte, layer.tilemapFileSize()) data := make([]byte, layer.tilemapFileSize())
if _, err := file.Read(data); err != nil { if _, err := file.Read(data); err != nil {
return nil, dataRange{}, err return nil, datarange.DataRange{}, err
} }
return data, dataRange{ return data, datarange.FromAddr(layer.Data, len(data)), nil
begin: layer.Data,
end: layer.Data.Sum(len(data)),
}, nil
} }
func readAllTileMaps(file *os.File, roomLayers []roomLayers) (map[psx.Addr][]byte, dataRange, error) { func readAllTileMaps(file *os.File, roomLayers []roomLayers) (map[psx.Addr][]byte, datarange.DataRange, error) {
ranges := []dataRange{} ranges := []datarange.DataRange{}
processed := map[psx.Addr][]byte{} processed := map[psx.Addr][]byte{}
for _, rl := range roomLayers { for _, rl := range roomLayers {
if rl.fg != nil { if rl.fg != nil {
if _, found := processed[rl.fg.Data]; !found { if _, found := processed[rl.fg.Data]; !found {
td, r, err := readTilemap(file, rl.fg) td, r, err := readTilemap(file, rl.fg)
if err != nil { if err != nil {
return nil, dataRange{}, fmt.Errorf("unable to read fg tilemap: %w", err) return nil, datarange.DataRange{}, fmt.Errorf("unable to read fg tilemap: %w", err)
} }
processed[rl.fg.Data] = td processed[rl.fg.Data] = td
ranges = append(ranges, r) ranges = append(ranges, r)
@ -38,12 +36,12 @@ func readAllTileMaps(file *os.File, roomLayers []roomLayers) (map[psx.Addr][]byt
if _, found := processed[rl.bg.Data]; !found { if _, found := processed[rl.bg.Data]; !found {
td, r, err := readTilemap(file, rl.bg) td, r, err := readTilemap(file, rl.bg)
if err != nil { if err != nil {
return nil, dataRange{}, fmt.Errorf("unable to read fg tilemap: %w", err) return nil, datarange.DataRange{}, fmt.Errorf("unable to read fg tilemap: %w", err)
} }
processed[rl.bg.Data] = td processed[rl.bg.Data] = td
ranges = append(ranges, r) ranges = append(ranges, r)
} }
} }
} }
return processed, mergeDataRanges(ranges), nil return processed, datarange.MergeDataRanges(ranges), nil
} }