mirror of
https://github.com/pmret/papermario.git
synced 2024-11-23 21:59:46 +00:00
commit
d353ab87a3
@ -8,3 +8,6 @@ insert_final_newline = true
|
||||
|
||||
[*.yaml]
|
||||
indent_size = 2
|
||||
|
||||
[Makefile]
|
||||
indent_type = tab
|
||||
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -8,6 +8,9 @@ venv/
|
||||
ctx.c
|
||||
expected/
|
||||
|
||||
# Assets
|
||||
assets
|
||||
|
||||
# Build artifacts
|
||||
*.ld
|
||||
*.z64
|
||||
|
20
Makefile
20
Makefile
@ -10,6 +10,7 @@ ASM_DIRS := asm asm/os
|
||||
INCLUDE_DIRS := include include/PR src
|
||||
DATA_DIRS := bin
|
||||
YAY0_DIRS := bin/Yay0
|
||||
ASSETS_FS_DIRS := assets/fs
|
||||
|
||||
# Source code files
|
||||
C_FILES := $(foreach dir,$(SRC_DIRS),$(wildcard $(dir)/*.c))
|
||||
@ -19,11 +20,13 @@ ifdef PM_HEADER_REBUILD
|
||||
endif
|
||||
DATA_FILES := $(foreach dir,$(DATA_DIRS),$(wildcard $(dir)/*.bin))
|
||||
YAY0_FILES := $(foreach dir,$(YAY0_DIRS),$(wildcard $(dir)/*.bin))
|
||||
ASSETS_FS_FILES := $(foreach dir,$(ASSETS_FS_DIRS),$(wildcard $(dir)/*.*))
|
||||
|
||||
# Object files
|
||||
O_FILES := $(foreach file,$(C_FILES),$(BUILD_DIR)/$(file:.c=.o)) \
|
||||
$(foreach file,$(S_FILES),$(BUILD_DIR)/$(file:.s=.o)) \
|
||||
$(foreach file,$(DATA_FILES),$(BUILD_DIR)/$(file:.bin=.o)) \
|
||||
$(foreach file,$(S_FILES),$(BUILD_DIR)/$(file:.s=.o)) \
|
||||
$(foreach file,$(DATA_FILES),$(BUILD_DIR)/$(file:.bin=.o)) \
|
||||
$(foreach dir,$(ASSETS_FS_DIRS),$(BUILD_DIR)/$(dir).o) \
|
||||
|
||||
YAY0_FILES := $(foreach file,$(YAY0_FILES),$(BUILD_DIR)/$(file:.bin=.bin.Yay0))
|
||||
|
||||
@ -52,7 +55,7 @@ LDFLAGS = -T undefined_syms.txt -T undefined_funcs.txt -T $(LD_SCRIPT) -Map $
|
||||
|
||||
######################## Targets #############################
|
||||
|
||||
$(foreach dir,$(SRC_DIRS) $(ASM_DIRS) $(DATA_DIRS) ,$(shell mkdir -p build/$(dir)))
|
||||
$(foreach dir,$(SRC_DIRS) $(ASM_DIRS) $(DATA_DIRS) $(ASSETS_FS_DIRS) ,$(shell mkdir -p build/$(dir)))
|
||||
|
||||
default: all
|
||||
|
||||
@ -67,7 +70,7 @@ submodules:
|
||||
git submodule update --init --recursive
|
||||
|
||||
split:
|
||||
rm -rf $(DATA_DIRS) && ./tools/n64splat/split.py baserom.z64 tools/splat.yaml . --modes ld bin Yay0
|
||||
rm -rf $(DATA_DIRS) && ./tools/n64splat/split.py baserom.z64 tools/splat.yaml . --modes ld bin Yay0 PaperMarioMapFS
|
||||
|
||||
split-all:
|
||||
rm -rf $(DATA_DIRS) && ./tools/n64splat/split.py baserom.z64 tools/splat.yaml . --modes all
|
||||
@ -100,6 +103,15 @@ $(BUILD_DIR)/%.bin.Yay0: %.bin
|
||||
tools/Yay0compress $< $<.Yay0
|
||||
$(LD) -r -b binary -o $@ $<.Yay0
|
||||
|
||||
$(BUILD_DIR)/assets/fs/%: $(ASSETS_FS_FILES)
|
||||
tools/build_assets_fs.py $*
|
||||
|
||||
$(BUILD_DIR)/assets/fs.bin: assets/fs.json tools/build_assets_fs.py $(foreach file,$(ASSETS_FS_FILES),build/$(file))
|
||||
tools/build_assets_fs.py
|
||||
|
||||
$(BUILD_DIR)/assets/fs.o: $(BUILD_DIR)/assets/fs.bin
|
||||
$(LD) -r -b binary -o $@ $<
|
||||
|
||||
$(BUILD_DIR)/$(TARGET).bin: $(BUILD_DIR)/$(TARGET).elf
|
||||
$(OBJCOPY) $< $@ -O binary
|
||||
|
||||
|
101
tools/build_assets_fs.py
Executable file
101
tools/build_assets_fs.py
Executable file
@ -0,0 +1,101 @@
|
||||
#! /usr/bin/python3
|
||||
|
||||
import os
|
||||
import json
|
||||
import sys
|
||||
from subprocess import call
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
|
||||
tools_dir = Path(__file__).parent.absolute()
|
||||
|
||||
def next_multiple(pos, multiple):
|
||||
return pos + pos % multiple
|
||||
|
||||
def build_mapfs(src_dir, build_dir, out_bin):
|
||||
with open(src_dir + ".json", "r") as f:
|
||||
config = json.loads(f.read())
|
||||
|
||||
# every TOC entry's name field has data after the null terminator made up from all the previous name fields.
|
||||
# we probably don't have to do this for the game to read the data properly (it doesn't read past the null terminator
|
||||
# of `string`), but the original devs' equivalent to build_assets_fs.py had this bug so we need to replicate it to match.
|
||||
written_names = []
|
||||
|
||||
with open(out_bin, "wb") as f:
|
||||
f.write(config["title"].encode("ascii"))
|
||||
|
||||
next_data_pos = (len(config["assets"]) + 1) * 0x1C
|
||||
|
||||
asset_idx = 0
|
||||
for asset in config["assets"]:
|
||||
toc_entry_pos = 0x20 + asset_idx * 0x1C
|
||||
|
||||
src_path = Path(src_dir, asset["path"])
|
||||
build_path = Path(build_dir, asset["path"])
|
||||
|
||||
# data for TOC entry
|
||||
name = asset["name"] + "\0"
|
||||
offset = next_data_pos
|
||||
size = next_multiple(build_path.stat().st_size, 2)
|
||||
decompressed_size = src_path.stat().st_size
|
||||
|
||||
print(f"{name} {offset:08X} {size:08X} {decompressed_size:08X}")
|
||||
|
||||
written_names.append(name)
|
||||
# write all previously-written names; required to match
|
||||
for prev_name in written_names:
|
||||
f.seek(toc_entry_pos)
|
||||
f.write(prev_name.encode('ascii'))
|
||||
|
||||
# write TOC entry.
|
||||
f.seek(toc_entry_pos + 0x10)
|
||||
f.write(offset.to_bytes(4, byteorder="big"))
|
||||
f.write(size.to_bytes(4, byteorder="big"))
|
||||
f.write(decompressed_size.to_bytes(4, byteorder="big"))
|
||||
|
||||
# write data.
|
||||
f.seek(0x20 + next_data_pos)
|
||||
f.write(build_path.read_bytes())
|
||||
next_data_pos += size
|
||||
|
||||
asset_idx += 1
|
||||
|
||||
# end_data
|
||||
toc_entry_pos = 0x20 + asset_idx * 0x1C
|
||||
|
||||
written_names.append("end_data\0")
|
||||
for prev_name in written_names:
|
||||
f.seek(toc_entry_pos)
|
||||
f.write(prev_name.encode('ascii'))
|
||||
|
||||
f.seek(toc_entry_pos + 0x18)
|
||||
f.write((0x903F0000).to_bytes(4, byteorder="big")) # TODO: figure out purpose
|
||||
|
||||
def build_file(src_dir, out_dir, filename):
|
||||
with open(src_dir + ".json", "r") as f:
|
||||
config = json.loads(f.read())
|
||||
|
||||
asset = None
|
||||
for a in config["assets"]:
|
||||
if (a["path"] == filename):
|
||||
asset = a
|
||||
|
||||
if not asset:
|
||||
print("asset not configured in {}.json".format(src_dir))
|
||||
exit(1)
|
||||
|
||||
src_path = Path(src_dir, filename)
|
||||
out_path = Path(out_dir, filename)
|
||||
|
||||
if asset["compress"]:
|
||||
call([f"{tools_dir}/Yay0compress", src_path, out_path])
|
||||
else:
|
||||
shutil.copy(src_path, out_path)
|
||||
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) > 1:
|
||||
# copy (and compress if required) the given file
|
||||
build_file("assets/fs", "build/assets/fs", sys.argv[1])
|
||||
else:
|
||||
# build the aggregate file
|
||||
build_mapfs("assets/fs", "build/assets/fs", "build/assets/fs.bin")
|
@ -1 +1 @@
|
||||
Subproject commit d96382cbd5921e19260a0fb2ebd16921f83c1682
|
||||
Subproject commit df0c077d223dc45856da8b0b551512c063f8fcca
|
@ -1259,6 +1259,8 @@ segments:
|
||||
- [0x1B81E88, "Yay0"]
|
||||
- [0x1B82058, "Yay0"]
|
||||
- [0x1B82202, "bin"]
|
||||
- [0x1E40000, "bin", "map_assets.fs"] # todo add fs support
|
||||
- name: assets/fs
|
||||
type: PaperMarioMapFS
|
||||
start: 0x1E40000
|
||||
- [0x27FEE22, "bin"]
|
||||
- [0x2800000]
|
||||
|
Loading…
Reference in New Issue
Block a user