Update dtk to 0.9.2

This commit is contained in:
LagoLunatic 2024-06-26 13:50:31 -04:00
parent c3fac15b9d
commit abe7439e00
15 changed files with 896 additions and 492 deletions

View File

@ -14,7 +14,7 @@ jobs:
version: [GZLE01, GZLP01, GZLJ01]
steps:
- name: Checkout
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: Git config
run: git config --global --add safe.directory "$GITHUB_WORKSPACE"
- name: Prepare

2
.gitignore vendored
View File

@ -2,12 +2,12 @@ __pycache__
.idea
.vscode
.ninja_*
.mypy_cache
*.exe
build
build.ninja
objdiff.json
orig/*/*
!orig/*/.gitkeep
tools/mwcc_compiler
/*.txt
ctx.c

View File

@ -1,17 +1,17 @@
Sections:
.init type:code
extab type:rodata
extabindex type:rodata
.text type:code
.ctors type:rodata
.dtors type:rodata
.rodata type:rodata
.data type:data
.bss type:bss
.sdata type:data
.sbss type:bss
.sdata2 type:rodata
.sbss2 type:bss
.init type:code align:4
extab type:rodata align:32
extabindex type:rodata align:32
.text type:code align:8
.ctors type:rodata align:32
.dtors type:rodata align:16
.rodata type:rodata align:32
.data type:data align:32
.bss type:bss align:32
.sdata type:data align:32
.sbss type:bss align:32
.sdata2 type:rodata align:16
.sbss2 type:bss align:4
m_Do/m_Do_main.cpp:
.text start:0x800056E0 end:0x800065DC

View File

@ -1,17 +1,17 @@
Sections:
.init type:code
extab type:rodata
extabindex type:rodata
.text type:code
.ctors type:rodata
.dtors type:rodata
.rodata type:rodata
.data type:data
.bss type:bss
.sdata type:data
.sbss type:bss
.sdata2 type:rodata
.sbss2 type:bss
.init type:code align:4
extab type:rodata align:32
extabindex type:rodata align:32
.text type:code align:8
.ctors type:rodata align:32
.dtors type:rodata align:32
.rodata type:rodata align:32
.data type:data align:32
.bss type:bss align:32
.sdata type:data align:32
.sbss type:bss align:32
.sdata2 type:rodata align:16
.sbss2 type:bss align:32
m_Do/m_Do_main.cpp:
.text start:0x800056E0 end:0x80006578

View File

@ -1,17 +1,17 @@
Sections:
.init type:code
extab type:rodata
extabindex type:rodata
.text type:code
.ctors type:rodata
.dtors type:rodata
.rodata type:rodata
.data type:data
.bss type:bss
.sdata type:data
.sbss type:bss
.sdata2 type:rodata
.sbss2 type:bss
.init type:code align:4
extab type:rodata align:32
extabindex type:rodata align:32
.text type:code align:8
.ctors type:rodata align:16
.dtors type:rodata align:32
.rodata type:rodata align:32
.data type:data align:32
.bss type:bss align:32
.sdata type:data align:32
.sbss type:bss align:32
.sdata2 type:rodata align:16
.sbss2 type:bss align:32
m_Do/m_Do_main.cpp:
.text start:0x800056E0 end:0x800065DC

View File

@ -12,10 +12,11 @@
# Append --help to see available options.
###
import sys
import argparse
import sys
from pathlib import Path
from typing import Any, Dict, List
from tools.project import (
Object,
ProjectConfig,
@ -32,74 +33,86 @@ VERSIONS = [
"GZLP01", # 2
]
if len(VERSIONS) > 1:
versions_str = ", ".join(VERSIONS[:-1]) + f" or {VERSIONS[-1]}"
else:
versions_str = VERSIONS[0]
parser = argparse.ArgumentParser()
parser.add_argument(
"mode",
choices=["configure", "progress"],
default="configure",
help="configure or progress (default: configure)",
help="script mode (default: configure)",
nargs="?",
)
parser.add_argument(
"-v",
"--version",
dest="version",
choices=VERSIONS,
type=str.upper,
default=VERSIONS[DEFAULT_VERSION],
help=f"version to build ({versions_str})",
help="version to build",
)
parser.add_argument(
"--build-dir",
dest="build_dir",
metavar="DIR",
type=Path,
default=Path("build"),
help="base build directory (default: build)",
)
parser.add_argument(
"--binutils",
metavar="BINARY",
type=Path,
help="path to binutils (optional)",
)
parser.add_argument(
"--compilers",
dest="compilers",
metavar="DIR",
type=Path,
help="path to compilers (optional)",
)
parser.add_argument(
"--map",
dest="map",
action="store_true",
help="generate map file(s)",
)
parser.add_argument(
"--no-asm",
action="store_true",
help="don't incorporate .s files from asm directory",
)
parser.add_argument(
"--debug",
dest="debug",
action="store_true",
help="build with debug info (non-matching)",
)
if not is_windows():
parser.add_argument(
"--wrapper",
dest="wrapper",
metavar="BINARY",
type=Path,
help="path to wibo or wine (optional)",
)
parser.add_argument(
"--build-dtk",
dest="build_dtk",
"--dtk",
metavar="BINARY | DIR",
type=Path,
help="path to decomp-toolkit source (optional)",
help="path to decomp-toolkit binary or source (optional)",
)
parser.add_argument(
"--sjiswrap",
dest="sjiswrap",
metavar="EXE",
type=Path,
help="path to sjiswrap.exe (optional)",
)
parser.add_argument(
"--verbose",
dest="verbose",
action="store_true",
help="print verbose output",
)
parser.add_argument(
"--non-matching",
dest="non_matching",
action="store_true",
help="builds equivalent (but non-matching) or modded objects",
)
parser.add_argument(
"--warn",
dest="warn",
@ -110,35 +123,48 @@ parser.add_argument(
args = parser.parse_args()
config = ProjectConfig()
config.version = args.version.upper()
if config.version not in VERSIONS:
sys.exit(f"Invalid version '{config.version}', expected {versions_str}")
config.version = str(args.version)
version_num = VERSIONS.index(config.version)
# Apply arguments
config.build_dir = args.build_dir
config.build_dtk_path = args.build_dtk
config.dtk_path = args.dtk
config.binutils_path = args.binutils
config.compilers_path = args.compilers
config.debug = args.debug
config.generate_map = args.map
config.non_matching = args.non_matching
config.sjiswrap_path = args.sjiswrap
if not is_windows():
config.wrapper = args.wrapper
if args.no_asm:
config.asm_dir = None
# Tool versions
config.compilers_tag = "20230715"
config.dtk_tag = "v0.7.2"
config.binutils_tag = "2.42-1"
config.compilers_tag = "20231018"
config.dtk_tag = "v0.9.2"
config.sjiswrap_tag = "v1.1.1"
config.wibo_tag = "0.6.3"
config.wibo_tag = "0.6.11"
# Project
config.config_path = Path("config") / config.version / "config.yml"
config.check_sha_path = Path("orig") / f"{config.version}.sha1"
config.linker_version = "GC/1.3.2"
config.check_sha_path = Path("config") / config.version / "build.sha1"
config.asflags = [
"-mgekko",
"--strip-local-absolute",
"-I include",
f"-I build/{config.version}/include",
f"--defsym version={version_num}",
]
config.ldflags = [
"-fp hardware",
"-nodefaults",
"-warn off", # Ignore '.note.split' warnings
# "-listclosure", # Uncomment for Wii linkers
]
# Use for any additional files that should cause a re-configure when modified
config.reconfig_deps = []
# Base flags, common to most GC/Wii games.
# Generally leave untouched, with overrides added below.
@ -159,8 +185,9 @@ cflags_base = [
"-RTTI off",
"-fp_contract on",
"-str reuse",
"-multibyte",
"-multibyte", # For Wii compilers, replace with `-enc SJIS`
"-i include",
f"-i build/{config.version}/include",
"-i src",
"-i src/PowerPC_EABI_Support/MSL/MSL_C/MSL_Common/Include",
"-i src/PowerPC_EABI_Support/MSL/MSL_C/MSL_Common_Embedded/Math/Include",
@ -169,6 +196,8 @@ cflags_base = [
"-i src/PowerPC_EABI_Support/Runtime/Inc",
f"-DVERSION={version_num}",
]
# Debug flags
if config.debug:
cflags_base.extend(["-sym on", "-DDEBUG=1"])
else:
@ -185,6 +214,8 @@ cflags_runtime = [
*cflags_base,
"-use_lmw_stmw on",
"-str reuse,pool,readonly",
"-gccinc",
"-common off",
"-inline deferred,auto",
]
@ -215,23 +246,34 @@ cflags_rel = [
"-sdata2 0",
]
config.linker_version = "GC/1.3.2"
# Helper function for single-object RELs
def Rel(status, rel_name, cpp_name, extra_cflags=[]):
# Helper function for Dolphin libraries
def DolphinLib(lib_name: str, objects: List[Object]) -> Dict[str, Any]:
return {
"lib": rel_name,
"lib": lib_name,
"mw_version": "GC/1.2.5n",
"cflags": cflags_base,
"host": False,
"objects": objects,
}
# Helper function for REL script objects
def Rel(lib_name: str, objects: List[Object]) -> Dict[str, Any]:
return {
"lib": lib_name,
"mw_version": "GC/1.3.2",
"cflags": cflags_rel + extra_cflags,
"cflags": cflags_rel,
"host": True,
"objects": [
Object(status, cpp_name),
],
"objects": objects,
}
# Helper function for actor RELs
def ActorRel(status, rel_name, extra_cflags=[]):
return Rel(status, rel_name, f"d/actor/{rel_name}.cpp", extra_cflags=extra_cflags)
return Rel(rel_name, [Object(status, f"d/actor/{rel_name}.cpp", extra_cflags=extra_cflags)])
# Helper function for JSystem libraries
@ -244,23 +286,12 @@ def JSystemLib(lib_name, objects):
"objects": objects,
}
# Helper function for Dolphin libraries
def DolphinLib(lib_name, objects):
return {
"lib": lib_name,
"mw_version": "GC/1.2.5n",
"cflags": cflags_dolphin, # TODO check
"host": False,
"objects": objects,
}
Matching = True
NonMatching = False
Matching = True # Object matches and should be linked
NonMatching = False # Object does not match and should not be linked
Equivalent = config.non_matching # Object should be linked when configured with --non-matching
config.warn_missing_config = True
config.warn_missing_source = False # TODO
config.warn_missing_source = False
config.libs = [
{
"lib": "framework",
@ -1224,7 +1255,7 @@ config.libs = [
),
],
},
Rel(Matching, "f_pc_profile_lst", "f_pc/f_pc_profile_lst.cpp"),
Rel("f_pc_profile_lst", [Object(Matching, "f_pc/f_pc_profile_lst.cpp")]),
ActorRel(Matching, "d_a_agbsw0", extra_cflags=['-pragma "nosyminline on"']),
ActorRel(Matching, "d_a_andsw0"),
ActorRel(Matching, "d_a_andsw2"),

View File

@ -13,6 +13,7 @@
import argparse
import os
import re
from typing import List
script_dir = os.path.dirname(os.path.realpath(__file__))
root_dir = os.path.abspath(os.path.join(script_dir, ".."))
@ -28,37 +29,40 @@ include_dirs = [
]
include_pattern = re.compile(r'^#include\s*[<"](.+?)[>"]$')
guard_pattern = re.compile(r'^#ifndef\s+(.*)$')
guard_pattern = re.compile(r"^#ifndef\s+(.*)$")
defines = set()
def import_h_file(in_file: str, r_path: str) -> str:
def import_h_file(in_file: str, r_path: str, deps: List[str]) -> str:
rel_path = os.path.join(root_dir, r_path, in_file)
if os.path.exists(rel_path):
return import_c_file(rel_path)
else:
for inc_dir in include_dirs:
inc_path = os.path.join(inc_dir, in_file)
return import_c_file(rel_path, deps)
for include_dir in include_dirs:
inc_path = os.path.join(include_dir, in_file)
if os.path.exists(inc_path):
return import_c_file(inc_path)
return import_c_file(inc_path, deps)
else:
print("Failed to locate", in_file)
exit(1)
return ""
def import_c_file(in_file) -> str:
def import_c_file(in_file: str, deps: List[str]) -> str:
in_file = os.path.relpath(in_file, root_dir)
out_text = ''
deps.append(in_file)
out_text = ""
try:
with open(in_file, encoding="utf8") as file:
out_text += process_file(in_file, list(file))
with open(in_file, encoding="utf-8") as file:
out_text += process_file(in_file, list(file), deps)
except Exception:
with open(in_file) as file:
out_text += process_file(in_file, list(file))
out_text += process_file(in_file, list(file), deps)
return out_text
def process_file(in_file: str, lines) -> str:
out_text = ''
def process_file(in_file: str, lines: List[str], deps: List[str]) -> str:
out_text = ""
for idx, line in enumerate(lines):
guard_match = guard_pattern.match(line.strip())
if idx == 0:
@ -69,14 +73,19 @@ def process_file(in_file: str, lines) -> str:
print("Processing file", in_file)
include_match = include_pattern.match(line.strip())
if include_match and not include_match[1].endswith(".s"):
out_text += f"/* \"{in_file}\" line {idx} \"{include_match[1]}\" */\n"
out_text += import_h_file(include_match[1], os.path.dirname(in_file))
out_text += f"/* end \"{include_match[1]}\" */\n"
out_text += f'/* "{in_file}" line {idx} "{include_match[1]}" */\n'
out_text += import_h_file(include_match[1], os.path.dirname(in_file), deps)
out_text += f'/* end "{include_match[1]}" */\n'
else:
out_text += line
return out_text
def sanitize_path(path: str) -> str:
return path.replace("\\", "/").replace(" ", "\\ ")
def main():
parser = argparse.ArgumentParser(
description="""Create a context file which can be used for decomp.me"""
@ -85,13 +94,32 @@ def main():
"c_file",
help="""File from which to create context""",
)
parser.add_argument(
"-o",
"--output",
help="""Output file""",
default="ctx.c",
)
parser.add_argument(
"-d",
"--depfile",
help="""Dependency file""",
)
args = parser.parse_args()
output = import_c_file(args.c_file)
deps = []
output = import_c_file(args.c_file, deps)
with open(os.path.join(root_dir, "ctx.c"), "w", encoding="utf-8") as f:
with open(os.path.join(root_dir, args.output), "w", encoding="utf-8") as f:
f.write(output)
if args.depfile:
with open(os.path.join(root_dir, args.depfile), "w", encoding="utf-8") as f:
f.write(sanitize_path(args.output) + ":")
for dep in deps:
path = sanitize_path(dep)
f.write(f" \\\n\t{path}")
if __name__ == "__main__":
main()

View File

@ -16,14 +16,31 @@ import os
import platform
import shutil
import stat
import sys
import urllib.request
import zipfile
from typing import Callable, Dict
from pathlib import Path
def dtk_url(tag):
def binutils_url(tag):
uname = platform.uname()
system = uname.system.lower()
arch = uname.machine.lower()
if system == "darwin":
system = "macos"
arch = "universal"
elif arch == "amd64":
arch = "x86_64"
repo = "https://github.com/encounter/gc-wii-binutils"
return f"{repo}/releases/download/{tag}/{system}-{arch}.zip"
def compilers_url(tag: str) -> str:
return f"https://files.decomp.dev/compilers_{tag}.zip"
def dtk_url(tag: str) -> str:
uname = platform.uname()
suffix = ""
system = uname.system.lower()
@ -39,29 +56,26 @@ def dtk_url(tag):
return f"{repo}/releases/download/{tag}/dtk-{system}-{arch}{suffix}"
def sjiswrap_url(tag):
def sjiswrap_url(tag: str) -> str:
repo = "https://github.com/encounter/sjiswrap"
return f"{repo}/releases/download/{tag}/sjiswrap-windows-x86.exe"
def wibo_url(tag):
def wibo_url(tag: str) -> str:
repo = "https://github.com/decompals/wibo"
return f"{repo}/releases/download/{tag}/wibo"
def compilers_url(tag):
return f"https://files.decomp.dev/compilers_{tag}.zip"
TOOLS = {
TOOLS: Dict[str, Callable[[str], str]] = {
"binutils": binutils_url,
"compilers": compilers_url,
"dtk": dtk_url,
"sjiswrap": sjiswrap_url,
"wibo": wibo_url,
"compilers": compilers_url,
}
def main():
def main() -> None:
parser = argparse.ArgumentParser()
parser.add_argument("tool", help="Tool name")
parser.add_argument("output", type=Path, help="output file path")
@ -78,7 +92,11 @@ def main():
data = io.BytesIO(response.read())
with zipfile.ZipFile(data) as f:
f.extractall(output)
output.touch(mode=0o755)
# Make all files executable
for root, _, files in os.walk(output):
for name in files:
os.chmod(os.path.join(root, name), 0o755)
output.touch(mode=0o755) # Update dir modtime
else:
with open(output, "wb") as f:
shutil.copyfileobj(response, f)

View File

@ -21,50 +21,67 @@ use Python.
import re
import textwrap
import os
from io import StringIO
from pathlib import Path
from typing import Dict, List, Match, Optional, Tuple, Union
NinjaPath = Union[str, Path]
NinjaPaths = Union[
List[str],
List[Path],
List[NinjaPath],
List[Optional[str]],
List[Optional[Path]],
List[Optional[NinjaPath]],
]
NinjaPathOrPaths = Union[NinjaPath, NinjaPaths]
def escape_path(word):
def escape_path(word: str) -> str:
return word.replace("$ ", "$$ ").replace(" ", "$ ").replace(":", "$:")
class Writer(object):
def __init__(self, output, width=78):
def __init__(self, output: StringIO, width: int = 78) -> None:
self.output = output
self.width = width
def newline(self):
def newline(self) -> None:
self.output.write("\n")
def comment(self, text):
def comment(self, text: str) -> None:
for line in textwrap.wrap(
text, self.width - 2, break_long_words=False, break_on_hyphens=False
):
self.output.write("# " + line + "\n")
def variable(self, key, value, indent=0):
if value is None:
return
if isinstance(value, list):
value = " ".join(filter(None, value)) # Filter out empty strings.
def variable(
self,
key: str,
value: Optional[NinjaPathOrPaths],
indent: int = 0,
) -> None:
value = " ".join(serialize_paths(value))
self._line("%s = %s" % (key, value), indent)
def pool(self, name, depth):
def pool(self, name: str, depth: int) -> None:
self._line("pool %s" % name)
self.variable("depth", depth, indent=1)
self.variable("depth", str(depth), indent=1)
def rule(
self,
name,
command,
description=None,
depfile=None,
generator=False,
pool=None,
restat=False,
rspfile=None,
rspfile_content=None,
deps=None,
):
name: str,
command: str,
description: Optional[str] = None,
depfile: Optional[NinjaPath] = None,
generator: bool = False,
pool: Optional[str] = None,
restat: bool = False,
rspfile: Optional[NinjaPath] = None,
rspfile_content: Optional[NinjaPath] = None,
deps: Optional[NinjaPathOrPaths] = None,
) -> None:
self._line("rule %s" % name)
self.variable("command", command, indent=1)
if description:
@ -86,32 +103,39 @@ class Writer(object):
def build(
self,
outputs,
rule,
inputs=None,
implicit=None,
order_only=None,
variables=None,
implicit_outputs=None,
pool=None,
dyndep=None,
):
outputs = as_list(outputs)
outputs: NinjaPathOrPaths,
rule: str,
inputs: Optional[NinjaPathOrPaths] = None,
implicit: Optional[NinjaPathOrPaths] = None,
order_only: Optional[NinjaPathOrPaths] = None,
variables: Optional[
Union[
List[Tuple[str, Optional[NinjaPathOrPaths]]],
Dict[str, Optional[NinjaPathOrPaths]],
]
] = None,
implicit_outputs: Optional[NinjaPathOrPaths] = None,
pool: Optional[str] = None,
dyndep: Optional[NinjaPath] = None,
) -> List[str]:
outputs = serialize_paths(outputs)
out_outputs = [escape_path(x) for x in outputs]
all_inputs = [escape_path(x) for x in as_list(inputs)]
all_inputs = [escape_path(x) for x in serialize_paths(inputs)]
if implicit:
implicit = [escape_path(x) for x in as_list(implicit)]
implicit = [escape_path(x) for x in serialize_paths(implicit)]
all_inputs.append("|")
all_inputs.extend(implicit)
all_inputs.extend(map(str, implicit))
if order_only:
order_only = [escape_path(x) for x in as_list(order_only)]
order_only = [escape_path(x) for x in serialize_paths(order_only)]
all_inputs.append("||")
all_inputs.extend(order_only)
all_inputs.extend(map(str, order_only))
if implicit_outputs:
implicit_outputs = [escape_path(x) for x in as_list(implicit_outputs)]
implicit_outputs = [
escape_path(x) for x in serialize_paths(implicit_outputs)
]
out_outputs.append("|")
out_outputs.extend(implicit_outputs)
out_outputs.extend(map(str, implicit_outputs))
self._line(
"build %s: %s" % (" ".join(out_outputs), " ".join([rule] + all_inputs))
@ -119,7 +143,7 @@ class Writer(object):
if pool is not None:
self._line(" pool = %s" % pool)
if dyndep is not None:
self._line(" dyndep = %s" % dyndep)
self._line(" dyndep = %s" % serialize_path(dyndep))
if variables:
if isinstance(variables, dict):
@ -132,16 +156,16 @@ class Writer(object):
return outputs
def include(self, path):
def include(self, path: str) -> None:
self._line("include %s" % path)
def subninja(self, path):
def subninja(self, path: str) -> None:
self._line("subninja %s" % path)
def default(self, paths):
self._line("default %s" % " ".join(as_list(paths)))
def default(self, paths: NinjaPathOrPaths) -> None:
self._line("default %s" % " ".join(serialize_paths(paths)))
def _count_dollars_before_index(self, s, i):
def _count_dollars_before_index(self, s: str, i: int) -> int:
"""Returns the number of '$' characters right in front of s[i]."""
dollar_count = 0
dollar_index = i - 1
@ -150,7 +174,7 @@ class Writer(object):
dollar_index -= 1
return dollar_count
def _line(self, text, indent=0):
def _line(self, text: str, indent: int = 0) -> None:
"""Write 'text' word-wrapped at self.width characters."""
leading_space = " " * indent
while len(leading_space) + len(text) > self.width:
@ -187,19 +211,26 @@ class Writer(object):
self.output.write(leading_space + text + "\n")
def close(self):
def close(self) -> None:
self.output.close()
def as_list(input):
if input is None:
return []
def serialize_path(input: Optional[NinjaPath]) -> str:
if not input:
return ""
if isinstance(input, Path):
return str(input).replace("/", os.sep)
else:
return str(input)
def serialize_paths(input: Optional[NinjaPathOrPaths]) -> List[str]:
if isinstance(input, list):
return input
return [input]
return [serialize_path(path) for path in input if path]
return [serialize_path(input)] if input else []
def escape(string):
def escape(string: str) -> str:
"""Escape a string such that it can be embedded into a Ninja file without
further interpretation."""
assert "\n" not in string, "Ninja syntax does not allow newlines"
@ -207,14 +238,14 @@ def escape(string):
return string.replace("$", "$$")
def expand(string, vars, local_vars={}):
def expand(string: str, vars: Dict[str, str], local_vars: Dict[str, str] = {}) -> str:
"""Expand a string containing $vars as Ninja would.
Note: doesn't handle the full Ninja variable syntax, but it's enough
to make configure.py's use of it work.
"""
def exp(m):
def exp(m: Match[str]) -> str:
var = m.group(1)
if var == "$":
return "$"

File diff suppressed because it is too large Load Diff

View File

@ -25,7 +25,7 @@ def in_wsl() -> bool:
return "microsoft-standard" in uname().release
def import_d_file(in_file) -> str:
def import_d_file(in_file: str) -> str:
out_text = ""
with open(in_file) as file:
@ -60,7 +60,7 @@ def import_d_file(in_file) -> str:
return out_text
def main():
def main() -> None:
parser = argparse.ArgumentParser(
description="""Transform a .d file from Wine paths to normal paths"""
)

View File

@ -51,7 +51,7 @@ if __name__ == "__main__":
args = parser.parse_args()
api_key = args.api_key or os.environ.get("PROGRESS_API_KEY")
if not api_key:
raise "API key required"
raise KeyError("API key required")
url = generate_url(args)
entries = []
@ -68,9 +68,12 @@ if __name__ == "__main__":
print("Publishing entry to", url)
json.dump(entries[0], sys.stdout, indent=4)
print()
r = requests.post(url, json={
r = requests.post(
url,
json={
"api_key": api_key,
"entries": entries,
})
},
)
r.raise_for_status()
print("Done!")