Clean up configure.py, move common logic to tools/project.py

This commit is contained in:
Luke Street 2023-09-13 17:43:26 -04:00
parent 2f0ae6fa7d
commit 9c6f228941
6 changed files with 958 additions and 730 deletions

View File

@ -1,21 +1,30 @@
#!/usr/bin/env python3
import os
import io
import platform
###
# Generates build files for the project.
# This file also includes the project configuration,
# such as compiler flags and the object matching status.
#
# Usage:
# python3 configure.py
# ninja
#
# Append --help to see available options.
###
import sys
import argparse
import json
from pathlib import Path
from tools import ninja_syntax
if sys.platform == "cygwin":
sys.exit(
f"Cygwin/MSYS2 is not supported."
f"\nPlease use native Windows Python instead."
f"\n(Current path: {sys.executable})"
)
from tools.project import (
Object,
ProjectConfig,
generate_build,
is_windows,
)
# Game versions
DEFAULT_VERSION = 1
VERSIONS = [
"GZLJ01", # 0
"GZLE01", # 1
@ -31,7 +40,7 @@ parser = argparse.ArgumentParser()
parser.add_argument(
"--version",
dest="version",
default="GZLE01",
default=VERSIONS[DEFAULT_VERSION],
help=f"version to build ({versions_str})",
)
parser.add_argument(
@ -41,17 +50,11 @@ parser.add_argument(
default=Path("build"),
help="base build directory (default: build)",
)
parser.add_argument(
"--build-dtk",
dest="build_dtk",
type=Path,
help="path to decomp-toolkit source",
)
parser.add_argument(
"--compilers",
dest="compilers",
type=Path,
help="path to compilers (default: tools/mwcc_compiler)",
help="path to compilers (optional)",
)
parser.add_argument(
"--map",
@ -65,27 +68,61 @@ parser.add_argument(
action="store_true",
help="build with debug info (non-matching)",
)
if os.name != "nt" and not "_NT-" in os.uname().sysname:
if not is_windows():
parser.add_argument(
"--wrapper",
dest="wrapper",
type=Path,
help="path to wine (or wibo)",
help="path to wibo or wine (optional)",
)
parser.add_argument(
"--build-dtk",
dest="build_dtk",
type=Path,
help="path to decomp-toolkit source (optional)",
)
parser.add_argument(
"--sjiswrap",
dest="sjiswrap",
type=Path,
help="path to sjiswrap",
help="path to sjiswrap.exe (optional)",
)
args = parser.parse_args()
version = args.version.upper()
if version not in VERSIONS:
sys.exit(f"Invalid version '{version}', expected {versions_str}")
version_num = VERSIONS.index(version)
config = ProjectConfig()
config.version = args.version.upper()
if config.version not in VERSIONS:
sys.exit(f"Invalid version '{config.version}', expected {versions_str}")
version_num = VERSIONS.index(config.version)
CFLAGS_BASE = [
# Apply arguments
config.build_dir = args.build_dir
config.build_dtk_path = args.build_dtk
config.compilers_path = args.compilers
config.debug = args.debug
config.generate_map = args.map
config.sjiswrap_path = args.sjiswrap
if not is_windows():
config.wrapper = args.wrapper
# Tool versions
config.compilers_tag = "1"
config.dtk_tag = "v0.5.1"
config.sjiswrap_tag = "v1.1.0"
config.wibo_tag = "0.5.1"
# Project
config.config_path = Path("config") / config.version / "config.yml"
config.check_sha_path = Path("orig") / f"{config.version}.sha1"
config.linker_version = "GC/1.3.2"
config.ldflags = [
"-fp hardware",
"-nodefaults",
]
# Base flags, common to most GC/Wii games.
# Generally leave untouched, with overrides added below.
cflags_base = [
"-nodefaults",
"-proc gekko",
"-align powerpc",
@ -106,20 +143,22 @@ CFLAGS_BASE = [
"-i include",
f"-DVERSION={version_num}",
]
if args.debug:
CFLAGS_BASE.extend(["-sym on", "-DDEBUG=1"])
if config.debug:
cflags_base.extend(["-sym on", "-DDEBUG=1"])
else:
CFLAGS_BASE.append("-DNDEBUG=1")
cflags_base.append("-DNDEBUG=1")
CFLAGS_RUNTIME = [
*CFLAGS_BASE,
# Metrowerks library flags
cflags_runtime = [
*cflags_base,
"-use_lmw_stmw on",
"-str reuse,pool,readonly",
"-inline deferred,auto",
]
CFLAGS_FRAMEWORK = [
*CFLAGS_BASE,
# Framework flags
cflags_framework = [
*cflags_base,
"-use_lmw_stmw off",
"-str reuse,pool,readonly",
"-inline noauto",
@ -129,31 +168,20 @@ CFLAGS_FRAMEWORK = [
"-fp_contract off",
]
CFLAGS_REL = [
*CFLAGS_FRAMEWORK,
# REL flags
cflags_rel = [
*cflags_framework,
"-sdata 0",
"-sdata2 0",
]
LINKER_VERSION = "GC/1.3.2"
class Object:
def __init__(self, completed, obj_path, **options):
self.obj_path = obj_path
self.completed = completed
self.options = options
Matching = True
NonMatching = False
# Helper function for single-object RELs
def Rel(status, rel_name, cpp_name):
return {
"lib": rel_name,
"mw_version": "GC/1.3.2",
"cflags": CFLAGS_REL,
"cflags": cflags_rel,
"host": True,
"objects": [
Object(status, cpp_name),
@ -161,15 +189,19 @@ def Rel(status, rel_name, cpp_name):
}
# Helper function for actor RELs
def ActorRel(status, rel_name):
return Rel(status, rel_name, f"d/actor/{rel_name}.cpp")
LIBS = [
Matching = True
NonMatching = False
config.libs = [
{
"lib": "Runtime.PPCEABI.H",
"mw_version": "GC/1.3.2",
"cflags": CFLAGS_RUNTIME,
"cflags": cflags_runtime,
"host": False,
"objects": [
Object(Matching, "Runtime/__init_cpp_exceptions.cpp"),
@ -180,11 +212,12 @@ LIBS = [
{
"lib": "REL",
"mw_version": "GC/1.3.2",
"cflags": CFLAGS_REL,
"cflags": cflags_rel,
"host": False,
"objects": [
Object(Matching, "REL/executor.c"),
Object(Matching,
Object(
Matching,
"REL/global_destructor_chain.c",
source="Runtime/global_destructor_chain.c",
),
@ -193,7 +226,7 @@ LIBS = [
{
"lib": "JSystem",
"mw_version": "GC/1.3.2",
"cflags": CFLAGS_FRAMEWORK,
"cflags": cflags_framework,
"host": True,
"objects": [
Object(NonMatching, "JSystem/JFramework/JFWDisplay.cpp"),
@ -222,7 +255,7 @@ LIBS = [
{
"lib": "SSystem",
"mw_version": "GC/1.3.2",
"cflags": CFLAGS_FRAMEWORK,
"cflags": cflags_framework,
"host": True,
"objects": [
Object(Matching, "SSystem/SStandard/s_basic.cpp"),
@ -263,7 +296,7 @@ LIBS = [
{
"lib": "framework",
"mw_version": "GC/1.3.2",
"cflags": CFLAGS_FRAMEWORK,
"cflags": cflags_framework,
"host": True,
"objects": [
Object(NonMatching, "f_ap/f_ap_game.cpp"),
@ -385,7 +418,7 @@ LIBS = [
ActorRel(NonMatching, "d_a_lamp"),
ActorRel(NonMatching, "d_a_lod_bg"),
ActorRel(NonMatching, "d_a_lwood"),
ActorRel(Matching, "d_a_magma"),
ActorRel(Matching, "d_a_magma"),
ActorRel(NonMatching, "d_a_majuu_flag"),
ActorRel(NonMatching, "d_a_mdoor"),
ActorRel(NonMatching, "d_a_msw"),
@ -542,7 +575,7 @@ LIBS = [
ActorRel(NonMatching, "d_a_bmdfoot"),
ActorRel(NonMatching, "d_a_bmdhand"),
ActorRel(NonMatching, "d_a_bo"),
ActorRel(Matching, "d_a_boss_item"),
ActorRel(Matching, "d_a_boss_item"),
ActorRel(NonMatching, "d_a_bpw"),
ActorRel(NonMatching, "d_a_bst"),
ActorRel(NonMatching, "d_a_btd"),
@ -763,663 +796,5 @@ LIBS = [
ActorRel(NonMatching, "d_a_movie_player"),
]
# Tool versions
COMPILERS_TAG = "1"
DTK_TAG = "v0.5.0"
SJISWRAP_TAG = "v1.1.0"
WIBO_TAG = "0.5.1"
# On Windows, we need this to use && in commands
chain = "cmd /c " if os.name == "nt" else ""
# Begin generating build.ninja
out = io.StringIO()
n = ninja_syntax.Writer(out)
n.variable("ninja_required_version", "1.3")
n.newline()
n.comment("The arguments passed to configure.py, for rerunning it.")
n.variable("configure_args", sys.argv[1:])
n.variable("python", f'"{sys.executable}"')
n.newline()
###
# Variables
###
n.comment("Variables")
version = args.version
version_num = VERSIONS.index(args.version)
build_path = args.build_dir / version
config_path = Path("config") / version / "config.yml"
tools_path = Path("tools")
ldflags = f"-fp hardware -nodefaults"
if args.map:
ldflags += f" -mapunused"
if args.debug:
ldflags += " -g"
n.variable("ldflags", ldflags)
n.variable("mw_version", LINKER_VERSION)
if sys.platform == "win32":
exe = ".exe"
wrapper = None
else:
exe = ""
wrapper = args.wrapper or "wine"
n.newline()
# Replace forward slashes with backslashes on Windows
def os_str(value):
return str(value).replace("/", os.sep)
# Stringify paths for ninja_syntax
def path(value):
if value is None:
return None
elif isinstance(value, list):
return list(map(os_str, filter(lambda x: x is not None, value)))
else:
return [os_str(value)]
###
# Tooling
###
n.comment("Tooling")
build_tools_path = args.build_dir / "tools"
download_tool = tools_path / "download_tool.py"
n.rule(
name="download_tool",
command=f"$python {download_tool} $tool $out --tag $tag",
description="TOOL $out",
)
if args.build_dtk:
dtk = build_tools_path / "release" / f"dtk{exe}"
n.rule(
name="cargo",
command="cargo build --release --manifest-path $in --bin $bin --target-dir $target",
description="CARGO $bin",
depfile=path(Path("$target") / "release" / "$bin.d"),
deps="gcc",
)
n.build(
outputs=path(dtk),
rule="cargo",
inputs=path(args.build_dtk / "Cargo.toml"),
variables={
"bin": "dtk",
"target": build_tools_path,
},
)
else:
dtk = build_tools_path / f"dtk{exe}"
n.build(
outputs=path(dtk),
rule="download_tool",
implicit=path(download_tool),
variables={
"tool": "dtk",
"tag": DTK_TAG,
},
)
if args.sjiswrap:
sjiswrap = args.sjiswrap
else:
sjiswrap = build_tools_path / "sjiswrap.exe"
n.build(
outputs=path(sjiswrap),
rule="download_tool",
implicit=path(download_tool),
variables={
"tool": "sjiswrap",
"tag": SJISWRAP_TAG,
},
)
# Only add an implicit dependency on wibo if we download it
wrapper_implicit = None
if (
sys.platform == "linux"
and platform.machine() in ("i386", "x86_64")
and args.wrapper is None
):
wrapper = build_tools_path / "wibo"
wrapper_implicit = wrapper
n.build(
outputs=path(wrapper),
rule="download_tool",
implicit=path(download_tool),
variables={
"tool": "wibo",
"tag": WIBO_TAG,
},
)
compilers_implicit = None
if args.compilers:
compilers = args.compilers
else:
compilers = tools_path / "mwcc_compiler"
compilers_implicit = compilers
n.build(
outputs=path(compilers),
rule="download_tool",
implicit=path(download_tool),
variables={
"tool": "compilers",
"tag": COMPILERS_TAG,
},
)
n.newline()
###
# Rules
###
compiler_path = compilers / "$mw_version"
mwcc = compiler_path / "mwcceppc.exe"
mwcc_implicit = [compilers_implicit or mwcc, wrapper_implicit, sjiswrap]
mwld = compiler_path / "mwldeppc.exe"
mwld_implicit = [compilers_implicit or mwld, wrapper_implicit]
wrapper_cmd = f"{wrapper} " if wrapper else ""
mwcc_cmd = f"{wrapper_cmd}{sjiswrap} {mwcc} $cflags -MMD -c $in -o $basedir"
mwld_cmd = f"{wrapper_cmd}{mwld} $ldflags -o $out @$out.rsp"
if os.name != "nt":
transform_dep = tools_path / "transform_dep.py"
mwcc_cmd += f" && $python {transform_dep} $basefile.d $basefile.d"
mwcc_implicit.append(transform_dep)
n.comment("Link ELF file")
n.rule(
name="link",
command=mwld_cmd,
description="LINK $out",
rspfile="$out.rsp",
rspfile_content="$in_newline",
)
n.newline()
n.comment("Generate DOL")
n.rule(
name="elf2dol",
command=f"{dtk} elf2dol $in $out",
description="DOL $out",
)
n.newline()
n.comment("Generate RELs")
makerel_rsp = build_path / "makerel.rsp"
n.rule(
name="makerel",
command=f"{dtk} rel make -w -c $config @{makerel_rsp}",
description="REL",
rspfile=path(makerel_rsp),
rspfile_content="$in_newline",
)
n.newline()
n.comment("MWCC build")
n.rule(
name="mwcc",
command=mwcc_cmd,
description="MWCC $out",
depfile="$basefile.d",
deps="gcc",
)
n.newline()
n.comment("Host build")
n.variable("host_cflags", "-I include -Wno-trigraphs")
n.variable(
"host_cppflags",
"-std=c++98 -I include -fno-exceptions -fno-rtti -D_CRT_SECURE_NO_WARNINGS -Wno-trigraphs -Wno-c++11-extensions",
)
n.rule(
name="host_cc",
command="clang $host_cflags -c -o $out $in",
description="CC $out",
)
n.rule(
name="host_cpp",
command="clang++ $host_cppflags -c -o $out $in",
description="CXX $out",
)
n.newline()
###
# Rules for source files
###
n.comment("Source files")
build_src_path = build_path / "src"
build_host_path = build_path / "host"
build_config_path = build_path / "config.json"
objdiff_config = {
"min_version": "0.4.3",
"custom_make": "ninja",
"build_target": False,
"watch_patterns": [
"*.c",
"*.cp",
"*.cpp",
"*.h",
"*.hpp",
"*.py",
"*.yml",
"*.txt",
"*.json",
],
"units": [],
}
def locate_unit(unit):
for lib in LIBS:
for obj in lib["objects"]:
if obj.obj_path == unit:
return [lib, obj]
return None
def map_path(path):
return path.parent / (path.name + ".MAP")
class LinkStep:
def __init__(self, config):
self.name = config["name"]
self.module_id = config["module_id"]
self.ldscript = config["ldscript"]
self.entry = config["entry"]
self.inputs = []
def add(self, obj):
self.inputs.append(obj)
def output(self):
if self.module_id == 0:
return build_path / f"{self.name}.dol"
else:
return build_path / self.name / f"{self.name}.rel"
def partial_output(self):
if self.module_id == 0:
return build_path / f"{self.name}.elf"
else:
return build_path / self.name / f"{self.name}.plf"
def write(self, n):
n.comment(f"Link {self.name}")
if self.module_id == 0:
elf_path = build_path / f"{self.name}.elf"
dol_path = build_path / f"{self.name}.dol"
elf_ldflags = f"$ldflags -lcf {self.ldscript}"
if args.map:
elf_map = map_path(elf_path)
elf_ldflags += f" -map {elf_map}"
else:
elf_map = None
n.build(
outputs=path(elf_path),
rule="link",
inputs=path(self.inputs),
implicit=path([self.ldscript, *mwld_implicit]),
implicit_outputs=path(elf_map),
variables={"ldflags": elf_ldflags},
)
n.build(
outputs=path(dol_path),
rule="elf2dol",
inputs=path(elf_path),
implicit=path(dtk),
)
else:
preplf_path = build_path / self.name / f"{self.name}.preplf"
plf_path = build_path / self.name / f"{self.name}.plf"
preplf_ldflags = f"$ldflags -sdata 0 -sdata2 0 -r"
plf_ldflags = f"$ldflags -sdata 0 -sdata2 0 -m {self.entry} -r1 -strip_partial -lcf {self.ldscript}"
if args.map:
preplf_map = map_path(preplf_path)
preplf_ldflags += f" -map {preplf_map}"
plf_map = map_path(plf_path)
plf_ldflags += f" -map {plf_map}"
else:
preplf_map = None
plf_map = None
n.build(
outputs=path(preplf_path),
rule="link",
inputs=path(self.inputs),
implicit=path(mwld_implicit),
implicit_outputs=path(preplf_map),
variables={"ldflags": preplf_ldflags},
)
n.build(
outputs=path(plf_path),
rule="link",
inputs=path(self.inputs),
implicit=path([self.ldscript, preplf_path, *mwld_implicit]),
implicit_outputs=path(plf_map),
variables={"ldflags": plf_ldflags},
)
n.newline()
has_units = False
if build_config_path.is_file():
has_units = True
src_path = Path("src")
link_steps = []
used_compiler_versions = set()
source_inputs = []
host_source_inputs = []
source_added = set()
def add_unit(obj, step):
obj_path, unit = obj["object"], obj["name"]
result = locate_unit(unit)
if not result:
step.add(obj_path)
base_object = Path(unit).with_suffix("")
objdiff_config["units"].append(
{
"name": str(f"{step.name}/{base_object}").replace(os.sep, "/"),
"target_path": str(obj_path).replace(os.sep, "/"),
"reverse_fn_order": False,
}
)
return
lib, data = result
lib_name = lib["lib"]
options = {
"add_to_all": True,
"mw_version": None,
"cflags": None,
"source": unit,
}
completed = data.completed
if data.options is not None:
options.update(data.options)
unit_src_path = src_path / options["source"]
if not unit_src_path.exists():
step.add(obj_path)
return
mw_version = options["mw_version"] or lib["mw_version"]
cflags = options["cflags"] or lib["cflags"]
if type(cflags) is list:
cflags_str = " ".join(cflags)
else:
cflags_str = str(cflags)
used_compiler_versions.add(mw_version)
base_object = Path(data.obj_path).with_suffix("")
src_obj_path = build_src_path / f"{base_object}.o"
src_base_path = build_src_path / base_object
if src_obj_path not in source_added:
source_added.add(src_obj_path)
n.comment(f"{unit}: {lib_name} (linked {completed})")
n.build(
outputs=path(src_obj_path),
rule="mwcc",
inputs=path(unit_src_path),
variables={
"mw_version": path(Path(mw_version)),
"cflags": cflags_str,
"basedir": os.path.dirname(src_base_path),
"basefile": path(src_base_path),
},
implicit=path(mwcc_implicit),
)
if lib["host"]:
host_obj_path = build_host_path / f"{base_object}.o"
host_base_path = build_host_path / base_object
n.build(
outputs=path(host_obj_path),
rule="host_cc" if unit_src_path.suffix == ".c" else "host_cpp",
inputs=path(unit_src_path),
variables={
"basedir": os.path.dirname(host_base_path),
"basefile": path(host_base_path),
},
)
if options["add_to_all"]:
host_source_inputs.append(host_obj_path)
if options["add_to_all"]:
source_inputs.append(src_obj_path)
reverse_fn_order = False
if type(cflags) is list:
for flag in cflags:
if not flag.startswith("-inline "):
continue
for value in flag.split(" ")[1].split(","):
if value == "deferred":
reverse_fn_order = True
elif value == "nodeferred":
reverse_fn_order = False
objdiff_config["units"].append(
{
"name": str(f"{step.name}/{base_object}").replace(os.sep, "/"),
"target_path": str(obj_path).replace(os.sep, "/"),
"base_path": str(src_obj_path).replace(os.sep, "/"),
"reverse_fn_order": reverse_fn_order,
"complete": completed,
}
)
if completed:
obj_path = src_obj_path
step.add(obj_path)
with open(build_config_path) as r:
config = json.load(r)
link_step = LinkStep(config)
for unit in config["units"]:
add_unit(unit, link_step)
link_steps.append(link_step)
for module in config["modules"]:
module_link_step = LinkStep(module)
for unit in module["units"]:
add_unit(unit, module_link_step)
link_steps.append(module_link_step)
n.newline()
# Check if all compiler versions exist
for mw_version in used_compiler_versions:
mw_path = compilers / mw_version / "mwcceppc.exe"
if args.compilers and not os.path.exists(mw_path):
sys.exit(f"Compiler {mw_path} does not exist")
# Check if linker exists
mw_path = compilers / LINKER_VERSION / "mwldeppc.exe"
if args.compilers and not os.path.exists(mw_path):
sys.exit(f"Linker {mw_path} does not exist")
###
# Link
###
for step in link_steps:
step.write(n)
n.newline()
###
# Generate RELs
###
n.comment("Generate RELs")
n.build(
outputs=path(
list(
map(
lambda step: step.output(),
filter(lambda step: step.module_id != 0, link_steps),
)
)
),
rule="makerel",
inputs=path(list(map(lambda step: step.partial_output(), link_steps))),
implicit=path([dtk, config_path]),
variables={"config": path(config_path)},
)
n.newline()
###
# Helper rule for building all source files
###
n.comment("Build all source files")
n.build(
outputs="all_source",
rule="phony",
inputs=path(source_inputs),
)
n.newline()
###
# Helper rule for building all source files, with a host compiler
###
n.comment("Build all source files with a host compiler")
n.build(
outputs="all_source_host",
rule="phony",
inputs=path(host_source_inputs),
)
n.newline()
###
# Check hash
###
n.comment("Check hash")
ok_path = build_path / "ok"
n.rule(
name="check",
command=f"{dtk} shasum -q -c $in -o $out",
description="CHECK $in",
)
n.build(
outputs=path(ok_path),
rule="check",
inputs=path(Path("orig") / f"{version}.sha1"),
implicit=path([dtk, *map(lambda step: step.output(), link_steps)]),
)
n.newline()
###
# Helper tools
###
# TODO: make these rules work for RELs too
dol_link_step = link_steps[0]
dol_elf_path = dol_link_step.partial_output()
n.comment("Check for mismatching symbols")
n.rule(
name="dol_diff",
command=f"{dtk} -L error dol diff $in",
description=f"DIFF {dol_elf_path}",
)
n.build(
inputs=path([config_path, dol_elf_path]),
outputs="dol_diff",
rule="dol_diff",
)
n.build(
outputs="diff",
rule="phony",
inputs="dol_diff",
)
n.newline()
n.comment("Apply symbols from linked ELF")
n.rule(
name="dol_apply",
command=f"{dtk} dol apply $in",
description=f"APPLY {dol_elf_path}",
)
n.build(
inputs=path([config_path, dol_elf_path]),
outputs="dol_apply",
rule="dol_apply",
implicit=path([ok_path]),
)
n.build(
outputs="apply",
rule="phony",
inputs="dol_apply",
)
###
# DOL split
###
n.comment("Generate objects from original DOL")
n.rule(
name="split",
command=f"{dtk} dol split $in $out_dir",
description="SPLIT",
depfile="$out_dir/dep",
deps="gcc",
)
n.build(
inputs=path(config_path),
outputs=path(build_config_path),
rule="split",
implicit=path(dtk),
variables={"out_dir": path(build_path)},
)
n.newline()
###
# Regenerate on change
###
n.comment("Reconfigure on change")
script = os.path.relpath(__file__)
n.rule(
name="configure",
command=f"$python {script} $configure_args",
generator=True,
description=f"RUN {script}",
)
n.build(
outputs="build.ninja",
rule="configure",
implicit=path([script, tools_path / "ninja_syntax.py", build_config_path]),
)
n.newline()
###
# Default rule
###
n.comment("Default rule")
if has_units:
n.default(path(ok_path))
else:
n.default(path(build_config_path))
###
# Write build.ninja
###
with open("build.ninja", "w") as f:
f.write(out.getvalue())
n.close()
###
# Write objdiff config
###
with open("objdiff.json", "w") as w:
json.dump(objdiff_config, w, indent=4)
# Write build.ninja and objdiff.json
generate_build(config)

14
tools/decompctx.py Normal file → Executable file
View File

@ -1,5 +1,15 @@
#!/usr/bin/env python3
###
# Generates a ctx.c file, usable for "Context" on https://decomp.me.
#
# Usage:
# python3 tools/decompctx.py src/file.cpp
#
# If changes are made, please submit a PR to
# https://github.com/encounter/dtk-template
###
import argparse
import os
import re
@ -30,7 +40,7 @@ def import_c_file(in_file) -> str:
out_text = ''
try:
with open(in_file, encoding="shift-jis") as file:
with open(in_file, encoding="utf-8") as file:
out_text += process_file(in_file, list(file))
except Exception:
with open(in_file) as file:
@ -74,4 +84,4 @@ def main():
if __name__ == "__main__":
main()
main()

25
tools/download_tool.py Normal file → Executable file
View File

@ -1,13 +1,26 @@
#!/usr/bin/env python3
###
# Downloads various tools from GitHub releases.
#
# Usage:
# python3 tools/download_tool.py wibo build/tools/wibo --tag 1.0.0
#
# If changes are made, please submit a PR to
# https://github.com/encounter/dtk-template
###
import argparse
import io
import os
import platform
import shutil
import stat
import sys
import urllib.request
import os
import stat
import platform
from pathlib import Path
import zipfile
import io
import shutil
from pathlib import Path
def dtk_url(tag):

View File

@ -1,5 +1,3 @@
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");

820
tools/project.py Normal file
View File

@ -0,0 +1,820 @@
###
# decomp-toolkit project generator
# Generates build.ninja and objdiff.json.
#
# This generator is intentionally project-agnostic
# and shared between multiple projects. Any configuration
# specific to a project should be added to `configure.py`.
#
# If changes are made, please submit a PR to
# https://github.com/encounter/dtk-template
###
import io
import json
import os
import platform
import sys
from pathlib import Path
from . import ninja_syntax
if sys.platform == "cygwin":
sys.exit(
f"Cygwin/MSYS2 is not supported."
f"\nPlease use native Windows Python instead."
f"\n(Current path: {sys.executable})"
)
class ProjectConfig:
# Paths
build_dir = Path("build")
src_dir = Path("src")
tools_dir = Path("tools")
# Tooling
dtk_tag = None # Git tag
build_dtk_path = None # If None, download
compilers_tag = None # 1
compilers_path = None # If None, download
wibo_tag = None # Git tag
wrapper = None # If None, download wibo on Linux
sjiswrap_tag = None # Git tag
sjiswrap_path = None # If None, download
# Project config
check_sha_path = None # Path to version.sha1
config_path = None # Path to config.yml
build_rels = True # Build REL files
debug = False # Build with debug info
generate_map = False # Generate map file(s)
ldflags = None # Linker flags
linker_version = None # mwld version
libs = None # List of libraries
version = None # Version name
def validate(self):
required_attrs = [
"build_dir",
"src_dir",
"tools_dir",
"check_sha_path",
"config_path",
"ldflags",
"linker_version",
"libs",
"version",
]
for attr in required_attrs:
if getattr(self, attr) is None:
sys.exit(f"ProjectConfig.{attr} missing")
def find_object(self, name):
for lib in self.libs:
for obj in lib["objects"]:
if obj.name == name:
return [lib, obj]
return None
def out_path(self):
return self.build_dir / self.version
class Object:
def __init__(self, completed, name, **options):
self.name = name
self.completed = completed
self.options = {
"add_to_all": True,
"cflags": None,
"mw_version": None,
"shiftjis": True,
"source": name,
}
self.options.update(options)
def is_windows():
return os.name == "nt"
# On Windows, we need this to use && in commands
CHAIN = "cmd /c " if is_windows() else ""
# Native executable extension
EXE = ".exe" if is_windows() else ""
# Replace forward slashes with backslashes on Windows
def os_str(value):
return str(value).replace("/", os.sep)
# Replace backslashes with forward slashes on Windows
def unix_str(value):
return str(value).replace(os.sep, "/")
# Stringify paths for ninja_syntax
def path(value):
if value is None:
return None
elif isinstance(value, list):
return list(map(os_str, filter(lambda x: x is not None, value)))
else:
return [os_str(value)]
# Load decomp-toolkit generated config.json
def load_build_config(config, build_config_path):
def versiontuple(v):
return tuple(map(int, (v.split("."))))
if build_config_path.is_file():
with open(build_config_path) as r:
build_config = json.load(r)
config_version = build_config.get("version")
if not config_version:
return None
dtk_version = config.dtk_tag[1:] # Strip v
if versiontuple(config_version) < versiontuple(dtk_version):
return None
return build_config
return None
# Generate build.ninja and objdiff.json
def generate_build(config):
build_config = load_build_config(config, config.out_path() / "config.json")
generate_build_ninja(config, build_config)
generate_objdiff_config(config, build_config)
# Generate build.ninja
def generate_build_ninja(config, build_config):
config.validate()
out = io.StringIO()
n = ninja_syntax.Writer(out)
n.variable("ninja_required_version", "1.3")
n.newline()
n.comment("The arguments passed to configure.py, for rerunning it.")
n.variable("configure_args", sys.argv[1:])
n.variable("python", f'"{sys.executable}"')
n.newline()
###
# Variables
###
n.comment("Variables")
ldflags = " ".join(config.ldflags)
if config.generate_map:
ldflags += " -mapunused"
if config.debug:
ldflags += " -g"
n.variable("ldflags", ldflags)
n.variable("mw_version", config.linker_version)
n.newline()
###
# Tooling
###
n.comment("Tooling")
build_path = config.out_path()
build_tools_path = config.build_dir / "tools"
download_tool = config.tools_dir / "download_tool.py"
n.rule(
name="download_tool",
command=f"$python {download_tool} $tool $out --tag $tag",
description="TOOL $out",
)
if config.build_dtk_path:
dtk = build_tools_path / "release" / f"dtk{EXE}"
n.rule(
name="cargo",
command="cargo build --release --manifest-path $in --bin $bin --target-dir $target",
description="CARGO $bin",
depfile=path(Path("$target") / "release" / "$bin.d"),
deps="gcc",
)
n.build(
outputs=path(dtk),
rule="cargo",
inputs=path(config.build_dtk_path / "Cargo.toml"),
variables={
"bin": "dtk",
"target": build_tools_path,
},
)
elif config.dtk_tag:
dtk = build_tools_path / f"dtk{EXE}"
n.build(
outputs=path(dtk),
rule="download_tool",
implicit=path(download_tool),
variables={
"tool": "dtk",
"tag": config.dtk_tag,
},
)
else:
sys.exit("ProjectConfig.dtk_tag missing")
if config.sjiswrap_path:
sjiswrap = config.sjiswrap_path
elif config.sjiswrap_tag:
sjiswrap = build_tools_path / "sjiswrap.exe"
n.build(
outputs=path(sjiswrap),
rule="download_tool",
implicit=path(download_tool),
variables={
"tool": "sjiswrap",
"tag": config.sjiswrap_tag,
},
)
else:
sys.exit("ProjectConfig.sjiswrap_tag missing")
# Only add an implicit dependency on wibo if we download it
wrapper = config.wrapper
wrapper_implicit = None
if (
config.wibo_tag is not None
and sys.platform == "linux"
and platform.machine() in ("i386", "x86_64")
and config.wrapper is None
):
wrapper = build_tools_path / "wibo"
wrapper_implicit = wrapper
n.build(
outputs=path(wrapper),
rule="download_tool",
implicit=path(download_tool),
variables={
"tool": "wibo",
"tag": config.wibo_tag,
},
)
if not is_windows() and wrapper is None:
wrapper = "wine"
wrapper_cmd = f"{wrapper} " if wrapper else ""
compilers_implicit = None
if config.compilers_path:
compilers = config.compilers_path
elif config.compilers_tag:
compilers = config.build_dir / "compilers"
compilers_implicit = compilers
n.build(
outputs=path(compilers),
rule="download_tool",
implicit=path(download_tool),
variables={
"tool": "compilers",
"tag": config.compilers_tag,
},
)
n.newline()
###
# Build rules
###
compiler_path = compilers / "$mw_version"
# MWCC
mwcc = compiler_path / "mwcceppc.exe"
mwcc_cmd = f"{wrapper_cmd}{mwcc} $cflags -MMD -c $in -o $basedir"
mwcc_implicit = [compilers_implicit or mwcc, wrapper_implicit]
# MWCC with UTF-8 to Shift JIS wrapper
mwcc_sjis_cmd = f"{wrapper_cmd}{sjiswrap} {mwcc} $cflags -MMD -c $in -o $basedir"
mwcc_sjis_implicit = [*mwcc_implicit, sjiswrap]
# MWLD
mwld = compiler_path / "mwldeppc.exe"
mwld_cmd = f"{wrapper_cmd}{mwld} $ldflags -o $out @$out.rsp"
mwld_implicit = [compilers_implicit or mwld, wrapper_implicit]
if os.name != "nt":
transform_dep = config.tools_dir / "transform_dep.py"
mwcc_cmd += f" && $python {transform_dep} $basefile.d $basefile.d"
mwcc_implicit.append(transform_dep)
n.comment("Link ELF file")
n.rule(
name="link",
command=mwld_cmd,
description="LINK $out",
rspfile="$out.rsp",
rspfile_content="$in_newline",
)
n.newline()
n.comment("Generate DOL")
n.rule(
name="elf2dol",
command=f"{dtk} elf2dol $in $out",
description="DOL $out",
)
n.newline()
n.comment("Generate REL(s)")
makerel_rsp = build_path / "makerel.rsp"
n.rule(
name="makerel",
command=f"{dtk} rel make -w -c $config @{makerel_rsp}",
description="REL",
rspfile=path(makerel_rsp),
rspfile_content="$in_newline",
)
n.newline()
n.comment("MWCC build")
n.rule(
name="mwcc",
command=mwcc_cmd,
description="MWCC $out",
depfile="$basefile.d",
deps="gcc",
)
n.newline()
n.comment("MWCC build (with UTF-8 to Shift JIS wrapper)")
n.rule(
name="mwcc_sjis",
command=mwcc_sjis_cmd,
description="MWCC $out",
depfile="$basefile.d",
deps="gcc",
)
n.newline()
n.comment("Host build")
n.variable("host_cflags", "-I include -Wno-trigraphs")
n.variable(
"host_cppflags",
"-std=c++98 -I include -fno-exceptions -fno-rtti -D_CRT_SECURE_NO_WARNINGS -Wno-trigraphs -Wno-c++11-extensions",
)
n.rule(
name="host_cc",
command="clang $host_cflags -c -o $out $in",
description="CC $out",
)
n.rule(
name="host_cpp",
command="clang++ $host_cppflags -c -o $out $in",
description="CXX $out",
)
n.newline()
###
# Source files
###
n.comment("Source files")
build_src_path = build_path / "src"
build_host_path = build_path / "host"
build_config_path = build_path / "config.json"
def map_path(path):
return path.parent / (path.name + ".MAP")
class LinkStep:
def __init__(self, config):
self.name = config["name"]
self.module_id = config["module_id"]
self.ldscript = config["ldscript"]
self.entry = config["entry"]
self.inputs = []
def add(self, obj):
self.inputs.append(obj)
def output(self):
if self.module_id == 0:
return build_path / f"{self.name}.dol"
else:
return build_path / self.name / f"{self.name}.rel"
def partial_output(self):
if self.module_id == 0:
return build_path / f"{self.name}.elf"
else:
return build_path / self.name / f"{self.name}.plf"
def write(self, n):
n.comment(f"Link {self.name}")
if self.module_id == 0:
elf_path = build_path / f"{self.name}.elf"
dol_path = build_path / f"{self.name}.dol"
elf_ldflags = f"$ldflags -lcf {self.ldscript}"
if config.generate_map:
elf_map = map_path(elf_path)
elf_ldflags += f" -map {elf_map}"
else:
elf_map = None
n.build(
outputs=path(elf_path),
rule="link",
inputs=path(self.inputs),
implicit=path([self.ldscript, *mwld_implicit]),
implicit_outputs=path(elf_map),
variables={"ldflags": elf_ldflags},
)
n.build(
outputs=path(dol_path),
rule="elf2dol",
inputs=path(elf_path),
implicit=path(dtk),
)
else:
preplf_path = build_path / self.name / f"{self.name}.preplf"
plf_path = build_path / self.name / f"{self.name}.plf"
preplf_ldflags = f"$ldflags -sdata 0 -sdata2 0 -r"
plf_ldflags = f"$ldflags -sdata 0 -sdata2 0 -m {self.entry} -r1 -strip_partial -lcf {self.ldscript}"
if config.generate_map:
preplf_map = map_path(preplf_path)
preplf_ldflags += f" -map {preplf_map}"
plf_map = map_path(plf_path)
plf_ldflags += f" -map {plf_map}"
else:
preplf_map = None
plf_map = None
n.build(
outputs=path(preplf_path),
rule="link",
inputs=path(self.inputs),
implicit=path(mwld_implicit),
implicit_outputs=path(preplf_map),
variables={"ldflags": preplf_ldflags},
)
n.build(
outputs=path(plf_path),
rule="link",
inputs=path(self.inputs),
implicit=path([self.ldscript, preplf_path, *mwld_implicit]),
implicit_outputs=path(plf_map),
variables={"ldflags": plf_ldflags},
)
n.newline()
if build_config:
link_steps = []
used_compiler_versions = set()
source_inputs = []
host_source_inputs = []
source_added = set()
def add_unit(build_obj, link_step):
obj_path, obj_name = build_obj["object"], build_obj["name"]
result = config.find_object(obj_name)
if not result:
link_step.add(obj_path)
return
lib, obj = result
lib_name = lib["lib"]
options = obj.options
completed = obj.completed
unit_src_path = config.src_dir / options["source"]
if not unit_src_path.exists():
link_step.add(obj_path)
return
mw_version = options["mw_version"] or lib["mw_version"]
cflags = options["cflags"] or lib["cflags"]
if type(cflags) is list:
cflags_str = " ".join(cflags)
else:
cflags_str = str(cflags)
used_compiler_versions.add(mw_version)
base_object = Path(obj.name).with_suffix("")
src_obj_path = build_src_path / f"{base_object}.o"
src_base_path = build_src_path / base_object
if src_obj_path not in source_added:
source_added.add(src_obj_path)
n.comment(f"{obj_name}: {lib_name} (linked {completed})")
n.build(
outputs=path(src_obj_path),
rule="mwcc_sjis" if options["shiftjis"] else "mwcc",
inputs=path(unit_src_path),
variables={
"mw_version": path(Path(mw_version)),
"cflags": cflags_str,
"basedir": os.path.dirname(src_base_path),
"basefile": path(src_base_path),
},
implicit=path(
mwcc_sjis_implicit if options["shiftjis"] else mwcc_implicit
),
)
if lib["host"]:
host_obj_path = build_host_path / f"{base_object}.o"
host_base_path = build_host_path / base_object
n.build(
outputs=path(host_obj_path),
rule="host_cc" if unit_src_path.suffix == ".c" else "host_cpp",
inputs=path(unit_src_path),
variables={
"basedir": os.path.dirname(host_base_path),
"basefile": path(host_base_path),
},
)
if options["add_to_all"]:
host_source_inputs.append(host_obj_path)
n.newline()
if options["add_to_all"]:
source_inputs.append(src_obj_path)
if completed:
obj_path = src_obj_path
link_step.add(obj_path)
# Add DOL link step
link_step = LinkStep(build_config)
for unit in build_config["units"]:
add_unit(unit, link_step)
link_steps.append(link_step)
if config.build_rels:
# Add REL link steps
for module in build_config["modules"]:
module_link_step = LinkStep(module)
for unit in module["units"]:
add_unit(unit, module_link_step)
link_steps.append(module_link_step)
n.newline()
# Check if all compiler versions exist
for mw_version in used_compiler_versions:
mw_path = compilers / mw_version / "mwcceppc.exe"
if config.compilers_path and not os.path.exists(mw_path):
sys.exit(f"Compiler {mw_path} does not exist")
# Check if linker exists
mw_path = compilers / config.linker_version / "mwldeppc.exe"
if config.compilers_path and not os.path.exists(mw_path):
sys.exit(f"Linker {mw_path} does not exist")
###
# Link
###
for step in link_steps:
step.write(n)
n.newline()
###
# Generate RELs
###
n.comment("Generate RELs")
n.build(
outputs=path(
list(
map(
lambda step: step.output(),
filter(lambda step: step.module_id != 0, link_steps),
)
)
),
rule="makerel",
inputs=path(list(map(lambda step: step.partial_output(), link_steps))),
implicit=path([dtk, config.config_path]),
variables={"config": path(config.config_path)},
)
n.newline()
###
# Helper rule for building all source files
###
n.comment("Build all source files")
n.build(
outputs="all_source",
rule="phony",
inputs=path(source_inputs),
)
n.newline()
###
# Helper rule for building all source files, with a host compiler
###
n.comment("Build all source files with a host compiler")
n.build(
outputs="all_source_host",
rule="phony",
inputs=path(host_source_inputs),
)
n.newline()
###
# Check hash
###
n.comment("Check hash")
ok_path = build_path / "ok"
n.rule(
name="check",
command=f"{dtk} shasum -q -c $in -o $out",
description="CHECK $in",
)
n.build(
outputs=path(ok_path),
rule="check",
inputs=path(config.check_sha_path),
implicit=path([dtk, *map(lambda step: step.output(), link_steps)]),
)
n.newline()
###
# Helper tools
###
# TODO: make these rules work for RELs too
dol_link_step = link_steps[0]
dol_elf_path = dol_link_step.partial_output()
n.comment("Check for mismatching symbols")
n.rule(
name="dol_diff",
command=f"{dtk} -L error dol diff $in",
description=f"DIFF {dol_elf_path}",
)
n.build(
inputs=path([config.config_path, dol_elf_path]),
outputs="dol_diff",
rule="dol_diff",
)
n.build(
outputs="diff",
rule="phony",
inputs="dol_diff",
)
n.newline()
n.comment("Apply symbols from linked ELF")
n.rule(
name="dol_apply",
command=f"{dtk} dol apply $in",
description=f"APPLY {dol_elf_path}",
)
n.build(
inputs=path([config.config_path, dol_elf_path]),
outputs="dol_apply",
rule="dol_apply",
implicit=path([ok_path]),
)
n.build(
outputs="apply",
rule="phony",
inputs="dol_apply",
)
n.newline()
###
# Split DOL
###
n.comment("Split DOL into relocatable objects")
n.rule(
name="split",
command=f"{dtk} dol split $in $out_dir",
description="SPLIT $in",
depfile="$out_dir/dep",
deps="gcc",
)
n.build(
inputs=path(config.config_path),
outputs=path(build_config_path),
rule="split",
implicit=path(dtk),
variables={"out_dir": path(build_path)},
)
n.newline()
###
# Regenerate on change
###
n.comment("Reconfigure on change")
python_script = os.path.relpath(os.path.abspath(sys.argv[0]))
python_lib = os.path.relpath(__file__)
python_lib_dir = os.path.dirname(python_lib)
n.rule(
name="configure",
command=f"$python {python_script} $configure_args",
generator=True,
description=f"RUN {python_script}",
)
n.build(
outputs="build.ninja",
rule="configure",
implicit=path(
[
build_config_path,
python_script,
python_lib,
Path(python_lib_dir) / "ninja_syntax.py",
]
),
)
n.newline()
###
# Default rule
###
n.comment("Default rule")
if build_config:
n.default(path(ok_path))
else:
n.default(path(build_config_path))
# Write build.ninja
with open("build.ninja", "w", encoding="utf-8") as f:
f.write(out.getvalue())
out.close()
# Generate objdiff.json
def generate_objdiff_config(config, build_config):
if not build_config:
return
objdiff_config = {
"min_version": "0.4.3",
"custom_make": "ninja",
"build_target": False,
"watch_patterns": [
"*.c",
"*.cp",
"*.cpp",
"*.h",
"*.hpp",
"*.py",
"*.yml",
"*.txt",
"*.json",
],
"units": [],
}
build_path = config.out_path()
def add_unit(build_obj, module_name):
if build_obj["autogenerated"]:
# Skip autogenerated objects
return
obj_path, obj_name = build_obj["object"], build_obj["name"]
base_object = Path(obj_name).with_suffix("")
unit_config = {
"name": unix_str(Path(module_name) / base_object),
"target_path": unix_str(obj_path),
}
result = config.find_object(obj_name)
if not result:
objdiff_config["units"].append(unit_config)
return
lib, obj = result
unit_src_path = config.src_dir / obj.options["source"]
if not unit_src_path.exists():
objdiff_config["units"].append(unit_config)
return
cflags = obj.options["cflags"] or lib["cflags"]
src_obj_path = build_path / "src" / f"{base_object}.o"
reverse_fn_order = False
if type(cflags) is list:
for flag in cflags:
if not flag.startswith("-inline "):
continue
for value in flag.split(" ")[1].split(","):
if value == "deferred":
reverse_fn_order = True
elif value == "nodeferred":
reverse_fn_order = False
unit_config["base_path"] = unix_str(src_obj_path)
unit_config["reverse_fn_order"] = reverse_fn_order
unit_config["complete"] = obj.completed
objdiff_config["units"].append(unit_config)
# Add DOL units
for unit in build_config["units"]:
add_unit(unit, build_config["name"])
# Add REL units
for module in build_config["modules"]:
for unit in module["units"]:
add_unit(unit, module["name"])
# Write objdiff.json
with open("objdiff.json", "w", encoding="utf-8") as w:
json.dump(objdiff_config, w, indent=4)

12
tools/transform_dep.py Normal file → Executable file
View File

@ -1,4 +1,16 @@
#!/usr/bin/env python3
###
# Transforms .d files, converting Windows paths to Unix paths.
# Allows usage of the mwcc -MMD flag on platforms other than Windows.
#
# Usage:
# python3 tools/transform_dep.py build/src/file.d build/src/file.d
#
# If changes are made, please submit a PR to
# https://github.com/encounter/dtk-template
###
import argparse
import os
from platform import uname