mirror of
https://github.com/xbret/xenoblade.git
synced 2024-11-23 06:09:47 +00:00
parent
b6c39cdd0c
commit
df2bcdd2b1
38
.gitignore
vendored
38
.gitignore
vendored
@ -1,14 +1,36 @@
|
||||
# IDE folders
|
||||
.idea/
|
||||
.vs/
|
||||
|
||||
# Caches
|
||||
__pycache__
|
||||
.idea
|
||||
.vscode
|
||||
.ninja_*
|
||||
.mypy_cache
|
||||
*.exe
|
||||
build
|
||||
build.ninja
|
||||
objdiff.json
|
||||
.cache/
|
||||
|
||||
# Original files
|
||||
orig/*/*
|
||||
!orig/*/.gitkeep
|
||||
*.dol
|
||||
*.rel
|
||||
*.elf
|
||||
*.o
|
||||
*.map
|
||||
*.MAP
|
||||
|
||||
# Build files
|
||||
build/
|
||||
.ninja_*
|
||||
build.ninja
|
||||
|
||||
# decompctx output
|
||||
ctx.*
|
||||
*.ctx
|
||||
|
||||
# Generated configs
|
||||
objdiff.json
|
||||
compile_commands.json
|
||||
|
||||
# Miscellaneous
|
||||
/*.txt
|
||||
ctx.c
|
||||
*.exe
|
||||
.DS_Store
|
||||
|
16
.vscode/extensions.json
vendored
16
.vscode/extensions.json
vendored
@ -1,6 +1,12 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"ms-vscode.cpptools",
|
||||
"ms-vscode.makefile-tools"
|
||||
]
|
||||
}
|
||||
"recommendations": [
|
||||
"llvm-vs-code-extensions.vscode-clangd",
|
||||
"ms-python.black-formatter",
|
||||
"ms-python.flake8",
|
||||
],
|
||||
"unwantedRecommendations": [
|
||||
"ms-vscode.cmake-tools",
|
||||
"ms-vscode.cpptools-extension-pack",
|
||||
"ms-vscode.cpptools",
|
||||
]
|
||||
}
|
||||
|
55
.vscode/settings.json
vendored
55
.vscode/settings.json
vendored
@ -1,36 +1,23 @@
|
||||
{
|
||||
"[c]": {
|
||||
"files.encoding": "utf8"
|
||||
},
|
||||
"[cpp]": {
|
||||
"files.encoding": "utf8"
|
||||
},
|
||||
"editor.tabSize": 4,
|
||||
"files.exclude": {
|
||||
"**/CVS": false,
|
||||
"**/*.ctx": true
|
||||
},
|
||||
"files.insertFinalNewline": true,
|
||||
"files.associations": {
|
||||
"*.cp": "cpp",
|
||||
"algorithm": "cpp",
|
||||
"cmath": "cpp",
|
||||
"cstdarg": "cpp",
|
||||
"cstddef": "cpp",
|
||||
"cstdio": "cpp",
|
||||
"cstring": "cpp",
|
||||
"cwchar": "cpp",
|
||||
"exception": "cpp",
|
||||
"iterator": "cpp",
|
||||
"new": "cpp",
|
||||
"typeinfo": "cpp"
|
||||
},
|
||||
"search.useIgnoreFiles": false,
|
||||
"search.exclude": {
|
||||
"build/*/config.json": true,
|
||||
"build/**/*.MAP": true,
|
||||
"build.ninja": true,
|
||||
".ninja_*": true,
|
||||
"objdiff.json": true
|
||||
}
|
||||
"[c]": {
|
||||
"files.encoding": "utf8",
|
||||
"editor.defaultFormatter": "llvm-vs-code-extensions.vscode-clangd"
|
||||
},
|
||||
"[cpp]": {
|
||||
"files.encoding": "utf8",
|
||||
"editor.defaultFormatter": "llvm-vs-code-extensions.vscode-clangd"
|
||||
},
|
||||
"[python]": {
|
||||
"editor.defaultFormatter": "ms-python.black-formatter"
|
||||
},
|
||||
// "editor.tabSize": 2,
|
||||
"files.autoSave": "onFocusChange",
|
||||
"files.insertFinalNewline": true,
|
||||
"files.trimFinalNewlines": true,
|
||||
"files.associations": {
|
||||
"*.inc": "c",
|
||||
".clangd": "yaml"
|
||||
},
|
||||
// Disable C/C++ IntelliSense, use clangd instead
|
||||
"C_Cpp.intelliSenseEngine": "disabled",
|
||||
}
|
||||
|
@ -146,9 +146,9 @@ if not config.non_matching:
|
||||
# Tool versions
|
||||
config.binutils_tag = "2.42-1"
|
||||
config.compilers_tag = "20240706"
|
||||
config.dtk_tag = "v1.1.0"
|
||||
config.objdiff_tag = "v2.2.1"
|
||||
config.sjiswrap_tag = "v1.1.1"
|
||||
config.dtk_tag = "v1.1.4"
|
||||
config.objdiff_tag = "v2.3.3"
|
||||
config.sjiswrap_tag = "v1.2.0"
|
||||
config.wibo_tag = "0.6.11"
|
||||
|
||||
# Project
|
||||
|
@ -28,7 +28,7 @@ include_dirs = [
|
||||
os.path.join(root_dir, "libs/RVL_SDK/src/revolution/hbm/include"),
|
||||
]
|
||||
|
||||
include_pattern = re.compile(r'^#\s*include\s*[<"](.+?)[>"]$')
|
||||
include_pattern = re.compile(r'^#\s*include\s*[<"](.+?)[>"]')
|
||||
guard_pattern = re.compile(r"^#\s*ifndef\s+(.*)$")
|
||||
pragmaonce_pattern = re.compile(r'^#pragma once.*$')
|
||||
|
||||
|
@ -55,6 +55,7 @@ def dtk_url(tag: str) -> str:
|
||||
repo = "https://github.com/encounter/decomp-toolkit"
|
||||
return f"{repo}/releases/download/{tag}/dtk-{system}-{arch}{suffix}"
|
||||
|
||||
|
||||
def objdiff_cli_url(tag: str) -> str:
|
||||
uname = platform.uname()
|
||||
suffix = ""
|
||||
|
@ -24,17 +24,10 @@ import textwrap
|
||||
import os
|
||||
from io import StringIO
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Match, Optional, Tuple, Union
|
||||
from typing import Dict, Iterable, List, Match, Optional, Tuple, Union
|
||||
|
||||
NinjaPath = Union[str, Path]
|
||||
NinjaPaths = Union[
|
||||
List[str],
|
||||
List[Path],
|
||||
List[NinjaPath],
|
||||
List[Optional[str]],
|
||||
List[Optional[Path]],
|
||||
List[Optional[NinjaPath]],
|
||||
]
|
||||
NinjaPaths = Iterable[Optional[NinjaPath]]
|
||||
NinjaPathOrPaths = Union[NinjaPath, NinjaPaths]
|
||||
|
||||
|
||||
@ -118,8 +111,8 @@ class Writer(object):
|
||||
pool: Optional[str] = None,
|
||||
dyndep: Optional[NinjaPath] = None,
|
||||
) -> List[str]:
|
||||
outputs = serialize_paths(outputs)
|
||||
out_outputs = [escape_path(x) for x in outputs]
|
||||
str_outputs = serialize_paths(outputs)
|
||||
out_outputs = [escape_path(x) for x in str_outputs]
|
||||
all_inputs = [escape_path(x) for x in serialize_paths(inputs)]
|
||||
|
||||
if implicit:
|
||||
@ -154,7 +147,7 @@ class Writer(object):
|
||||
for key, val in iterator:
|
||||
self.variable(key, val, indent=1)
|
||||
|
||||
return outputs
|
||||
return str_outputs
|
||||
|
||||
def include(self, path: str) -> None:
|
||||
self._line("include %s" % path)
|
||||
@ -225,9 +218,11 @@ def serialize_path(input: Optional[NinjaPath]) -> str:
|
||||
|
||||
|
||||
def serialize_paths(input: Optional[NinjaPathOrPaths]) -> List[str]:
|
||||
if isinstance(input, list):
|
||||
if isinstance(input, str) or isinstance(input, Path):
|
||||
return [serialize_path(input)] if input else []
|
||||
elif input is not None:
|
||||
return [serialize_path(path) for path in input if path]
|
||||
return [serialize_path(input)] if input else []
|
||||
return []
|
||||
|
||||
|
||||
def escape(string: str) -> str:
|
||||
|
479
tools/project.py
479
tools/project.py
@ -17,7 +17,7 @@ import os
|
||||
import platform
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import IO, Any, Dict, List, Optional, Set, Tuple, Union, cast
|
||||
from typing import IO, Any, Dict, Iterable, List, Optional, Set, Tuple, Union, cast
|
||||
|
||||
from . import ninja_syntax
|
||||
from .ninja_syntax import serialize_path
|
||||
@ -41,8 +41,9 @@ class Object:
|
||||
"asflags": None,
|
||||
"asm_dir": None,
|
||||
"cflags": None,
|
||||
"extra_asflags": None,
|
||||
"extra_cflags": None,
|
||||
"extra_asflags": [],
|
||||
"extra_cflags": [],
|
||||
"extra_clang_flags": [],
|
||||
"host": None,
|
||||
"lib": None,
|
||||
"mw_version": None,
|
||||
@ -81,6 +82,20 @@ class Object:
|
||||
set_default("shift_jis", config.shift_jis)
|
||||
set_default("src_dir", config.src_dir)
|
||||
|
||||
# Validate progress categories
|
||||
def check_category(category: str):
|
||||
if not any(category == c.id for c in config.progress_categories):
|
||||
sys.exit(
|
||||
f"Progress category '{category}' missing from config.progress_categories"
|
||||
)
|
||||
|
||||
progress_category = obj.options["progress_category"]
|
||||
if isinstance(progress_category, list):
|
||||
for category in progress_category:
|
||||
check_category(category)
|
||||
elif progress_category is not None:
|
||||
check_category(progress_category)
|
||||
|
||||
# Resolve paths
|
||||
build_dir = config.out_path()
|
||||
obj.src_path = Path(obj.options["src_dir"]) / obj.options["source"]
|
||||
@ -157,15 +172,22 @@ class ProjectConfig:
|
||||
self.custom_build_steps: Optional[Dict[str, List[Dict[str, Any]]]] = (
|
||||
None # Custom build steps, types are ["pre-compile", "post-compile", "post-link", "post-build"]
|
||||
)
|
||||
self.generate_compile_commands: bool = (
|
||||
True # Generate compile_commands.json for clangd
|
||||
)
|
||||
self.extra_clang_flags: List[str] = [] # Extra flags for clangd
|
||||
|
||||
# Progress output, progress.json and report.json config
|
||||
self.progress = True # Enable progress output
|
||||
self.progress = True # Enable report.json generation and CLI progress output
|
||||
self.progress_all: bool = True # Include combined "all" category
|
||||
self.progress_modules: bool = True # Include combined "modules" category
|
||||
self.progress_each_module: bool = (
|
||||
False # Include individual modules, disable for large numbers of modules
|
||||
)
|
||||
self.progress_categories: List[ProgressCategory] = [] # Additional categories
|
||||
self.print_progress_categories: Union[bool, List[str]] = (
|
||||
True # Print additional progress categories in the CLI progress output
|
||||
)
|
||||
|
||||
# Progress fancy printing
|
||||
self.progress_use_fancy: bool = False
|
||||
@ -202,9 +224,40 @@ class ProjectConfig:
|
||||
out[obj.name] = obj.resolve(self, lib)
|
||||
return out
|
||||
|
||||
# Gets the output path for build-related files.
|
||||
def out_path(self) -> Path:
|
||||
return self.build_dir / str(self.version)
|
||||
|
||||
# Gets the path to the compilers directory.
|
||||
# Exits the program if neither `compilers_path` nor `compilers_tag` is provided.
|
||||
def compilers(self) -> Path:
|
||||
if self.compilers_path:
|
||||
return self.compilers_path
|
||||
elif self.compilers_tag:
|
||||
return self.build_dir / "compilers"
|
||||
else:
|
||||
sys.exit("ProjectConfig.compilers_tag missing")
|
||||
|
||||
# Gets the wrapper to use for compiler commands, if set.
|
||||
def compiler_wrapper(self) -> Optional[Path]:
|
||||
wrapper = self.wrapper
|
||||
|
||||
if self.use_wibo():
|
||||
wrapper = self.build_dir / "tools" / "wibo"
|
||||
if not is_windows() and wrapper is None:
|
||||
wrapper = Path("wine")
|
||||
|
||||
return wrapper
|
||||
|
||||
# Determines whether or not to use wibo as the compiler wrapper.
|
||||
def use_wibo(self) -> bool:
|
||||
return (
|
||||
self.wibo_tag is not None
|
||||
and sys.platform == "linux"
|
||||
and platform.machine() in ("i386", "x86_64")
|
||||
and self.wrapper is None
|
||||
)
|
||||
|
||||
|
||||
def is_windows() -> bool:
|
||||
return os.name == "nt"
|
||||
@ -216,13 +269,26 @@ CHAIN = "cmd /c " if is_windows() else ""
|
||||
EXE = ".exe" if is_windows() else ""
|
||||
|
||||
|
||||
def make_flags_str(flags: Optional[Union[str, List[str]]]) -> str:
|
||||
def file_is_asm(path: Path) -> bool:
|
||||
return path.suffix.lower() == ".s"
|
||||
|
||||
|
||||
def file_is_c(path: Path) -> bool:
|
||||
return path.suffix.lower() == ".c"
|
||||
|
||||
|
||||
def file_is_cpp(path: Path) -> bool:
|
||||
return path.suffix.lower() in (".cc", ".cp", ".cpp", ".cxx")
|
||||
|
||||
|
||||
def file_is_c_cpp(path: Path) -> bool:
|
||||
return file_is_c(path) or file_is_cpp(path)
|
||||
|
||||
|
||||
def make_flags_str(flags: Optional[List[str]]) -> str:
|
||||
if flags is None:
|
||||
return ""
|
||||
elif isinstance(flags, list):
|
||||
return " ".join(flags)
|
||||
else:
|
||||
return flags
|
||||
return " ".join(flags)
|
||||
|
||||
|
||||
# Load decomp-toolkit generated config.json
|
||||
@ -255,13 +321,14 @@ def load_build_config(
|
||||
return build_config
|
||||
|
||||
|
||||
# Generate build.ninja and objdiff.json
|
||||
# Generate build.ninja, objdiff.json and compile_commands.json
|
||||
def generate_build(config: ProjectConfig) -> None:
|
||||
config.validate()
|
||||
objects = config.objects()
|
||||
build_config = load_build_config(config, config.out_path() / "config.json")
|
||||
generate_build_ninja(config, objects, build_config)
|
||||
generate_objdiff_config(config, objects, build_config)
|
||||
generate_compile_commands(config, objects, build_config)
|
||||
|
||||
|
||||
# Generate build.ninja
|
||||
@ -408,16 +475,10 @@ def generate_build_ninja(
|
||||
else:
|
||||
sys.exit("ProjectConfig.sjiswrap_tag missing")
|
||||
|
||||
wrapper = config.compiler_wrapper()
|
||||
# Only add an implicit dependency on wibo if we download it
|
||||
wrapper = config.wrapper
|
||||
wrapper_implicit: Optional[Path] = None
|
||||
if (
|
||||
config.wibo_tag is not None
|
||||
and sys.platform == "linux"
|
||||
and platform.machine() in ("i386", "x86_64")
|
||||
and config.wrapper is None
|
||||
):
|
||||
wrapper = build_tools_path / "wibo"
|
||||
if wrapper is not None and config.use_wibo():
|
||||
wrapper_implicit = wrapper
|
||||
n.build(
|
||||
outputs=wrapper,
|
||||
@ -428,15 +489,11 @@ def generate_build_ninja(
|
||||
"tag": config.wibo_tag,
|
||||
},
|
||||
)
|
||||
if not is_windows() and wrapper is None:
|
||||
wrapper = Path("wine")
|
||||
wrapper_cmd = f"{wrapper} " if wrapper else ""
|
||||
|
||||
compilers = config.compilers()
|
||||
compilers_implicit: Optional[Path] = None
|
||||
if config.compilers_path:
|
||||
compilers = config.compilers_path
|
||||
elif config.compilers_tag:
|
||||
compilers = config.build_dir / "compilers"
|
||||
if config.compilers_path is None and config.compilers_tag is not None:
|
||||
compilers_implicit = compilers
|
||||
n.build(
|
||||
outputs=compilers,
|
||||
@ -447,8 +504,6 @@ def generate_build_ninja(
|
||||
"tag": config.compilers_tag,
|
||||
},
|
||||
)
|
||||
else:
|
||||
sys.exit("ProjectConfig.compilers_tag missing")
|
||||
|
||||
binutils_implicit = None
|
||||
if config.binutils_path:
|
||||
@ -580,7 +635,7 @@ def generate_build_ninja(
|
||||
)
|
||||
n.newline()
|
||||
|
||||
def write_custom_step(step: str) -> List[str | Path]:
|
||||
def write_custom_step(step: str, prev_step: Optional[str] = None) -> None:
|
||||
implicit: List[str | Path] = []
|
||||
if config.custom_build_steps and step in config.custom_build_steps:
|
||||
n.comment(f"Custom build steps ({step})")
|
||||
@ -604,7 +659,12 @@ def generate_build_ninja(
|
||||
dyndep=custom_step.get("dyndep", None),
|
||||
)
|
||||
n.newline()
|
||||
return implicit
|
||||
n.build(
|
||||
outputs=step,
|
||||
rule="phony",
|
||||
inputs=implicit,
|
||||
order_only=prev_step,
|
||||
)
|
||||
|
||||
n.comment("Host build")
|
||||
n.variable("host_cflags", "-I include -Wno-trigraphs")
|
||||
@ -625,7 +685,7 @@ def generate_build_ninja(
|
||||
n.newline()
|
||||
|
||||
# Add all build steps needed before we compile (e.g. processing assets)
|
||||
precompile_implicit = write_custom_step("pre-compile")
|
||||
write_custom_step("pre-compile")
|
||||
|
||||
###
|
||||
# Source files
|
||||
@ -662,7 +722,6 @@ def generate_build_ninja(
|
||||
n.comment(f"Link {self.name}")
|
||||
if self.module_id == 0:
|
||||
elf_path = build_path / f"{self.name}.elf"
|
||||
dol_path = build_path / f"{self.name}.dol"
|
||||
elf_ldflags = f"$ldflags -lcf {serialize_path(self.ldscript)}"
|
||||
if config.generate_map:
|
||||
elf_map = map_path(elf_path)
|
||||
@ -674,13 +733,12 @@ def generate_build_ninja(
|
||||
rule="link",
|
||||
inputs=self.inputs,
|
||||
implicit=[
|
||||
*precompile_implicit,
|
||||
self.ldscript,
|
||||
*mwld_implicit,
|
||||
*postcompile_implicit,
|
||||
],
|
||||
implicit_outputs=elf_map,
|
||||
variables={"ldflags": elf_ldflags},
|
||||
order_only="post-compile",
|
||||
)
|
||||
else:
|
||||
preplf_path = build_path / self.name / f"{self.name}.preplf"
|
||||
@ -707,6 +765,7 @@ def generate_build_ninja(
|
||||
implicit=mwld_implicit,
|
||||
implicit_outputs=preplf_map,
|
||||
variables={"ldflags": preplf_ldflags},
|
||||
order_only="post-compile",
|
||||
)
|
||||
n.build(
|
||||
outputs=plf_path,
|
||||
@ -715,6 +774,7 @@ def generate_build_ninja(
|
||||
implicit=[self.ldscript, preplf_path, *mwld_implicit],
|
||||
implicit_outputs=plf_map,
|
||||
variables={"ldflags": plf_ldflags},
|
||||
order_only="post-compile",
|
||||
)
|
||||
n.newline()
|
||||
|
||||
@ -727,17 +787,33 @@ def generate_build_ninja(
|
||||
source_added: Set[Path] = set()
|
||||
|
||||
def c_build(obj: Object, src_path: Path) -> Optional[Path]:
|
||||
cflags_str = make_flags_str(obj.options["cflags"])
|
||||
if obj.options["extra_cflags"] is not None:
|
||||
extra_cflags_str = make_flags_str(obj.options["extra_cflags"])
|
||||
cflags_str += " " + extra_cflags_str
|
||||
used_compiler_versions.add(obj.options["mw_version"])
|
||||
|
||||
# Avoid creating duplicate build rules
|
||||
if obj.src_obj_path is None or obj.src_obj_path in source_added:
|
||||
return obj.src_obj_path
|
||||
source_added.add(obj.src_obj_path)
|
||||
|
||||
cflags = obj.options["cflags"]
|
||||
extra_cflags = obj.options["extra_cflags"]
|
||||
|
||||
# Add appropriate language flag if it doesn't exist already
|
||||
# Added directly to the source so it flows to other generation tasks
|
||||
if not any(flag.startswith("-lang") for flag in cflags) and not any(
|
||||
flag.startswith("-lang") for flag in extra_cflags
|
||||
):
|
||||
# Ensure extra_cflags is a unique instance,
|
||||
# and insert into there to avoid modifying shared sets of flags
|
||||
extra_cflags = obj.options["extra_cflags"] = list(extra_cflags)
|
||||
if file_is_cpp(src_path):
|
||||
extra_cflags.insert(0, "-lang=c++")
|
||||
else:
|
||||
extra_cflags.insert(0, "-lang=c")
|
||||
|
||||
cflags_str = make_flags_str(cflags)
|
||||
if len(extra_cflags) > 0:
|
||||
extra_cflags_str = make_flags_str(extra_cflags)
|
||||
cflags_str += " " + extra_cflags_str
|
||||
used_compiler_versions.add(obj.options["mw_version"])
|
||||
|
||||
# Add MWCC build rule
|
||||
lib_name = obj.options["lib"]
|
||||
n.comment(f"{obj.name}: {lib_name} (linked {obj.completed})")
|
||||
@ -754,6 +830,7 @@ def generate_build_ninja(
|
||||
implicit=(
|
||||
mwcc_sjis_implicit if obj.options["shift_jis"] else mwcc_implicit
|
||||
),
|
||||
order_only="pre-compile",
|
||||
)
|
||||
|
||||
# Add ctx build rule
|
||||
@ -769,12 +846,13 @@ def generate_build_ninja(
|
||||
if obj.options["host"] and obj.host_obj_path is not None:
|
||||
n.build(
|
||||
outputs=obj.host_obj_path,
|
||||
rule="host_cc" if src_path.suffix == ".c" else "host_cpp",
|
||||
rule="host_cc" if file_is_c(src_path) else "host_cpp",
|
||||
inputs=src_path,
|
||||
variables={
|
||||
"basedir": os.path.dirname(obj.host_obj_path),
|
||||
"basefile": obj.host_obj_path.with_suffix(""),
|
||||
},
|
||||
order_only="pre-compile",
|
||||
)
|
||||
if obj.options["add_to_all"]:
|
||||
host_source_inputs.append(obj.host_obj_path)
|
||||
@ -791,7 +869,7 @@ def generate_build_ninja(
|
||||
if obj.options["asflags"] is None:
|
||||
sys.exit("ProjectConfig.asflags missing")
|
||||
asflags_str = make_flags_str(obj.options["asflags"])
|
||||
if obj.options["extra_asflags"] is not None:
|
||||
if len(obj.options["extra_asflags"]) > 0:
|
||||
extra_asflags_str = make_flags_str(obj.options["extra_asflags"])
|
||||
asflags_str += " " + extra_asflags_str
|
||||
|
||||
@ -809,6 +887,7 @@ def generate_build_ninja(
|
||||
inputs=src_path,
|
||||
variables={"asflags": asflags_str},
|
||||
implicit=gnu_as_implicit,
|
||||
order_only="pre-compile",
|
||||
)
|
||||
n.newline()
|
||||
|
||||
@ -829,10 +908,10 @@ def generate_build_ninja(
|
||||
link_built_obj = obj.completed
|
||||
built_obj_path: Optional[Path] = None
|
||||
if obj.src_path is not None and obj.src_path.exists():
|
||||
if obj.src_path.suffix in (".c", ".cp", ".cpp"):
|
||||
if file_is_c_cpp(obj.src_path):
|
||||
# Add MWCC & host build rules
|
||||
built_obj_path = c_build(obj, obj.src_path)
|
||||
elif obj.src_path.suffix == ".s":
|
||||
elif file_is_asm(obj.src_path):
|
||||
# Add assembler build rule
|
||||
built_obj_path = asm_build(obj, obj.src_path, obj.src_obj_path)
|
||||
else:
|
||||
@ -898,7 +977,7 @@ def generate_build_ninja(
|
||||
sys.exit(f"Linker {mw_path} does not exist")
|
||||
|
||||
# Add all build steps needed before we link and after compiling objects
|
||||
postcompile_implicit = write_custom_step("post-compile")
|
||||
write_custom_step("post-compile", "pre-compile")
|
||||
|
||||
###
|
||||
# Link
|
||||
@ -909,7 +988,7 @@ def generate_build_ninja(
|
||||
n.newline()
|
||||
|
||||
# Add all build steps needed after linking and before GC/Wii native format generation
|
||||
postlink_implicit = write_custom_step("post-link")
|
||||
write_custom_step("post-link", "post-compile")
|
||||
|
||||
###
|
||||
# Generate DOL
|
||||
@ -918,7 +997,8 @@ def generate_build_ninja(
|
||||
outputs=link_steps[0].output(),
|
||||
rule="elf2dol",
|
||||
inputs=link_steps[0].partial_output(),
|
||||
implicit=[*postlink_implicit, dtk],
|
||||
implicit=dtk,
|
||||
order_only="post-link",
|
||||
)
|
||||
|
||||
###
|
||||
@ -980,11 +1060,12 @@ def generate_build_ninja(
|
||||
"rspfile": config.out_path() / f"rel{idx}.rsp",
|
||||
"names": rel_names_arg,
|
||||
},
|
||||
order_only="post-link",
|
||||
)
|
||||
n.newline()
|
||||
|
||||
# Add all build steps needed post-build (re-building archives and such)
|
||||
postbuild_implicit = write_custom_step("post-build")
|
||||
write_custom_step("post-build", "post-link")
|
||||
|
||||
###
|
||||
# Helper rule for building all source files
|
||||
@ -1023,7 +1104,8 @@ def generate_build_ninja(
|
||||
outputs=ok_path,
|
||||
rule="check",
|
||||
inputs=config.check_sha_path,
|
||||
implicit=[dtk, *link_outputs, *postbuild_implicit],
|
||||
implicit=[dtk, *link_outputs],
|
||||
order_only="post-build",
|
||||
)
|
||||
n.newline()
|
||||
|
||||
@ -1045,6 +1127,7 @@ def generate_build_ninja(
|
||||
python_lib,
|
||||
report_path,
|
||||
],
|
||||
order_only="post-build",
|
||||
)
|
||||
|
||||
###
|
||||
@ -1056,11 +1139,11 @@ def generate_build_ninja(
|
||||
command=f"{objdiff} report generate -o $out",
|
||||
description="REPORT",
|
||||
)
|
||||
report_implicit: List[str | Path] = [objdiff, "all_source"]
|
||||
n.build(
|
||||
outputs=report_path,
|
||||
rule="report",
|
||||
implicit=report_implicit,
|
||||
implicit=[objdiff, "all_source"],
|
||||
order_only="post-build",
|
||||
)
|
||||
|
||||
###
|
||||
@ -1179,6 +1262,13 @@ def generate_objdiff_config(
|
||||
if build_config is None:
|
||||
return
|
||||
|
||||
# Load existing objdiff.json
|
||||
existing_units = {}
|
||||
if Path("objdiff.json").is_file():
|
||||
with open("objdiff.json", "r", encoding="utf-8") as r:
|
||||
existing_config = json.load(r)
|
||||
existing_units = {unit["name"]: unit for unit in existing_config["units"]}
|
||||
|
||||
objdiff_config: Dict[str, Any] = {
|
||||
"min_version": "2.0.0-beta.5",
|
||||
"custom_make": "ninja",
|
||||
@ -1236,15 +1326,27 @@ def generate_objdiff_config(
|
||||
) -> None:
|
||||
obj_path, obj_name = build_obj["object"], build_obj["name"]
|
||||
base_object = Path(obj_name).with_suffix("")
|
||||
name = str(Path(module_name) / base_object).replace(os.sep, "/")
|
||||
unit_config: Dict[str, Any] = {
|
||||
"name": Path(module_name) / base_object,
|
||||
"name": name,
|
||||
"target_path": obj_path,
|
||||
"base_path": None,
|
||||
"scratch": None,
|
||||
"metadata": {
|
||||
"auto_generated": build_obj["autogenerated"],
|
||||
"complete": None,
|
||||
"reverse_fn_order": None,
|
||||
"source_path": None,
|
||||
"progress_categories": progress_categories,
|
||||
"auto_generated": build_obj["autogenerated"],
|
||||
},
|
||||
"symbol_mappings": None,
|
||||
}
|
||||
|
||||
# Preserve existing symbol mappings
|
||||
existing_unit = existing_units.get(name)
|
||||
if existing_unit is not None:
|
||||
unit_config["symbol_mappings"] = existing_unit.get("symbol_mappings")
|
||||
|
||||
obj = objects.get(obj_name)
|
||||
if obj is None:
|
||||
objdiff_config["units"].append(unit_config)
|
||||
@ -1257,37 +1359,27 @@ def generate_objdiff_config(
|
||||
|
||||
cflags = obj.options["cflags"]
|
||||
reverse_fn_order = False
|
||||
if type(cflags) is list:
|
||||
for flag in cflags:
|
||||
if not flag.startswith("-inline "):
|
||||
continue
|
||||
for value in flag.split(" ")[1].split(","):
|
||||
if value == "deferred":
|
||||
reverse_fn_order = True
|
||||
elif value == "nodeferred":
|
||||
reverse_fn_order = False
|
||||
for flag in cflags:
|
||||
if not flag.startswith("-inline "):
|
||||
continue
|
||||
for value in flag.split(" ")[1].split(","):
|
||||
if value == "deferred":
|
||||
reverse_fn_order = True
|
||||
elif value == "nodeferred":
|
||||
reverse_fn_order = False
|
||||
|
||||
# Filter out include directories
|
||||
def keep_flag(flag):
|
||||
return not flag.startswith("-i ") and not flag.startswith("-I ")
|
||||
# Filter out include directories
|
||||
def keep_flag(flag):
|
||||
return not flag.startswith("-i ") and not flag.startswith("-I ")
|
||||
|
||||
cflags = list(filter(keep_flag, cflags))
|
||||
|
||||
# Add appropriate lang flag
|
||||
if obj.src_path is not None and not any(
|
||||
flag.startswith("-lang") for flag in cflags
|
||||
):
|
||||
if obj.src_path.suffix in (".cp", ".cpp"):
|
||||
cflags.insert(0, "-lang=c++")
|
||||
else:
|
||||
cflags.insert(0, "-lang=c")
|
||||
cflags = list(filter(keep_flag, cflags))
|
||||
|
||||
compiler_version = COMPILER_MAP.get(obj.options["mw_version"])
|
||||
if compiler_version is None:
|
||||
print(f"Missing scratch compiler mapping for {obj.options['mw_version']}")
|
||||
else:
|
||||
cflags_str = make_flags_str(cflags)
|
||||
if obj.options["extra_cflags"] is not None:
|
||||
if len(obj.options["extra_cflags"]) > 0:
|
||||
extra_cflags_str = make_flags_str(obj.options["extra_cflags"])
|
||||
cflags_str += " " + extra_cflags_str
|
||||
unit_config["scratch"] = {
|
||||
@ -1309,7 +1401,7 @@ def generate_objdiff_config(
|
||||
progress_categories.append(category_opt)
|
||||
unit_config["metadata"].update(
|
||||
{
|
||||
"complete": obj.completed,
|
||||
"complete": obj.completed if src_exists else None,
|
||||
"reverse_fn_order": reverse_fn_order,
|
||||
"progress_categories": progress_categories,
|
||||
}
|
||||
@ -1354,13 +1446,234 @@ def generate_objdiff_config(
|
||||
for category in config.progress_categories:
|
||||
add_category(category.id, category.name)
|
||||
|
||||
def cleandict(d):
|
||||
if isinstance(d, dict):
|
||||
return {k: cleandict(v) for k, v in d.items() if v is not None}
|
||||
elif isinstance(d, list):
|
||||
return [cleandict(v) for v in d]
|
||||
else:
|
||||
return d
|
||||
|
||||
# Write objdiff.json
|
||||
with open("objdiff.json", "w", encoding="utf-8") as w:
|
||||
|
||||
def unix_path(input: Any) -> str:
|
||||
return str(input).replace(os.sep, "/") if input else ""
|
||||
|
||||
json.dump(objdiff_config, w, indent=4, default=unix_path)
|
||||
json.dump(cleandict(objdiff_config), w, indent=2, default=unix_path)
|
||||
|
||||
|
||||
def generate_compile_commands(
|
||||
config: ProjectConfig,
|
||||
objects: Dict[str, Object],
|
||||
build_config: Optional[Dict[str, Any]],
|
||||
) -> None:
|
||||
if build_config is None or not config.generate_compile_commands:
|
||||
return
|
||||
|
||||
# The following code attempts to convert mwcc flags to clang flags
|
||||
# for use with clangd.
|
||||
|
||||
# Flags to ignore explicitly
|
||||
CFLAG_IGNORE: Set[str] = {
|
||||
# Search order modifier
|
||||
# Has a different meaning to Clang, and would otherwise
|
||||
# be picked up by the include passthrough prefix
|
||||
"-I-",
|
||||
"-i-",
|
||||
}
|
||||
CFLAG_IGNORE_PREFIX: Tuple[str, ...] = (
|
||||
# Recursive includes are not supported by modern compilers
|
||||
"-ir ",
|
||||
)
|
||||
|
||||
# Flags to replace
|
||||
CFLAG_REPLACE: Dict[str, str] = {}
|
||||
CFLAG_REPLACE_PREFIX: Tuple[Tuple[str, str], ...] = (
|
||||
# Includes
|
||||
("-i ", "-I"),
|
||||
("-I ", "-I"),
|
||||
("-I+", "-I"),
|
||||
# Defines
|
||||
("-d ", "-D"),
|
||||
("-D ", "-D"),
|
||||
("-D+", "-D"),
|
||||
)
|
||||
|
||||
# Flags with a finite set of options
|
||||
CFLAG_REPLACE_OPTIONS: Tuple[Tuple[str, Dict[str, Tuple[str, ...]]], ...] = (
|
||||
# Exceptions
|
||||
(
|
||||
"-Cpp_exceptions",
|
||||
{
|
||||
"off": ("-fno-cxx-exceptions",),
|
||||
"on": ("-fcxx-exceptions",),
|
||||
},
|
||||
),
|
||||
# RTTI
|
||||
(
|
||||
"-RTTI",
|
||||
{
|
||||
"off": ("-fno-rtti",),
|
||||
"on": ("-frtti",),
|
||||
},
|
||||
),
|
||||
# Language configuration
|
||||
(
|
||||
"-lang",
|
||||
{
|
||||
"c": ("--language=c", "--std=c99"),
|
||||
"c99": ("--language=c", "--std=c99"),
|
||||
"c++": ("--language=c++", "--std=c++98"),
|
||||
"cplus": ("--language=c++", "--std=c++98"),
|
||||
},
|
||||
),
|
||||
# Enum size
|
||||
(
|
||||
"-enum",
|
||||
{
|
||||
"min": ("-fshort-enums",),
|
||||
"int": ("-fno-short-enums",),
|
||||
},
|
||||
),
|
||||
# Common BSS
|
||||
(
|
||||
"-common",
|
||||
{
|
||||
"off": ("-fno-common",),
|
||||
"on": ("-fcommon",),
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
# Flags to pass through
|
||||
CFLAG_PASSTHROUGH: Set[str] = set()
|
||||
CFLAG_PASSTHROUGH_PREFIX: Tuple[str, ...] = (
|
||||
"-I", # includes
|
||||
"-D", # defines
|
||||
)
|
||||
|
||||
clangd_config = []
|
||||
|
||||
def add_unit(build_obj: Dict[str, Any]) -> None:
|
||||
obj = objects.get(build_obj["name"])
|
||||
if obj is None:
|
||||
return
|
||||
|
||||
# Skip unresolved objects
|
||||
if (
|
||||
obj.src_path is None
|
||||
or obj.src_obj_path is None
|
||||
or not file_is_c_cpp(obj.src_path)
|
||||
):
|
||||
return
|
||||
|
||||
# Gather cflags for source file
|
||||
cflags: list[str] = []
|
||||
|
||||
def append_cflags(flags: Iterable[str]) -> None:
|
||||
# Match a flag against either a set of concrete flags, or a set of prefixes.
|
||||
def flag_match(
|
||||
flag: str, concrete: Set[str], prefixes: Tuple[str, ...]
|
||||
) -> bool:
|
||||
if flag in concrete:
|
||||
return True
|
||||
|
||||
for prefix in prefixes:
|
||||
if flag.startswith(prefix):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
# Determine whether a flag should be ignored.
|
||||
def should_ignore(flag: str) -> bool:
|
||||
return flag_match(flag, CFLAG_IGNORE, CFLAG_IGNORE_PREFIX)
|
||||
|
||||
# Determine whether a flag should be passed through.
|
||||
def should_passthrough(flag: str) -> bool:
|
||||
return flag_match(flag, CFLAG_PASSTHROUGH, CFLAG_PASSTHROUGH_PREFIX)
|
||||
|
||||
# Attempts replacement for the given flag.
|
||||
def try_replace(flag: str) -> bool:
|
||||
replacement = CFLAG_REPLACE.get(flag)
|
||||
if replacement is not None:
|
||||
cflags.append(replacement)
|
||||
return True
|
||||
|
||||
for prefix, replacement in CFLAG_REPLACE_PREFIX:
|
||||
if flag.startswith(prefix):
|
||||
cflags.append(flag.replace(prefix, replacement, 1))
|
||||
return True
|
||||
|
||||
for prefix, options in CFLAG_REPLACE_OPTIONS:
|
||||
if not flag.startswith(prefix):
|
||||
continue
|
||||
|
||||
# "-lang c99" and "-lang=c99" are both generally valid option forms
|
||||
option = flag.removeprefix(prefix).removeprefix("=").lstrip()
|
||||
replacements = options.get(option)
|
||||
if replacements is not None:
|
||||
cflags.extend(replacements)
|
||||
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
for flag in flags:
|
||||
# Ignore flags first
|
||||
if should_ignore(flag):
|
||||
continue
|
||||
|
||||
# Then find replacements
|
||||
if try_replace(flag):
|
||||
continue
|
||||
|
||||
# Pass flags through last
|
||||
if should_passthrough(flag):
|
||||
cflags.append(flag)
|
||||
continue
|
||||
|
||||
append_cflags(obj.options["cflags"])
|
||||
append_cflags(obj.options["extra_cflags"])
|
||||
cflags.extend(config.extra_clang_flags)
|
||||
cflags.extend(obj.options["extra_clang_flags"])
|
||||
|
||||
unit_config = {
|
||||
"directory": Path.cwd(),
|
||||
"file": obj.src_path,
|
||||
"output": obj.src_obj_path,
|
||||
"arguments": [
|
||||
"clang",
|
||||
"-nostdinc",
|
||||
"-fno-builtin",
|
||||
"--target=powerpc-eabi",
|
||||
*cflags,
|
||||
"-c",
|
||||
obj.src_path,
|
||||
"-o",
|
||||
obj.src_obj_path,
|
||||
],
|
||||
}
|
||||
clangd_config.append(unit_config)
|
||||
|
||||
# Add DOL units
|
||||
for unit in build_config["units"]:
|
||||
add_unit(unit)
|
||||
|
||||
# Add REL units
|
||||
for module in build_config["modules"]:
|
||||
for unit in module["units"]:
|
||||
add_unit(unit)
|
||||
|
||||
# Write compile_commands.json
|
||||
with open("compile_commands.json", "w", encoding="utf-8") as w:
|
||||
|
||||
def default_format(o):
|
||||
if isinstance(o, Path):
|
||||
return o.resolve().as_posix()
|
||||
return str(o)
|
||||
|
||||
json.dump(clangd_config, w, indent=2, default=default_format)
|
||||
|
||||
|
||||
# Calculate, print and write progress to progress.json
|
||||
@ -1382,7 +1695,7 @@ def calculate_progress(config: ProjectConfig) -> None:
|
||||
data[key] = int(value)
|
||||
|
||||
convert_numbers(report_data["measures"])
|
||||
for category in report_data["categories"]:
|
||||
for category in report_data.get("categories", []):
|
||||
convert_numbers(category["measures"])
|
||||
|
||||
# Output to GitHub Actions job summary, if available
|
||||
@ -1424,8 +1737,12 @@ def calculate_progress(config: ProjectConfig) -> None:
|
||||
)
|
||||
|
||||
print_category("All", report_data["measures"])
|
||||
for category in report_data["categories"]:
|
||||
print_category(category["name"], category["measures"])
|
||||
for category in report_data.get("categories", []):
|
||||
if config.print_progress_categories is True or (
|
||||
isinstance(config.print_progress_categories, list)
|
||||
and category["id"] in config.print_progress_categories
|
||||
):
|
||||
print_category(category["name"], category["measures"])
|
||||
|
||||
if config.progress_use_fancy:
|
||||
measures = report_data["measures"]
|
||||
@ -1478,8 +1795,8 @@ def calculate_progress(config: ProjectConfig) -> None:
|
||||
else:
|
||||
# Support for old behavior where "dol" was the main category
|
||||
add_category("dol", report_data["measures"])
|
||||
for category in report_data["categories"]:
|
||||
for category in report_data.get("categories", []):
|
||||
add_category(category["id"], category["measures"])
|
||||
|
||||
with open(out_path / "progress.json", "w", encoding="utf-8") as w:
|
||||
json.dump(progress_json, w, indent=4)
|
||||
json.dump(progress_json, w, indent=2)
|
||||
|
Loading…
Reference in New Issue
Block a user