2024-07-10 00:56:28 +00:00
|
|
|
# build.py
|
|
|
|
# the main build script for building each library
|
|
|
|
|
2024-08-09 22:53:35 +00:00
|
|
|
import glob
|
|
|
|
import hashlib
|
2024-07-10 00:56:28 +00:00
|
|
|
import subprocess
|
|
|
|
import sys
|
|
|
|
import os
|
|
|
|
import pathlib
|
|
|
|
from ninja_syntax import Writer
|
2024-08-12 01:42:39 +00:00
|
|
|
import helpers
|
2024-07-10 00:56:28 +00:00
|
|
|
|
|
|
|
nonmatching_str = ""
|
2024-07-23 14:14:39 +00:00
|
|
|
clean = False
|
2024-07-10 00:56:28 +00:00
|
|
|
|
|
|
|
if "-non-matching" in sys.argv:
|
|
|
|
nonmatching_str = "-DNON_MATCHING"
|
|
|
|
print("Non-functions matches will be compiled")
|
|
|
|
|
2024-07-23 14:14:39 +00:00
|
|
|
if "-clean" in sys.argv:
|
|
|
|
subprocess.call("ninja -t clean", shell=True)
|
|
|
|
|
2024-08-09 22:17:00 +00:00
|
|
|
INCLUDE_DIRS = [
|
|
|
|
"include",
|
|
|
|
"lib/ActionLibrary/include",
|
|
|
|
"lib/agl/include",
|
|
|
|
"lib/eui/include",
|
|
|
|
"lib/nn/include",
|
|
|
|
"lib/sead/include",
|
2024-08-30 23:51:21 +00:00
|
|
|
"lib/nvn/include",
|
2024-08-09 22:17:00 +00:00
|
|
|
"compiler/nx/aarch64/include",
|
|
|
|
"compiler/nx/aarch64/include/c++",
|
2024-09-02 19:26:08 +00:00
|
|
|
"compiler/nx/aarch64/lib/clang/1.8.14/include"
|
2024-08-09 22:17:00 +00:00
|
|
|
]
|
2024-08-12 00:57:01 +00:00
|
|
|
|
2024-08-09 22:17:00 +00:00
|
|
|
LIBRARIES = ["Game", "ActionLibrary", "agl", "eui", "nn", "sead"]
|
|
|
|
|
|
|
|
incdirs = " ".join([f'-I {dir}' for dir in INCLUDE_DIRS])
|
|
|
|
|
2024-09-29 20:12:20 +00:00
|
|
|
COMPILER_CMD = f"-x c++ -O3 -fno-omit-frame-pointer -mno-implicit-float -fno-cxx-exceptions -fno-strict-aliasing -std=gnu++14 -fno-common -fno-short-enums -ffunction-sections -fdata-sections -mcpu=cortex-a57+fp+simd+crypto+crc -g -fPIC -Wall -Wno-invalid-offsetof {nonmatching_str} {incdirs} -c"
|
2024-08-27 22:40:05 +00:00
|
|
|
OBJDUMP_PATH = pathlib.Path("compilers/800/nx/aarch64/bin/llvm-objdump.exe")
|
2024-08-08 20:31:06 +00:00
|
|
|
|
2024-08-09 22:53:35 +00:00
|
|
|
# if we don't have this file, create it
|
|
|
|
HASHES_BASE_PATH = pathlib.Path("data\\hashes.txt")
|
|
|
|
CHANGED_PATH = pathlib.Path("data\\changed.txt")
|
|
|
|
|
|
|
|
if not os.path.exists(CHANGED_PATH):
|
|
|
|
open(CHANGED_PATH, 'a').close()
|
|
|
|
|
|
|
|
# our hashes that we are starting out with
|
|
|
|
start_hashes = {}
|
|
|
|
|
|
|
|
if os.path.exists(HASHES_BASE_PATH):
|
|
|
|
with open(HASHES_BASE_PATH, "r") as f:
|
|
|
|
lines = f.readlines()
|
|
|
|
|
|
|
|
for line in lines:
|
|
|
|
line = line.strip("\n")
|
|
|
|
spl = line.split("=")
|
|
|
|
obj = spl[0]
|
|
|
|
hash = spl[1]
|
|
|
|
start_hashes[obj] = hash
|
|
|
|
|
2024-08-08 20:31:06 +00:00
|
|
|
isNotWindows = os.name != 'nt'
|
2024-07-10 00:56:28 +00:00
|
|
|
|
2024-08-09 22:17:00 +00:00
|
|
|
def genNinja(compile_tasks):
|
2024-07-10 00:56:28 +00:00
|
|
|
with open('build.ninja', 'w') as ninja_file:
|
|
|
|
ninja_writer = Writer(ninja_file)
|
2024-08-08 20:31:06 +00:00
|
|
|
|
2024-08-27 22:40:05 +00:00
|
|
|
# Define a single compile rule with a placeholder for the compiler path
|
|
|
|
ninja_writer.rule("compile", command='$compiler_path ' + COMPILER_CMD + ' $in -o $out', description='Compiling $in')
|
2024-07-10 00:56:28 +00:00
|
|
|
|
2024-08-09 22:17:00 +00:00
|
|
|
for source_path, build_path in compile_tasks:
|
2024-08-27 22:40:05 +00:00
|
|
|
# Get the appropriate compiler path for the current file
|
|
|
|
compiler_path = helpers.getCompilerPath(os.path.basename(source_path))
|
|
|
|
|
|
|
|
# Build the file with the correct compiler path
|
|
|
|
ninja_writer.build(outputs=[build_path], rule="compile", inputs=[source_path], variables={'compiler_path': compiler_path})
|
2024-07-10 00:56:28 +00:00
|
|
|
|
2024-08-09 22:17:00 +00:00
|
|
|
def compileLibraries(libraries):
|
|
|
|
compile_tasks = []
|
2024-07-10 22:20:54 +00:00
|
|
|
|
2024-08-09 22:17:00 +00:00
|
|
|
for name in libraries:
|
|
|
|
path = "source" if name == "Game" else f"lib/{name}/source"
|
2024-07-10 22:20:54 +00:00
|
|
|
|
2024-08-09 22:17:00 +00:00
|
|
|
# let's do our source files first which we use ninja for
|
|
|
|
for root, dirs, files in os.walk(path):
|
|
|
|
for file in files:
|
2024-08-23 00:22:31 +00:00
|
|
|
if file.endswith(".cpp") or file.endswith(".c"):
|
2024-08-09 22:17:00 +00:00
|
|
|
source_path = os.path.join(root, file)
|
2024-08-23 00:22:31 +00:00
|
|
|
if file.endswith(".cpp"):
|
|
|
|
build_path = source_path.replace("source", "build", 1).replace(".cpp", ".o")
|
|
|
|
elif file.endswith(".c"):
|
|
|
|
build_path = source_path.replace("source", "build", 1).replace(".c", ".o")
|
2024-07-10 00:56:28 +00:00
|
|
|
|
2024-08-09 22:17:00 +00:00
|
|
|
os.makedirs(os.path.dirname(build_path), exist_ok=True)
|
|
|
|
compile_tasks.append((source_path, build_path))
|
2024-07-10 00:56:28 +00:00
|
|
|
|
|
|
|
genNinja(compile_tasks)
|
|
|
|
subprocess.run(['ninja', '-f', 'build.ninja'], check=True)
|
2024-08-11 17:39:03 +00:00
|
|
|
|
|
|
|
for name in libraries:
|
|
|
|
path = "source" if name == "Game" else f"lib/{name}/source"
|
|
|
|
generateMaps(path)
|
2024-07-10 00:56:28 +00:00
|
|
|
|
2024-08-09 23:35:05 +00:00
|
|
|
def generateMaps(path):
|
|
|
|
objdump_tasks = list()
|
2024-07-10 00:56:28 +00:00
|
|
|
|
2024-08-09 23:35:05 +00:00
|
|
|
# now for our map files which we don't need ninja for
|
|
|
|
for root, dirs, files in os.walk(path):
|
2024-07-10 00:56:28 +00:00
|
|
|
for file in files:
|
2024-08-23 00:22:31 +00:00
|
|
|
if file.endswith(".cpp") or file.endswith(".c"):
|
2024-08-09 23:35:05 +00:00
|
|
|
source_path = os.path.join(root, file)
|
2024-08-23 00:22:31 +00:00
|
|
|
if file.endswith(".cpp"):
|
|
|
|
build_path = source_path.replace("source", "build", 1).replace(".cpp", ".o")
|
|
|
|
elif file.endswith(".c"):
|
|
|
|
build_path = source_path.replace("source", "build", 1).replace(".c", ".o")
|
|
|
|
|
2024-07-10 00:56:28 +00:00
|
|
|
map_path = build_path.replace("build", "map", 1).replace(".o", ".map")
|
2024-08-09 23:35:05 +00:00
|
|
|
os.makedirs(os.path.dirname(map_path), exist_ok=True)
|
|
|
|
objdump_tasks.append((source_path, build_path, map_path))
|
2024-08-08 20:31:06 +00:00
|
|
|
|
2024-08-09 23:35:05 +00:00
|
|
|
for task in objdump_tasks:
|
|
|
|
source_path, build_path, map_path = task
|
2024-07-10 00:56:28 +00:00
|
|
|
|
2024-08-09 23:35:05 +00:00
|
|
|
mapFileOutput = subprocess.check_output([OBJDUMP_PATH, build_path, "-t"]).decode("utf-8").replace("\r", "")
|
2024-07-10 00:56:28 +00:00
|
|
|
lines = mapFileOutput.split("\n")
|
2024-08-09 23:35:05 +00:00
|
|
|
|
2024-07-10 00:56:28 +00:00
|
|
|
newOutput = []
|
|
|
|
|
|
|
|
for line in lines:
|
2024-08-09 23:35:05 +00:00
|
|
|
if line == '':
|
2024-07-10 00:56:28 +00:00
|
|
|
continue
|
|
|
|
|
2024-08-09 23:35:05 +00:00
|
|
|
if line.startswith("build") or line.startswith("SYMBOL TABLE"):
|
|
|
|
continue
|
2024-08-09 22:17:00 +00:00
|
|
|
|
2024-08-09 23:35:05 +00:00
|
|
|
more_split = line.split(" ")
|
|
|
|
|
2024-07-10 00:56:28 +00:00
|
|
|
# if global, it is most likely a symbol
|
2024-07-18 13:38:45 +00:00
|
|
|
# gw includes weak globals
|
2024-08-09 23:35:05 +00:00
|
|
|
if more_split[1] == "g" or more_split[1] == "gw":
|
|
|
|
# symbol is always the last entry
|
|
|
|
sym = more_split[(len(more_split) - 1)]
|
2024-07-10 00:56:28 +00:00
|
|
|
newOutput.append(f"{sym}\n")
|
|
|
|
|
|
|
|
with open(map_path, "w") as w:
|
|
|
|
w.writelines(newOutput)
|
|
|
|
|
2024-08-09 22:53:35 +00:00
|
|
|
compileLibraries(LIBRARIES)
|
|
|
|
|
|
|
|
obj_hashes = {}
|
|
|
|
changed_objs = []
|
|
|
|
|
|
|
|
for lib in LIBRARIES:
|
|
|
|
objs = []
|
|
|
|
|
|
|
|
if lib == "Game":
|
|
|
|
objs = glob.glob(os.path.join("build", "**", "*.o"), recursive=True)
|
|
|
|
else:
|
|
|
|
objs = glob.glob(os.path.join("lib", lib, "build", "**", "*.o"), recursive=True)
|
|
|
|
|
|
|
|
# generate our hashes
|
|
|
|
for obj in objs:
|
2024-08-11 17:39:03 +00:00
|
|
|
obj_hashes[obj] = hashlib.md5(open(obj,'rb').read()).hexdigest()
|
2024-08-09 22:53:35 +00:00
|
|
|
|
|
|
|
# now we determine what objects were changed based on comparing the two MD5 hashes
|
|
|
|
for obj in obj_hashes:
|
|
|
|
if obj in start_hashes:
|
|
|
|
if start_hashes[obj] != obj_hashes[obj]:
|
|
|
|
changed_objs.append(obj)
|
2024-08-11 17:39:03 +00:00
|
|
|
# this means that the object isn't in the starting hashes
|
|
|
|
else:
|
|
|
|
changed_objs.append(obj)
|
2024-08-09 22:53:35 +00:00
|
|
|
|
|
|
|
# do we have changed objs?
|
|
|
|
# if we do, then we write those changed objects to our text file
|
|
|
|
# if not, we clear the file
|
|
|
|
if len(changed_objs) > 0:
|
|
|
|
with open(CHANGED_PATH, "w") as w:
|
|
|
|
for obj in changed_objs:
|
|
|
|
w.write(f"{obj}\n")
|
|
|
|
else:
|
|
|
|
open(CHANGED_PATH, 'w').close()
|
|
|
|
|
|
|
|
# write our new hashes
|
|
|
|
with open(HASHES_BASE_PATH, "w") as w:
|
|
|
|
for obj in obj_hashes:
|
|
|
|
w.write(f"{obj}={obj_hashes[obj]}\n")
|