mirror of
https://github.com/shibbo/3dcomp.git
synced 2024-11-26 23:10:39 +00:00
build system but better
This commit is contained in:
parent
7369a13a3b
commit
9657876541
8
.gitignore
vendored
Normal file
8
.gitignore
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
compiler/*
|
||||
*.o
|
||||
.vscode/*
|
||||
.ninja_log
|
||||
*.nso
|
||||
*.map
|
||||
__pycache__/*
|
||||
build.ninja
|
95
build.py
Normal file
95
build.py
Normal file
@ -0,0 +1,95 @@
|
||||
# build.py
|
||||
# the main build script for building each library
|
||||
|
||||
import subprocess
|
||||
import sys
|
||||
import os
|
||||
import pathlib
|
||||
import shutil
|
||||
from ninja_syntax import Writer
|
||||
|
||||
nonmatching_str = ""
|
||||
|
||||
if "-non-matching" in sys.argv:
|
||||
nonmatching_str = "-DNON_MATCHING"
|
||||
print("Non-functions matches will be compiled")
|
||||
|
||||
INCLUDE_DIRS = ["include", "lib\\ActionLibrary\\include", "lib\\agl\\include", "lib\\eui\\incldue", "lib\\nn\\include", "lib\\sead\\include", "compiler\\nx\\aarch64\\include"]
|
||||
LIBRARIES = ["ActionLibrary", "agl", "eui", "nn", "sead"]
|
||||
|
||||
incdirs = ""
|
||||
for dir in INCLUDE_DIRS:
|
||||
incdirs += f'-I {dir} '
|
||||
|
||||
COMPILER_CMD = f"-x c++ -O3 -std=gnu++1z --target=aarch64-linux-elf -mcpu=cortex-a57+fp+simd+crypto+crc -fno-exceptions -fms-extensions -mno-implicit-float -fno-strict-aliasing -fno-short-enums -fdata-sections -fPIC -g -Wall {nonmatching_str} {incdirs} -c "
|
||||
COMPILER_PATH = pathlib.Path("compiler\\bin\\nx-clang++")
|
||||
OBJDUMP_PATH = pathlib.Path("compiler\\nx\\aarch64\\bin\\llvm-objdump")
|
||||
|
||||
def genNinja(tasks):
|
||||
with open('build.ninja', 'w') as ninja_file:
|
||||
ninja_writer = Writer(ninja_file)
|
||||
|
||||
for task in tasks:
|
||||
source_path, build_path = task
|
||||
ninja_writer.rule('compile', command=f'{COMPILER_PATH} {COMPILER_CMD} $in -o $out',description=f'Compiling {source_path}')
|
||||
ninja_writer.build(outputs=[build_path], rule='compile', inputs=[source_path])
|
||||
|
||||
def compileLibrary(path):
|
||||
compile_tasks = list()
|
||||
# let's do our source files first which we use ninja for
|
||||
for root, dirs, files in os.walk(path):
|
||||
for file in files:
|
||||
if file.endswith(".cpp"):
|
||||
source_path = os.path.join(root, file)
|
||||
build_path = source_path.replace("source", "build", 1).replace(".cpp", ".o")
|
||||
|
||||
os.makedirs(os.path.dirname(build_path), exist_ok=True)
|
||||
compile_tasks.append((source_path, build_path))
|
||||
|
||||
genNinja(compile_tasks)
|
||||
subprocess.run(['ninja', '-f', 'build.ninja'], check=True)
|
||||
generateMaps(path)
|
||||
|
||||
def generateMaps(path):
|
||||
objdump_tasks = list()
|
||||
|
||||
# now for our map files which we don't need ninja for
|
||||
for root, dirs, files in os.walk(path):
|
||||
for file in files:
|
||||
if file.endswith(".cpp"):
|
||||
source_path = os.path.join(root, file)
|
||||
build_path = source_path.replace("source", "build", 1).replace(".cpp", ".o")
|
||||
map_path = build_path.replace("build", "map", 1).replace(".o", ".map")
|
||||
os.makedirs(os.path.dirname(map_path), exist_ok=True)
|
||||
objdump_tasks.append((source_path, build_path, map_path))
|
||||
|
||||
for task in objdump_tasks:
|
||||
source_path, build_path, map_path = task
|
||||
|
||||
mapFileOutput = subprocess.check_output([OBJDUMP_PATH, build_path, "-t"]).decode("utf-8").replace("\r", "")
|
||||
lines = mapFileOutput.split("\n")
|
||||
|
||||
newOutput = []
|
||||
|
||||
for line in lines:
|
||||
if line == '':
|
||||
continue
|
||||
|
||||
if line.startswith("build") or line.startswith("SYMBOL TABLE"):
|
||||
continue
|
||||
|
||||
more_split = line.split(" ")
|
||||
|
||||
# if global, it is most likely a symbol
|
||||
if more_split[1] == "g":
|
||||
# symbol is always the last entry
|
||||
sym = more_split[(len(more_split) - 1)]
|
||||
newOutput.append(f"{sym}\n")
|
||||
|
||||
with open(map_path, "w") as w:
|
||||
w.writelines(newOutput)
|
||||
|
||||
compileLibrary("source")
|
||||
|
||||
for lib in LIBRARIES:
|
||||
compileLibrary(f"lib\\{lib}\\source")
|
175
check.py
Normal file
175
check.py
Normal file
@ -0,0 +1,175 @@
|
||||
import glob, nso, os, sys
|
||||
from colorama import Fore, Style
|
||||
from capstone import *
|
||||
from elftools.elf.elffile import ELFFile
|
||||
|
||||
LIBRARIES = ["ActionLibrary", "agl", "eui", "nn", "sead"]
|
||||
|
||||
def getModule(map, sym):
|
||||
for root, dirs, files in os.walk(map):
|
||||
for file in files:
|
||||
if file.endswith(".map"):
|
||||
map_path = os.path.join(root, file)
|
||||
with open(map_path, "r") as f:
|
||||
lines = f.readlines()
|
||||
|
||||
for line in lines:
|
||||
fmt = line.replace("\n", "")
|
||||
if fmt == sym:
|
||||
# we found where our symbol lives!
|
||||
# we just grab the source module
|
||||
object_path = map_path.replace("map", "build", 1)
|
||||
object_path = object_path.replace(".map", ".o")
|
||||
return object_path
|
||||
return ""
|
||||
|
||||
if len(sys.argv) < 2:
|
||||
print("python check.py [-no-diff] <mangled symbol>")
|
||||
sys.exit(1)
|
||||
|
||||
printDiff = True
|
||||
|
||||
if "-no-diff" in sys.argv:
|
||||
sym = sys.argv[2]
|
||||
printDiff = False
|
||||
else:
|
||||
sym = sys.argv[1]
|
||||
|
||||
sym = "_ZN2al19CameraPoserParallel25checkEnableCameraApproachEv"
|
||||
|
||||
# first let's see if our symbol even exists somewhere
|
||||
path = getModule("map", sym)
|
||||
|
||||
if path == "":
|
||||
for lib in LIBRARIES:
|
||||
path = getModule(f"lib\\{lib}\\map", sym)
|
||||
|
||||
if path != "":
|
||||
break
|
||||
|
||||
if path == "":
|
||||
print("Unable to find symbol.")
|
||||
sys.exit(1)
|
||||
|
||||
functionSize = 0
|
||||
functionAddr = 0
|
||||
|
||||
with open("data/main.map", "r") as f:
|
||||
lines = f.readlines()
|
||||
|
||||
for line in lines:
|
||||
spl = line.split("=")
|
||||
name = spl[0]
|
||||
addr = int(spl[1], 16)
|
||||
size = int(spl[2].replace("\n", ""), 16)
|
||||
|
||||
if sym == name:
|
||||
functionSize = size
|
||||
# this is to adjust from the address in the NSO to the local address in the text section
|
||||
functionAddr = addr - 0x0000007100000000
|
||||
break
|
||||
|
||||
with open("fury.nso", "rb") as f:
|
||||
nso_file = nso.NSO(f.read())
|
||||
|
||||
funcData = nso_file.getFunction(functionAddr, functionSize)
|
||||
capstone_inst = Cs(CS_ARCH_ARM64, CS_MODE_ARM + CS_MODE_LITTLE_ENDIAN)
|
||||
capstone_inst.detail = True
|
||||
capstone_inst.imm_unsigned = False
|
||||
|
||||
if funcData == b'':
|
||||
print("Failed to fetch function data.")
|
||||
sys.exit(1)
|
||||
|
||||
error_count = 0
|
||||
warning_count = 0
|
||||
|
||||
original_instrs = list(capstone_inst.disasm(funcData, 0))
|
||||
|
||||
with open(path, "rb") as f:
|
||||
elf = f
|
||||
|
||||
elf_file = ELFFile(elf)
|
||||
symtab = elf_file.get_section_by_name('.symtab')
|
||||
|
||||
if symtab.get_symbol_by_name(sym) is None:
|
||||
print("Could not find symbol in object file. This may be caused by the code not being compiled, the function being in the wrong C++ source file or the function signature being wrong.")
|
||||
sys.exit(1)
|
||||
|
||||
compiled_symbol = symtab.get_symbol_by_name(sym)[0]
|
||||
custom_offset = compiled_symbol["st_value"]
|
||||
custom_size = compiled_symbol['st_size']
|
||||
text = elf_file.get_section_by_name('.text')
|
||||
custom_data = text.data()[custom_offset:custom_offset + custom_size]
|
||||
custom_instructions = list(capstone_inst.disasm(custom_data, 0))
|
||||
|
||||
orig_length = len(list(original_instrs))
|
||||
cust_length = len(list(custom_instructions))
|
||||
|
||||
instr_equal = True
|
||||
regs_equal = True
|
||||
|
||||
for i in range(orig_length):
|
||||
curOrigInstr = original_instrs[i]
|
||||
curCustInstr = custom_instructions[i]
|
||||
|
||||
orig_operands = curOrigInstr.operands
|
||||
cust_operands = curCustInstr.operands
|
||||
|
||||
if str(curOrigInstr) == str(curCustInstr):
|
||||
if printDiff == True:
|
||||
print(f"{Fore.GREEN}{str(curOrigInstr):<80}{curCustInstr}{Style.RESET_ALL}")
|
||||
continue
|
||||
|
||||
if curOrigInstr.id != curCustInstr.id:
|
||||
print(f"{Fore.RED}{str(curOrigInstr):<80}{curCustInstr}{Style.RESET_ALL}")
|
||||
instr_equal = False
|
||||
continue
|
||||
|
||||
for j in range(len(orig_operands)):
|
||||
if orig_operands[j].reg != cust_operands[j]:
|
||||
# ADRP and ADD can give of wrong operands because of us not linking, same with LDR
|
||||
if curOrigInstr.id == 9 or curOrigInstr.id == 6 or curOrigInstr.id == 162:
|
||||
print(f"{Fore.YELLOW}{str(curOrigInstr):<80}{curCustInstr}{Style.RESET_ALL}")
|
||||
# B and BL instructions
|
||||
elif curOrigInstr.id == 21 or curOrigInstr.id == 16:
|
||||
print(f"{Fore.YELLOW}{str(curOrigInstr):<80}{curCustInstr}{Style.RESET_ALL}")
|
||||
else:
|
||||
print(f"{Fore.RED}{str(curOrigInstr):<80}{curCustInstr}{Style.RESET_ALL}")
|
||||
regs_equal = False
|
||||
break
|
||||
|
||||
isAlreadyMarked = False
|
||||
|
||||
if instr_equal == True and regs_equal == True:
|
||||
|
||||
with open("data/main.map", "r") as f:
|
||||
csvData = f.readlines()
|
||||
|
||||
outCsv = []
|
||||
|
||||
for c in csvData:
|
||||
spl = c.split("=")
|
||||
|
||||
if spl[1] == sym and spl[3] == "false\n":
|
||||
outCsv.append(f"{spl[0]},{spl[1]},{spl[2]},true\n")
|
||||
elif spl[1] == sym and spl[3] == "true\n":
|
||||
isAlreadyMarked = True
|
||||
outCsv.append(c)
|
||||
else:
|
||||
outCsv.append(c)
|
||||
|
||||
with open("data/main.map", "w") as w:
|
||||
w.writelines(outCsv)
|
||||
|
||||
if isAlreadyMarked == True:
|
||||
print("Function is already marked as decompiled.")
|
||||
else:
|
||||
print("Function is matching! Marking as decompiled...")
|
||||
|
||||
elif instr_equal == True and regs_equal == False:
|
||||
print("Function has matching instructions, but operands are not equal.")
|
||||
elif instr_equal == False and regs_equal == True:
|
||||
print("Function has matching operands, but instructions are not equal.")
|
||||
elif instr_equal == False and regs_equal == False:
|
||||
print("Function does not match in either instructions or operands.")
|
6
data/percent.json
Normal file
6
data/percent.json
Normal file
@ -0,0 +1,6 @@
|
||||
{
|
||||
"schemaVersion": 1,
|
||||
"label": "decompiled",
|
||||
"message": "0.0%",
|
||||
"color": "blue"
|
||||
}
|
2
lib/ActionLibrary/README.md
Normal file
2
lib/ActionLibrary/README.md
Normal file
@ -0,0 +1,2 @@
|
||||
# ActionLibrary
|
||||
ActionLibrary is one of the main backend libraries used in modern 3D Mario games. It contains the base classes for actors and more.
|
2
lib/agl/README.md
Normal file
2
lib/agl/README.md
Normal file
@ -0,0 +1,2 @@
|
||||
# agl
|
||||
AGL is the library that is responsible for most of the graphics processing done through the GPU.
|
2
lib/eui/README.md
Normal file
2
lib/eui/README.md
Normal file
@ -0,0 +1,2 @@
|
||||
# eui
|
||||
EUI is related to layouts rendered in the game.
|
2
lib/nn/README.md
Normal file
2
lib/nn/README.md
Normal file
@ -0,0 +1,2 @@
|
||||
# nn
|
||||
the `nn` library contains the SDK code used in games.
|
2
lib/sead/README.md
Normal file
2
lib/sead/README.md
Normal file
@ -0,0 +1,2 @@
|
||||
# sead
|
||||
`sead` is the library that contains classes and functions that control heaps, strings, and other low level operations.
|
51
nso.py
Normal file
51
nso.py
Normal file
@ -0,0 +1,51 @@
|
||||
import os, struct, sys
|
||||
import lz4.block
|
||||
|
||||
class SegmentHeader:
|
||||
fileOffset = 0
|
||||
memoryOffset = 0
|
||||
decompressedSize = 0
|
||||
|
||||
def __init__(self, fileOffset, memoryOffset, decompressedSize):
|
||||
self.fileOffset = fileOffset
|
||||
self.memoryOffset = memoryOffset
|
||||
self.decompressedSize = decompressedSize
|
||||
|
||||
class SegmentHeaderRelative:
|
||||
offset = 0
|
||||
size = 0
|
||||
|
||||
def __init__(self, offset, size):
|
||||
self.offset = offset
|
||||
self.size = size
|
||||
|
||||
class NSO:
|
||||
textSegmentHeader = None
|
||||
textData = []
|
||||
rawData = None
|
||||
|
||||
def __init__(self, nsoBinary):
|
||||
self.rawData = nsoBinary
|
||||
|
||||
nsoStr = struct.unpack_from("4s", self.rawData, 0)[0].decode('latin-1')
|
||||
|
||||
if nsoStr != "NSO0":
|
||||
print("Invalid NSO.")
|
||||
sys.exit(1)
|
||||
|
||||
# we only need two things out of the header
|
||||
# 1) the offset and size of the text data within the binary (and its compressed size)
|
||||
# 2) whether or not we need to decompress the data
|
||||
flags = struct.unpack_from("<I", self.rawData, 0xC)[0]
|
||||
textOffs, textMemOffs, textSize = struct.unpack_from("<3I", self.rawData, 0x10)
|
||||
self.textSegmentHeader = SegmentHeader(textOffs, textMemOffs, textSize)
|
||||
textSizeComp = struct.unpack_from("<I", self.rawData, 0x60)[0]
|
||||
self.textData = self.rawData[textOffs:textOffs + textSizeComp]
|
||||
|
||||
# decompress data if needed
|
||||
if flags & 0x1:
|
||||
self.textData = lz4.block.decompress(self.textData, uncompressed_size=textSize)
|
||||
|
||||
def getFunction(self, addr, size):
|
||||
funcData = self.textData[addr:addr + size]
|
||||
return funcData
|
37
progress.py
Normal file
37
progress.py
Normal file
@ -0,0 +1,37 @@
|
||||
import math, os, sys
|
||||
|
||||
def truncate(number, digits) -> float:
|
||||
stepper = 10.0 ** digits
|
||||
return math.trunc(stepper * number) / stepper
|
||||
|
||||
done_size = 0
|
||||
TOTAL_GAME_SIZE = 0xA0F5A0
|
||||
|
||||
with open("data/main.map", "r") as f:
|
||||
csvData = f.readlines()
|
||||
|
||||
for c in csvData:
|
||||
spl = c.split("=")
|
||||
isDone = spl[3].replace("\n", "")
|
||||
|
||||
if isDone == "true":
|
||||
funcSize = int(spl[2])
|
||||
done_size = done_size + funcSize
|
||||
|
||||
prog = (done_size / TOTAL_GAME_SIZE) * 100.0
|
||||
print("Progress:")
|
||||
print(f"{prog}% [{done_size} / {TOTAL_GAME_SIZE}]")
|
||||
|
||||
print("Generating JSON...")
|
||||
|
||||
# generate our JSON for the tags on the github page
|
||||
json = []
|
||||
json.append("{\n")
|
||||
json.append("\t\"schemaVersion\": 1,\n")
|
||||
json.append("\t\"label\": \"decompiled\",\n")
|
||||
json.append(f"\t\"message\": \"{truncate(prog, 4)}%\",\n")
|
||||
json.append("\t\"color\": \"blue\"\n")
|
||||
json.append("}")
|
||||
|
||||
with open("data/percent.json", "w") as w:
|
||||
w.writelines(json)
|
Loading…
Reference in New Issue
Block a user