Migrate to dtk-template

This commit is contained in:
Luke Street 2024-09-10 00:53:29 -06:00
parent e7a50789bf
commit 79ac060c4d
93 changed files with 200843 additions and 4710 deletions

4
.flake8 Normal file
View File

@ -0,0 +1,4 @@
[flake8]
# E203: whitespace before ':'
# E501: line too long
extend-ignore = E203,E501

3
.gitattributes vendored Executable file → Normal file
View File

@ -8,3 +8,6 @@
*.bat text eol=crlf
*.sh text eol=lf
*.sha1 text eol=lf
# decomp-toolkit writes files with LF
config/**/*.txt text eol=lf

View File

@ -6,50 +6,62 @@ on:
jobs:
build:
name: Build
runs-on: ubuntu-latest
container: ghcr.io/projectpiki/build:main
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
version: [usa.1]
version: [GPIE01_01] # TODO: other versions
steps:
# Checkout the repository (shallow clone)
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
submodules: recursive
# Set Git config
- name: Git config
run: git config --global --add safe.directory "$GITHUB_WORKSPACE"
# Copy the original files to the workspace
- name: Prepare
run: cp -R /orig .
# Build the project
- name: Build
run: |
python configure.py -m -v ${{matrix.version}} --compilers /compilers/GC
ninja
python configure.py --map --version ${{ matrix.version }} \
--binutils /binutils --compilers /compilers
ninja build/${{ matrix.version }}/progress.json
# TODO: Enable all_source and report.json
# ninja all_source build/${{ matrix.version }}/progress.json \
# build/${{ matrix.version }}/report.json
# Upload progress if we're on the main branch
- name: Upload progress
if: github.ref == 'refs/heads/main'
continue-on-error: true
env:
PROGRESS_API_KEY: ${{secrets.PROGRESS_API_KEY}}
PROGRESS_SLUG: pikmin
PROGRESS_API_KEY: ${{ secrets.PROGRESS_API_KEY }}
run: |
python tools/upload-progress.py -b https://progress.decomp.club/ -p pikmin -v ${{matrix.version}} \
build/pikmin.${{matrix.version}}/main.dol.progress
python tools/upload_progress.py -b https://progress.decomp.club/ \
-p $PROGRESS_SLUG -v ${{ matrix.version }} \
build/${{ matrix.version }}/progress.json
# Upload map files
- name: Upload map
uses: actions/upload-artifact@v4
with:
name: build-${{matrix.version}}.map
path: build/*/build.map
name: ${{ matrix.version }}_maps
path: build/${{ matrix.version }}/**/*.MAP
build_make:
name: Build (legacy)
runs-on: ubuntu-latest
container: ghcr.io/projectpiki/build:main
strategy:
fail-fast: false
matrix:
version: [usa.1]
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Git config
run: git config --global --add safe.directory "$GITHUB_WORKSPACE"
- name: Build
run: |
python configure.py --map --compilers /compilers/GC
make -j$(nproc) MAPGENFLAG=1 VERSION=${{matrix.version}} COMPILERS=/compilers/GC
# Upload progress report
- name: Upload report
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.version }}_report
path: build/${{ matrix.version }}/report.json

51
.gitignore vendored
View File

@ -1,39 +1,16 @@
.vscode/*.log
*.dat
*.exe
*.dll
*.idb
*.id0
*.id1
*.id2
*.nam
*.til
*.o
*.out
*.elf
*.dol
*.a
*.d
*.map
*.exe
*.dump
*.7z
*.bat
build
tools/mwcc_compiler/*
!tools/mwcc_compiler/.gitkeep
*.sln
*.vcxproj
*.user
*.filters
Debug
.vs
ctx.c
tools/dtk
tools/powerpc/
.ninja_deps
.ninja_log
objdiff.json
__pycache__
.idea
.vscode/*.log
.ninja_*
.mypy_cache
*.exe
build
build.ninja
newlines.py
objdiff.json
orig/*/*
!orig/*/.gitkeep
/*.txt
ctx.c
tools/mwcc_compiler
tools/dtk
tools/powerpc

View File

@ -1,10 +1,10 @@
{
"[c]": {
"files.encoding": "shiftjis",
"files.encoding": "utf8",
"editor.defaultFormatter": "ms-vscode.cpptools"
},
"[cpp]": {
"files.encoding": "shiftjis",
"files.encoding": "utf8",
"editor.defaultFormatter": "ms-vscode.cpptools"
},
"editor.tabSize": 4,
@ -17,4 +17,4 @@
"source_location": "cpp",
"*.cp": "cpp",
}
}
}

230
Makefile
View File

@ -1,230 +0,0 @@
ifneq ($(findstring MINGW,$(shell uname)),)
WINDOWS := 1
endif
ifneq ($(findstring MSYS,$(shell uname)),)
WINDOWS := 1
endif
# If 0, tells the console to chill out. (Quiets the make process.)
VERBOSE ?= 0
# If MAPGENFLAG set to 1, tells LDFLAGS to generate a mapfile, which makes linking take several minutes.
MAPGENFLAG ?= 0
# Use the all-in-one updater after successful build? (Fails on non-windows platforms)
USE_AOI ?= 0
ifeq ($(VERBOSE),0)
QUIET := @
endif
#-------------------------------------------------------------------------------
# Files
#-------------------------------------------------------------------------------
NAME := pikmin
VERSION ?= usa.1
#VERSION := usa.0
ifeq ($(VERSION), usa.1)
VERNUM = 2
else ifeq ($(VERSION), usa.0)
VERNUM = 1
else
VERNUM = 0
endif
# Use the all-in-one updater after successful build? (Fails on non-windows platforms)
ifeq ($(USE_AOI), 1)
ifeq ($(WINDOWS), 1)
USE_AOI = 1
else
@echo "aoi.exe fails on non-windows platforms."
USE_AOI = 0
endif
else
USE_AOI = 0
endif
BUILD_DIR := build/$(NAME).$(VERSION)
# Inputs
S_FILES := $(wildcard asm/*.s)
C_FILES := $(wildcard src/*.c)
CPP_FILES := $(wildcard src/*.cpp)
CPP_FILES += $(wildcard src/*.cp)
LDSCRIPT := $(BUILD_DIR)/ldscript.lcf
AOI := aoi.exe
# Outputs
DOL := $(BUILD_DIR)/main.dol
ELF := $(DOL:.dol=.elf)
MAP := $(BUILD_DIR)/build.map
ifeq ($(MAPGENFLAG),1)
MAPGEN := -map $(MAP)
endif
include obj_files.mk
O_FILES := $(SYSBOOTUP) $(JAUDIO) $(HVQM4DEC) $(SYS) $(PLUGPIKI) $(DOLPHIN)
DEPENDS := $($(filter *.o,O_FILES):.o=.d)
DEPENDS += $($(filter *.o,E_FILES):.o=.d)
# If a specific .o file is passed as a target, also process its deps
DEPENDS += $(MAKECMDGOALS:.o=.d)
#-------------------------------------------------------------------------------
# Tools
#-------------------------------------------------------------------------------
MWCC_VERSION := 1.2.5
MWLD_VERSION := 1.2.5
# Programs
POWERPC ?= tools/powerpc
ifeq ($(WINDOWS),1)
WINE :=
AS := $(POWERPC)/powerpc-eabi-as.exe
PYTHON := python
else
WIBO := $(shell command -v wibo 2> /dev/null)
ifdef WIBO
WINE ?= wibo
else
WINE ?= wine
endif
# Disable wine debug output for cleanliness
export WINEDEBUG ?= -all
AS := $(POWERPC)/powerpc-eabi-as
PYTHON := python3
endif
COMPILERS ?= tools/mwcc_compiler
CC = $(WINE) $(COMPILERS)/$(MWCC_VERSION)/mwcceppc.exe
LD := $(WINE) $(COMPILERS)/$(MWLD_VERSION)/mwldeppc.exe
DTK := tools/dtk
ELF2DOL := $(DTK) elf2dol
SHASUM := $(DTK) shasum
ifneq ($(WINDOWS),1)
TRANSFORM_DEP := tools/transform-dep.py
else
TRANSFORM_DEP := tools/transform-win.py
endif
# Options
INCLUDES := -i include/ -i include/stl/
ASM_INCLUDES := -I include/
ASFLAGS := -mgekko $(ASM_INCLUDES) --defsym version=$(VERNUM)
ifeq ($(VERBOSE),1)
# this set of LDFLAGS outputs warnings.
LDFLAGS := $(MAPGEN) -fp hard -nodefaults
endif
ifeq ($(VERBOSE),0)
# this set of LDFLAGS generates no warnings.
LDFLAGS := $(MAPGEN) -fp hard -nodefaults -w off
endif
LIBRARY_LDFLAGS := -nodefaults -fp hard -proc gekko
CFLAGS := -Cpp_exceptions off -O4,p -fp hard -proc gekko -nodefaults -RTTI on -enum int -common on -inline auto -str noreadonly $(INCLUDES)
ifeq ($(VERBOSE),0)
# this set of ASFLAGS generates no warnings.
ASFLAGS += -W
# this set of CFLAGS generates no warnings.
CFLAGS += -w off
endif
#-------------------------------------------------------------------------------
# Recipes
#-------------------------------------------------------------------------------
### Default target ###
default: all
all: $(DOL)
ALL_DIRS := $(sort $(dir $(O_FILES)))
# Make sure build directory exists before compiling anything
DUMMY != mkdir -p $(ALL_DIRS)
LDSCRIPT := ldscript.lcf
$(DOL): $(ELF) | $(DTK)
$(QUIET) $(ELF2DOL) $< $@
$(QUIET) $(SHASUM) -c sha1/$(NAME).$(VERSION).sha1
ifneq ($(findstring -map,$(LDFLAGS)),)
$(QUIET) $(PYTHON) tools/calcprogress.py $(DOL) $(MAP)
endif
ifeq ($(USE_AOI),1)
$(WINE) ./aoi.exe
endif
clean:
rm -f -d -r build
$(DTK): tools/dtk_version
@echo "Downloading $@"
$(QUIET) $(PYTHON) tools/download_dtk.py $< $@
# ELF creation makefile instructions
$(ELF): $(O_FILES) $(LDSCRIPT)
@echo Linking ELF $@
$(QUIET) @echo $(O_FILES) > build/o_files
$(QUIET) $(LD) $(LDFLAGS) -o $@ -lcf $(LDSCRIPT) @build/o_files
%.d.unix: %.d $(TRANSFORM_DEP)
@echo Processing $<
$(QUIET) $(PYTHON) $(TRANSFORM_DEP) $< $@
-include include_link.mk
DEPENDS := $(DEPENDS:.d=.d.unix)
ifneq ($(MAKECMDGOALS), clean)
-include $(DEPENDS)
endif
$(BUILD_DIR)/%.o: %.s
@echo Assembling $<
$(QUIET) mkdir -p $(dir $@)
$(QUIET) $(AS) $(ASFLAGS) -o $@ $<
# for files with capitalized .C extension
$(BUILD_DIR)/%.o: %.C
@echo "Compiling " $<
$(QUIET) mkdir -p $(dir $@)
$(QUIET) $(CC) $(CFLAGS) -c -o $(dir $@) $<
$(BUILD_DIR)/%.o: %.c
@echo "Compiling " $<
$(QUIET) mkdir -p $(dir $@)
$(QUIET) $(CC) $(CFLAGS) -c -o $(dir $@) $<
$(BUILD_DIR)/%.o: %.cp
@echo "Compiling " $<
$(QUIET) mkdir -p $(dir $@)
$(QUIET) $(CC) $(CFLAGS) -c -o $(dir $@) $<
$(BUILD_DIR)/%.o: %.cpp
@echo "Compiling " $<
$(QUIET) mkdir -p $(dir $@)
$(QUIET) $(CC) $(CFLAGS) -c -o $(dir $@) $<
### Extremely lazy recipes for generating context ###
# Example usage: make build/pikmin2.usa/src/plugProjectYamashitaU/farmMgr.h
$(BUILD_DIR)/%.h: %.c
@echo "Compiling and generating context for " $<
$(QUIET) $(CC) $(CFLAGS) -E -c -o $@ $<
$(BUILD_DIR)/%.h: %.cp
@echo "Compiling and generating context for " $<
$(QUIET) $(CC) $(CFLAGS) -E -c -o $@ $<
$(BUILD_DIR)/%.h: %.cpp
@echo "Compiling and generating context for " $<
$(QUIET) $(CC) $(CFLAGS) -E -c -o $@ $<
### Debug Print ###
print-% : ; $(info $* is a $(flavor $*) variable set to [$($*)]) @true

126
README.MD
View File

@ -1,42 +1,108 @@
# Pikmin 1
Pikmin 1 (USA Revision 1) disassembly/decompilation to C++/C
Pikmin
[![Build Status]][actions] ![Code Progress] ![Data Progress] [![Discord Badge]][discord]
=============
```diff
- INFORMATION! -
[Build Status]: https://github.com/projectPiki/pikmin/actions/workflows/build.yml/badge.svg
[actions]: https://github.com/projectPiki/pikmin/actions/workflows/build.yml
[Code Progress]: https://img.shields.io/endpoint?label=Code&url=https%3A%2F%2Fprogress.deco.mp%2Fdata%2Fpikmin%2FGPVE01%2Fdol%2F%3Fmode%3Dshield%26measure%3Dcode
[Data Progress]: https://img.shields.io/endpoint?label=Data&url=https%3A%2F%2Fprogress.deco.mp%2Fdata%2Fpikmin%2FGPVE01%2Fdol%2F%3Fmode%3Dshield%26measure%3Ddata
[Discord Badge]: https://img.shields.io/discord/933849697485983765?color=%237289DA&logo=discord&logoColor=%23FFFFFF
[discord]: https://discord.gg/CWKqYMePX8
The ROM this repository builds can be shifted! Meaning you are able to now
add and remove code as you see fit, for modding or research purposes.
A work-in-progress decompilation of Pikmin.
- DISCLAIMER -
Supported versions:
Shiftability is tentative and not fully verified. Use at your own discretion.
- `DPIJ01`: PikiDemo / Jitsuen-you Sample
- `GPIJ01_01`: Japan (Rev 1)
- `GPIJ01_02`: Japan (Rev 2)
- `GPIE01_00`: USA (Rev 0)
- `GPIE01_01`: USA (Rev 1)
- `GPIP01_00`: Europe (Rev 0)
Index
-----
- [Dependencies](#dependencies)
- [Windows](#windows)
- [macOS](#macos)
- [Linux](#linux)
- [Building](#building)
- [Diffing](#diffing)
Dependencies
------------
### Windows
On Windows, it's **highly recommended** to use native tooling. WSL or msys2 are **not** required.
When running under WSL, [objdiff](#diffing) is unable to get filesystem notifications for automatic rebuilds.
- Install [Python](https://www.python.org/downloads/) and add it to `%PATH%`.
- Also available from the [Windows Store](https://apps.microsoft.com/store/detail/python-311/9NRWMJP3717K).
- Download [ninja](https://github.com/ninja-build/ninja/releases) and add it to `%PATH%`.
- Quick install via pip: `pip install ninja`
### macOS
- Install [ninja](https://github.com/ninja-build/ninja/wiki/Pre-built-Ninja-packages):
```sh
brew install ninja
```
- Install [wine-crossover](https://github.com/Gcenx/homebrew-wine):
```sh
brew install --cask --no-quarantine gcenx/wine/wine-crossover
```
After OS upgrades, if macOS complains about `Wine Crossover.app` being unverified, you can unquarantine it using:
```sh
sudo xattr -rd com.apple.quarantine '/Applications/Wine Crossover.app'
```
It builds the following DOL:
pikmin.usa.1.dol: `sha1: 02204260B7EFE8742D34572E58BA3DFECD92E4E9`
### Linux
## Building
- Install [ninja](https://github.com/ninja-build/ninja/wiki/Pre-built-Ninja-packages).
- For non-x86(_64) platforms: Install wine from your package manager.
- For x86(_64), [wibo](https://github.com/decompals/wibo), a minimal 32-bit Windows binary wrapper, will be automatically downloaded and used.
### Required Tools
* [devkitPro](https://devkitpro.org/wiki/Getting_Started)
* python
Building
--------
### Instructions
- Clone the repository:
* WINDOWS ONLY STEP:
- Launch msys2 (provided by devkitPro) and run the following commands:
```
pacman -S msys2-keyring
pacman -S git python
```
```sh
git clone https://github.com/projectPiki/pikmin.git
```
* OPTIONAL:
- Obtain a clean DOL of Pikmin 1 USA 1.01 and place it in the base working directory and rename it to `baserom.dol`.
1. Download GC_WII_COMPILERS.zip from (https://files.decomp.dev/compilers_20230715.zip) and extract it to tools/mwcc_compiler/.
2. Run the `make` command.
- Using [Dolphin Emulator](https://dolphin-emu.org/), extract your game's system data to `orig/GPIE01_01`. (Or the appropriate version folder.)
- Right-click the game in Dolphin's game list and select `Properties`.
- Go to the `Filesystem` tab and right-click `Disc` -> `Extract System Data`.
![Dolphin filesystem extract](assets/dolphin-extract.png)
- After extraction, the following file should exist: `orig/GPIE01_01/sys/main.dol`.
- Configure:
## Project Organisation
- For each namespace that types and functions are contained within, create a folder. E.g. the structure `Game::GameSystem` will go in include/Game/GameSystem.h.
- The exception is only when adding extra folders becomes useless, for example having to add multiple folders that are empty, you can instead opt for creating a header file with the namespace's definitions inside.
```sh
python configure.py
```
To use version other than `GPIE01_01`, add `--version`. Add `--help` to see all available options.
- Build:
```sh
ninja
```
Diffing
-------
Once the initial build succeeds, an `objdiff.json` should exist in the project root.
Download the latest release from [encounter/objdiff](https://github.com/encounter/objdiff). Under project settings, set `Project directory`. The configuration should be loaded automatically.
Select an object from the left sidebar to begin diffing. Changes to the project will rebuild automatically: changes to source files, headers, `configure.py`, `splits.txt` or `symbols.txt`.
![objdiff project configuration](assets/objdiff.png)

1
config/DPIJ01/build.sha1 Normal file
View File

@ -0,0 +1 @@
16aa58df2658f79b1dbb0f8500afd65399253267 build/DPIJ01/main.dol

10
config/DPIJ01/config.yml Normal file
View File

@ -0,0 +1,10 @@
object: orig/DPIJ01/sys/main.dol
hash: 16aa58df2658f79b1dbb0f8500afd65399253267
# Generated from dataDir/build.map
symbols: config/DPIJ01/symbols.txt
splits: config/DPIJ01/splits.txt
mw_comment_version: 8 # GC Linker 1.2.5
symbols_known: true # Very fast analysis
fill_gaps: false # Alignments known

3135
config/DPIJ01/splits.txt Normal file

File diff suppressed because it is too large Load Diff

30105
config/DPIJ01/symbols.txt Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1 @@
30a3850f203a4a910eaccb8fdc697f518d22a687 build/GPIE01_00/main.dol

View File

@ -0,0 +1,10 @@
object: orig/GPIE01_00/sys/main.dol
hash: 30a3850f203a4a910eaccb8fdc697f518d22a687
# Generated from dataDir/build.map
symbols: config/GPIE01_00/symbols.txt
splits: config/GPIE01_00/splits.txt
mw_comment_version: 8 # GC Linker 1.2.5
symbols_known: true # Very fast analysis
fill_gaps: false # Alignments known

3110
config/GPIE01_00/splits.txt Normal file

File diff suppressed because it is too large Load Diff

29569
config/GPIE01_00/symbols.txt Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1 @@
02204260b7efe8742d34572e58ba3dfecd92e4e9 build/GPIE01_01/main.dol

View File

@ -0,0 +1,10 @@
object: orig/GPIE01_01/sys/main.dol
hash: 02204260b7efe8742d34572e58ba3dfecd92e4e9
# Generated from dataDir/build.map
symbols: config/GPIE01_01/symbols.txt
splits: config/GPIE01_01/splits.txt
mw_comment_version: 8 # GC Linker 1.2.5
symbols_known: true # Very fast analysis
fill_gaps: false # Alignments known

3110
config/GPIE01_01/splits.txt Normal file

File diff suppressed because it is too large Load Diff

29571
config/GPIE01_01/symbols.txt Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1 @@
92b0ca199a36b78ad978f54541392231ef8c0cea build/GPIJ01_01/main.dol

View File

@ -0,0 +1,10 @@
object: orig/GPIJ01_01/sys/main.dol
hash: 92b0ca199a36b78ad978f54541392231ef8c0cea
# Generated from dataDir/build.map
symbols: config/GPIJ01_01/symbols.txt
splits: config/GPIJ01_01/splits.txt
mw_comment_version: 8 # GC Linker 1.2.5
symbols_known: true # Very fast analysis
fill_gaps: false # Alignments known

3122
config/GPIJ01_01/splits.txt Normal file

File diff suppressed because it is too large Load Diff

29948
config/GPIJ01_01/symbols.txt Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1 @@
d87f92a733fc8fa5f65045730fe81a66b0c7d146 build/GPIJ01_02/main.dol

View File

@ -0,0 +1,10 @@
object: orig/GPIJ01_02/sys/main.dol
hash: d87f92a733fc8fa5f65045730fe81a66b0c7d146
# Generated from dataDir/build.map
symbols: config/GPIJ01_02/symbols.txt
splits: config/GPIJ01_02/splits.txt
mw_comment_version: 8 # GC Linker 1.2.5
symbols_known: true # Very fast analysis
fill_gaps: false # Alignments known

3122
config/GPIJ01_02/splits.txt Normal file

File diff suppressed because it is too large Load Diff

29951
config/GPIJ01_02/symbols.txt Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1 @@
e5794ffe8ac72c3ead3849edbd2f27ea5e0ed440 build/GPIP01_00/main.dol

View File

@ -0,0 +1,10 @@
object: orig/GPIP01_00/sys/main.dol
hash: e5794ffe8ac72c3ead3849edbd2f27ea5e0ed440
# Generated from dataDir/build.map
symbols: config/GPIP01_00/symbols.txt
splits: config/GPIP01_00/splits.txt
mw_comment_version: 8 # GC Linker 1.2.5
symbols_known: true # Very fast analysis
fill_gaps: false # Alignments known

3136
config/GPIP01_00/splits.txt Normal file

File diff suppressed because it is too large Load Diff

29710
config/GPIP01_00/symbols.txt Normal file

File diff suppressed because it is too large Load Diff

2275
configure.py Normal file → Executable file

File diff suppressed because it is too large Load Diff

1
include/.gitignore vendored
View File

@ -1 +0,0 @@
*.s

View File

@ -204,7 +204,7 @@ struct GenObjectActor : public GenObject {
*/
struct GenObjectBoss : public GenObject {
inline GenObjectBoss()
: GenObject('boss', "ボスを生む") // 'generate a boss'
: GenObject('boss', "ボスを生む") // 'generate a boss'
{
}
@ -285,7 +285,7 @@ struct GenObjectMapObject : public GenObject {
*/
struct GenObjectMapParts : public GenObject {
inline GenObjectMapParts()
: GenObject('mpar', "マップパーツを生む") // 'generate map parts'
: GenObject('mpar', "マップパーツを生む") // 'generate map parts'
{
}
@ -456,7 +456,7 @@ struct GenType : public GenBase {
*/
struct GenTypeAtOnce : public GenType {
inline GenTypeAtOnce()
: GenType('aton', "最初から全部生む") // 'generate everything from the beginning'
: GenType('aton', "最初から全部生む") // 'generate everything from the beginning'
, mMaxCount(this, 1, 0, 0, "p00", nullptr)
{
}
@ -477,7 +477,7 @@ struct GenTypeAtOnce : public GenType {
*/
struct GenTypeInitRand : public GenType {
inline GenTypeInitRand()
: GenType('irnd', "最初から生む(ランダム)") // 'generate from the beginning (random)'
: GenType('irnd', "最初から生む(ランダム)") // 'generate from the beginning (random)'
, _38(this, 1, 0, 0, "p00", nullptr)
, mMaxCount(this, 5, 0, 0, "p01", nullptr)
{
@ -500,7 +500,7 @@ struct GenTypeInitRand : public GenType {
*/
struct GenTypeOne : public GenType {
inline GenTypeOne()
: GenType('1one', "1つだけうむ") // 'just one thing'
: GenType('1one', "1つだけうむ") // 'just one thing'
, _38(this, 0, 0, 0, "p00", nullptr)
, _48(this, 0, 0, 0, "p01", nullptr)
, _58(this, 0, 0, 0, "p02", nullptr)

View File

@ -34,6 +34,8 @@ struct Stream {
virtual void close(); // _4C
virtual bool getClosing() { return false; } // _50 (weak)
virtual void flush() { } // _54 (weak)
void print(char*, ...);
};
/**

View File

@ -1,217 +0,0 @@
/*
Code sections:
.init: 0x00000100 0x80003100 0x800054C0
.text: 0x000024C0 0x80005560 0x80221F60
Data sections:
extab: 0x0021EEC0 0x800054C0 0x80005500
extabindex: 0x0021EF00 0x80005500 0x80005560
.ctors: 0x0021EF60 0x80221F60 0x80221FC0
.dtors: 0x0021EFC0 0x80221FC0 0x80221FE0
.rodata: 0x0021EFE0 0x80221FE0 0x80222DC0
.data: 0x0021FDC0 0x80222DC0 0x802E9640
.sdata: 0x002E6640 0x803DCD20 0x803E7820
.sdata2: 0x002F1140 0x803E8200 0x803EC840
BSS section:
.bss: 0x00000000 0x802E9640 0x803E81E5
.sbss:
Entry Point: 0x80003100
*/
# PowerPC Register Constants
# General Purpose Registers (GPRs)
.set r0, 0
.set r1, 1
.set r2, 2
.set r3, 3
.set r4, 4
.set r5, 5
.set r6, 6
.set r7, 7
.set r8, 8
.set r9, 9
.set r10, 10
.set r11, 11
.set r12, 12
.set r13, 13
.set r14, 14
.set r15, 15
.set r16, 16
.set r17, 17
.set r18, 18
.set r19, 19
.set r20, 20
.set r21, 21
.set r22, 22
.set r23, 23
.set r24, 24
.set r25, 25
.set r26, 26
.set r27, 27
.set r28, 28
.set r29, 29
.set r30, 30
.set r31, 31
# Floating Point Registers (FPRs)
.set f0, 0
.set f1, 1
.set f2, 2
.set f3, 3
.set f4, 4
.set f5, 5
.set f6, 6
.set f7, 7
.set f8, 8
.set f9, 9
.set f10, 10
.set f11, 11
.set f12, 12
.set f13, 13
.set f14, 14
.set f15, 15
.set f16, 16
.set f17, 17
.set f18, 18
.set f19, 19
.set f20, 20
.set f21, 21
.set f22, 22
.set f23, 23
.set f24, 24
.set f25, 25
.set f26, 26
.set f27, 27
.set f28, 28
.set f29, 29
.set f30, 30
.set f31, 31
# Graphics Quantization Registers (GQRs)
.set qr0, 0
.set qr1, 1
.set qr2, 2
.set qr3, 3
.set qr4, 4
.set qr5, 5
.set qr6, 6
.set qr7, 7
# Special Purpose Registers (SPRs)
.set XER, 1
.set LR, 8
.set CTR, 9
.set DSISR, 18
.set DAR, 19
.set DEC, 22
.set SDR1, 25
.set SRR0, 26
.set SRR1, 27
.set SPRG0, 272
.set SPRG1, 273
.set SPRG2, 274
.set SPRG3, 275
.set EAR, 282
.set PVR, 287
.set IBAT0U, 528
.set IBAT0L, 529
.set IBAT1U, 530
.set IBAT1L, 531
.set IBAT2U, 532
.set IBAT2L, 533
.set IBAT3U, 534
.set IBAT3L, 535
.set DBAT0U, 536
.set DBAT0L, 537
.set DBAT1U, 538
.set DBAT1L, 539
.set DBAT2U, 540
.set DBAT2L, 541
.set DBAT3U, 542
.set DBAT3L, 543
.set GQR0, 912
.set GQR1, 913
.set GQR2, 914
.set GQR3, 915
.set GQR4, 916
.set GQR5, 917
.set GQR6, 918
.set GQR7, 919
.set HID2, 920
.set WPAR, 921
.set DMA_U, 922
.set DMA_L, 923
.set UMMCR0, 936
.set UPMC1, 937
.set UPMC2, 938
.set USIA, 939
.set UMMCR1, 940
.set UPMC3, 941
.set UPMC4, 942
.set USDA, 943
.set MMCR0, 952
.set PMC1, 953
.set PMC2, 954
.set SIA, 955
.set MMCR1, 956
.set PMC3, 957
.set PMC4, 958
.set SDA, 959
.set HID0, 1008
.set HID1, 1009
.set IABR, 1010
.set DABR, 1013
.set L2CR, 1017
.set ICTC, 1019
.set THRM1, 1020
.set THRM2, 1021
.set THRM3, 1022
# Defines a sized symbol with function type.
# Usage:
# .fn my_function, local
# /* asm here */
# .endfn my_function
.macro .fn name, visibility=global
.\visibility "\name"
.type "\name", @function
"\name":
.endm
.macro .endfn name
.size "\name", . - "\name"
.endm
# Defines a sized symbol with object type.
# Usage:
# .obj my_object, local
# /* data here */
# .endobj my_object
.macro .obj name, visibility=global
.\visibility "\name"
.type "\name", @object
"\name":
.endm
.macro .endobj name
.size "\name", . - "\name"
.endm
# Defines a sized symbol without a type.
# Usage:
# .sym my_sym, local
# /* anything here */
# .endsym my_sym
.macro .sym name, visibility=global
.\visibility "\name"
"\name":
.endm
.macro .endsym name
.size "\name", . - "\name"
.endm
# Generates a relative relocation against a symbol.
# Usage:
# .rel my_function, .L_label
.macro .rel name, label
.4byte "\name" + ("\label" - "\name")
.endm

View File

@ -1,31 +0,0 @@
# libraries
-include src/jaudio/Makefile
-include src/hvqm4dec/Makefile
-include src/sysCommon/Makefile
-include src/sysDolphin/Makefile
-include src/plugPikiColin/Makefile
-include src/plugPikiKando/Makefile
-include src/plugPikiNakata/Makefile
-include src/plugPikiNishimura/Makefile
-include src/plugPikiOgawa/Makefile
-include src/plugPikiYamashita/Makefile
-include src/base/Makefile
-include src/os/Makefile
-include src/db/Makefile
-include src/mtx/Makefile
-include src/dvd/Makefile
-include src/vi/Makefile
-include src/pad/Makefile
-include src/ai/Makefile
-include src/ar/Makefile
-include src/dsp/Makefile
-include src/card/Makefile
-include src/hio/Makefile
-include src/gx/Makefile
-include src/Runtime/PPCEABI/H/Makefile
-include src/MSL_C/PPCEABI/bare/H/Makefile
-include src/TRK_MINNOW_DOLPHIN/Makefile
-include src/amcExi2/Makefile
-include src/amcnotstub/Makefile
-include src/OdemuExi2/Makefile
-include src/odenotstub/Makefile

View File

@ -1,33 +0,0 @@
MEMORY {
text : origin = 0x80003100
}
SECTIONS {
GROUP: {
.init ALIGN(0x20) : {}
._extab ALIGN(0x20) : {}
._exidx ALIGN(0x20) : {}
.text ALIGN(0x20) : {}
.ctors ALIGN(0x20) : {}
.dtors ALIGN(0x20) : {}
.rodata ALIGN(0x20) : {}
.data ALIGN(0x20) : {}
.bss ALIGN(0x20) : {}
.sdata ALIGN(0x20) : {}
.sbss ALIGN(0x20) : {}
.sdata2 ALIGN(0x20): {}
.sbss2 ALIGN(0x20) : {}
.stack ALIGN(0x100) : {}
} > text
_stack_addr = (_f_sbss2 + SIZEOF(.sbss2) + 65536 + 0x7) & ~0x7;
_stack_end = _f_sbss2 + SIZEOF(.sbss2);
_db_stack_addr = (_stack_addr + 0x2000);
_db_stack_end = _stack_addr;
__ArenaLo = (_db_stack_addr + 0x1f) & ~0x1f;
__ArenaHi = 0x81700000;
}
FORCEFILES {
uart_console_io.o
ExceptionPPC.o
__init_cpp_exceptions.o
}

View File

@ -1,44 +0,0 @@
# Linker order for every file, passed to the Metrowerks linker.
SYSBOOTUP :=\
$(BUILD_DIR)/src/sysBootup.o\
JAUDIO :=\
$(BUILD_DIR)/src/jaudio/jaudio.o\
HVQM4DEC :=\
$(BUILD_DIR)/src/hvqm4dec/hvqm4dec.a\
SYS :=\
$(BUILD_DIR)/src/sysCommon/sysCommon.a\
$(BUILD_DIR)/src/sysDolphin/sysDolphin.a\
PLUGPIKI :=\
$(BUILD_DIR)/src/plugPikiColin/plugPikiColin.a\
$(BUILD_DIR)/src/plugPikiKando/plugPikiKando.a\
$(BUILD_DIR)/src/plugPikiNakata/plugPikiNakata.a\
$(BUILD_DIR)/src/plugPikiNishimura/plugPikiNishimura.a\
$(BUILD_DIR)/src/plugPikiOgawa/plugPikiOgawa.a\
$(BUILD_DIR)/src/plugPikiYamashita/plugPikiYamashita.a\
DOLPHIN :=\
$(BUILD_DIR)/src/base/base.a\
$(BUILD_DIR)/src/os/os.a\
$(BUILD_DIR)/src/db/db.a\
$(BUILD_DIR)/src/mtx/mtx.a\
$(BUILD_DIR)/src/dvd/dvd.a\
$(BUILD_DIR)/src/vi/vi.a\
$(BUILD_DIR)/src/pad/pad.a\
$(BUILD_DIR)/src/ai/ai.a\
$(BUILD_DIR)/src/ar/ar.a\
$(BUILD_DIR)/src/dsp/dsp.a\
$(BUILD_DIR)/src/card/card.a\
$(BUILD_DIR)/src/hio/hio.a\
$(BUILD_DIR)/src/gx/gx.a\
$(BUILD_DIR)/src/Runtime/PPCEABI/H/Runtime.PPCEABI.H.a\
$(BUILD_DIR)/src/MSL_C/PPCEABI/bare/H/MSL_C.PPCEABI.bare.H.a\
$(BUILD_DIR)/src/TRK_MINNOW_DOLPHIN/TRK_MINNOW_DOLPHIN.a\
$(BUILD_DIR)/src/amcExi2/amcExi2.a\
$(BUILD_DIR)/src/amcnotstub/amcnotstub.a\
$(BUILD_DIR)/src/OdemuExi2/OdemuExi2.a\
$(BUILD_DIR)/src/odenotstub/odenotstub.a\

View File

@ -1 +0,0 @@
02204260B7EFE8742D34572E58BA3DFECD92E4E9 build/pikmin.usa.1/main.dol

View File

@ -1,50 +0,0 @@
MSL_C_FILES:=\
$(BUILD_DIR)/asm/MSL_C/PPCEABI/bare/H/abort_exit.o\
$(BUILD_DIR)/src/MSL_C/PPCEABI/bare/H/errno.o\
$(BUILD_DIR)/asm/MSL_C/PPCEABI/bare/H/ansi_fp.o\
$(BUILD_DIR)/src/MSL_C/PPCEABI/bare/H/arith.o\
$(BUILD_DIR)/asm/MSL_C/PPCEABI/bare/H/buffer_io.o\
$(BUILD_DIR)/src/MSL_C/PPCEABI/bare/H/critical_regions.ppc_eabi.o\
$(BUILD_DIR)/src/MSL_C/PPCEABI/bare/H/ctype.o\
$(BUILD_DIR)/asm/MSL_C/PPCEABI/bare/H/ansi_files.o\
$(BUILD_DIR)/asm/MSL_C/PPCEABI/bare/H/locale.o\
$(BUILD_DIR)/asm/MSL_C/PPCEABI/bare/H/direct_io.o\
$(BUILD_DIR)/asm/MSL_C/PPCEABI/bare/H/mbstring.o\
$(BUILD_DIR)/asm/MSL_C/PPCEABI/bare/H/mem.o\
$(BUILD_DIR)/asm/MSL_C/PPCEABI/bare/H/mem_funcs.o\
$(BUILD_DIR)/src/MSL_C/PPCEABI/bare/H/misc_io.o\
$(BUILD_DIR)/asm/MSL_C/PPCEABI/bare/H/printf.o\
$(BUILD_DIR)/src/MSL_C/PPCEABI/bare/H/rand.o\
$(BUILD_DIR)/asm/MSL_C/PPCEABI/bare/H/scanf.o\
$(BUILD_DIR)/asm/MSL_C/PPCEABI/bare/H/string.o\
$(BUILD_DIR)/asm/MSL_C/PPCEABI/bare/H/strtold.o\
$(BUILD_DIR)/asm/MSL_C/PPCEABI/bare/H/strtoul.o\
$(BUILD_DIR)/asm/MSL_C/PPCEABI/bare/H/uart_console_io.o\
$(BUILD_DIR)/src/MSL_C/PPCEABI/bare/H/wchar_io.o\
$(BUILD_DIR)/src/MSL_C/PPCEABI/bare/H/float.o\
$(BUILD_DIR)/asm/MSL_C/PPCEABI/bare/H/e_asin.o\
$(BUILD_DIR)/asm/MSL_C/PPCEABI/bare/H/e_atan2.o\
$(BUILD_DIR)/src/MSL_C/PPCEABI/bare/H/e_pow.o\
$(BUILD_DIR)/asm/MSL_C/PPCEABI/bare/H/fminmaxdim.o\
$(BUILD_DIR)/asm/MSL_C/PPCEABI/bare/H/s_atan.o\
$(BUILD_DIR)/src/MSL_C/PPCEABI/bare/H/s_copysign.o\
$(BUILD_DIR)/src/MSL_C/PPCEABI/bare/H/s_frexp.o\
$(BUILD_DIR)/asm/MSL_C/PPCEABI/bare/H/s_ldexp.o\
$(BUILD_DIR)/asm/MSL_C/PPCEABI/bare/H/w_atan2.o\
$(BUILD_DIR)/asm/MSL_C/PPCEABI/bare/H/w_pow.o\
$(BUILD_DIR)/src/MSL_C/PPCEABI/bare/H/hyperbolicsf.o\
$(BUILD_DIR)/asm/MSL_C/PPCEABI/bare/H/inverse_trig.o\
$(BUILD_DIR)/asm/MSL_C/PPCEABI/bare/H/trigf.o\
$(BUILD_DIR)/asm/MSL_C/PPCEABI/bare/H/math_inlines.o\
$(BUILD_DIR)/asm/MSL_C/PPCEABI/bare/H/common_float_tables.o\
$(MSL_C_FILES): CFLAGS += -common off -fp_contract on
$(MSL_C_FILES): MWCC_VERSION := 1.2.5
DEPENDS += $(MSL_C_FILES:.o=.d)
$(BUILD_DIR)/src/MSL_C/PPCEABI/bare/H/MSL_C.PPCEABI.bare.H.a: $(MSL_C_FILES)
@echo Linking... $@
$(QUIET) mkdir -p $(dir $@)
@echo $(MSL_C_FILES) > build/MSL_C_ofiles
$(QUIET) $(LD) -library $(LIBRARY_LDFLAGS) -o $@ -lcf ldscript.lcf @build/MSL_C_ofiles

View File

@ -1,13 +0,0 @@
ODEMUEXI2_FILES:=\
$(BUILD_DIR)/asm/OdemuExi2/DebuggerDriver.o\
$(ODEMUEXI2_FILES): CFLAGS += -common off
$(ODEMUEXI2_FILES): MWCC_VERSION := 1.2.5
DEPENDS += $(ODEMUEXI2_FILES:.o=.d)
$(BUILD_DIR)/src/OdemuExi2/OdemuExi2.a: $(ODEMUEXI2_FILES)
@echo Linking... $@
$(QUIET) mkdir -p $(dir $@)
@echo $(ODEMUEXI2_FILES) > build/ODEMUEXI2_ofiles
$(QUIET) $(LD) -library $(LIBRARY_LDFLAGS) -o $@ -lcf ldscript.lcf @build/ODEMUEXI2_ofiles

View File

@ -1,21 +0,0 @@
RUNTIME_FILES:=\
$(BUILD_DIR)/asm/Runtime/PPCEABI/H/__mem.o\
$(BUILD_DIR)/asm/Runtime/PPCEABI/H/__va_arg.o\
$(BUILD_DIR)/asm/Runtime/PPCEABI/H/global_destructor_chain.o\
$(BUILD_DIR)/asm/Runtime/PPCEABI/H/CPlusLibPPC.o\
$(BUILD_DIR)/asm/Runtime/PPCEABI/H/NMWException.o\
$(BUILD_DIR)/src/Runtime/PPCEABI/H/ptmf.o\
$(BUILD_DIR)/asm/Runtime/PPCEABI/H/ExceptionPPC.o\
$(BUILD_DIR)/asm/Runtime/PPCEABI/H/runtime.o\
$(BUILD_DIR)/asm/Runtime/PPCEABI/H/__init_cpp_exceptions.o\
$(RUNTIME_FILES): CFLAGS += -common off
$(RUNTIME_FILES): MWCC_VERSION := 1.2.5
DEPENDS += $(RUNTIME_FILES:.o=.d)
$(BUILD_DIR)/src/Runtime/PPCEABI/H/Runtime.PPCEABI.H.a: $(RUNTIME_FILES)
@echo Linking... $@
$(QUIET) mkdir -p $(dir $@)
@echo $(RUNTIME_FILES) > build/RUNTIME_ofiles
$(QUIET) $(LD) -library $(LIBRARY_LDFLAGS) -o $@ -lcf ldscript.lcf @build/RUNTIME_ofiles

View File

@ -1,33 +0,0 @@
TRK_MINNOW_DOLPHIN_FILES:=\
$(BUILD_DIR)/asm/TRK_MINNOW_DOLPHIN/mainloop.o\
$(BUILD_DIR)/asm/TRK_MINNOW_DOLPHIN/nubevent.o\
$(BUILD_DIR)/asm/TRK_MINNOW_DOLPHIN/nubinit.o\
$(BUILD_DIR)/asm/TRK_MINNOW_DOLPHIN/msg.o\
$(BUILD_DIR)/asm/TRK_MINNOW_DOLPHIN/msgbuf.o\
$(BUILD_DIR)/asm/TRK_MINNOW_DOLPHIN/serpoll.o\
$(BUILD_DIR)/src/TRK_MINNOW_DOLPHIN/usr_put.o\
$(BUILD_DIR)/asm/TRK_MINNOW_DOLPHIN/dispatch.o\
$(BUILD_DIR)/asm/TRK_MINNOW_DOLPHIN/msghndlr.o\
$(BUILD_DIR)/asm/TRK_MINNOW_DOLPHIN/support.o\
$(BUILD_DIR)/src/TRK_MINNOW_DOLPHIN/mutex_TRK.o\
$(BUILD_DIR)/asm/TRK_MINNOW_DOLPHIN/notify.o\
$(BUILD_DIR)/asm/TRK_MINNOW_DOLPHIN/flush_cache.o\
$(BUILD_DIR)/asm/TRK_MINNOW_DOLPHIN/mem_TRK.o\
$(BUILD_DIR)/asm/TRK_MINNOW_DOLPHIN/__exception.o\
$(BUILD_DIR)/asm/TRK_MINNOW_DOLPHIN/targimpl.o\
$(BUILD_DIR)/asm/TRK_MINNOW_DOLPHIN/dolphin_trk.o\
$(BUILD_DIR)/asm/TRK_MINNOW_DOLPHIN/mpc_7xx_603e.o\
$(BUILD_DIR)/asm/TRK_MINNOW_DOLPHIN/main_TRK.o\
$(BUILD_DIR)/asm/TRK_MINNOW_DOLPHIN/dolphin_trk_glue.o\
$(BUILD_DIR)/asm/TRK_MINNOW_DOLPHIN/targcont.o\
$(TRK_MINNOW_DOLPHIN_FILES): CFLAGS += -common off
$(TRK_MINNOW_DOLPHIN_FILES): MWCC_VERSION := 1.2.5
DEPENDS += $(TRK_MINNOW_DOLPHIN_FILES:.o=.d)
$(BUILD_DIR)/src/TRK_MINNOW_DOLPHIN/TRK_MINNOW_DOLPHIN.a: $(TRK_MINNOW_DOLPHIN_FILES)
@echo Linking... $@
$(QUIET) mkdir -p $(dir $@)
@echo $(TRK_MINNOW_DOLPHIN_FILES) > build/TRK_MINNOW_DOLPHIN_ofiles
$(QUIET) $(LD) -library $(LIBRARY_LDFLAGS) -o $@ -lcf ldscript.lcf @build/TRK_MINNOW_DOLPHIN_ofiles

View File

@ -1,13 +0,0 @@
AI_FILES:=\
$(BUILD_DIR)/asm/ai/ai.o\
$(AI_FILES): CFLAGS += -common off
$(AI_FILES): MWCC_VERSION := 1.2.5
DEPENDS += $(AI_FILES:.o=.d)
$(BUILD_DIR)/src/ai/ai.a: $(AI_FILES)
@echo Linking... $@
$(QUIET) mkdir -p $(dir $@)
@echo $(AI_FILES) > build/AI_ofiles
$(QUIET) $(LD) -library $(LIBRARY_LDFLAGS) -o $@ -lcf ldscript.lcf @build/AI_ofiles

View File

@ -1,14 +0,0 @@
AMCEXI2_FILES:=\
$(BUILD_DIR)/asm/amcExi2/AmcExi.o\
$(BUILD_DIR)/asm/amcExi2/AmcExi2Comm.o\
$(AMCEXI2_FILES): CFLAGS += -common off
$(AMCEXI2_FILES): MWCC_VERSION := 1.2.5
DEPENDS += $(AMCEXI2_FILES:.o=.d)
$(BUILD_DIR)/src/amcExi2/amcExi2.a: $(AMCEXI2_FILES)
@echo Linking... $@
$(QUIET) mkdir -p $(dir $@)
@echo $(AMCEXI2_FILES) > build/AMCEXI2_ofiles
$(QUIET) $(LD) -library $(LIBRARY_LDFLAGS) -o $@ -lcf ldscript.lcf @build/AMCEXI2_ofiles

View File

@ -1,13 +0,0 @@
AMCNOTSTUB_FILES:=\
$(BUILD_DIR)/src/amcnotstub/amcnotstub.o\
$(AMCNOTSTUB_FILES): CFLAGS += -common off
$(AMCNOTSTUB_FILES): MWCC_VERSION := 1.2.5
DEPENDS += $(AMCNOTSTUB_FILES:.o=.d)
$(BUILD_DIR)/src/amcnotstub/amcnotstub.a: $(AMCNOTSTUB_FILES)
@echo Linking... $@
$(QUIET) mkdir -p $(dir $@)
@echo $(AMCNOTSTUB_FILES) > build/AMCNOTSTUB_ofiles
$(QUIET) $(LD) -library $(LIBRARY_LDFLAGS) -o $@ -lcf ldscript.lcf @build/AMCNOTSTUB_ofiles

View File

@ -1,14 +0,0 @@
AR_FILES:=\
$(BUILD_DIR)/asm/ar/ar.o\
$(BUILD_DIR)/asm/ar/arq.o\
$(AR_FILES): CFLAGS += -common off
$(AR_FILES): MWCC_VERSION := 1.2.5
DEPENDS += $(AR_FILES:.o=.d)
$(BUILD_DIR)/src/ar/ar.a: $(AR_FILES)
@echo Linking... $@
$(QUIET) mkdir -p $(dir $@)
@echo $(AR_FILES) > build/AR_ofiles
$(QUIET) $(LD) -library $(LIBRARY_LDFLAGS) -o $@ -lcf ldscript.lcf @build/AR_ofiles

View File

@ -1,13 +0,0 @@
BASE_FILES:=\
$(BUILD_DIR)/src/base/PPCArch.o\
$(BASE_FILES): CFLAGS += -common off
$(BASE_FILES): MWCC_VERSION := 1.2.5
DEPENDS += $(BASE_FILES:.o=.d)
$(BUILD_DIR)/src/base/base.a: $(BASE_FILES)
@echo Linking... $@
$(QUIET) mkdir -p $(dir $@)
@echo $(BASE_FILES) > build/BASE_ofiles
$(QUIET) $(LD) -library $(LIBRARY_LDFLAGS) -o $@ -lcf ldscript.lcf @build/BASE_ofiles

View File

@ -1,26 +0,0 @@
CARD_FILES:=\
$(BUILD_DIR)/asm/card/CARDBios.o\
$(BUILD_DIR)/asm/card/CARDRdwr.o\
$(BUILD_DIR)/asm/card/CARDBlock.o\
$(BUILD_DIR)/asm/card/CARDDir.o\
$(BUILD_DIR)/asm/card/CARDCheck.o\
$(BUILD_DIR)/asm/card/CARDMount.o\
$(BUILD_DIR)/asm/card/CARDFormat.o\
$(BUILD_DIR)/asm/card/CARDOpen.o\
$(BUILD_DIR)/asm/card/CARDCreate.o\
$(BUILD_DIR)/asm/card/CARDRead.o\
$(BUILD_DIR)/asm/card/CARDWrite.o\
$(BUILD_DIR)/asm/card/CARDDelete.o\
$(BUILD_DIR)/asm/card/CARDStat.o\
$(BUILD_DIR)/asm/card/CARDRename.o\
$(CARD_FILES): CFLAGS += -common off
$(CARD_FILES): MWCC_VERSION := 1.2.5
DEPENDS += $(CARD_FILES:.o=.d)
$(BUILD_DIR)/src/card/card.a: $(CARD_FILES)
@echo Linking... $@
$(QUIET) mkdir -p $(dir $@)
@echo $(CARD_FILES) > build/CARD_ofiles
$(QUIET) $(LD) -library $(LIBRARY_LDFLAGS) -o $@ -lcf ldscript.lcf @build/CARD_ofiles

View File

@ -1,13 +0,0 @@
DB_FILES:=\
$(BUILD_DIR)/asm/db/db.o\
$(DB_FILES): CFLAGS += -common off
$(DB_FILES): MWCC_VERSION := 1.2.5
DEPENDS += $(DB_FILES:.o=.d)
$(BUILD_DIR)/src/db/db.a: $(DB_FILES)
@echo Linking... $@
$(QUIET) mkdir -p $(dir $@)
@echo $(DB_FILES) > build/DB_ofiles
$(QUIET) $(LD) -library $(LIBRARY_LDFLAGS) -o $@ -lcf ldscript.lcf @build/DB_ofiles

View File

@ -1,13 +0,0 @@
DSP_FILES:=\
$(BUILD_DIR)/src/dsp/dsp.o\
$(DSP_FILES): CFLAGS += -common off
$(DSP_FILES): MWCC_VERSION := 1.2.5
DEPENDS += $(DSP_FILES:.o=.d)
$(BUILD_DIR)/src/dsp/dsp.a: $(DSP_FILES)
@echo Linking... $@
$(QUIET) mkdir -p $(dir $@)
@echo $(DSP_FILES) > build/DSP_ofiles
$(QUIET) $(LD) -library $(LIBRARY_LDFLAGS) -o $@ -lcf ldscript.lcf @build/DSP_ofiles

View File

@ -1,18 +0,0 @@
DVD_FILES:=\
$(BUILD_DIR)/asm/dvd/dvdlow.o\
$(BUILD_DIR)/asm/dvd/dvdfs.o\
$(BUILD_DIR)/asm/dvd/dvd.o\
$(BUILD_DIR)/asm/dvd/dvdqueue.o\
$(BUILD_DIR)/asm/dvd/dvderror.o\
$(BUILD_DIR)/asm/dvd/fstload.o\
$(DVD_FILES): CFLAGS += -common off
$(DVD_FILES): MWCC_VERSION := 1.2.5
DEPENDS += $(DVD_FILES:.o=.d)
$(BUILD_DIR)/src/dvd/dvd.a: $(DVD_FILES)
@echo Linking... $@
$(QUIET) mkdir -p $(dir $@)
@echo $(DVD_FILES) > build/DVD_ofiles
$(QUIET) $(LD) -library $(LIBRARY_LDFLAGS) -o $@ -lcf ldscript.lcf @build/DVD_ofiles

View File

@ -1,26 +0,0 @@
GX_FILES:=\
$(BUILD_DIR)/asm/gx/GXInit.o\
$(BUILD_DIR)/asm/gx/GXFifo.o\
$(BUILD_DIR)/asm/gx/GXAttr.o\
$(BUILD_DIR)/asm/gx/GXMisc.o\
$(BUILD_DIR)/asm/gx/GXGeometry.o\
$(BUILD_DIR)/asm/gx/GXFrameBuf.o\
$(BUILD_DIR)/asm/gx/GXLight.o\
$(BUILD_DIR)/asm/gx/GXTexture.o\
$(BUILD_DIR)/asm/gx/GXBump.o\
$(BUILD_DIR)/asm/gx/GXTev.o\
$(BUILD_DIR)/asm/gx/GXPixel.o\
$(BUILD_DIR)/src/gx/GXStubs.o\
$(BUILD_DIR)/asm/gx/GXDisplayList.o\
$(BUILD_DIR)/asm/gx/GXTransform.o\
$(GX_FILES): CFLAGS += -common off
$(GX_FILES): MWCC_VERSION := 1.2.5
DEPENDS += $(GX_FILES:.o=.d)
$(BUILD_DIR)/src/gx/gx.a: $(GX_FILES)
@echo Linking... $@
$(QUIET) mkdir -p $(dir $@)
@echo $(GX_FILES) > build/GX_ofiles
$(QUIET) $(LD) -library $(LIBRARY_LDFLAGS) -o $@ -lcf ldscript.lcf @build/GX_ofiles

View File

@ -1,13 +0,0 @@
HIO_FILES:=\
$(BUILD_DIR)/asm/hio/hio.o\
$(HIO_FILES): CFLAGS += -common off
$(HIO_FILES): MWCC_VERSION := 1.2.5
DEPENDS += $(HIO_FILES:.o=.d)
$(BUILD_DIR)/src/hio/hio.a: $(HIO_FILES)
@echo Linking... $@
$(QUIET) mkdir -p $(dir $@)
@echo $(HIO_FILES) > build/HIO_ofiles
$(QUIET) $(LD) -library $(LIBRARY_LDFLAGS) -o $@ -lcf ldscript.lcf @build/HIO_ofiles

View File

@ -1,13 +0,0 @@
HVQM4DEC_FILES:=\
$(BUILD_DIR)/asm/hvqm4dec/hvqm4dec.o\
$(HVQM4DEC_FILES): CFLAGS += -common off
$(HVQM4DEC_FILES): MWCC_VERSION := 1.2.5
DEPENDS += $(HVQM4DEC_FILES:.o=.d)
$(BUILD_DIR)/src/hvqm4dec/hvqm4dec.a: $(HVQM4DEC_FILES)
@echo Linking... $@
$(QUIET) mkdir -p $(dir $@)
@echo $(HVQM4DEC_FILES) > build/HVQM4DEC_ofiles
$(QUIET) $(LD) -library $(LIBRARY_LDFLAGS) -o $@ -lcf ldscript.lcf @build/HVQM4DEC_ofiles

View File

@ -1,72 +0,0 @@
JAUDIO_FILES:=\
$(BUILD_DIR)/src/jaudio/dummyprobe.o\
$(BUILD_DIR)/asm/jaudio/memory.o\
$(BUILD_DIR)/asm/jaudio/aictrl.o\
$(BUILD_DIR)/asm/jaudio/sample.o\
$(BUILD_DIR)/asm/jaudio/dummyrom.o\
$(BUILD_DIR)/asm/jaudio/audiothread.o\
$(BUILD_DIR)/asm/jaudio/audiothread_fakebss.o\
$(BUILD_DIR)/asm/jaudio/streamctrl.o\
$(BUILD_DIR)/asm/jaudio/dspbuf.o\
$(BUILD_DIR)/asm/jaudio/cpubuf.o\
$(BUILD_DIR)/asm/jaudio/playercall.o\
$(BUILD_DIR)/asm/jaudio/dvdthread.o\
$(BUILD_DIR)/asm/jaudio/audiomesg.o\
$(BUILD_DIR)/asm/jaudio/rate.o\
$(BUILD_DIR)/asm/jaudio/stackchecker.o\
$(BUILD_DIR)/asm/jaudio/dspboot.o\
$(BUILD_DIR)/asm/jaudio/dspproc.o\
$(BUILD_DIR)/asm/jaudio/ipldec.o\
$(BUILD_DIR)/asm/jaudio/dsp_cardunlock.o\
$(BUILD_DIR)/asm/jaudio/driverinterface.o\
$(BUILD_DIR)/asm/jaudio/dspdriver.o\
$(BUILD_DIR)/asm/jaudio/dspinterface.o\
$(BUILD_DIR)/asm/jaudio/fxinterface.o\
$(BUILD_DIR)/asm/jaudio/bankread.o\
$(BUILD_DIR)/asm/jaudio/waveread.o\
$(BUILD_DIR)/asm/jaudio/connect.o\
$(BUILD_DIR)/asm/jaudio/tables.o\
$(BUILD_DIR)/asm/jaudio/bankdrv.o\
$(BUILD_DIR)/asm/jaudio/random.o\
$(BUILD_DIR)/asm/jaudio/aramcall.o\
$(BUILD_DIR)/asm/jaudio/ja_calc.o\
$(BUILD_DIR)/asm/jaudio/fat.o\
$(BUILD_DIR)/asm/jaudio/cmdstack.o\
$(BUILD_DIR)/asm/jaudio/virload.o\
$(BUILD_DIR)/asm/jaudio/heapctrl.o\
$(BUILD_DIR)/asm/jaudio/jammain_2.o\
$(BUILD_DIR)/asm/jaudio/midplay.o\
$(BUILD_DIR)/asm/jaudio/noteon.o\
$(BUILD_DIR)/asm/jaudio/seqsetup.o\
$(BUILD_DIR)/asm/jaudio/centcalc.o\
$(BUILD_DIR)/asm/jaudio/jamosc.o\
$(BUILD_DIR)/asm/jaudio/oneshot.o\
$(BUILD_DIR)/asm/jaudio/interface.o\
$(BUILD_DIR)/asm/jaudio/verysimple.o\
$(BUILD_DIR)/asm/jaudio/app_inter.o\
$(BUILD_DIR)/asm/jaudio/pikiinter.o\
$(BUILD_DIR)/asm/jaudio/piki_player.o\
$(BUILD_DIR)/asm/jaudio/piki_bgm.o\
$(BUILD_DIR)/asm/jaudio/piki_scene.o\
$(BUILD_DIR)/asm/jaudio/pikidemo.o\
$(BUILD_DIR)/asm/jaudio/file_seq.o\
$(BUILD_DIR)/asm/jaudio/cmdqueue.o\
$(BUILD_DIR)/src/jaudio/filter3d.o\
$(BUILD_DIR)/asm/jaudio/syncstream.o\
$(BUILD_DIR)/asm/jaudio/bankloader.o\
$(BUILD_DIR)/asm/jaudio/interleave.o\
$(BUILD_DIR)/asm/jaudio/pikiseq.o\
$(BUILD_DIR)/asm/jaudio/hplaybss.o\
$(BUILD_DIR)/asm/jaudio/hplaybss2.o\
$(BUILD_DIR)/asm/jaudio/hvqm_play.o\
$(JAUDIO_FILES): CFLAGS += -common off -func_align 32 -lang c++
$(JAUDIO_FILES): MWCC_VERSION := 1.2.5n
DEPENDS += $(JAUDIO_FILES:.o=.d)
$(BUILD_DIR)/src/jaudio/jaudio.o: $(JAUDIO_FILES)
@echo Linking... $@
$(QUIET) mkdir -p $(dir $@)
@echo $(JAUDIO_FILES) > build/JAUDIO_ofiles
$(QUIET) $(LD) -library $(LIBRARY_LDFLAGS) -o $@ -lcf ldscript.lcf @build/JAUDIO_ofiles

View File

@ -19,10 +19,5 @@ void Probe_Finish(void) { return; }
* Address: ........
* Size: 000050
*/
/*
void Console_printf(void)
{
// UNUSED FUNCTION
}
*/
void Console_printf(char*, ...) { return; }
}

View File

@ -1,15 +0,0 @@
MTX_FILES:=\
$(BUILD_DIR)/asm/mtx/mtx.o\
$(BUILD_DIR)/asm/mtx/mtx44.o\
$(BUILD_DIR)/asm/mtx/vec.o\
$(MTX_FILES): CFLAGS += -common off
$(MTX_FILES): MWCC_VERSION := 1.2.5
DEPENDS += $(MTX_FILES:.o=.d)
$(BUILD_DIR)/src/mtx/mtx.a: $(MTX_FILES)
@echo Linking... $@
$(QUIET) mkdir -p $(dir $@)
@echo $(MTX_FILES) > build/MTX_ofiles
$(QUIET) $(LD) -library $(LIBRARY_LDFLAGS) -o $@ -lcf ldscript.lcf @build/MTX_ofiles

View File

@ -1,13 +0,0 @@
ODENOTSTUB_FILES:=\
$(BUILD_DIR)/src/odenotstub/odenotstub.o\
$(ODENOTSTUB_FILES): CFLAGS += -common off
$(ODENOTSTUB_FILES): MWCC_VERSION := 1.2.5
DEPENDS += $(ODENOTSTUB_FILES:.o=.d)
$(BUILD_DIR)/src/odenotstub/odenotstub.a: $(ODENOTSTUB_FILES)
@echo Linking... $@
$(QUIET) mkdir -p $(dir $@)
@echo $(ODENOTSTUB_FILES) > build/ODENOTSTUB_ofiles
$(QUIET) $(LD) -library $(LIBRARY_LDFLAGS) -o $@ -lcf ldscript.lcf @build/ODENOTSTUB_ofiles

View File

@ -1,37 +0,0 @@
OS_FILES:=\
$(BUILD_DIR)/asm/os/OS.o\
$(BUILD_DIR)/asm/os/OSAlarm.o\
$(BUILD_DIR)/src/os/OSAlloc.o\
$(BUILD_DIR)/src/os/OSArena.o\
$(BUILD_DIR)/asm/os/OSAudioSystem.o\
$(BUILD_DIR)/asm/os/OSCache.o\
$(BUILD_DIR)/asm/os/OSContext.o\
$(BUILD_DIR)/src/os/OSError.o\
$(BUILD_DIR)/asm/os/OSExi.o\
$(BUILD_DIR)/asm/os/OSFont.o\
$(BUILD_DIR)/asm/os/OSInterrupt.o\
$(BUILD_DIR)/src/os/OSLink.o\
$(BUILD_DIR)/asm/os/OSMessage.o\
$(BUILD_DIR)/asm/os/OSMutex.o\
$(BUILD_DIR)/asm/os/OSReboot.o\
$(BUILD_DIR)/asm/os/OSReset.o\
$(BUILD_DIR)/asm/os/OSResetSW.o\
$(BUILD_DIR)/asm/os/OSRtc.o\
$(BUILD_DIR)/asm/os/OSSerial.o\
$(BUILD_DIR)/asm/os/OSSync.o\
$(BUILD_DIR)/asm/os/OSThread.o\
$(BUILD_DIR)/asm/os/OSTime.o\
$(BUILD_DIR)/asm/os/OSUartExi.o\
$(BUILD_DIR)/src/os/__start.o\
$(BUILD_DIR)/asm/os/__ppc_eabi_init.o\
$(OS_FILES): CFLAGS += -common off
$(OS_FILES): MWCC_VERSION := 1.2.5
DEPENDS += $(OS_FILES:.o=.d)
$(BUILD_DIR)/src/os/os.a: $(OS_FILES)
@echo Linking... $@
$(QUIET) mkdir -p $(dir $@)
@echo $(OS_FILES) > build/OS_ofiles
$(QUIET) $(LD) -library $(LIBRARY_LDFLAGS) -o $@ -lcf ldscript.lcf @build/OS_ofiles

View File

@ -2,6 +2,14 @@
#pragma section code_type ".init"
static void __check_pad3(void)
{
if ((Pad3Button & 0x0eef) == 0x0eef) {
OSResetSystem(OS_RESET_RESTART, 0, FALSE);
}
return;
}
WEAKFUNC ASM void __start(void)
{
#ifdef __MWERKS__ // clang-format off
@ -75,6 +83,19 @@ _no_args:
_end_of_parseargs:
bl DBInit
bl OSInit
#if VERSION == 0
lis r4, 0x8000
addi r4, r4, 0x30e6
lhz r3, 0x0(r4)
andi. r5, r3, 0x8000
beq _check_pad
andi. r3, r3, 0x7fff
cmplwi r3, 0x1
bne _end
_check_pad:
bl __check_pad3
_end:
#endif
bl __init_user
mr r3, r14
mr r4, r15

View File

@ -1,14 +0,0 @@
PAD_FILES:=\
$(BUILD_DIR)/asm/pad/Padclamp.o\
$(BUILD_DIR)/asm/pad/Pad.o\
$(PAD_FILES): CFLAGS += -common off
$(PAD_FILES): MWCC_VERSION := 1.2.5
DEPENDS += $(PAD_FILES:.o=.d)
$(BUILD_DIR)/src/pad/pad.a: $(PAD_FILES)
@echo Linking... $@
$(QUIET) mkdir -p $(dir $@)
@echo $(PAD_FILES) > build/PAD_ofiles
$(QUIET) $(LD) -library $(LIBRARY_LDFLAGS) -o $@ -lcf ldscript.lcf @build/PAD_ofiles

View File

@ -1,37 +0,0 @@
COLIN_FILES:=\
$(BUILD_DIR)/asm/plugPikiColin/cardutil.o\
$(BUILD_DIR)/asm/plugPikiColin/dynsimulator.o\
$(BUILD_DIR)/asm/plugPikiColin/animMgr.o\
$(BUILD_DIR)/asm/plugPikiColin/gameflow.o\
$(BUILD_DIR)/asm/plugPikiColin/game.o\
$(BUILD_DIR)/asm/plugPikiColin/gamePrefs.o\
$(BUILD_DIR)/asm/plugPikiColin/gameSetup.o\
$(BUILD_DIR)/asm/plugPikiColin/cardSelect.o\
$(BUILD_DIR)/asm/plugPikiColin/mapSelect.o\
$(BUILD_DIR)/asm/plugPikiColin/newPikiGame.o\
$(BUILD_DIR)/asm/plugPikiColin/introGame.o\
$(BUILD_DIR)/src/plugPikiColin/gameExit.o\
$(BUILD_DIR)/asm/plugPikiColin/gauges.o\
$(BUILD_DIR)/asm/plugPikiColin/genMapObject.o\
$(BUILD_DIR)/asm/plugPikiColin/gui.o\
$(BUILD_DIR)/asm/plugPikiColin/parameters.o\
$(BUILD_DIR)/asm/plugPikiColin/plugPiki.o\
$(BUILD_DIR)/asm/plugPikiColin/titles.o\
$(BUILD_DIR)/asm/plugPikiColin/ninLogo.o\
$(BUILD_DIR)/asm/plugPikiColin/mapMgr.o\
$(BUILD_DIR)/asm/plugPikiColin/dayMgr.o\
$(BUILD_DIR)/asm/plugPikiColin/cinePlayer.o\
$(BUILD_DIR)/asm/plugPikiColin/lightPool.o\
$(BUILD_DIR)/asm/plugPikiColin/memoryCard.o\
$(BUILD_DIR)/asm/plugPikiColin/moviePlayer.o\
$(BUILD_DIR)/asm/plugPikiColin/movSample.o\
$(COLIN_FILES): MWCC_VERSION := 1.2.5n
DEPENDS += $(COLIN_FILES:.o=.d)
$(BUILD_DIR)/src/plugPikiColin/plugPikiColin.a: $(COLIN_FILES)
@echo Linking... $@
$(QUIET) mkdir -p $(dir $@)
@echo $(COLIN_FILES) > build/COLIN_ofiles
$(QUIET) $(LD) -library $(LIBRARY_LDFLAGS) -o $@ -lcf ldscript.lcf @build/COLIN_ofiles

View File

@ -1,143 +0,0 @@
KANDO_FILES:=\
$(BUILD_DIR)/asm/plugPikiKando/omake.o\
$(BUILD_DIR)/asm/plugPikiKando/radarInfo.o\
$(BUILD_DIR)/asm/plugPikiKando/interactBattle.o\
$(BUILD_DIR)/asm/plugPikiKando/interactGrab.o\
$(BUILD_DIR)/asm/plugPikiKando/interactEtc.o\
$(BUILD_DIR)/asm/plugPikiKando/interactPullout.o\
$(BUILD_DIR)/asm/plugPikiKando/saiEvents.o\
$(BUILD_DIR)/asm/plugPikiKando/simpleAI.o\
$(BUILD_DIR)/asm/plugPikiKando/formationMgr.o\
$(BUILD_DIR)/src/plugPikiKando/globalShapes.o\
$(BUILD_DIR)/asm/plugPikiKando/playerState.o\
$(BUILD_DIR)/asm/plugPikiKando/gameDemo.o\
$(BUILD_DIR)/asm/plugPikiKando/demoInvoker.o\
$(BUILD_DIR)/asm/plugPikiKando/demoEvent.o\
$(BUILD_DIR)/asm/plugPikiKando/resultFlag.o\
$(BUILD_DIR)/asm/plugPikiKando/aiConstants.o\
$(BUILD_DIR)/asm/plugPikiKando/kio.o\
$(BUILD_DIR)/asm/plugPikiKando/keyConfig.o\
$(BUILD_DIR)/asm/plugPikiKando/aiPerf.o\
$(BUILD_DIR)/asm/plugPikiKando/courseDebug.o\
$(BUILD_DIR)/asm/plugPikiKando/memStat.o\
$(BUILD_DIR)/asm/plugPikiKando/collInfo.o\
$(BUILD_DIR)/asm/plugPikiKando/complexCreature.o\
$(BUILD_DIR)/asm/plugPikiKando/creature.o\
$(BUILD_DIR)/asm/plugPikiKando/creatureCollision.o\
$(BUILD_DIR)/asm/plugPikiKando/creatureCollPart.o\
$(BUILD_DIR)/asm/plugPikiKando/creatureMove.o\
$(BUILD_DIR)/asm/plugPikiKando/creatureStick.o\
$(BUILD_DIR)/asm/plugPikiKando/dualCreature.o\
$(BUILD_DIR)/asm/plugPikiKando/dynCreature.o\
$(BUILD_DIR)/src/plugPikiKando/eventListener.o\
$(BUILD_DIR)/asm/plugPikiKando/fastGrid.o\
$(BUILD_DIR)/asm/plugPikiKando/ropeCreature.o\
$(BUILD_DIR)/src/plugPikiKando/objectTypes.o\
$(BUILD_DIR)/asm/plugPikiKando/pelletMgr.o\
$(BUILD_DIR)/asm/plugPikiKando/animPellet.o\
$(BUILD_DIR)/asm/plugPikiKando/genPellet.o\
$(BUILD_DIR)/asm/plugPikiKando/pelletState.o\
$(BUILD_DIR)/asm/plugPikiKando/workObject.o\
$(BUILD_DIR)/asm/plugPikiKando/routeMgr.o\
$(BUILD_DIR)/asm/plugPikiKando/seMgr.o\
$(BUILD_DIR)/asm/plugPikiKando/seConstants.o\
$(BUILD_DIR)/asm/plugPikiKando/soundMgr.o\
$(BUILD_DIR)/asm/plugPikiKando/updateMgr.o\
$(BUILD_DIR)/asm/plugPikiKando/cPlate.o\
$(BUILD_DIR)/asm/plugPikiKando/aiStone.o\
$(BUILD_DIR)/asm/plugPikiKando/aiActions.o\
$(BUILD_DIR)/asm/plugPikiKando/aiAttack.o\
$(BUILD_DIR)/asm/plugPikiKando/aiBore.o\
$(BUILD_DIR)/asm/plugPikiKando/aiBoMake.o\
$(BUILD_DIR)/asm/plugPikiKando/aiBou.o\
$(BUILD_DIR)/asm/plugPikiKando/aiBridge.o\
$(BUILD_DIR)/asm/plugPikiKando/aiBreakWall.o\
$(BUILD_DIR)/asm/plugPikiKando/aiTransport.o\
$(BUILD_DIR)/asm/plugPikiKando/aiKinoko.o\
$(BUILD_DIR)/asm/plugPikiKando/aiChase.o\
$(BUILD_DIR)/asm/plugPikiKando/aiCrowd.o\
$(BUILD_DIR)/asm/plugPikiKando/aiDecoy.o\
$(BUILD_DIR)/asm/plugPikiKando/aiEnter.o\
$(BUILD_DIR)/asm/plugPikiKando/aiEscape.o\
$(BUILD_DIR)/asm/plugPikiKando/aiExit.o\
$(BUILD_DIR)/asm/plugPikiKando/aiMine.o\
$(BUILD_DIR)/asm/plugPikiKando/aiFormation.o\
$(BUILD_DIR)/asm/plugPikiKando/aiFree.o\
$(BUILD_DIR)/asm/plugPikiKando/aiGoto.o\
$(BUILD_DIR)/asm/plugPikiKando/aiGuard.o\
$(BUILD_DIR)/asm/plugPikiKando/aiPick.o\
$(BUILD_DIR)/asm/plugPikiKando/aiPickCreature.o\
$(BUILD_DIR)/asm/plugPikiKando/aiPullout.o\
$(BUILD_DIR)/asm/plugPikiKando/aiPush.o\
$(BUILD_DIR)/asm/plugPikiKando/aiPut.o\
$(BUILD_DIR)/asm/plugPikiKando/aiRandomBoid.o\
$(BUILD_DIR)/asm/plugPikiKando/aiRescue.o\
$(BUILD_DIR)/asm/plugPikiKando/aiRope.o\
$(BUILD_DIR)/asm/plugPikiKando/aiShoot.o\
$(BUILD_DIR)/asm/plugPikiKando/aiWatch.o\
$(BUILD_DIR)/asm/plugPikiKando/aiWeed.o\
$(BUILD_DIR)/asm/plugPikiKando/aiTable.o\
$(BUILD_DIR)/asm/plugPikiKando/aiAction.o\
$(BUILD_DIR)/asm/plugPikiKando/pikiInf.o\
$(BUILD_DIR)/asm/plugPikiKando/piki.o\
$(BUILD_DIR)/src/plugPikiKando/odoMeter.o\
$(BUILD_DIR)/asm/plugPikiKando/pikidoKill.o\
$(BUILD_DIR)/asm/plugPikiKando/pikiMgr.o\
$(BUILD_DIR)/asm/plugPikiKando/pikiState.o\
$(BUILD_DIR)/asm/plugPikiKando/viewPiki.o\
$(BUILD_DIR)/src/plugPikiKando/conditions.o\
$(BUILD_DIR)/asm/plugPikiKando/generator.o\
$(BUILD_DIR)/asm/plugPikiKando/generatorCache.o\
$(BUILD_DIR)/asm/plugPikiKando/objectMgr.o\
$(BUILD_DIR)/asm/plugPikiKando/searchSystem.o\
$(BUILD_DIR)/src/plugPikiKando/smartPtr.o\
$(BUILD_DIR)/asm/plugPikiKando/itemGem.o\
$(BUILD_DIR)/asm/plugPikiKando/weedsItem.o\
$(BUILD_DIR)/asm/plugPikiKando/kusaItem.o\
$(BUILD_DIR)/asm/plugPikiKando/fishItem.o\
$(BUILD_DIR)/asm/plugPikiKando/ufoItem.o\
$(BUILD_DIR)/asm/plugPikiKando/ufoAnim.o\
$(BUILD_DIR)/asm/plugPikiKando/bombItem.o\
$(BUILD_DIR)/asm/plugPikiKando/goalItem.o\
$(BUILD_DIR)/asm/plugPikiKando/pikiheadItem.o\
$(BUILD_DIR)/asm/plugPikiKando/keyItem.o\
$(BUILD_DIR)/asm/plugPikiKando/ropeItem.o\
$(BUILD_DIR)/asm/plugPikiKando/seedItem.o\
$(BUILD_DIR)/asm/plugPikiKando/genItem.o\
$(BUILD_DIR)/asm/plugPikiKando/itemAI.o\
$(BUILD_DIR)/asm/plugPikiKando/itemMgr.o\
$(BUILD_DIR)/asm/plugPikiKando/itemObject.o\
$(BUILD_DIR)/asm/plugPikiKando/mizuItem.o\
$(BUILD_DIR)/asm/plugPikiKando/paniItemAnimator.o\
$(BUILD_DIR)/asm/plugPikiKando/genNavi.o\
$(BUILD_DIR)/asm/plugPikiKando/navi.o\
$(BUILD_DIR)/asm/plugPikiKando/naviState.o\
$(BUILD_DIR)/asm/plugPikiKando/naviDemoState.o\
$(BUILD_DIR)/asm/plugPikiKando/gameCoreSection.o\
$(BUILD_DIR)/asm/plugPikiKando/gmWin.o\
$(BUILD_DIR)/asm/plugPikiKando/gameStat.o\
$(BUILD_DIR)/asm/plugPikiKando/kmath.o\
$(BUILD_DIR)/asm/plugPikiKando/uteffect.o\
$(BUILD_DIR)/asm/plugPikiKando/kontroller.o\
$(BUILD_DIR)/src/plugPikiKando/mapcode.o\
$(BUILD_DIR)/asm/plugPikiKando/utkando.o\
$(BUILD_DIR)/asm/plugPikiKando/naviMgr.o\
$(BUILD_DIR)/asm/plugPikiKando/genMapParts.o\
$(BUILD_DIR)/asm/plugPikiKando/mapParts.o\
$(BUILD_DIR)/asm/plugPikiKando/panipikianimator.o\
$(BUILD_DIR)/asm/plugPikiKando/actor.o\
$(BUILD_DIR)/src/plugPikiKando/actorMgr.o\
$(BUILD_DIR)/asm/plugPikiKando/genActor.o\
$(BUILD_DIR)/src/plugPikiKando/pikiInfo.o\
$(BUILD_DIR)/asm/plugPikiKando/plantMgr.o\
$(BUILD_DIR)/asm/plugPikiKando/paniPlantAnimator.o\
$(KANDO_FILES): MWCC_VERSION := 1.2.5n
DEPENDS += $(KANDO_FILES:.o=.d)
$(BUILD_DIR)/src/plugPikiKando/plugPikiKando.a: $(KANDO_FILES)
@echo Linking... $@
$(QUIET) mkdir -p $(dir $@)
@echo $(KANDO_FILES) > build/KANDO_ofiles
$(QUIET) $(LD) -library $(LIBRARY_LDFLAGS) -o $@ -lcf ldscript.lcf @build/KANDO_ofiles

View File

@ -1,66 +0,0 @@
NAKATA_FILES:=\
$(BUILD_DIR)/asm/plugPikiNakata/genteki.o\
$(BUILD_DIR)/src/plugPikiNakata/nakatacode.o\
$(BUILD_DIR)/asm/plugPikiNakata/nlibfunction.o\
$(BUILD_DIR)/asm/plugPikiNakata/nlibgeometry.o\
$(BUILD_DIR)/asm/plugPikiNakata/nlibgeometry3d.o\
$(BUILD_DIR)/asm/plugPikiNakata/nlibgraphics.o\
$(BUILD_DIR)/asm/plugPikiNakata/nlibmath.o\
$(BUILD_DIR)/asm/plugPikiNakata/nlibspline.o\
$(BUILD_DIR)/asm/plugPikiNakata/nlibsystem.o\
$(BUILD_DIR)/asm/plugPikiNakata/panianimator.o\
$(BUILD_DIR)/asm/plugPikiNakata/panipikianimmgr.o\
$(BUILD_DIR)/asm/plugPikiNakata/panitekianimator.o\
$(BUILD_DIR)/asm/plugPikiNakata/panitestsection.o\
$(BUILD_DIR)/asm/plugPikiNakata/paraparameters.o\
$(BUILD_DIR)/asm/plugPikiNakata/pcamcamera.o\
$(BUILD_DIR)/asm/plugPikiNakata/pcamcameramanager.o\
$(BUILD_DIR)/asm/plugPikiNakata/pcammotionevents.o\
$(BUILD_DIR)/asm/plugPikiNakata/pcamcameraparameters.o\
$(BUILD_DIR)/asm/plugPikiNakata/peve.o\
$(BUILD_DIR)/asm/plugPikiNakata/peveconditions.o\
$(BUILD_DIR)/asm/plugPikiNakata/pevemotionevents.o\
$(BUILD_DIR)/asm/plugPikiNakata/tai.o\
$(BUILD_DIR)/asm/plugPikiNakata/taiattackactions.o\
$(BUILD_DIR)/asm/plugPikiNakata/taibasicactions.o\
$(BUILD_DIR)/asm/plugPikiNakata/taichappy.o\
$(BUILD_DIR)/asm/plugPikiNakata/taicollec.o\
$(BUILD_DIR)/asm/plugPikiNakata/taicollisionactions.o\
$(BUILD_DIR)/asm/plugPikiNakata/taieffectactions.o\
$(BUILD_DIR)/asm/plugPikiNakata/taiiwagen.o\
$(BUILD_DIR)/asm/plugPikiNakata/taijudgementactions.o\
$(BUILD_DIR)/asm/plugPikiNakata/taikinoko.o\
$(BUILD_DIR)/asm/plugPikiNakata/taimessageactions.o\
$(BUILD_DIR)/asm/plugPikiNakata/taimizinko.o\
$(BUILD_DIR)/asm/plugPikiNakata/taimotionactions.o\
$(BUILD_DIR)/asm/plugPikiNakata/taimoveactions.o\
$(BUILD_DIR)/asm/plugPikiNakata/tainapkid.o\
$(BUILD_DIR)/asm/plugPikiNakata/taiotimoti.o\
$(BUILD_DIR)/asm/plugPikiNakata/taipalm.o\
$(BUILD_DIR)/asm/plugPikiNakata/taireactionactions.o\
$(BUILD_DIR)/asm/plugPikiNakata/taiswallow.o\
$(BUILD_DIR)/asm/plugPikiNakata/taishell.o\
$(BUILD_DIR)/asm/plugPikiNakata/taitimeractions.o\
$(BUILD_DIR)/asm/plugPikiNakata/taiwaitactions.o\
$(BUILD_DIR)/asm/plugPikiNakata/teki.o\
$(BUILD_DIR)/asm/plugPikiNakata/tekianimationmanager.o\
$(BUILD_DIR)/asm/plugPikiNakata/tekibteki.o\
$(BUILD_DIR)/asm/plugPikiNakata/tekiconditions.o\
$(BUILD_DIR)/src/plugPikiNakata/tekievent.o\
$(BUILD_DIR)/asm/plugPikiNakata/tekiinteraction.o\
$(BUILD_DIR)/asm/plugPikiNakata/tekimgr.o\
$(BUILD_DIR)/asm/plugPikiNakata/tekinakata.o\
$(BUILD_DIR)/asm/plugPikiNakata/tekinteki.o\
$(BUILD_DIR)/asm/plugPikiNakata/tekiparameters.o\
$(BUILD_DIR)/asm/plugPikiNakata/tekipersonality.o\
$(BUILD_DIR)/asm/plugPikiNakata/tekistrategy.o\
$(NAKATA_FILES): MWCC_VERSION := 1.2.5n
DEPENDS += $(NAKATA_FILES:.o=.d)
$(BUILD_DIR)/src/plugPikiNakata/plugPikiNakata.a: $(NAKATA_FILES)
@echo Linking... $@
$(QUIET) mkdir -p $(dir $@)
@echo $(NAKATA_FILES) > build/NAKATA_ofiles
$(QUIET) $(LD) -library $(LIBRARY_LDFLAGS) -o $@ -lcf ldscript.lcf @build/NAKATA_ofiles

View File

@ -1,45 +0,0 @@
NISHIMURA_FILES:=\
$(BUILD_DIR)/asm/plugPikiNishimura/genBoss.o\
$(BUILD_DIR)/asm/plugPikiNishimura/Boss.o\
$(BUILD_DIR)/asm/plugPikiNishimura/BossAnimMgr.o\
$(BUILD_DIR)/asm/plugPikiNishimura/BossCnd.o\
$(BUILD_DIR)/asm/plugPikiNishimura/BossMgr.o\
$(BUILD_DIR)/asm/plugPikiNishimura/BossShapeObj.o\
$(BUILD_DIR)/asm/plugPikiNishimura/Spider.o\
$(BUILD_DIR)/asm/plugPikiNishimura/SpiderAi.o\
$(BUILD_DIR)/asm/plugPikiNishimura/SpiderLeg.o\
$(BUILD_DIR)/asm/plugPikiNishimura/Snake.o\
$(BUILD_DIR)/asm/plugPikiNishimura/SnakeAi.o\
$(BUILD_DIR)/asm/plugPikiNishimura/SnakeBody.o\
$(BUILD_DIR)/asm/plugPikiNishimura/Slime.o\
$(BUILD_DIR)/asm/plugPikiNishimura/SlimeAi.o\
$(BUILD_DIR)/asm/plugPikiNishimura/SlimeBody.o\
$(BUILD_DIR)/asm/plugPikiNishimura/SlimeCreature.o\
$(BUILD_DIR)/asm/plugPikiNishimura/King.o\
$(BUILD_DIR)/asm/plugPikiNishimura/KingAi.o\
$(BUILD_DIR)/asm/plugPikiNishimura/KingBody.o\
$(BUILD_DIR)/asm/plugPikiNishimura/Kogane.o\
$(BUILD_DIR)/asm/plugPikiNishimura/KoganeAi.o\
$(BUILD_DIR)/asm/plugPikiNishimura/Pom.o\
$(BUILD_DIR)/asm/plugPikiNishimura/PomAi.o\
$(BUILD_DIR)/asm/plugPikiNishimura/KingBack.o\
$(BUILD_DIR)/asm/plugPikiNishimura/Nucleus.o\
$(BUILD_DIR)/asm/plugPikiNishimura/NucleusAi.o\
$(BUILD_DIR)/asm/plugPikiNishimura/CoreNucleus.o\
$(BUILD_DIR)/asm/plugPikiNishimura/CoreNucleusAi.o\
$(BUILD_DIR)/asm/plugPikiNishimura/Mizu.o\
$(BUILD_DIR)/asm/plugPikiNishimura/MizuAi.o\
$(BUILD_DIR)/asm/plugPikiNishimura/nscalculation.o\
$(BUILD_DIR)/asm/plugPikiNishimura/RumbleData.o\
$(BUILD_DIR)/asm/plugPikiNishimura/HmRumbleMgr.o\
$(BUILD_DIR)/src/plugPikiNishimura/HmRumbleSample.o\
$(NISHIMURA_FILES): MWCC_VERSION := 1.2.5n
DEPENDS += $(NISHIMURA_FILES:.o=.d)
$(BUILD_DIR)/src/plugPikiNishimura/plugPikiNishimura.a: $(NISHIMURA_FILES)
@echo Linking... $@
$(QUIET) mkdir -p $(dir $@)
@echo $(NISHIMURA_FILES) > build/NISHIMURA_ofiles
$(QUIET) $(LD) -library $(LIBRARY_LDFLAGS) -o $@ -lcf ldscript.lcf @build/NISHIMURA_ofiles

View File

@ -1,35 +0,0 @@
OGAWA_FILES:=\
$(BUILD_DIR)/asm/plugPikiOgawa/ogTest.o\
$(BUILD_DIR)/asm/plugPikiOgawa/ogSub.o\
$(BUILD_DIR)/asm/plugPikiOgawa/ogTitle.o\
$(BUILD_DIR)/asm/plugPikiOgawa/ogPause.o\
$(BUILD_DIR)/asm/plugPikiOgawa/ogTutorial.o\
$(BUILD_DIR)/src/plugPikiOgawa/ogTutorialData.o\
$(BUILD_DIR)/asm/plugPikiOgawa/ogMap.o\
$(BUILD_DIR)/asm/plugPikiOgawa/ogResult.o\
$(BUILD_DIR)/asm/plugPikiOgawa/ogRader.o\
$(BUILD_DIR)/asm/plugPikiOgawa/ogFileSelect.o\
$(BUILD_DIR)/asm/plugPikiOgawa/ogMessage.o\
$(BUILD_DIR)/asm/plugPikiOgawa/ogMemChk.o\
$(BUILD_DIR)/asm/plugPikiOgawa/ogDiary.o\
$(BUILD_DIR)/asm/plugPikiOgawa/ogMenu.o\
$(BUILD_DIR)/asm/plugPikiOgawa/ogFileChkSel.o\
$(BUILD_DIR)/asm/plugPikiOgawa/ogMakeDefault.o\
$(BUILD_DIR)/asm/plugPikiOgawa/ogTotalScore.o\
$(BUILD_DIR)/asm/plugPikiOgawa/ogSave.o\
$(BUILD_DIR)/asm/plugPikiOgawa/ogNitaku.o\
$(BUILD_DIR)/asm/plugPikiOgawa/ogFileCopy.o\
$(BUILD_DIR)/asm/plugPikiOgawa/ogFileDelete.o\
$(BUILD_DIR)/asm/plugPikiOgawa/ogGraph.o\
$(BUILD_DIR)/asm/plugPikiOgawa/ogStart.o\
$(BUILD_DIR)/asm/plugPikiOgawa/ogCallBack.o\
$(OGAWA_FILES): MWCC_VERSION := 1.2.5n
DEPENDS += $(OGAWA_FILES:.o=.d)
$(BUILD_DIR)/src/plugPikiOgawa/plugPikiOgawa.a: $(OGAWA_FILES)
@echo Linking... $@
$(QUIET) mkdir -p $(dir $@)
@echo $(OGAWA_FILES) > build/OGAWA_ofiles
$(QUIET) $(LD) -library $(LIBRARY_LDFLAGS) -o $@ -lcf ldscript.lcf @build/OGAWA_ofiles

View File

@ -1,92 +0,0 @@
YAMASHITA_FILES:=\
$(BUILD_DIR)/asm/plugPikiYamashita/gameCourseClear.o\
$(BUILD_DIR)/asm/plugPikiYamashita/gameStageClear.o\
$(BUILD_DIR)/asm/plugPikiYamashita/gameCredits.o\
$(BUILD_DIR)/asm/plugPikiYamashita/zenMath.o\
$(BUILD_DIR)/asm/plugPikiYamashita/effectMgr.o\
$(BUILD_DIR)/asm/plugPikiYamashita/particleGenerator.o\
$(BUILD_DIR)/asm/plugPikiYamashita/particleLoader.o\
$(BUILD_DIR)/src/plugPikiYamashita/solidField.o\
$(BUILD_DIR)/asm/plugPikiYamashita/particleManager.o\
$(BUILD_DIR)/asm/plugPikiYamashita/particleMdlManager.o\
$(BUILD_DIR)/asm/plugPikiYamashita/bBoardColourAnim.o\
$(BUILD_DIR)/asm/plugPikiYamashita/simpleParticle.o\
$(BUILD_DIR)/asm/plugPikiYamashita/tekiyteki.o\
$(BUILD_DIR)/asm/plugPikiYamashita/tekiyamashita.o\
$(BUILD_DIR)/src/plugPikiYamashita/TAIanimation.o\
$(BUILD_DIR)/asm/plugPikiYamashita/TAItank.o\
$(BUILD_DIR)/asm/plugPikiYamashita/TAImar.o\
$(BUILD_DIR)/asm/plugPikiYamashita/TAIAreaction.o\
$(BUILD_DIR)/asm/plugPikiYamashita/TAIAmove.o\
$(BUILD_DIR)/asm/plugPikiYamashita/TAIAmotion.o\
$(BUILD_DIR)/asm/plugPikiYamashita/TAIAjudge.o\
$(BUILD_DIR)/asm/plugPikiYamashita/TAIAattack.o\
$(BUILD_DIR)/asm/plugPikiYamashita/P2DGrafContext.o\
$(BUILD_DIR)/asm/plugPikiYamashita/P2DOrthoGraph.o\
$(BUILD_DIR)/asm/plugPikiYamashita/P2DPerspGraph.o\
$(BUILD_DIR)/asm/plugPikiYamashita/P2DPane.o\
$(BUILD_DIR)/asm/plugPikiYamashita/P2DPicture.o\
$(BUILD_DIR)/asm/plugPikiYamashita/P2DScreen.o\
$(BUILD_DIR)/asm/plugPikiYamashita/P2DStream.o\
$(BUILD_DIR)/asm/plugPikiYamashita/PSUList.o\
$(BUILD_DIR)/asm/plugPikiYamashita/PUTRect.o\
$(BUILD_DIR)/asm/plugPikiYamashita/P2DWindow.o\
$(BUILD_DIR)/asm/plugPikiYamashita/P2DTextBox.o\
$(BUILD_DIR)/asm/plugPikiYamashita/P2DPrint.o\
$(BUILD_DIR)/asm/plugPikiYamashita/P2DFont.o\
$(BUILD_DIR)/asm/plugPikiYamashita/drawGameInfo.o\
$(BUILD_DIR)/asm/plugPikiYamashita/zenGraphics.o\
$(BUILD_DIR)/asm/plugPikiYamashita/drawContainer.o\
$(BUILD_DIR)/asm/plugPikiYamashita/drawCommon.o\
$(BUILD_DIR)/asm/plugPikiYamashita/zenController.o\
$(BUILD_DIR)/asm/plugPikiYamashita/drawHurryUp.o\
$(BUILD_DIR)/asm/plugPikiYamashita/texAnim.o\
$(BUILD_DIR)/asm/plugPikiYamashita/drawAccount.o\
$(BUILD_DIR)/asm/plugPikiYamashita/drawMenu.o\
$(BUILD_DIR)/asm/plugPikiYamashita/TAIeffectAttack.o\
$(BUILD_DIR)/asm/plugPikiYamashita/TAIbeatle.o\
$(BUILD_DIR)/asm/plugPikiYamashita/menuPanelMgr.o\
$(BUILD_DIR)/asm/plugPikiYamashita/TAIkabekuiA.o\
$(BUILD_DIR)/asm/plugPikiYamashita/TAIkabekuiB.o\
$(BUILD_DIR)/asm/plugPikiYamashita/TAIkabekuiC.o\
$(BUILD_DIR)/asm/plugPikiYamashita/TAItamago.o\
$(BUILD_DIR)/asm/plugPikiYamashita/TAIdororo.o\
$(BUILD_DIR)/asm/plugPikiYamashita/TAIhibaA.o\
$(BUILD_DIR)/asm/plugPikiYamashita/TAIAeffect.o\
$(BUILD_DIR)/asm/plugPikiYamashita/TAImiurin.o\
$(BUILD_DIR)/src/plugPikiYamashita/ptclGenPack.o\
$(BUILD_DIR)/asm/plugPikiYamashita/drawProgre.o\
$(BUILD_DIR)/asm/plugPikiYamashita/spectrumCursorMgr.o\
$(BUILD_DIR)/asm/plugPikiYamashita/drawWorldMap.o\
$(BUILD_DIR)/asm/plugPikiYamashita/drawCountDown.o\
$(BUILD_DIR)/asm/plugPikiYamashita/drawGameOver.o\
$(BUILD_DIR)/asm/plugPikiYamashita/yai.o\
$(BUILD_DIR)/asm/plugPikiYamashita/effectMgr2D.o\
$(BUILD_DIR)/asm/plugPikiYamashita/drawWMPause.o\
$(BUILD_DIR)/asm/plugPikiYamashita/TAIusuba.o\
$(BUILD_DIR)/asm/plugPikiYamashita/TAIotama.o\
$(BUILD_DIR)/asm/plugPikiYamashita/drawCMcourseSelect.o\
$(BUILD_DIR)/asm/plugPikiYamashita/drawCMtitle.o\
$(BUILD_DIR)/asm/plugPikiYamashita/drawCMscore.o\
$(BUILD_DIR)/asm/plugPikiYamashita/drawCMbest.o\
$(BUILD_DIR)/asm/plugPikiYamashita/drawCMresult.o\
$(BUILD_DIR)/asm/plugPikiYamashita/drawMenuBase.o\
$(BUILD_DIR)/asm/plugPikiYamashita/drawHiScore.o\
$(BUILD_DIR)/asm/plugPikiYamashita/damageEffect.o\
$(BUILD_DIR)/asm/plugPikiYamashita/alphaWipe.o\
$(BUILD_DIR)/asm/plugPikiYamashita/drawUfoParts.o\
$(BUILD_DIR)/asm/plugPikiYamashita/zenSys.o\
$(BUILD_DIR)/asm/plugPikiYamashita/drawSaveMes.o\
$(BUILD_DIR)/asm/plugPikiYamashita/drawSaveFailure.o\
$(BUILD_DIR)/asm/plugPikiYamashita/drawFinalResult.o\
$(BUILD_DIR)/asm/plugPikiYamashita/drawOptionSave.o\
$(YAMASHITA_FILES): MWCC_VERSION := 1.2.5n
DEPENDS += $(YAMASHITA_FILES:.o=.d)
$(BUILD_DIR)/src/plugPikiYamashita/plugPikiYamashita.a: $(YAMASHITA_FILES)
@echo Linking... $@
$(QUIET) mkdir -p $(dir $@)
@echo $(YAMASHITA_FILES) > build/YAMASHITA_ofiles
$(QUIET) $(LD) -library $(LIBRARY_LDFLAGS) -o $@ -lcf ldscript.lcf @build/YAMASHITA_ofiles

View File

@ -1,32 +0,0 @@
SYSCOMMON_FILES:=\
$(BUILD_DIR)/asm/sysCommon/ayuStack.o\
$(BUILD_DIR)/src/sysCommon/baseApp.o\
$(BUILD_DIR)/src/sysCommon/stream.o\
$(BUILD_DIR)/asm/sysCommon/streamBufferedInput.o\
$(BUILD_DIR)/src/sysCommon/string.o\
$(BUILD_DIR)/asm/sysCommon/graphics.o\
$(BUILD_DIR)/asm/sysCommon/grLight.o\
$(BUILD_DIR)/asm/sysCommon/shapeBase.o\
$(BUILD_DIR)/asm/sysCommon/shpLightFlares.o\
$(BUILD_DIR)/asm/sysCommon/shpObjColl.o\
$(BUILD_DIR)/asm/sysCommon/shpRoutes.o\
$(BUILD_DIR)/asm/sysCommon/sysMath.o\
$(BUILD_DIR)/asm/sysCommon/matMath.o\
$(BUILD_DIR)/asm/sysCommon/stdSystem.o\
$(BUILD_DIR)/asm/sysCommon/node.o\
$(BUILD_DIR)/src/sysCommon/timers.o\
$(BUILD_DIR)/src/sysCommon/controller.o\
$(BUILD_DIR)/src/sysCommon/cmdStream.o\
$(BUILD_DIR)/asm/sysCommon/camera.o\
$(BUILD_DIR)/src/sysCommon/atx.o\
$(BUILD_DIR)/src/sysCommon/id32.o\
$(SYSCOMMON_FILES): MWCC_VERSION := 1.2.5n
DEPENDS += $(SYSCOMMON_FILES:.o=.d)
$(BUILD_DIR)/src/sysCommon/sysCommon.a: $(SYSCOMMON_FILES)
@echo Linking... $@
$(QUIET) mkdir -p $(dir $@)
@echo $(SYSCOMMON_FILES) > build/SYSCOMMON_ofiles
$(QUIET) $(LD) -library $(LIBRARY_LDFLAGS) -o $@ -lcf ldscript.lcf @build/SYSCOMMON_ofiles

View File

@ -175,10 +175,10 @@ void Stream::writeString(String& s)
* Address: ........
* Size: 0000C4
*/
// void Stream::print(char*, ...)
// {
// // UNUSED FUNCTION
// }
void Stream::print(char*, ...)
{
// UNUSED FUNCTION
}
/*
* --INFO--

View File

@ -1,17 +0,0 @@
SYSDOLPHIN_FILES:=\
$(BUILD_DIR)/asm/sysDolphin/texture.o\
$(BUILD_DIR)/asm/sysDolphin/system.o\
$(BUILD_DIR)/asm/sysDolphin/sysNew.o\
$(BUILD_DIR)/asm/sysDolphin/controllerMgr.o\
$(BUILD_DIR)/asm/sysDolphin/dgxGraphics.o\
$(BUILD_DIR)/src/sysDolphin/gameApp.o\
$(SYSDOLPHIN_FILES): MWCC_VERSION := 1.2.5n
DEPENDS += $(SYSDOLPHIN_FILES:.o=.d)
$(BUILD_DIR)/src/sysDolphin/sysDolphin.a: $(SYSDOLPHIN_FILES)
@echo Linking... $@
$(QUIET) mkdir -p $(dir $@)
@echo $(SYSDOLPHIN_FILES) > build/SYSDOLPHIN_ofiles
$(QUIET) $(LD) -library $(LIBRARY_LDFLAGS) -o $@ -lcf ldscript.lcf @build/SYSDOLPHIN_ofiles

View File

@ -1,13 +0,0 @@
VI_FILES:=\
$(BUILD_DIR)/asm/vi/vi.o\
$(VI_FILES): CFLAGS += -common off
$(VI_FILES): MWCC_VERSION := 1.2.5
DEPENDS += $(VI_FILES:.o=.d)
$(BUILD_DIR)/src/vi/vi.a: $(VI_FILES)
@echo Linking... $@
$(QUIET) mkdir -p $(dir $@)
@echo $(VI_FILES) > build/VI_ofiles
$(QUIET) $(LD) -library $(LIBRARY_LDFLAGS) -o $@ -lcf ldscript.lcf @build/VI_ofiles

3
tools/.gitignore vendored
View File

@ -1,3 +0,0 @@
# Build artifacts
*.exe
elf2dol

View File

@ -1,73 +0,0 @@
#!/usr/bin/env python
#
# Usage: ./asmsplit.py MAPFILE < ASMFILE
import os
import re
import sys
basedir = 'asm/'
macros = 'macros.inc'
filenames = {}
lastfile = None
with open(sys.argv[1]) as mapfile:
for mapline in mapfile:
match = re.match(' [0-9a-f]{8} [0-9a-f]{6} ([0-9a-f]{8}) [ 0-9][0-9] [^ ]+ \t(.+)', mapline)
if match and match.group(2) != lastfile:
lastfile = match.group(2)
addr = int(match.group(1), 16)
fname = basedir + '/'.join(map(lambda s: os.path.splitext(s)[0], match.group(2).strip().split(' '))) + '.s'
filenames[addr] = fname
curfile = open(macros, 'w')
curaddr = 0
section = ''
remainder = None
while asmline := remainder or sys.stdin.readline():
remainder = None
trim = asmline.strip()
if trim.startswith('.section'):
curaddr = int(trim[-23:-13], 0)
section = asmline
curfile.close()
else:
if trim != "":
if curfile.closed:
fname = filenames[curaddr]
if os.path.exists(fname):
curfile = open(fname, 'a')
curfile.write('\n')
else:
os.makedirs(os.path.dirname(fname), exist_ok = True)
curfile = open(fname, 'x')
curfile.write('.include "' + macros + '"\n\n')
curfile.write(section)
if trim.startswith('.skip'):
curaddr += int(trim[6:], 0)
elif trim.startswith('.incbin'):
f, a, s = asmline.split(', ')
a = int(a, 0)
s = int(s, 0)
if s < 0: raise ValueError()
elif s == 0: continue
k = 1
while (curaddr + k) not in filenames and k < s: k += 1
curaddr += k
if k < s:
asmline = f + ', 0x' + format(a, 'X') + ', 0x' + format(k, 'X') + '\n'
remainder = f + ', 0x' + format(a + k, 'X') + ', 0x' + format(s - k, 'X') + '\n'
elif not trim.startswith('.global') and not trim.endswith(':'):
curaddr += 4
if not curfile.closed: curfile.write(asmline)
if curaddr in filenames and filenames[curaddr] != curfile.name:
curfile.close()

View File

@ -1,343 +0,0 @@
#!/usr/bin/env python3
################################################################################
# Description #
################################################################################
# calcprogress: Used to calculate the progress of the Pikmin decomp. #
# stores to CSV so that it can be used for a webpage display. #
# #
# Usage: No arguments needed #
################################################################################
###############################################
# #
# Imports #
# #
###############################################
import os
import sys
import struct
import re
import math
import csv
import json
import argparse
from datetime import datetime
###############################################
# #
# Constants #
# #
###############################################
MEM1_HI = 0x81200000
MEM1_LO = 0x80004000
MW_WII_SYMBOL_REGEX = r"^\s*"\
r"(?P<SectOfs>\w{8})\s+"\
r"(?P<Size>\w{6})\s+"\
r"(?P<VirtOfs>\w{8})\s+"\
r"(?P<FileOfs>\w{8})\s+"\
r"(\w{1,2})\s+"\
r"(?P<Symbol>[0-9A-Za-z_<>$@.*]*)\s*"\
r"(?P<Object>[\S ]*)"
MW_GC_SYMBOL_REGEX = r"^\s*"\
r"(?P<SectOfs>\w{8})\s+"\
r"(?P<Size>\w{6})\s+"\
r"(?P<VirtOfs>\w{8})\s+"\
r"(\w{1,2})\s+"\
r"(?P<Symbol>[0-9A-Za-z_<>$@.*]*)\s*"\
r"(?P<Object>[\S ]*)"
REGEX_TO_USE = MW_GC_SYMBOL_REGEX
TEXT_SECTIONS = ["init", "text"]
DATA_SECTIONS = [
"rodata", "data", "bss", "sdata", "sbss", "sdata2", "sbss2",
"ctors", "_ctors", "dtors", "ctors$99", "_ctors$99", "ctors$00", "dtors$99",
"extab_", "extabindex_", "_extab", "_exidx", "extab", "extabindex"
]
# DOL info
TEXT_SECTION_COUNT = 7
DATA_SECTION_COUNT = 11
SECTION_TEXT = 0
SECTION_DATA = 1
# Progress flavor
CODE_FRAC = 30 # total code "item" amount
DATA_FRAC = 100 # total data "item" amount
CODE_ITEM = "ship parts" # code flavor item
DATA_ITEM = "Pikmin" # data flavor item
CSV_FILE_NAME = 'progress.csv'
CSV_FILE_PATH = f'./tools/{CSV_FILE_NAME}'
###############################################
# #
# Entrypoint #
# #
###############################################
def update_csv(
code_count,
decomp_code_size,
code_completion_percentage,
data_count,
decomp_data_size,
data_completion_percentage,
sentence,
):
does_file_exist = False
are_there_changes = True
try:
with open(CSV_FILE_PATH, 'r') as file:
reader = csv.reader(file)
does_file_exist = True
latest_row = list(reader)[-1]
latest_code_size = int(latest_row[1]) # code_completion_in_bytes
latest_data_size = int(latest_row[4]) # data_completion_in_bytes
are_there_changes = not (
decomp_code_size == latest_code_size and decomp_data_size == latest_data_size
)
print(f"Successfully read {CSV_FILE_PATH}!")
except:
print(f'Failed to read {CSV_FILE_PATH}!')
if not are_there_changes:
print("No changes detected. Exiting...")
return
col_one = f"code_count_in_{CODE_ITEM.lower()}"
col_two = "code_completion_in_bytes"
col_three = "code_completion_in_percentage"
col_four = f"data_count_in_{DATA_ITEM.lower()}"
col_five = "data_completion_in_bytes"
col_six = "data_completion_in_percentage"
col_seven = "sentence"
col_eight = "created_at"
headers = [
col_one,
col_two,
col_three,
col_four,
col_five,
col_six,
col_seven,
col_eight,
]
try:
with open(CSV_FILE_PATH, 'a', newline='') as file:
writer = csv.DictWriter(file, fieldnames=headers)
# only add headers if this is the first iteration of the file
if not does_file_exist:
writer.writeheader()
writer.writerow({
col_one: code_count,
col_two: decomp_code_size,
col_three: code_completion_percentage,
col_four: data_count,
col_five: decomp_data_size,
col_six: data_completion_percentage,
col_seven: sentence,
col_eight: datetime.now(),
})
print(f"Successfully wrote to {CSV_FILE_PATH}!")
except:
print(f"Failed to write to {CSV_FILE_PATH}!")
def countDigits(n):
if n > 0:
digits = int(math.log10(n))+1
elif n == 0:
digits = 1
else:
digits = int(math.log10(-n))+2 # +1 if you don't count the '-'
return digits
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Calculate progress.")
parser.add_argument("dol", help="Path to DOL")
parser.add_argument("map", help="Path to map")
parser.add_argument("-o", "--output", help="JSON output file")
args = parser.parse_args()
# Sum up DOL section sizes
dol_handle = open(sys.argv[1], "rb")
# Seek to virtual addresses
dol_handle.seek(0x48)
# Read virtual addresses
text_starts = list()
for i in range(TEXT_SECTION_COUNT):
text_starts.append(int.from_bytes(dol_handle.read(4), byteorder='big'))
data_starts = list()
for i in range(DATA_SECTION_COUNT):
data_starts.append(int.from_bytes(dol_handle.read(4), byteorder='big'))
# Read lengths
text_sizes = list()
for i in range(TEXT_SECTION_COUNT):
text_sizes.append(int.from_bytes(dol_handle.read(4), byteorder='big'))
data_sizes = list()
for i in range(DATA_SECTION_COUNT):
data_sizes.append(int.from_bytes(dol_handle.read(4), byteorder='big'))
# BSS address + length
bss_start = int.from_bytes(dol_handle.read(4), byteorder='big')
bss_size = int.from_bytes(dol_handle.read(4), byteorder='big')
bss_end = bss_start + bss_size
dol_code_size = 0
dol_data_size = 0
for i in range(DATA_SECTION_COUNT):
# Ignore sections inside BSS
if (data_starts[i] >= bss_start) and (data_starts[i] + data_sizes[i] <= bss_end):
continue
dol_data_size += data_sizes[i]
dol_data_size += bss_size
for i in text_sizes:
dol_code_size += i
# Open map file
mapfile = open(sys.argv[2], "r")
symbols = mapfile.readlines()
decomp_code_size = 0
decomp_data_size = 0
section_type = None
# Find first section
first_section = 0
while (symbols[first_section].startswith(".") == False and "section layout" not in symbols[first_section]):
first_section += 1
assert(first_section < len(symbols)), "Map file contains no sections!!!"
cur_object = None
cur_size = 0
j = 0
for i in range(first_section, len(symbols)):
# New section
if (symbols[i].startswith(".") == True or "section layout" in symbols[i]):
# Grab section name (i.e. ".init section layout" -> "init")
sectionName = re.search(r"\.*(?P<Name>\w+)\s", symbols[i]).group("Name")
# Determine type of section
section_type = SECTION_DATA if (sectionName in DATA_SECTIONS) else SECTION_TEXT
# Parse symbols until we hit the next section declaration
else:
if "UNUSED" in symbols[i]:
continue
if "entry of" in symbols[i]:
if j == i - 1:
if section_type == SECTION_TEXT:
decomp_code_size -= cur_size
else:
decomp_data_size -= cur_size
cur_size = 0
#print(f"Line* {j}: {symbols[j]}")
#print(f"Line {i}: {symbols[i]}")
continue
assert(section_type != None), f"Symbol found outside of a section!!!\n{symbols[i]}"
match_obj = re.search(REGEX_TO_USE, symbols[i])
# Should be a symbol in ASM (so we discard it)
if (match_obj == None):
#print(f"Line {i}: {symbols[i]}")
continue
#print(match_obj.group("Object"))
# Has the object file changed?
last_object = cur_object
cur_object = match_obj.group("Object").strip()
if last_object != cur_object or cur_object.endswith(" (asm)"):
continue
# Is the symbol a file-wide section?
symb = match_obj.group("Symbol")
if (symb.startswith("*fill*")) or (symb.startswith(".") and symb[1:] in TEXT_SECTIONS or symb[1:] in DATA_SECTIONS):
continue
# Subtract size of symbols ending in ".o", as they're assembly.
if match_obj.group("Object").endswith(".o") == True:
if j == i - 1:
if section_type == SECTION_TEXT:
decomp_code_size -= cur_size
else:
decomp_data_size -= cur_size
cur_size = 0
#print(f"Line* {j}: {symbols[j]}")
#print(f"Line {i}: {symbols[i]}")
continue
# For sections that don't start with "."
if (symb in DATA_SECTIONS):
continue
# If not, we accumulate the file size
cur_size = int(match_obj.group("Size"), 16)
j = i
if (section_type == SECTION_TEXT):
decomp_code_size += cur_size
else:
decomp_data_size += cur_size
# Calculate percentages
codeCompletionPcnt = (decomp_code_size / dol_code_size) # code completion percent
dataCompletionPcnt = (decomp_data_size / dol_data_size) # data completion percent
bytesPerCodeItem = dol_code_size / CODE_FRAC # bytes per code item
bytesPerDataItem = dol_data_size / DATA_FRAC # bytes per data item
codeCount = math.floor(decomp_code_size / bytesPerCodeItem)
dataCount = math.floor(decomp_data_size / bytesPerDataItem)
# Math for aligning percentage prints
codeDigitsD = (countDigits(decomp_code_size))
dataDigitsD = (countDigits(decomp_data_size))
codeDigits = (countDigits(dol_code_size))
dataDigits = (countDigits(dol_data_size))
maxDigitsD = (max(codeDigitsD, dataDigitsD))
maxDigits = (max(codeDigits, dataDigits))
codeStrA = "\tCode sections: "
codeStrB = f"{decomp_code_size} / "
codeStrC = f"{dol_code_size} bytes in src ({codeCompletionPcnt:%})"
dataStrA = "\tData sections: "
dataStrB = f"{decomp_data_size} / "
dataStrC = f"{dol_data_size} bytes in src ({dataCompletionPcnt:%})"
# Print progress
print("Progress:")
print(f"{codeStrA + ' ' * (maxDigitsD-codeDigitsD) + codeStrB + ' ' * (maxDigits-codeDigits) + codeStrC}")
# print(f"\tCode sections: {decomp_code_size} / {dol_code_size} bytes in src ({codeCompletionPcnt:%})")
print(f"{dataStrA + ' ' * (maxDigitsD-dataDigitsD) + dataStrB + ' ' * (maxDigits-dataDigits) + dataStrC}")
# print(f"\tData sections: {decomp_data_size} / {dol_data_size} bytes in src ({dataCompletionPcnt:%})")
sentence = f"\nYou have {codeCount} out of {CODE_FRAC} {CODE_ITEM} and {dataCount} out of {DATA_FRAC} {DATA_ITEM}."
print(sentence)
if args.output:
data = {
"dol": {
"code": decomp_code_size,
"code/total": dol_code_size,
"data": decomp_data_size,
"data/total": dol_data_size,
}
}
with open(args.output, "w") as f:
json.dump(data, f)
update_csv(
code_count=codeCount,
decomp_code_size=decomp_code_size,
code_completion_percentage=codeCompletionPcnt,
data_count=dataCount,
decomp_data_size=decomp_data_size,
data_completion_percentage=dataCompletionPcnt,
sentence=sentence
)

120
tools/decompctx.py Normal file
View File

@ -0,0 +1,120 @@
#!/usr/bin/env python3
###
# Generates a ctx.c file, usable for "Context" on https://decomp.me.
#
# Usage:
# python3 tools/decompctx.py src/file.cpp
#
# If changes are made, please submit a PR to
# https://github.com/encounter/dtk-template
###
import argparse
import os
import re
from typing import List
script_dir = os.path.dirname(os.path.realpath(__file__))
root_dir = os.path.abspath(os.path.join(script_dir, ".."))
src_dir = os.path.join(root_dir, "src")
include_dirs = [
os.path.join(root_dir, "include"),
os.path.join(root_dir, "include/stl"),
]
include_pattern = re.compile(r'^#\s*include\s*[<"](.+?)[>"]$')
guard_pattern = re.compile(r"^#\s*ifndef\s+(.*)$")
defines = set()
def import_h_file(in_file: str, r_path: str, deps: List[str]) -> str:
rel_path = os.path.join(root_dir, r_path, in_file)
if os.path.exists(rel_path):
return import_c_file(rel_path, deps)
for include_dir in include_dirs:
inc_path = os.path.join(include_dir, in_file)
if os.path.exists(inc_path):
return import_c_file(inc_path, deps)
else:
print("Failed to locate", in_file)
return ""
def import_c_file(in_file: str, deps: List[str]) -> str:
in_file = os.path.relpath(in_file, root_dir)
deps.append(in_file)
out_text = ""
try:
with open(in_file, encoding="utf-8") as file:
out_text += process_file(in_file, list(file), deps)
except Exception:
with open(in_file) as file:
out_text += process_file(in_file, list(file), deps)
return out_text
def process_file(in_file: str, lines: List[str], deps: List[str]) -> str:
out_text = ""
for idx, line in enumerate(lines):
guard_match = guard_pattern.match(line.strip())
if idx == 0:
if guard_match:
if guard_match[1] in defines:
break
defines.add(guard_match[1])
print("Processing file", in_file)
include_match = include_pattern.match(line.strip())
if include_match and not include_match[1].endswith(".s"):
out_text += f'/* "{in_file}" line {idx} "{include_match[1]}" */\n'
out_text += import_h_file(include_match[1], os.path.dirname(in_file), deps)
out_text += f'/* end "{include_match[1]}" */\n'
else:
out_text += line
return out_text
def sanitize_path(path: str) -> str:
return path.replace("\\", "/").replace(" ", "\\ ")
def main():
parser = argparse.ArgumentParser(
description="""Create a context file which can be used for decomp.me"""
)
parser.add_argument(
"c_file",
help="""File from which to create context""",
)
parser.add_argument(
"-o",
"--output",
help="""Output file""",
default="ctx.c",
)
parser.add_argument(
"-d",
"--depfile",
help="""Dependency file""",
)
args = parser.parse_args()
deps = []
output = import_c_file(args.c_file, deps)
with open(os.path.join(root_dir, args.output), "w", encoding="utf-8") as f:
f.write(output)
if args.depfile:
with open(os.path.join(root_dir, args.depfile), "w", encoding="utf-8") as f:
f.write(sanitize_path(args.output) + ":")
for dep in deps:
path = sanitize_path(dep)
f.write(f" \\\n\t{path}")
if __name__ == "__main__":
main()

View File

@ -1,686 +0,0 @@
#!/usr/bin/env python
#
# GameCube .dol file disassembler
# Usage: doldisasm.py DOL_FILE > assembly_file.s
#
from capstone import *
from capstone.ppc import *
import re
import sys
substitutions = (
('<', '$$0'),
('>', '$$1'),
('@', '$$2'),
('\\', '$$3'),
(',', '$$4'),
('-', '$$5')
)
def format(symbol):
for sub in substitutions:
symbol = symbol.replace(sub[0], sub[1])
return symbol
def decodeformat(symbol):
for sub in substitutions:
symbol = symbol.replace(sub[1], sub[0])
return symbol
r13_addr = None
r2_addr = None
labels = set()
labelNames = {}
#argshift = 1
#if sys.argv[argshift] == '-m':
with open('pik1excised.map', 'r') as mapfile:
for line in mapfile:
match = re.match(' [0-9a-f]{8} [0-9a-f]{6} ([0-9a-f]{8}) [ 0-9][0-9] ([^ 0-9.][^ ]*)', line)
if match:
addr = int(match.group(1), 16)
name = format(match.group(2))
labels.add(addr)
labelNames[addr] = name
#argshift += 2
argshift = 1 #This is just to auto-find the mapfile. I prefer to drag and drop the dol onto this script.
with open(sys.argv[argshift], 'rb') as dolfile:
filecontent = bytearray(dolfile.read())
def read_u8(offset):
return filecontent[offset]
def read_u32(offset):
return (filecontent[offset + 0] << 24) | (filecontent[offset + 1] << 16) | (filecontent[offset + 2] << 8) | filecontent[offset + 3]
def sign_extend_16(value):
if value > 0 and (value & 0x8000):
value -= 0x10000
return value
def sign_extend_12(value):
if value > 0 and (value & 0x800):
value -= 0x1000
return value
textOffsets = []
textAddresses = []
textSizes = []
dataOffsets = []
dataAddresses = []
dataSizes = []
for i in range(0, 7):
textOffsets.append(read_u32(0x00 + 4 * i))
textAddresses.append(read_u32(0x48 + 4 * i))
textSizes.append(read_u32(0x90 + 4 * i))
for i in range(0, 11):
dataOffsets.append(read_u32(0x1C + 4 * i))
dataAddresses.append(read_u32(0x64 + 4 * i))
dataSizes.append(read_u32(0xAC + 4 * i))
bssAddress = read_u32(0xD8)
bssSize = read_u32(0xDC)
entryPoint = read_u32(0xE0)
origStdout = sys.stdout
with open('asm\disasm.s', 'w') as out:
sys.stdout = out
print('/*')
print('Code sections:')
for i in range(0, 7):
if textOffsets[i] != 0 and textAddresses[i] != 0 and textSizes[i] != 0:
print('\t.text%i:\t0x%08X\t0x%08X\t0x%08X' % (i, textOffsets[i], textAddresses[i], textAddresses[i] + textSizes[i]))
print('Data sections:')
for i in range(0, 11):
if dataOffsets[i] != 0 and dataAddresses[i] != 0 and dataSizes[i] != 0:
print('\t.data%i:\t0x%08X\t0x%08X\t0x%08X' % (i, dataOffsets[i], dataAddresses[i], dataAddresses[i] + dataSizes[i]))
print('BSS section:')
print('\t.bss:\t0x%08X\t0x%08X\t0x%08X' % (0, bssAddress, bssAddress + bssSize))
print('Entry Point: 0x%08X' % entryPoint)
print('*/')
# Add entry point
labels.add(entryPoint)
labelNames[entryPoint] = '__start'
def addr_to_label(addr):
if addr in labels:
if addr in labelNames:
return labelNames[addr]
else:
return "lbl_%08X" % addr
else:
return "0x%08X" % addr
def add_label(addr, name):
labels.add(addr)
if name != None and not addr in labelNames:
labelNames[addr] = name
def is_label_candidate(addr):
for i in range(0, 7):
if addr >= textAddresses[i] and addr < textAddresses[i] + textSizes[i] and (addr & 3) == 0:
return True
for i in range(0, 11):
if addr >= dataAddresses[i] and addr < dataAddresses[i] + dataSizes[i]:
return True
if addr >= bssAddress and addr < bssAddress + bssSize:
return True
return False
# TODO: find all of them
loadStoreInsns = {
PPC_INS_LWZ,
PPC_INS_LMW,
PPC_INS_LHA,
PPC_INS_LHAU,
PPC_INS_LHZ,
PPC_INS_LHZU,
PPC_INS_LBZ,
PPC_INS_LBZU,
PPC_INS_LFD,
PPC_INS_LFDU,
PPC_INS_LFS,
PPC_INS_LFSU,
PPC_INS_STW,
PPC_INS_STWU,
PPC_INS_STMW,
PPC_INS_STH,
PPC_INS_STHU,
PPC_INS_STB,
PPC_INS_STBU,
PPC_INS_STFS,
PPC_INS_STFSU,
PPC_INS_STFD,
PPC_INS_STDU,
}
# Returns true if the instruction is a load or store with the given register as a base
def is_load_store_reg_offset(insn, reg):
return insn.id in loadStoreInsns and (reg == None or insn.operands[1].mem.base == reg)
cs = Cs(CS_ARCH_PPC, CS_MODE_32 | CS_MODE_BIG_ENDIAN)
cs.detail = True
cs.imm_unsigned = False
blacklistedInsns = {
# Unsupported instructions
PPC_INS_VMSUMSHM, PPC_INS_VMHADDSHS, PPC_INS_XXSLDWI, PPC_INS_VSEL,
PPC_INS_XVSUBSP, PPC_INS_XXSEL, PPC_INS_XVMULSP, PPC_INS_XVDIVSP,
PPC_INS_VADDUHM, PPC_INS_XXPERMDI, PPC_INS_XVMADDASP, PPC_INS_XVMADDMSP,
PPC_INS_XVCMPGTSP, PPC_INS_XXMRGHD, PPC_INS_XSMSUBMDP, PPC_INS_XSTDIVDP,
PPC_INS_XVADDSP, PPC_INS_XVCMPEQSP, PPC_INS_XVMSUBASP, PPC_INS_XVCMPGESP,
PPC_INS_VMRGHB, PPC_INS_XXSPLTW,
# Found during disassemble attempts
PPC_INS_XVCMPEQDP, PPC_INS_XVCMPEQDP, PPC_INS_XVMADDADP, PPC_INS_XVMADDMDP,
PPC_INS_VPKUHUM, PPC_INS_XSMADDMDP, PPC_INS_XSMADDADP, PPC_INS_XSCMPUDP,
PPC_INS_XSMSUBADP, PPC_INS_XSCMPODP, PPC_INS_XSCMPUDP, PPC_INS_XVMSUBMSP,
PPC_INS_XVMSUBMDP, PPC_INS_XVCMPGEDP, PPC_INS_XVMSUBADP, PPC_INS_XVCMPGTDP,
PPC_INS_XVMSUBADP, PPC_INS_XVNMSUBMSP,
# Instructions that Capstone gets wrong
PPC_INS_MFESR, PPC_INS_MFDEAR, PPC_INS_MTESR, PPC_INS_MTDEAR, PPC_INS_MFICCR, PPC_INS_MFASR,
# Sus
PPC_INS_ATTN
}
# Calls callback for every instruction in the specified code section
def disasm_iter(offset, address, size, callback):
if size == 0:
return
start = address
end = address + size
while address < end:
code = filecontent[offset + (address-start) : offset + size]
for insn in cs.disasm(code, address):
address = insn.address
if insn.id in blacklistedInsns:
callback(address, offset + address - start, None, insn.bytes)
else:
callback(address, offset + address - start, insn, insn.bytes)
address += 4
if address < end:
o = offset + address - start
callback(address, offset + address - start, None, filecontent[o : o + 4])
address += 4
lisInsns = {} # register : insn
splitDataLoads = {} # address of load insn (both high and low) : data
linkedInsns = {} # addr of lis insn : ori/addi insn
# Returns true if the instruction writes to the specified register
def reg_modified(insn, reg):
if insn.op[0].type == PPC_OP_REG and insn.op[0].reg == reg:
return True
else:
return False
# Computes the combined value from a lis, addi/ori instruction pairr
def combine_split_load_value(hiLoadInsn, loLoadInsn):
assert hiLoadInsn.id == PPC_INS_LIS
#assert loLoadInsn.id in {PPC_INS_ADDI, PPC_INS_ORI}
#assert loLoadInsn.operands[1].reg == hiLoadInsn.operands[0].reg
# hiLoadInsn must be "lis rX, hiPart"
value = hiLoadInsn.operands[1].imm << 16
# loLoadInsn must be "addi rY, rX, loPart"
if loLoadInsn.id == PPC_INS_ORI:
value |= loLoadInsn.operands[2].imm
elif loLoadInsn.id == PPC_INS_ADDI:
value += sign_extend_16(loLoadInsn.operands[2].imm)
elif is_load_store_reg_offset(loLoadInsn, hiLoadInsn.operands[0].reg):
value += sign_extend_16(loLoadInsn.operands[1].mem.disp)
else:
assert False
return value
def is_store_insn(insn):
# TODO: all store instructions
return insn.id in {PPC_INS_STW}
# Get labels
def get_label_callback(address, offset, insn, bytes):
global r13_addr
global r2_addr
if insn == None:
return
#print("%s %s" % (insn.mnemonic, insn.op_str))
# if branch instruction
if insn.id in {PPC_INS_B, PPC_INS_BL, PPC_INS_BC, PPC_INS_BDZ, PPC_INS_BDNZ}:
lisInsns.clear()
for op in insn.operands:
if op.type == PPC_OP_IMM:
#print("label 0x%08X" % op.imm)
labels.add(op.imm)
if insn.id == PPC_INS_BL:
#labelNames[op.imm] = 'func_%08X' % op.imm
add_label(op.imm, 'func_%08X' % op.imm)
# Detect split load (high part)
# this is 'lis rX, hipart'
if insn.id == PPC_INS_LIS:
# Record instruction that loads into register with 'lis'
lisInsns[insn.operands[0].reg] = insn
# Detect split load (low part)
# this is either 'addi/ori rY, rX, lopart' or 'load/store rY, lopart(rX)'
elif (insn.id in {PPC_INS_ADDI, PPC_INS_ORI} and insn.operands[1].reg in lisInsns) \
or (is_load_store_reg_offset(insn, None) and insn.operands[1].mem.base in lisInsns):
hiLoadInsn = lisInsns[insn.operands[1].reg]
# Compute combined value
value = combine_split_load_value(hiLoadInsn, insn)
if is_label_candidate(value):
labels.add(value)
# Record linked instruction
linkedInsns[hiLoadInsn.address] = insn
splitDataLoads[hiLoadInsn.address] = value
splitDataLoads[insn.address] = value
lisInsns.pop(insn.operands[1].reg, None)
# detect r2/r13 initialization
if insn.id == PPC_INS_ORI and insn.operands[0].reg == insn.operands[1].reg:
if r2_addr == None and insn.operands[0].reg == PPC_REG_R2:
r2_addr = value
#print('# DEBUG: set r2 to 0x%08X' % value)
elif r13_addr == None and insn.operands[0].reg == PPC_REG_R13:
r13_addr = value
#print('# DEBUG: set r13 to 0x%08X' % value)
# Remove record if register is overwritten
elif (not is_store_insn(insn)) and len(insn.operands) >= 1 and insn.operands[0].type == PPC_OP_REG:
lisInsns.pop(insn.operands[0].reg, None)
# Handle r13 offset values
if r13_addr != None:
if insn.id == PPC_INS_ADDI and insn.operands[1].value.reg == PPC_REG_R13: # r13 offset
value = r13_addr + sign_extend_16(insn.operands[2].imm)
if is_label_candidate(value):
labels.add(value)
#labelNames[value] = 'r13_%08X' % value
if is_load_store_reg_offset(insn, PPC_REG_R13):
value = r13_addr + sign_extend_16(insn.operands[1].mem.disp)
if is_label_candidate(value):
labels.add(value)
#labelNames[value] = 'r13_%08X' % value
# Handle r2 offset values
if r2_addr != None:
if insn.id == PPC_INS_ADDI and insn.operands[1].value.reg == PPC_REG_R2: # r13 offset
value = r2_addr + sign_extend_16(insn.operands[2].imm)
if is_label_candidate(value):
labels.add(value)
#labelNames[value] = 'r2_%08X' % value
if is_load_store_reg_offset(insn, PPC_REG_R2):
value = r2_addr + sign_extend_16(insn.operands[1].mem.disp)
if is_label_candidate(value):
labels.add(value)
#labelNames[value] = 'r2_%08X' % value
for i in range(0, 7):
if textSizes[i] != 0:
disasm_iter(textOffsets[i], textAddresses[i], textSizes[i], get_label_callback)
# Write macros
print('# PowerPC Register Constants')
for i in range(0, 32):
print(".set r%i, %i" % (i, i))
for i in range(0, 32):
print(".set f%i, %i" % (i, i))
for i in range(0, 8):
print(".set qr%i, %i" % (i, i))
if r13_addr != None:
print('# Small Data Area (read/write) Base')
print(".set _SDA_BASE_, 0x%08X" % r13_addr)
if r2_addr != None:
print('# Small Data Area (read only) Base')
print(".set _SDA2_BASE_, 0x%08X" % r2_addr)
print('')
# Converts the instruction to a string, fixing various issues with Capstone
def insn_to_text(insn, raw):
# Probably data, not a real instruction
if insn.id == PPC_INS_BDNZ and (insn.bytes[0] & 1):
return None
if insn.id in {PPC_INS_B, PPC_INS_BL, PPC_INS_BDZ, PPC_INS_BDNZ}:
return "%s %s" % (insn.mnemonic, addr_to_label(insn.operands[0].imm))
elif insn.id == PPC_INS_BC:
branchPred = '+' if (insn.bytes[1] & 0x20) else ''
if insn.operands[0].type == PPC_OP_IMM:
return "%s%s %s" % (insn.mnemonic, branchPred, addr_to_label(insn.operands[0].imm))
elif insn.operands[1].type == PPC_OP_IMM:
return "%s%s %s, %s" % (insn.mnemonic, branchPred, insn.reg_name(insn.operands[0].value.reg), addr_to_label(insn.operands[1].imm))
# Handle split loads (high part)
if insn.address in splitDataLoads and insn.id == PPC_INS_LIS:
loLoadInsn = linkedInsns[insn.address]
#assert loLoadInsn.id in {PPC_INS_ADDI, PPC_INS_ORI}
value = splitDataLoads[insn.address]
suffix = 'h' if loLoadInsn.id == PPC_INS_ORI else 'ha'
return '%s %s, %s@%s' % (insn.mnemonic, insn.reg_name(insn.operands[0].reg), addr_to_label(value), suffix)
# Handle split loads (low part)
elif insn.address in splitDataLoads and insn.id in {PPC_INS_ADDI, PPC_INS_ORI}:
value = splitDataLoads[insn.address]
return '%s %s, %s, %s@l' % (insn.mnemonic, insn.reg_name(insn.operands[0].reg), insn.reg_name(insn.operands[1].reg), addr_to_label(value))
elif insn.address in splitDataLoads and is_load_store_reg_offset(insn, None):
value = splitDataLoads[insn.address]
return '%s %s, %s@l(%s)' % (insn.mnemonic, insn.reg_name(insn.operands[0].reg), addr_to_label(value), insn.reg_name(insn.operands[1].mem.base))
# r13 offset loads
if r13_addr != None:
if insn.id == PPC_INS_ADDI and insn.operands[1].reg == PPC_REG_R13:
value = r13_addr + sign_extend_16(insn.operands[2].imm)
if value in labels:
return "%s %s, %s, %s@sda21" % (insn.mnemonic, insn.reg_name(insn.operands[0].reg), insn.reg_name(insn.operands[1].reg), addr_to_label(value))
if is_load_store_reg_offset(insn, PPC_REG_R13):
value = r13_addr + sign_extend_16(insn.operands[1].mem.disp)
if value in labels:
return "%s %s, %s@sda21(%s)" % (insn.mnemonic, insn.reg_name(insn.operands[0].value.reg), addr_to_label(value), insn.reg_name(insn.operands[1].mem.base))
# r2 offset loads
if r2_addr != None:
if insn.id == PPC_INS_ADDI and insn.operands[1].reg == PPC_REG_R2:
value = r2_addr + sign_extend_16(insn.operands[2].imm)
if value in labels:
return "%s %s, %s, %s@sda21" % (insn.mnemonic, insn.reg_name(insn.operands[0].reg), insn.reg_name(insn.operands[1].reg), addr_to_label(value))
if is_load_store_reg_offset(insn, PPC_REG_R2):
value = r2_addr + sign_extend_16(insn.operands[1].mem.disp)
if value in labels:
return "%s %s, %s@sda21(%s)" % (insn.mnemonic, insn.reg_name(insn.operands[0].value.reg), addr_to_label(value), insn.reg_name(insn.operands[1].mem.base))
# Sign-extend immediate values because Capstone is an idiot and doesn't do that automatically
if insn.id in {PPC_INS_ADDI, PPC_INS_ADDIC, PPC_INS_SUBFIC, PPC_INS_MULLI} and (insn.operands[2].imm & 0x8000):
return "%s %s, %s, %i" % (insn.mnemonic, insn.reg_name(insn.operands[0].reg), insn.reg_name(insn.operands[1].value.reg), insn.operands[2].imm - 0x10000)
elif (insn.id == PPC_INS_LI or insn.id == PPC_INS_CMPWI) and (insn.operands[1].imm & 0x8000):
return "%s %s, %i" % (insn.mnemonic, insn.reg_name(insn.operands[0].reg), insn.operands[1].imm - 0x10000)
# cntlz -> cntlzw
elif insn.id == PPC_INS_CNTLZW:
return "cntlzw %s" % insn.op_str
elif insn.id == PPC_INS_MTICCR:
return 'mtictc %s' % insn.op_str
# Dunno why GNU assembler doesn't accept this
elif insn.id == PPC_INS_LMW and insn.operands[0].reg == PPC_REG_R0:
return '.4byte 0x%08X /* illegal %s %s */' % (raw, insn.mnemonic, insn.op_str)
return '%s %s' % (insn.mnemonic, insn.op_str)
def disasm_ps(inst):
RA = ((inst >> 16) & 0x1f)
RB = ((inst >> 11) & 0x1f)
FA = ((inst >> 16) & 0x1f)
FB = ((inst >> 11) & 0x1f)
FC = ((inst >> 6) & 0x1f)
FD = ((inst >> 21) & 0x1f)
FS = ((inst >> 21) & 0x1f)
IX = ((inst >> 7) & 0x7)
WX = ((inst >> 10) & 0x1)
opcode = (inst >> 1) & 0x1F
if opcode == 6: # doesn't seem to be used
mnemonic = 'psq_lux' if inst & 0x40 else 'psq_lx'
return '%s f%i, r%i, r%i, %i, qr%i' % (mnemonic, FD, RA, RB, WX, IX)
if opcode == 7:
mnemonic = 'psq_stux' if inst & 0x40 else 'psq_stx'
return '%s f%i, r%i, r%i, %i, qr%i' % (mnemonic, FS, RA, RB, WX, IX)
if opcode == 18:
return 'ps_div f%i, f%i, f%i' % (FD, FA, FB)
if opcode == 20:
return 'ps_sub f%i, f%i, f%i' % (FD, FA, FB)
if opcode == 21:
return 'ps_add f%i, f%i, f%i' % (FD, FA, FB)
if opcode == 23:
return 'ps_sel f%i, f%i, f%i' % (FD, FA, FC)
if opcode == 24:
return 'ps_res f%i, f%i' % (FD, FB)
if opcode == 25:
return 'ps_mul f%i, f%i, f%i' % (FD, FA, FC)
if opcode == 26:
return 'ps_rsqrte f%i, f%i' % (FD, FB)
if opcode == 28:
return 'ps_msub f%i, f%i, f%i, f%i' % (FD, FA, FC, FB)
if opcode == 29:
return 'ps_madd f%i, f%i, f%i, f%i' % (FD, FA, FC, FB)
if opcode == 30:
return 'ps_nmsub f%i, f%i, f%i, f%i' % (FD, FA, FC, FB)
if opcode == 31:
return 'ps_nmadd f%i, f%i, f%i, f%i' % (FD, FA, FC, FB)
if opcode == 10:
return 'ps_sum0 f%i, f%i, f%i, f%i' % (FD, FA, FC, FB)
if opcode == 11:
return 'ps_sum1 f%i, f%i, f%i, f%i' % (FD, FA, FC, FB)
if opcode == 12:
return 'ps_muls0 f%i, f%i, f%i' % (FD, FA, FC)
if opcode == 13:
return 'ps_muls1 f%i, f%i, f%i' % (FD, FA, FC)
if opcode == 14:
return 'ps_madds0 f%i, f%i, f%i, f%i' % (FD, FA, FC, FB)
if opcode == 15:
return 'ps_madds1 f%i, f%i, f%i, f%i' % (FD, FA, FC, FB)
opcode = (inst >> 1) & 0x3FF
if opcode == 40:
return 'ps_neg f%i, f%i' % (FD, FB)
if opcode == 72:
return 'ps_mr f%i, f%i' % (FD, FB)
if opcode == 136:
return 'ps_nabs f%i, f%i' % (FD, FB)
if opcode == 264:
return 'ps_abs f%i, f%i' % (FD, FB)
if opcode in {0, 32, 64, 96}:
mnemonics = ['ps_cmpu0', 'ps_cmpo0', 'ps_cmpu1', 'ps_cmpo1']
mnemonic = mnemonics[(inst >> 6) & 3]
i = (inst & 0x03800000) >> 23
return '%s cr%i, f%i, f%i' % (mnemonic, i, FA, FB)
if opcode == 528:
return 'ps_merge00 f%i, f%i, f%i' % (FD, FA, FB)
if opcode == 560:
return 'ps_merge01 f%i, f%i, f%i' % (FD, FA, FB)
if opcode == 592:
return 'ps_merge10 f%i, f%i, f%i' % (FD, FA, FB)
if opcode == 624:
return 'ps_merge11 f%i, f%i, f%i' % (FD, FA, FB)
if opcode == 1014:
if not (inst & 0x03e00000):
if (inst & 1) == 0:
return 'dcbz_l r%i, r%i' % ((inst & 0x001f0000) >> 16, (inst & 0x0000f800) >> 11)
return None
def disasm_ps_mem(inst, idx):
RA = ((inst >> 16) & 0x1f)
RS = ((inst >> 21) & 0x1f)
I = ((inst >> 12) & 0x7)
W = ((inst >> 15) & 0x1)
disp = sign_extend_12(inst & 0xFFF)
if idx == 56:
mnemonic = 'psq_l'
if idx == 57:
mnemonic = 'psq_lu'
if idx == 60:
mnemonic = 'psq_st'
if idx == 61:
mnemonic = 'psq_stu'
return '%s f%i, %i(r%i), %i, qr%i' % (mnemonic, RS, disp, RA, W, I)
def disasm_fcmp(inst):
crd = (inst & 0x03800000) >> 23
a = (inst & 0x001f0000) >> 16
b = (inst & 0x0000f800) >> 11
return 'fcmpo cr%i, f%i, f%i' % (crd, a, b)
def disasm_mspr(inst, mode):
if (inst & 1):
return None
d = (inst & 0x03e00000) >> 21
a = (inst & 0x001f0000) >> 16
b = (inst & 0x0000f800) >>11
spr = (b << 5) + a
if mode:
return 'mtspr 0x%X, r%i' % (spr, d)
else:
return 'mfspr r%i, 0x%X' % (d, spr)
def disasm_mcrxr(inst):
if (inst & 0x007ff801):
return None
crd = (inst & 0x03800000) >> 23
return 'mcrxr cr%i' % crd
# Disassemble code
def disassemble_callback(address, offset, insn, bytes):
# Output label (if any)
if address in labels:
if address in labelNames:
print("\n.global %s" % addr_to_label(address))
print("%s:" % addr_to_label(address))
prefixComment = '/* %08X %08X %02X %02X %02X %02X */' % (address, offset, bytes[0], bytes[1], bytes[2], bytes[3])
asm = None
raw = read_u32(offset)
if insn != None:
asm = insn_to_text(insn, raw)
else: # Capstone couldn't disassemble it
idx = (raw & 0xfc000000) >> 26
idx2 = (raw & 0x000007fe) >> 1
# mtspr
if idx == 31 and idx2 == 467:
asm = disasm_mspr(raw, 1)
# mfspr
elif idx == 31 and idx2 == 339:
asm = disasm_mspr(raw, 0)
# mcrxr
elif idx == 31 and idx2 == 512:
asm = disasm_mcrxr(raw)
# fcmpo
elif idx == 63 and idx2 == 32:
asm = disasm_fcmp(raw)
# Paired singles
elif idx == 4:
asm = disasm_ps(raw)
elif idx in {56, 57, 60, 61}:
asm = disasm_ps_mem(raw, idx)
if asm == None:
asm = '.4byte 0x%08X /* unknown instruction */' % raw
print('%s\t%s' % (prefixComment, asm))
for i in range(0, 7):
if textSizes[i] != 0:
print("\n.section .text%i, \"ax\" # 0x%08X - 0x%08X" % (i, textAddresses[i], textAddresses[i] + textSizes[i]))
disasm_iter(textOffsets[i], textAddresses[i], textSizes[i], disassemble_callback)
# Disassemble data
for i in range(0, 11):
offset = dataOffsets[i]
address = dataAddresses[i]
size = dataSizes[i]
start = address
end = start + size
if size == 0:
continue
print("\n.section .data%i, \"wa\" # 0x%08X - 0x%08X" % (i, start, end))
# Get a sorted list of labels in this data section
sectionLabels = []
for l in labels:
if l >= start and l < end:
sectionLabels.append(l)
sectionLabels.sort()
# Split incbins by labels
j = 0
while address < end:
if j < len(sectionLabels):
incbinSize = sectionLabels[j] - address
if incbinSize != 0:
print("\t.incbin \"baserom.dol\", 0x%X, 0x%X" % (offset, incbinSize))
l = addr_to_label(sectionLabels[j])
print(".global %s\n%s:" % (l, l))
j += 1
else:
incbinSize = end - address
if incbinSize != 0:
print("\t.incbin \"baserom.dol\", 0x%X, 0x%X" % (offset, incbinSize))
offset += incbinSize
address += incbinSize
# Remove labels to avoid duplicates in case of overlap with other sections
for l in sectionLabels:
labels.remove(l)
# Disassemble bss
start = bssAddress
end = bssAddress + bssSize
address = bssAddress
print("\n.section .bss, \"wa\" # 0x%08X - 0x%08X" % (start, end))
# Get a sorted list of labels in this bss section
sectionLabels = []
for l in labels:
if l >= start and l < end:
sectionLabels.append(l)
sectionLabels.sort()
# Split incbins by labels
j = 0
while address < end:
if j < len(sectionLabels):
gapSize = sectionLabels[j] - address
if gapSize != 0:
print("\t.skip 0x%X" % gapSize)
l = addr_to_label(sectionLabels[j])
print(".global %s\n%s:" % (l, l))
j += 1
else:
gapSize = end - address
if gapSize != 0:
print("\t.skip 0x%X" % gapSize)
address += gapSize
# Output linker script
origStdout = sys.stdout
with open('ldscript.ld', 'w') as out:
sys.stdout = out
print("ENTRY(__start)")
if r13_addr != None:
print("_SDA_BASE_ = 0x%08X;" % r13_addr)
if r2_addr != None:
print("_SDA2_BASE_ = 0x%08X;" % r2_addr)
print("PHDRS\n{")
for i in range(0, 7):
if textSizes[i] != 0:
print(" text%i PT_LOAD;" % i)
for i in range(0, 11):
if dataSizes[i] != 0:
print(" data%i PT_LOAD;" % i)
print(" bss PT_LOAD;")
print("}")
print("SECTIONS\n{")
for i in range(0, 7):
if textSizes[i] != 0:
print(" .text%i 0x%08X : { *(.text%i) } : text%i" % (i, textAddresses[i], i, i))
for i in range(0, 11):
if dataSizes[i] != 0:
print(" .data%i 0x%08X : { *(.data%i) } : data%i" % (i, dataAddresses[i], i, i))
print(" .bss 0x%08X (NOLOAD) : { *(.bss) } : bss" % bssAddress)
print("}")
sys.stdout = origStdout
# Output linker script (Metrowerks)
origStdout = sys.stdout
with open('ldscript.lcf', 'w') as out:
sys.stdout = out
if r13_addr != None:
print("_SDA_BASE_ = 0x%08X;" % r13_addr)
if r2_addr != None:
print("_SDA2_BASE_ = 0x%08X;" % r2_addr)
print("SECTIONS\n{")
for i in range(0, 7):
if textSizes[i] != 0:
print(" .text%i BIND(0x%08X) : { *(.text%i) }" % (i, textAddresses[i], i))
for i in range(0, 11):
if dataSizes[i] != 0:
print(" .data%i BIND(0x%08X) : { *(.data%i) }" % (i, dataAddresses[i], i))
print(" .bss BIND(0x%08X) : { *(.bss) }" % bssAddress)
print("}")
sys.stdout = origStdout

View File

@ -1,50 +0,0 @@
import argparse
import urllib.request
import sys
import os
import stat
import platform
from pathlib import Path
if sys.platform == "cygwin":
sys.exit(
f"Cygwin/MSYS2 is not supported."
f"\nPlease use native Windows Python instead."
f"\nPlease run pacman -R python in msys2."
f"\n(Current path: {sys.executable})"
)
REPO = "https://github.com/encounter/decomp-toolkit"
def main():
parser = argparse.ArgumentParser()
parser.add_argument("tag_file", help="file containing GitHub tag")
parser.add_argument("output", type=Path, help="output file path")
args = parser.parse_args()
with open(args.tag_file, "r") as f:
tag = f.readline().rstrip()
uname = platform.uname()
suffix = ""
system = uname.system.lower()
if system == "darwin":
system = "macos"
elif system == "windows":
suffix = ".exe"
arch = uname.machine.lower()
if arch == "amd64":
arch = "x86_64"
url = f"{REPO}/releases/download/{tag}/dtk-{system}-{arch}{suffix}"
output = args.output
# print(f"Downloading {url} to {output}")
urllib.request.urlretrieve(url, output)
st = os.stat(output)
os.chmod(output, st.st_mode | stat.S_IEXEC)
if __name__ == "__main__":
main()

View File

@ -1,45 +0,0 @@
import urllib.request
import sys
import os
import stat
import tempfile
import shutil
import zipfile
if sys.platform == "cygwin":
sys.exit(
f"Cygwin/MSYS2 is not supported."
f"\nPlease use native Windows Python instead."
f"\nPlease run pacman -R python in msys2."
f"\n(Current path: {sys.executable})"
)
HARDLINK = "https://files.decomp.dev/compilers_20230715.zip"
def main() -> None:
output = f"{os.path.dirname(__file__)}/mwcc_compiler"
with tempfile.TemporaryDirectory() as tmp_dir:
tmp_zip = f"{tmp_dir}/mwcc_compiler.zip"
tmp_gc = f"{tmp_dir}/GC"
request = urllib.request.Request(
url=HARDLINK,
headers={"User-Agent": "Mozilla/5.0"},
)
with urllib.request.urlopen(request) as src, open(tmp_zip, "wb") as dst:
shutil.copyfileobj(src, dst)
with zipfile.ZipFile(tmp_zip) as zip_file:
zip_file.extractall(tmp_dir)
shutil.move(tmp_gc, output)
st = os.stat(output)
os.chmod(output, st.st_mode | stat.S_IEXEC)
if __name__ == "__main__":
main()

View File

@ -1,49 +0,0 @@
import urllib.request
import sys
import os
import stat
import platform
import tempfile
import zipfile
if sys.platform == "cygwin":
sys.exit(
f"Cygwin/MSYS2 is not supported."
f"\nPlease use native Windows Python instead."
f"\nPlease run pacman -R python in msys2."
f"\n(Current path: {sys.executable})"
)
REPO = "https://github.com/encounter/gc-wii-binutils"
def main() -> None:
output = f"{os.path.dirname(__file__)}/powerpc"
uname = platform.uname()
system = uname.system.lower()
arch = uname.machine.lower()
if system == "darwin":
system = "macos"
arch = "universal"
if arch == "amd64":
arch = "x86_64"
if arch == "x86_32":
arch = "i686"
with tempfile.TemporaryDirectory() as tmp_dir:
tmp_zip = f"{tmp_dir}/powerpc.zip"
url = f"{REPO}/releases/latest/download/{system}-{arch}.zip"
urllib.request.urlretrieve(url, tmp_zip)
with zipfile.ZipFile(tmp_zip) as zip_file:
zip_file.extractall(output)
for filename in os.listdir(output):
f = os.path.join(output, filename)
st = os.stat(f)
os.chmod(f, st.st_mode | stat.S_IEXEC)
if __name__ == "__main__":
main()

124
tools/download_tool.py Normal file
View File

@ -0,0 +1,124 @@
#!/usr/bin/env python3
###
# Downloads various tools from GitHub releases.
#
# Usage:
# python3 tools/download_tool.py wibo build/tools/wibo --tag 1.0.0
#
# If changes are made, please submit a PR to
# https://github.com/encounter/dtk-template
###
import argparse
import io
import os
import platform
import shutil
import stat
import urllib.request
import zipfile
from typing import Callable, Dict
from pathlib import Path
def binutils_url(tag):
uname = platform.uname()
system = uname.system.lower()
arch = uname.machine.lower()
if system == "darwin":
system = "macos"
arch = "universal"
elif arch == "amd64":
arch = "x86_64"
repo = "https://github.com/encounter/gc-wii-binutils"
return f"{repo}/releases/download/{tag}/{system}-{arch}.zip"
def compilers_url(tag: str) -> str:
return f"https://files.decomp.dev/compilers_{tag}.zip"
def dtk_url(tag: str) -> str:
uname = platform.uname()
suffix = ""
system = uname.system.lower()
if system == "darwin":
system = "macos"
elif system == "windows":
suffix = ".exe"
arch = uname.machine.lower()
if arch == "amd64":
arch = "x86_64"
repo = "https://github.com/encounter/decomp-toolkit"
return f"{repo}/releases/download/{tag}/dtk-{system}-{arch}{suffix}"
def objdiff_cli_url(tag: str) -> str:
uname = platform.uname()
suffix = ""
system = uname.system.lower()
if system == "darwin":
system = "macos"
elif system == "windows":
suffix = ".exe"
arch = uname.machine.lower()
if arch == "amd64":
arch = "x86_64"
repo = "https://github.com/encounter/objdiff"
return f"{repo}/releases/download/{tag}/objdiff-cli-{system}-{arch}{suffix}"
def sjiswrap_url(tag: str) -> str:
repo = "https://github.com/encounter/sjiswrap"
return f"{repo}/releases/download/{tag}/sjiswrap-windows-x86.exe"
def wibo_url(tag: str) -> str:
repo = "https://github.com/decompals/wibo"
return f"{repo}/releases/download/{tag}/wibo"
TOOLS: Dict[str, Callable[[str], str]] = {
"binutils": binutils_url,
"compilers": compilers_url,
"dtk": dtk_url,
"objdiff-cli": objdiff_cli_url,
"sjiswrap": sjiswrap_url,
"wibo": wibo_url,
}
def main() -> None:
parser = argparse.ArgumentParser()
parser.add_argument("tool", help="Tool name")
parser.add_argument("output", type=Path, help="output file path")
parser.add_argument("--tag", help="GitHub tag", required=True)
args = parser.parse_args()
url = TOOLS[args.tool](args.tag)
output = Path(args.output)
print(f"Downloading {url} to {output}")
req = urllib.request.Request(url, headers={"User-Agent": "Mozilla/5.0"})
with urllib.request.urlopen(req) as response:
if url.endswith(".zip"):
data = io.BytesIO(response.read())
with zipfile.ZipFile(data) as f:
f.extractall(output)
# Make all files executable
for root, _, files in os.walk(output):
for name in files:
os.chmod(os.path.join(root, name), 0o755)
output.touch(mode=0o755) # Update dir modtime
else:
with open(output, "wb") as f:
shutil.copyfileobj(response, f)
st = os.stat(output)
os.chmod(output, st.st_mode | stat.S_IEXEC)
if __name__ == "__main__":
main()

View File

@ -1 +0,0 @@
v0.2.3

View File

@ -1,5 +1,3 @@
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -23,82 +21,129 @@ use Python.
import re
import textwrap
import os
from io import StringIO
from pathlib import Path
from typing import Dict, List, Match, Optional, Tuple, Union
NinjaPath = Union[str, Path]
NinjaPaths = Union[
List[str],
List[Path],
List[NinjaPath],
List[Optional[str]],
List[Optional[Path]],
List[Optional[NinjaPath]],
]
NinjaPathOrPaths = Union[NinjaPath, NinjaPaths]
def escape_path(word: str) -> str:
return word.replace("$ ", "$$ ").replace(" ", "$ ").replace(":", "$:")
def escape_path(word):
return word.replace('$ ', '$$ ').replace(' ', '$ ').replace(':', '$:')
class Writer(object):
def __init__(self, output, width=78):
def __init__(self, output: StringIO, width: int = 78) -> None:
self.output = output
self.width = width
def newline(self):
self.output.write('\n')
def newline(self) -> None:
self.output.write("\n")
def comment(self, text):
for line in textwrap.wrap(text, self.width - 2, break_long_words=False,
break_on_hyphens=False):
self.output.write('# ' + line + '\n')
def comment(self, text: str) -> None:
for line in textwrap.wrap(
text, self.width - 2, break_long_words=False, break_on_hyphens=False
):
self.output.write("# " + line + "\n")
def variable(self, key, value, indent=0):
if value is None:
return
if isinstance(value, list):
value = ' '.join(filter(None, value)) # Filter out empty strings.
self._line('%s = %s' % (key, value), indent)
def variable(
self,
key: str,
value: Optional[NinjaPathOrPaths],
indent: int = 0,
) -> None:
value = " ".join(serialize_paths(value))
self._line("%s = %s" % (key, value), indent)
def pool(self, name, depth):
self._line('pool %s' % name)
self.variable('depth', depth, indent=1)
def pool(self, name: str, depth: int) -> None:
self._line("pool %s" % name)
self.variable("depth", str(depth), indent=1)
def rule(self, name, command, description=None, depfile=None,
generator=False, pool=None, restat=False, rspfile=None,
rspfile_content=None, deps=None):
self._line('rule %s' % name)
self.variable('command', command, indent=1)
def rule(
self,
name: str,
command: str,
description: Optional[str] = None,
depfile: Optional[NinjaPath] = None,
generator: bool = False,
pool: Optional[str] = None,
restat: bool = False,
rspfile: Optional[NinjaPath] = None,
rspfile_content: Optional[NinjaPath] = None,
deps: Optional[NinjaPathOrPaths] = None,
) -> None:
self._line("rule %s" % name)
self.variable("command", command, indent=1)
if description:
self.variable('description', description, indent=1)
self.variable("description", description, indent=1)
if depfile:
self.variable('depfile', depfile, indent=1)
self.variable("depfile", depfile, indent=1)
if generator:
self.variable('generator', '1', indent=1)
self.variable("generator", "1", indent=1)
if pool:
self.variable('pool', pool, indent=1)
self.variable("pool", pool, indent=1)
if restat:
self.variable('restat', '1', indent=1)
self.variable("restat", "1", indent=1)
if rspfile:
self.variable('rspfile', rspfile, indent=1)
self.variable("rspfile", rspfile, indent=1)
if rspfile_content:
self.variable('rspfile_content', rspfile_content, indent=1)
self.variable("rspfile_content", rspfile_content, indent=1)
if deps:
self.variable('deps', deps, indent=1)
self.variable("deps", deps, indent=1)
def build(self, outputs, rule, inputs=None, implicit=None, order_only=None,
variables=None, implicit_outputs=None, pool=None, dyndep=None):
outputs = as_list(outputs)
def build(
self,
outputs: NinjaPathOrPaths,
rule: str,
inputs: Optional[NinjaPathOrPaths] = None,
implicit: Optional[NinjaPathOrPaths] = None,
order_only: Optional[NinjaPathOrPaths] = None,
variables: Optional[
Union[
List[Tuple[str, Optional[NinjaPathOrPaths]]],
Dict[str, Optional[NinjaPathOrPaths]],
]
] = None,
implicit_outputs: Optional[NinjaPathOrPaths] = None,
pool: Optional[str] = None,
dyndep: Optional[NinjaPath] = None,
) -> List[str]:
outputs = serialize_paths(outputs)
out_outputs = [escape_path(x) for x in outputs]
all_inputs = [escape_path(x) for x in as_list(inputs)]
all_inputs = [escape_path(x) for x in serialize_paths(inputs)]
if implicit:
implicit = [escape_path(x) for x in as_list(implicit)]
all_inputs.append('|')
all_inputs.extend(implicit)
implicit = [escape_path(x) for x in serialize_paths(implicit)]
all_inputs.append("|")
all_inputs.extend(map(str, implicit))
if order_only:
order_only = [escape_path(x) for x in as_list(order_only)]
all_inputs.append('||')
all_inputs.extend(order_only)
order_only = [escape_path(x) for x in serialize_paths(order_only)]
all_inputs.append("||")
all_inputs.extend(map(str, order_only))
if implicit_outputs:
implicit_outputs = [escape_path(x)
for x in as_list(implicit_outputs)]
out_outputs.append('|')
out_outputs.extend(implicit_outputs)
implicit_outputs = [
escape_path(x) for x in serialize_paths(implicit_outputs)
]
out_outputs.append("|")
out_outputs.extend(map(str, implicit_outputs))
self._line('build %s: %s' % (' '.join(out_outputs),
' '.join([rule] + all_inputs)))
self._line(
"build %s: %s" % (" ".join(out_outputs), " ".join([rule] + all_inputs))
)
if pool is not None:
self._line(' pool = %s' % pool)
self._line(" pool = %s" % pool)
if dyndep is not None:
self._line(' dyndep = %s' % dyndep)
self._line(" dyndep = %s" % serialize_path(dyndep))
if variables:
if isinstance(variables, dict):
@ -111,89 +156,99 @@ class Writer(object):
return outputs
def include(self, path):
self._line('include %s' % path)
def include(self, path: str) -> None:
self._line("include %s" % path)
def subninja(self, path):
self._line('subninja %s' % path)
def subninja(self, path: str) -> None:
self._line("subninja %s" % path)
def default(self, paths):
self._line('default %s' % ' '.join(as_list(paths)))
def default(self, paths: NinjaPathOrPaths) -> None:
self._line("default %s" % " ".join(serialize_paths(paths)))
def _count_dollars_before_index(self, s, i):
def _count_dollars_before_index(self, s: str, i: int) -> int:
"""Returns the number of '$' characters right in front of s[i]."""
dollar_count = 0
dollar_index = i - 1
while dollar_index > 0 and s[dollar_index] == '$':
while dollar_index > 0 and s[dollar_index] == "$":
dollar_count += 1
dollar_index -= 1
return dollar_count
def _line(self, text, indent=0):
def _line(self, text: str, indent: int = 0) -> None:
"""Write 'text' word-wrapped at self.width characters."""
leading_space = ' ' * indent
leading_space = " " * indent
while len(leading_space) + len(text) > self.width:
# The text is too wide; wrap if possible.
# Find the rightmost space that would obey our width constraint and
# that's not an escaped space.
available_space = self.width - len(leading_space) - len(' $')
available_space = self.width - len(leading_space) - len(" $")
space = available_space
while True:
space = text.rfind(' ', 0, space)
if (space < 0 or
self._count_dollars_before_index(text, space) % 2 == 0):
space = text.rfind(" ", 0, space)
if space < 0 or self._count_dollars_before_index(text, space) % 2 == 0:
break
if space < 0:
# No such space; just use the first unescaped space we can find.
space = available_space - 1
while True:
space = text.find(' ', space + 1)
if (space < 0 or
self._count_dollars_before_index(text, space) % 2 == 0):
space = text.find(" ", space + 1)
if (
space < 0
or self._count_dollars_before_index(text, space) % 2 == 0
):
break
if space < 0:
# Give up on breaking.
break
self.output.write(leading_space + text[0:space] + ' $\n')
text = text[space+1:]
self.output.write(leading_space + text[0:space] + " $\n")
text = text[space + 1 :]
# Subsequent lines are continuations, so indent them.
leading_space = ' ' * (indent+2)
leading_space = " " * (indent + 2)
self.output.write(leading_space + text + '\n')
self.output.write(leading_space + text + "\n")
def close(self):
def close(self) -> None:
self.output.close()
def as_list(input):
if input is None:
return []
def serialize_path(input: Optional[NinjaPath]) -> str:
if not input:
return ""
if isinstance(input, Path):
return str(input).replace("/", os.sep)
else:
return str(input)
def serialize_paths(input: Optional[NinjaPathOrPaths]) -> List[str]:
if isinstance(input, list):
return input
return [input]
return [serialize_path(path) for path in input if path]
return [serialize_path(input)] if input else []
def escape(string):
def escape(string: str) -> str:
"""Escape a string such that it can be embedded into a Ninja file without
further interpretation."""
assert '\n' not in string, 'Ninja syntax does not allow newlines'
assert "\n" not in string, "Ninja syntax does not allow newlines"
# We only have one special metacharacter: '$'.
return string.replace('$', '$$')
return string.replace("$", "$$")
def expand(string, vars, local_vars={}):
def expand(string: str, vars: Dict[str, str], local_vars: Dict[str, str] = {}) -> str:
"""Expand a string containing $vars as Ninja would.
Note: doesn't handle the full Ninja variable syntax, but it's enough
to make configure.py's use of it work.
"""
def exp(m):
def exp(m: Match[str]) -> str:
var = m.group(1)
if var == '$':
return '$'
return local_vars.get(var, vars.get(var, ''))
return re.sub(r'\$(\$|\w*)', exp, string)
if var == "$":
return "$"
return local_vars.get(var, vars.get(var, ""))
return re.sub(r"\$(\$|\w*)", exp, string)

1498
tools/project.py Normal file

File diff suppressed because it is too large Load Diff

View File

@ -1,206 +0,0 @@
#!/usr/bin/env python3
# ====================================
# Ptr checker v0.0.9 (by RevoSucks)
# ====================================
# WARNING, seriously, get a puke bucket. This is just a file i am writing for my own sole use.
# If you want to use it, I have some advice for you: D.O.N.T.
# It stands for dooooooonnnnnn't use it. Ever.
# ------------------
# Imports
# ------------------
import os, fnmatch, math
# ------------------
# Defines
# ------------------
# Path to baserom.dol
baserom_path = "baserom.dol"
map_path = "build/pikmin.usa.1/pikmin1.map"
# ------------------
# Methods
# ------------------
# Return a list of searchable '.s' files in a given folder recursively.
def findExt(folder):
matches = []
for root, dirnames, filenames in os.walk(folder):
for filename in filenames:
if filename.endswith('.s'):
matches.append(os.path.join(root, filename))
#print(matches)
return matches
# Name is passed in as the address initially. If we cannot find it, return the string of the address.
def getMatchingTextSymbolFromAddress(name):
# Strip the 0x from the start.
#print("DEBUG: ", str(hex(name))[2:].zfill(8))
result = str(hex(name))[2:].zfill(8)
matches = [result, '(entry of ']
# First open the .map file.
map = open(map_path, 'r')
Lines = map.readlines()
# Parse over the map until a match is found.
for line in Lines:
# Does the line have the 2 expected strings?
if all(x in line for x in matches):
# We found the line.
#print("Match found! Line: ", line)
# Split the name out and return it.
return line.split(' ')[5]
# We're done, close it.
map.close()
return hex(name)
# Build a list of the pointers and scan each word.
def dumpVirtualTable(line):
# Init the table.
table = ""
# Open baserom.dol for processing.
f = open(baserom_path, 'rb')
address = line.split(' ')[2][:-1]
size = line.split(' ')[3]
#print("ADDR: ", address)
#print("SIZE: ", size)
f.seek(int(address, 16))
entries = int(size, 16) / 4
while entries != 0:
entry = int.from_bytes(f.read(4), byteorder='big')
if entry == 0:
str_to_print = "0"
elif (entry & 0xFF000000) == 0x80000000:
str_to_print = str(getMatchingTextSymbolFromAddress(entry))
else:
str_to_print = str(hex(entry))
table = table + " .4byte " + str_to_print + "\n"
entries = entries - 1
return table
def isBaseromEntrySus(line):
with open(baserom_path, 'rb') as f: # context management to close file for us
address = line.split(' ')[2][:-1]
size = line.split(' ')[3]
lower_four = 4 * math.floor(int(size, 16)/4) # Floor the size to the lower multiple of 4 if needed.
entries = int(size, 16) / 4
f.seek(int(address, 16))
while entries > 0:
entry = int.from_bytes(f.read(4), byteorder='big', signed=False)
if (entry & 0xFF000000) == 0x80000000 and entry != 0x80000000:
print("Suspicious Ptr: ", str(hex(entry)))
return True
entries -= 1
return False
# Same as isBaseromEntrySus but check for rodata str. Man this file is a mess.
def isBaseromEntrySus_ShouldDump(line):
with open(baserom_path, 'rb') as f: # context management to close file for us
address = line.split(' ')[2][:-1]
size = line.split(' ')[3]
lower_four = 4 * math.floor(int(size, 16)/4) # Floor the size to the lower multiple of 4 if needed.
entries = int(size, 16) / 4
f.seek(int(address, 16))
while entries > 0:
entry = int.from_bytes(f.read(4), byteorder='big', signed=False)
if (entry & 0xFF000000) == 0x80000000 and entry != 0x80000000:
text_sym = getMatchingTextSymbolFromAddress(entry)
if text_sym != str(hex(entry)):
if text_sym[:2] == "$$":
return True
entries -= 1
return False
def isBaseromEntryPurePtrTable(line):
table = ""
with open(baserom_path, 'rb') as f: # context management to close file for us
address = line.split(' ')[2][:-1]
size = line.split(' ')[3]
lower_four = 4 * math.floor(int(size, 16)/4) # Floor the size to the lower multiple of 4 if needed.
entries = int(size, 16) / 4
f.seek(int(address, 16))
# If any values are neither 0 nor a
while entries > 0:
entry = int.from_bytes(f.read(4), byteorder='big', signed=False)
if entry == 0:
str_to_print = "0"
elif (entry & 0xFF000000) == 0x80000000:
str_to_print = getMatchingTextSymbolFromAddress(entry)
print("DEBUG: ", str_to_print)
elif entry == 0xFFFFFFFF:
str_to_print = "-1"
else:
return line # Not a pure pure table. Keep the entry.
if str_to_print[:2] == "0x":
return line # Not a pure pure table. Keep the entry.
table = table + " .4byte " + str_to_print + "\n"
entries = entries - 1
return table
def dumpActorTable(line):
table = ""
with open(baserom_path, 'rb') as f: # context management to close file for us
address = line.split(' ')[2][:-1]
size = line.split(' ')[3]
lower_four = 4 * math.floor(int(size, 16)/4) # Floor the size to the lower multiple of 4 if needed.
entries = int(size, 16) / 4
f.seek(int(address, 16))
# If any values are neither 0 nor a
while entries > 0:
entry = int.from_bytes(f.read(4), byteorder='big', signed=False)
if entry == 0:
str_to_print = "0"
elif (entry & 0xFF000000) == 0x80000000:
str_to_print = getMatchingTextSymbolFromAddress(entry)
print("DEBUG: ", str_to_print)
elif entry == 0xFFFFFFFF:
str_to_print = "-1"
else:
str_to_print = str(entry)
table = table + " .4byte " + str_to_print + "\n"
entries = entries - 1
return table
# -----------------------------------
# Config
# -----------------------------------
print_baserom_calls = True
# -----------------------------------
# ------------------
# Main code
# ------------------
asm_files = findExt("asm")
#asm_files = ['/c/sms/asm/NPC/NpcInitData.s']
print("Checking all files for possible ptrs...")
# Using readlines()
for i in asm_files:
file1 = open(i, 'r')
Lines = file1.readlines()
file1.close()
file1 = open(i, 'w')
count = 0
line_array = [] #array of current line and last two lines
line_array.append("")
line_array.append("")
line_array.append("")
file_sus = False
for line in Lines:
line_array[0] = line_array[1]
line_array[1] = line_array[2]
line_array[2] = line
# Do something with the line here. Does the line belong to a baserom.dol call for a __vt__?
if "__RTTI__" in line_array[1] and "baserom.dol" in line:
file1.write(dumpVirtualTable(line))
else:
file1.write(line)
if file_sus == True:
print("Sus: ", i, "Count: ", count, "\n")
file1.close()
print("Done")
#print(os.getcwd())

View File

@ -1,53 +0,0 @@
#!/usr/bin/env python3
# very experimental python script that goes through a linker error file and resolves any undefined labels while splitting
with open("errors.txt", "r") as f:
lines = f.readlines()
lbls = []
undefcount = 0
for line in lines:
if "undefined" in line:
undefcount = undefcount + 1
splitLine = line.split(" ")
for spl in splitLine:
if spl.startswith("\'lbl_"):
localstr = spl.strip("\n")
localstr = localstr.strip("\'lbl_")
lbls.append(localstr)
with open("asm/data.s", "r") as asm:
asms = asm.readlines()
prevLine = ""
output = []
for l in asms:
l = l.strip("\n")
for lbl in lbls:
if l.startswith("/* " + lbl):
output.append("lbl_" + lbl + ":\n")
#if prevLine.startswith("func_"):
# prevLine = l
# output.append(l + "\n")
# continue
#else:
# for lbl in lbls:
# lstr = l.strip(":")
# if lstr == lbl:
# output.append(f".global {lbl}\n")
# prevLine = l
# break
output.append(l + "\n")
prevLine = l
with open("output.asm", "w") as w:
for o in output:
w.write(o)

View File

@ -1,77 +0,0 @@
#!/usr/bin/env python3
# borrowed from prime-decomp
import argparse
import os
from platform import uname
from typing import List
if os.name != 'nt':
wineprefix = os.environ.get('WINEPREFIX', os.path.join(os.environ['HOME'], '.wine'))
winedevices = os.path.join(wineprefix, 'dosdevices')
def in_wsl() -> bool:
# wsl1 has Microsoft, wsl2 has microsoft-standard
release = uname().release
return 'microsoft-standard' in release or 'Microsoft' in release
def convert_path(path: str) -> str:
# lowercase drive letter
path = path[0].lower() + path[1:]
if os.name == 'nt':
return path.replace('\\', '/')
elif path[0] == 'z':
# shortcut for z:
return path[2:].replace('\\', '/')
elif in_wsl():
if path.startswith(r'\\wsl'):
# first part could be wsl$ or wsl.localhost
pos = path.find('\\', 2)
pos = path.find('\\', pos + 1)
path = path[pos:]
return path.replace('\\', '/')
else:
path = path[0:1] + path[2:]
return os.path.join('/mnt', path.replace('\\', '/'))
else:
# use $WINEPREFIX/dosdevices to resolve path
return os.path.realpath(os.path.join(winedevices, path.replace('\\', '/')))
def import_d_file(in_file: str) -> str:
out_lines: List[str] = []
with open(in_file, 'r') as file:
it = iter(file)
line = next(it)
if line.endswith(' \\\n'):
out_lines.append(line[:-3].replace('\\', '/') + " \\\n")
else:
out_lines.append(line.replace('\\', '/'))
return ''.join(out_lines)
def main():
parser = argparse.ArgumentParser(
description="""Transform a .d file from Wine paths to normal paths"""
)
parser.add_argument(
"d_file",
help="""Dependency file in""",
)
parser.add_argument(
"d_file_out",
help="""Dependency file out""",
)
args = parser.parse_args()
output = import_d_file(args.d_file)
with open(args.d_file_out, "w", encoding="UTF-8") as f:
f.write(output)
if __name__ == "__main__":
main()

View File

@ -1,45 +0,0 @@
#!/usr/bin/env python3
import argparse
def import_d_file(in_file) -> str:
out_text = ''
with open(in_file) as file:
for idx, line in enumerate(file):
if idx != 0:
path_found_pos = line.find("include")
line = "\t" + line[path_found_pos:]
if line.endswith(' \\\n'):
out_text += line[:-3].replace('\\', '/') + " \\\n"
else:
out_text += line.replace('\\', '/')
else:
if line.endswith(' \\\n'):
out_text += line[:-3].replace('\\', '/') + " \\\n"
else:
out_text += line.replace('\\', '/')
return out_text
def main():
parser = argparse.ArgumentParser(
description="""Transform a .d file from Wine paths to normal paths"""
)
parser.add_argument(
"d_file",
help="""Dependency file in""",
)
parser.add_argument(
"d_file_out",
help="""Dependency file out""",
)
args = parser.parse_args()
output = import_d_file(args.d_file)
with open(args.d_file_out, "w", encoding="UTF-8") as f:
f.write(output)
if __name__ == "__main__":
main()

84
tools/transform_dep.py Executable file
View File

@ -0,0 +1,84 @@
#!/usr/bin/env python3
###
# Transforms .d files, converting Windows paths to Unix paths.
# Allows usage of the mwcc -MMD flag on platforms other than Windows.
#
# Usage:
# python3 tools/transform_dep.py build/src/file.d build/src/file.d
#
# If changes are made, please submit a PR to
# https://github.com/encounter/dtk-template
###
import argparse
import os
from platform import uname
wineprefix = os.path.join(os.environ["HOME"], ".wine")
if "WINEPREFIX" in os.environ:
wineprefix = os.environ["WINEPREFIX"]
winedevices = os.path.join(wineprefix, "dosdevices")
def in_wsl() -> bool:
return "microsoft-standard" in uname().release
def import_d_file(in_file: str) -> str:
out_text = ""
with open(in_file) as file:
for idx, line in enumerate(file):
if idx == 0:
if line.endswith(" \\\n"):
out_text += line[:-3].replace("\\", "/") + " \\\n"
else:
out_text += line.replace("\\", "/")
else:
suffix = ""
if line.endswith(" \\\n"):
suffix = " \\"
path = line.lstrip()[:-3]
else:
path = line.strip()
# lowercase drive letter
path = path[0].lower() + path[1:]
if path[0] == "z":
# shortcut for z:
path = path[2:].replace("\\", "/")
elif in_wsl():
path = path[0:1] + path[2:]
path = os.path.join("/mnt", path.replace("\\", "/"))
else:
# use $WINEPREFIX/dosdevices to resolve path
path = os.path.realpath(
os.path.join(winedevices, path.replace("\\", "/"))
)
out_text += "\t" + path + suffix + "\n"
return out_text
def main() -> None:
parser = argparse.ArgumentParser(
description="""Transform a .d file from Wine paths to normal paths"""
)
parser.add_argument(
"d_file",
help="""Dependency file in""",
)
parser.add_argument(
"d_file_out",
help="""Dependency file out""",
)
args = parser.parse_args()
output = import_d_file(args.d_file)
with open(args.d_file_out, "w", encoding="UTF-8") as f:
f.write(output)
if __name__ == "__main__":
main()

View File

@ -1,60 +0,0 @@
#!/usr/bin/env python3
import argparse
import json
import os
import subprocess
from pprint import pprint
import requests
def get_git_commit_timestamp() -> int:
return int(subprocess.check_output(['git', 'show', '-s', '--format=%ct']).decode('ascii').rstrip())
def get_git_commit_sha() -> str:
return subprocess.check_output(['git', 'rev-parse', 'HEAD']).decode('ascii').strip()
def generate_url(args: argparse.Namespace) -> str:
url_components = [args.base_url.rstrip('/'), 'data']
for arg in [args.project, args.version.replace('.', '-')]:
if arg != "":
url_components.append(arg)
return str.join('/', url_components) + '/'
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Upload progress information.")
parser.add_argument("-b", "--base_url", help="API base URL", required=True)
parser.add_argument("-a", "--api_key", help="API key (env var PROGRESS_API_KEY)")
parser.add_argument("-p", "--project", help="Project slug", required=True)
parser.add_argument("-v", "--version", help="Version slug", required=True)
parser.add_argument("input", help="Progress JSON input")
args = parser.parse_args()
api_key = args.api_key or os.environ.get("PROGRESS_API_KEY")
if not api_key:
raise "API key required"
url = generate_url(args)
entries = []
with open(args.input, "r") as f:
data = json.load(f)
entries.append({
"timestamp": get_git_commit_timestamp(),
"git_hash": get_git_commit_sha(),
"categories": data,
})
print("Publishing entries to", url)
pprint(entries)
data = {
"api_key": api_key,
"entries": entries,
}
r = requests.post(url, json=data)
r.raise_for_status()
print("Done!")

79
tools/upload_progress.py Executable file
View File

@ -0,0 +1,79 @@
#!/usr/bin/env python3
###
# Uploads progress information to https://github.com/decompals/frogress.
#
# Usage:
# python3 tools/upload_progress.py -b https://progress.decomp.club/ -p [project] -v [version] build/[version]/progress.json
#
# If changes are made, please submit a PR to
# https://github.com/encounter/dtk-template
###
import argparse
import json
import os
import requests
import subprocess
import sys
def get_git_commit_timestamp() -> int:
return int(
subprocess.check_output(["git", "show", "-s", "--format=%ct"])
.decode("ascii")
.rstrip()
)
def get_git_commit_sha() -> str:
return subprocess.check_output(["git", "rev-parse", "HEAD"]).decode("ascii").strip()
def generate_url(args: argparse.Namespace) -> str:
url_components = [args.base_url.rstrip("/"), "data"]
for arg in [args.project, args.version]:
if arg != "":
url_components.append(arg)
return str.join("/", url_components) + "/"
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Upload progress information.")
parser.add_argument("-b", "--base_url", help="API base URL", required=True)
parser.add_argument("-a", "--api_key", help="API key (env var PROGRESS_API_KEY)")
parser.add_argument("-p", "--project", help="Project slug", required=True)
parser.add_argument("-v", "--version", help="Version slug", required=True)
parser.add_argument("input", help="Progress JSON input")
args = parser.parse_args()
api_key = args.api_key or os.environ.get("PROGRESS_API_KEY")
if not api_key:
raise KeyError("API key required")
url = generate_url(args)
entries = []
with open(args.input, "r") as f:
data = json.load(f)
entries.append(
{
"timestamp": get_git_commit_timestamp(),
"git_hash": get_git_commit_sha(),
"categories": data,
}
)
print("Publishing entry to", url)
json.dump(entries[0], sys.stdout, indent=4)
print()
r = requests.post(
url,
json={
"api_key": api_key,
"entries": entries,
},
)
r.raise_for_status()
print("Done!")