Manage m2c and asm-differ via pip (#1142)

This commit is contained in:
Robin Avery 2024-01-28 06:04:40 -05:00 committed by GitHub
parent 411e2f1451
commit 011c1bb89b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
813 changed files with 46 additions and 51519 deletions

View File

@ -1,17 +1,14 @@
-e file:tools/asm-differ#egg=asm-differ
argcomplete
beautifulsoup4
black
colorama
coverage
cxxfilt
git+https://github.com/matt-kempster/m2c.git
GitPython
graphviz
isort
humanfriendly
pycparser
isort
pcpp
pyelftools
Pygments
pyperclip
python_Levenshtein
watchdog

View File

@ -2,34 +2,42 @@
# This file is autogenerated by pip-compile with Python 3.11
# by the following command:
#
# pip-compile
# pip-compile --strip-extras
#
-e file:tools/asm-differ#egg=asm-differ
# via -r requirements.in
ansiwrap==0.8.4
# via asm-differ
argcomplete==3.2.2
# via -r requirements.in
beautifulsoup4==4.12.3
# via -r requirements.in
black==24.1.0
black==24.1.1
# via -r requirements.in
click==8.1.7
# via black
colorama==0.4.6
# via -r requirements.in
# via
# -r requirements.in
# asm-differ
coverage==7.4.1
# via -r requirements.in
cxxfilt==0.3.0
# via -r requirements.in
# via asm-differ
gitdb==4.0.11
# via gitpython
gitpython==3.1.41
# via -r requirements.in
graphviz==0.20.1
# via -r requirements.in
# via m2c
humanfriendly==10.0
# via -r requirements.in
isort==5.13.2
# via -r requirements.in
levenshtein==0.23.0
# via python-levenshtein
levenshtein==0.20.9
# via asm-differ
m2c @ git+https://github.com/matt-kempster/m2c.git
# via -r requirements.in
mypy-extensions==1.0.0
# via black
packaging==23.2
@ -41,20 +49,20 @@ pcpp==1.30
platformdirs==4.1.0
# via black
pycparser==2.21
# via -r requirements.in
# via m2c
pyelftools==0.30
# via -r requirements.in
pygments==2.17.2
# via -r requirements.in
pyperclip==1.8.2
# via -r requirements.in
python-levenshtein==0.23.0
# via -r requirements.in
rapidfuzz==3.6.1
rapidfuzz==2.15.2
# via levenshtein
smmap==5.0.1
# via gitdb
soupsieve==2.5
# via beautifulsoup4
watchdog==3.0.0
# via -r requirements.in
textwrap3==0.9.2
# via ansiwrap
watchdog==2.3.1
# via asm-differ

View File

@ -10,11 +10,10 @@ from typing import Optional, cast
from elftools.elf.elffile import ELFFile
from elftools.elf.sections import SymbolTableSection
root = Path(__file__).parent.parent
root = Path(__file__).parents[1]
dtk_root = root / "build/GALE01"
obj_root = dtk_root / "obj"
asm_root = dtk_root / "asm"
m2c_script = root / "tools/m2c/m2c.py"
ctx_file = root / "build/ctx.c"
m2ctx_script = root / "tools/m2ctx/m2ctx.py"
@ -60,7 +59,7 @@ def run_cmd(cmd: list[str]) -> str:
return result.stdout.decode()
def gen_ctx():
def gen_ctx() -> None:
run_cmd(
[
"python",
@ -71,7 +70,7 @@ def gen_ctx():
)
def main():
def main() -> None:
parser = argparse.ArgumentParser(description="Decomp a function using m2c")
parser.add_argument(
"function",
@ -93,19 +92,19 @@ def main():
"--no-copy",
action="store_false",
dest="copy",
help=f"do not copy the output to the clipboard",
help="do not copy the output to the clipboard",
)
parser.add_argument(
"--no-print",
action="store_false",
dest="print",
help=f"do not print the output",
help="do not print the output",
)
parser.add_argument(
"--colorize",
action="store_true",
dest="color",
help=f"colorize the output (requires pygments)",
help="colorize the output (requires pygments)",
)
args = parser.parse_args()
@ -115,7 +114,8 @@ def main():
m2c_cmd: list[str] = [
"python",
resolve_path(m2c_script),
"-m",
"m2c.main",
*args.m2c_args,
"--target",
"ppc-mwcc-c",
@ -131,21 +131,29 @@ def main():
output = run_cmd(m2c_cmd)
if args.copy:
import pyperclip
try:
import pyperclip
pyperclip.copy(output)
except ModuleNotFoundError:
print("Failed to import pyperclip; could not copy", file=stderr)
pyperclip.copy(output)
if args.print:
if args.color:
try:
import colorama
colorama.just_fix_windows_console()
except ModuleNotFoundError:
pass
from pygments import highlight
from pygments.formatters import TerminalFormatter
from pygments.lexers import CLexer
try:
from pygments import highlight
from pygments.formatters import TerminalFormatter
from pygments.lexers import CLexer
output = highlight(output, CLexer(), TerminalFormatter())
output = highlight(output, CLexer(), TerminalFormatter())
except ModuleNotFoundError:
print("Failed to import pygments; could not colorize", file=stderr)
print(output, file=sys.stdout)
else:
print(f"Could not find {args.function}", file=stderr)

View File

@ -1,34 +0,0 @@
name: Python application
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Install poetry
run: pipx install poetry
- name: Setup Python 3.8
uses: actions/setup-python@v4
with:
python-version: 3.8
cache: poetry
- name: Install dependencies
run: |
poetry install --with=dev
- name: Run tests
run: |
poetry run ./run_tests.py
- name: Type-check
run: |
poetry run mypy
- name: Formatting check
run: |
poetry run black --check .

89
tools/m2c/.gitignore vendored
View File

@ -1,89 +0,0 @@
*.o
tests/project/
*.m2c
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
.hypothesis/
.pytest_cache/
*.log
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# pyenv
.python-version
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# mkdocs documentation
/site
# mypy
.mypy_cache/
# vscode
.vscode/
# Graphviz
*.gv
*.gv.*
# Temporary files used by the test runner
.stdout
.stderr

View File

@ -1,5 +0,0 @@
repos:
- repo: https://github.com/psf/black-pre-commit-mirror
rev: 23.12.1
hooks:
- id: black

View File

@ -1,674 +0,0 @@
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short
notice like this when it starts in an interactive mode:
<program> Copyright (C) <year> <name of author>
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, your program's commands
might be different; for a GUI interface, you would use an "about box".
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU GPL, see
<http://www.gnu.org/licenses/>.
The GNU General Public License does not permit incorporating your program
into proprietary programs. If your program is a subroutine library, you
may consider it more useful to permit linking proprietary applications with
the library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License. But first, please read
<http://www.gnu.org/philosophy/why-not-lgpl.html>.

View File

@ -1,389 +0,0 @@
# `m2c` Decompiler
`m2c` ("*Machine code to C*") is a decompiler for MIPS and PowerPC assembly that produces C code, with partial support for C++.
This project, initially named `mips_to_c`, has the goal to support decompilation projects, which aim to write source code that yields byte-identical output when compiled with a particular build system.
It originally targeted popular compilers of the late 1990's, but it also works well with newer compilers or hand-written assembly.
`m2c` is often used in decompilation workflows with [`splat`](https://github.com/ethteck/splat), [`asm-differ`](https://github.com/simonlindholm/asm-differ), and [`decomp-permuter`](https://github.com/simonlindholm/decomp-permuter).
Its focus on finding "matching" C source differentiates it from other decompilation suites, such as IDA or Ghidra.
Right now the decompiler is fairly functional, though it sometimes generates suboptimal code (especially for loops).
The input is expected to match the GNU `as` assembly format, produced by tools like [`spimdisasm`](https://github.com/Decompollaborate/spimdisasm).
See the `tests/` directory for some example input and output.
[An online version is also available](https://simonsoftware.se/other/m2c.html).
## Install
This project requires Python 3.7 or later. To install the Python dependencies:
```bash
python3 -m pip install --upgrade pycparser
```
You might need to install `pip` first; on Ubuntu this can be done with:
```bash
sudo apt update
sudo apt install python3-pip
```
## Usage
```bash
python3 m2c.py [options] [-t <target>] [--context <context file>] [-f <function name>] <asmfile>...
```
Run with `--help` to see which options are available.
Context files provided with `--context` are parsed and cached, so subsequent runs with the same file are faster. The cache for `foo/bar.c` is stored in `foo/bar.m2c`. These files can be ignored (added to `.gitignore`), and are automatically regenerated if context files change. Caching can be disabled with the `--no-cache` argument.
### Target Architecture / Compiler / Language
`m2c` has support for both MIPS and PowerPC assembly.
It also has some compiler-specific heuristics and language-specific behavior.
For example, it can demangle C++ symbol names as used by CodeWarrior.
Collectively, the output's architecture, compiler, and source language are referred to as a *target*.
The following target triples are supported:
- `--target mips-ido-c`: MIPS (with O32 ABI), IDO toolchain, C language
- `--target mips-gcc-c`: MIPS (with O32 ABI), GCC toolchain, C language
- `--target ppc-mwcc-c`: PowerPC, MetroWerks CodeWarrior toolchain (`mwccecpp.exe`), C language
- `--target ppc-mwcc-c++`: PowerPC, MetroWerks CodeWarrior toolchain (`mwccecpp.exe`), C++ language
### Multiple functions
By default, `m2c` decompiles all functions in the text sections from the input assembly files.
`m2c` is able to perform a small amount of cross-function type inference, if the functions call each other.
You can limit the function(s) that decompiled by providing the `-f <function name>` flags (or the "Function" dropdown on the website).
### Global Declarations & Initializers
When provided input files with `data`, `rodata`, and/or `bss` sections, `m2c` can generate the initializers for variables it knows the types of.
Qualifier hints such as `const`, `static`, and `extern` are based on which sections the symbols appear in, or if they aren't provided at all.
The output also includes prototypes for functions not declared in the context.
`m2c` cannot generate initializers for structs with bitfields (e.g. `unsigned foo: 3;`) or for symbols that it cannot infer the type of.
For the latter, you can provide a type for the symbol the context.
This feature is controlled with the `--globals` option (or "Global declarations" on the website):
- `--globals=used` is the default behavior, global declarations are emitted for referenced symbols. Initializers are generated when the data/rodata sections are provided.
- `--globals=none` disables globals entirely; only function definitions are emitted.
- `--globals=all` includes all of the output in `used`, but also includes initializers for unreferenced symbols. This can be used to convert data/rodata files without decompiling any functions.
### Struct Field Inference
By default, `m2c` can use type information from decompiled functions to help fill in unknown struct fields.
This behavior can be disabled with `--no-unk-inference` ("Disable unknown struct/type inference" on the website).
For structs in the context, the following fields treated as "unknown" space that can be inferred:
- `char` arrays with a name starting with `unk_`, e.g. `char unk_10[4];`
- any field with a type that starts with `UNK_` or `MIPS2C_UNK`, e.g. `UNK_TYPE4 foo;`
Currently, struct field inference only works on structs without bitfields or [unnamed union fields](https://gcc.gnu.org/onlinedocs/gcc/Unnamed-Fields.html).
The output will include declarations for any struct with at least one inferred field.
### Specifying stack variables
By default, `m2c` infers the types of stack (local) variables, and names them with the `sp` prefix based on their offset.
Internally, the stack is represented as a struct, so it is possible to manually specify the names & types of stack variables by providing a struct declaration in the context. `m2c` looks in the context for a struct with the tag name `_m2c_stack_<function name>` (e.g. `struct _m2c_stack_test` for a function `test()`).
The size of the stack must exactly match the detected frame size, or `m2c` will return an error.
If you run `m2c` with the `--stack-structs` option ("Stack struct templates" on the website), the output will include the inferred stack declaration, which can then be edited and provided as context by re-running `m2c`.
#### Example
Here is an example for specifying the stack for the `custom_stack` end-to-end test.
First, run `m2c` with the `--stack-structs` option to get the inferred struct for the `test()` function:
<details>
<summary><code>python3 m2c.py tests/end_to_end/custom_stack/irix-o2.s -f test --stack-structs</code></summary>
```c
struct _m2c_stack_test {
/* 0x00 */ char pad0[0x20];
/* 0x20 */ s8 sp20; /* inferred */
/* 0x21 */ char pad21[0x3]; /* maybe part of sp20[4]? */
/* 0x24 */ s32 sp24; /* inferred */
/* 0x28 */ s32 sp28; /* inferred */
/* 0x2C */ s8 sp2C; /* inferred */
/* 0x2D */ char pad2D[0x3]; /* maybe part of sp2C[4]? */
/* 0x30 */ s8 sp30; /* inferred */
/* 0x31 */ char pad31[0x3]; /* maybe part of sp30[4]? */
/* 0x34 */ s8 sp34; /* inferred */
/* 0x35 */ char pad35[0x2]; /* maybe part of sp34[3]? */
/* 0x37 */ s8 sp37; /* inferred */
}; /* size = 0x38 */
? func_00400090(s8 *); /* static */
s32 test(void *arg0); /* static */
s32 test(void *arg0) {
s8 sp37;
s8 sp34;
s8 sp30;
s8 sp2C;
s32 sp28;
s32 sp24;
s8 sp20;
s32 temp_t4;
func_00400090(&sp37);
func_00400090(&sp34);
func_00400090(&sp30);
func_00400090(&sp2C);
func_00400090(&sp20);
sp37 = arg0->unk0 + arg0->unk4;
sp34 = arg0->unk0 + arg0->unk8;
temp_t4 = arg0->unk4 + arg0->unk8;
sp30 = temp_t4;
sp20 = arg0->unk0 * sp37;
sp24 = arg0->unk4 * (s16) sp34;
sp28 = arg0->unk8 * temp_t4;
if (sp37 != 0) {
sp2C = arg0;
} else {
sp2C = &sp20;
}
return sp37 + (s16) sp34 + (s32) sp30 + *(s32 *) sp2C + sp24;
}
```
</details>
Now, based on the body of the `test()` function, we can make some guesses about the types of these variables, and give them more descriptive names:
```c
// Save this file as `test_context.c`
struct Vec {
s32 x, y, z;
};
struct _m2c_stack_test {
char pad0[0x20];
struct Vec vec;
struct Vec *vec_ptr;
s32 scale_z;
s16 scale_y;
char pad36[1];
s8 scale_x;
}; /* size 0x38 */
int test(struct Vec *vec_arg);
```
Finally, re-run `m2c` with our custom stack as part of the `--context`. The `--context` option can be specified multiple times to combine files.
<details>
<summary><code>python3 m2c.py tests/end_to_end/custom_stack/irix-o2.s -f test --context test_context.c</code></summary>
```c
? func_00400090(s8 *); /* static */
s32 test(struct Vec *vec_arg) {
s8 scale_x;
s16 scale_y;
s32 scale_z;
struct Vec *vec_ptr;
struct Vec vec;
s32 temp_t4;
func_00400090(&scale_x);
func_00400090((s8 *) &scale_y);
func_00400090((s8 *) &scale_z);
func_00400090((s8 *) &vec_ptr);
func_00400090((s8 *) &vec);
scale_x = vec_arg->x + vec_arg->y;
scale_y = vec_arg->x + vec_arg->z;
temp_t4 = vec_arg->y + vec_arg->z;
scale_z = temp_t4;
vec = vec_arg->x * scale_x;
vec.y = vec_arg->y * scale_y;
vec.z = vec_arg->z * temp_t4;
if (scale_x != 0) {
vec_ptr = vec_arg;
} else {
vec_ptr = &vec;
}
return scale_x + scale_y + scale_z + vec_ptr->x + vec.y;
}
```
</details>
### Formatting
The following options control the formatting details of the output, such as braces style or numeric format. See `./m2c.py --help` for more details.
(The option name on the website, if available, is in parentheses.)
- `--valid-syntax`
- `--allman` ("Allman braces")
- `--knr` ("K&R braces")
- `--pointer-style` ("`*` to the left")
- `--unk-underscore`
- `--hex-case`
- `--comment-style {multiline,oneline,none}` ("Comment style")
- `--comment-column N` ("Comment style")
- `--no-casts`
- `--zfill-constants` ("0-fill constants")
- `--deterministic-vars`
Note: `--valid-syntax` is used to produce output that is less human-readable, but is likely to directly compile without edits. This can be used to go directly from assembly to the permuter without human intervention.
### Debugging poor results (Advanced)
There are several options to `m2c` which can be used to troubleshoot poor results. Many of these options produce more "primitive" output or debugging information.
- `--no-andor` ("Disable &&/||"): Disable complex conditional detection, such as `if (a && b)`. Instead, emit each part of the conditional as a separate `if` statement. Ands, ors, nots, etc. are usually represented with `goto`s.
- `--no-switches` ("Disable irregular switch detection"): Disable "irregular" `switch` statements, where the compiler emits a single `switch` as a series of branches and/or jump tables. By default, these are coalesced into a single `switch` and marked with an `/* irregular */` comment.
- `--no-unk-inference` ("Disable unknown struct/type inference"): Disable attempting to infer struct fields/types in unknown struct sections and global symbols. See the [_Struct Field Inference_](#struct-field-inference) section above.
- `--gotos-only` ("Use gotos for everything"): Do not detect loops or complex conditionals. This format is close to a 1-1 translation of the assembly.
- Note: to use a goto for a single branch, don't use this flag, but add `# GOTO` to the assembly input.
- `--debug` ("Debug info"): include debug information inline with the code, such as basic block boundaries & labels.
- `--void` ("Force void return type"): assume that the decompiled function has return type `void`. Alternatively: provide the function prototype in the context.
#### Visualization
`m2c` can generate an SVG representation of the control flow of a function, which can sometimes be helpful to untangle complex loops or early returns.
Pass `--visualize` on the command line, or use the "Visualize" button on the website. The output will be an SVG file.
Example to produce C & assembly visualizations of `my_fn()`:
```sh
python3 ./m2c.py --visualize=c --context ctx.c -f my_fn my_asm.s > my_fn_c.svg
python3 ./m2c.py --visualize=asm --context ctx.c -f my_fn my_asm.s > my_fn_asm.svg
```
### Migrating from `mips_to_c.py`
This tool was originally known as `mips_to_c`. As part of the rename, deprecated command line arguments were removed.
When migrating to `m2c`, note the following changes to the CLI:
- Entrypoint rename: `./mips_to_c.py` becomes `./m2c.py`
- To limit decompilation to a [single function](#multiple-function): use `-f FN, --function FN`
- `--rodata` is [no longer needed](#multiple-functions): `my_text.s --rodata my_data.s` becomes `my_text.s my_data.s`
- `--compiler` has been replaced by [`--target`](#target-architecture--compiler--language): `--compiler gcc` becomes `--target mips-gcc-c`
- `--structs` is now the [default behavior](#struct-field-inference): remove `--structs` from the arguments
## Contributing
There is much low-hanging fruit still. Take a look at the issues if you want to help out.
We use `black` to auto-format our code and `mypy` for type checking. We recommend using `pre-commit` to ensure only auto-formatted code is committed. To set these up, run:
```bash
pip install pre-commit black mypy
pre-commit install
```
Your commits will then be automatically formatted per commit. You can also manually run `black` on the command-line.
Type annotations are used for all Python code. `mypy` should pass without any errors.
To get pretty graph visualizations, install `graphviz` using `pip` and globally on your system (e.g. `sudo apt install graphviz`), and pass the `--visualize` flag.
## Tests
There is a small test suite, which works as follows:
- As you develop your commit, occasionally run `./run_tests.py` to see if any tests have changed output.
These tests run the decompiler on a small corpus of assembly.
- Before pushing your commit, run `./run_tests.py --overwrite` to write changed tests to disk, and commit resultant changes.
### Running Decompilation Project Tests
It's possible to use the entire corpus of assembly files from decompilation projects as regression tests.
For now, the output of these tests are not tracked in version control.
You need to run `./run_tests.py --overwrite ...` **before** making any code changes to create the baseline output.
As an example, if you have the `oot` project cloned locally in the parent directory containing `m2c`, the following will decompile all of its assembly files.
```bash
./run_tests.py --project ../oot --project-with-context ../oot
```
This has been tested with:
- [zeldaret/oot](https://github.com/zeldaret/oot)
- [zeldaret/mm](https://github.com/zeldaret/mm)
- See notes below, the repository needs to be partially built
- [pmret/papermario](https://github.com/pmret/papermario)
- Need to use the `ver/us` or `ver/jp` subfolder, e.g. `--project ../papermario/ver/us`
#### Creating Context Files
The following bash can be used in each decompilation project to create a "universal" context file that can be used for decompiling any assembly file in the project.
This creates `ctx.c` in the project directory.
```bash
cd mm # Or oot, papermario, etc.
find include/ src/ -type f -name "*.h" | sed -e 's/.*/#include "\0"/' > ctx_includes.c
tools/m2ctx.py ctx_includes.c
```
#### Notes for Majora's Mask
The build system in the MM decompilation project is currently being re-written.
It uses "transient" assembly that is not checked in, and in the normal build process it re-groups `.rodata` sections by function.
To use the MM project, run the following to *just* build the transient assembly files (and avoid running `split_asm.py`).
```bash
cd mm
make distclean
make setup
make asm/disasm.dep
```
The repository should be setup correctly if there are `asm/code`, `asm/boot`, and `asm/overlays` folders with `.asm` files, but there *should not* be an `asm/non_matchings` folder.
### Coverage
Code branch coverage can be computed by running `./run_tests.py --coverage`.
By default, this will generate an HTML coverage report `./htmlcov/index.html`.
### Adding an End-to-End Test
You are encouraged to add new end-to-end tests using the `./tests/add_test.py` script.
For MIPS tests, you'll need the IDO `cc` compiler and the `spimdisasm` pip package.
A good reference test to base your new test on is [`array-access`](tests/end_to_end/array-access).
Create a new directory in `tests/end_to_end`, and write the `orig.c` test case.
If you want the test to pass in C context, also add `irix-o2-flags.txt` & `irix-g-flags.txt` files.
After writing these files, run `add_test.py` with the path to the new `orig.c` file, as shown below.
This example assumes that the IDO compiler is available from the OOT decompilation project.
You should change this exported path to match your system.
```bash
export IDO_CC=$HOME/oot/tools/ido_recomp/linux/7.1/cc
./tests/add_test.py $PWD/tests/end_to_end/my-new-test/orig.c
```
This should create `irix-o2.s` and `irix-g.s` files in your test directory.
Now, run `./run_tests.py --overwrite` to invoke the decompiler and write the output to `irix-o2-out.c` and `irix-g-out.c`.
Finally, `git add` your test to track it.
```bash
./run_tests.py --overwrite
git add tests/end_to_end/my-new-test
```
For PowerPC, the `MWCC_CC` environment variable should be set to point to a PPC cc binary (mwcceppc.exe),
and on non-Windows, `WINE` set to point to wine or equivalent ([wibo](https://github.com/decompals/wibo) also works).
### Installation with Poetry
You can include `m2c` as a dependency in your project with [Poetry](https://python-poetry.org/) by adding the following to your `pyproject.toml`:
```toml
[tool.poetry.dependencies]
m2c = {git="https://github.com/matt-kempster/m2c.git"}
```

View File

@ -1,4 +0,0 @@
#!/usr/bin/env python3
from m2c.main import main
main()

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,574 +0,0 @@
import csv
from dataclasses import dataclass, field
from enum import Enum
import re
import struct
import typing
from pathlib import Path
from typing import Callable, Dict, List, Match, Optional, Set, Tuple, TypeVar, Union
from .error import DecompFailure
from .options import Options
from .asm_instruction import RegFormatter
from .instruction import (
ArchAsm,
Instruction,
InstructionMeta,
parse_instruction,
)
@dataclass(frozen=True)
class Label:
# Various pattern matching code assumes that there cannot be consecutive
# labels, and to deal with this we allow for consecutive labels to be
# merged together. As a consequence, we allow a Label to have more than one
# name. When we need a single name to refer to one, we use the first one.
names: List[str]
def __str__(self) -> str:
return self.names[0]
@dataclass
class Function:
name: str
body: List[Union[Instruction, Label]] = field(default_factory=list)
reg_formatter: RegFormatter = field(default_factory=RegFormatter)
def new_label(self, name: str) -> None:
label = Label([name])
if self.body and self.body[-1] == label:
# Skip repeated labels
return
self.body.append(label)
def new_instruction(self, instruction: Instruction) -> None:
self.body.append(instruction)
def bodyless_copy(self) -> "Function":
return Function(
name=self.name,
reg_formatter=self.reg_formatter,
)
def __str__(self) -> str:
body = "\n".join(
str(item) if isinstance(item, Instruction) else f" {item}:"
for item in self.body
)
return f"glabel {self.name}\n{body}"
@dataclass
class AsmDataEntry:
sort_order: Tuple[str, int]
data: List[Union[str, bytes]] = field(default_factory=list)
is_string: bool = False
is_readonly: bool = False
is_bss: bool = False
is_jtbl: bool = False
def size_range_bytes(self) -> Tuple[int, int]:
"""Return the range of possible sizes, if padding were stripped."""
# TODO: The data address could be used to only strip padding
# that ends on 16-byte boundaries and is at the end of a section
max_size = 0
for x in self.data:
if isinstance(x, str):
max_size += 4
else:
max_size += len(x)
padding_size = 0
if self.data and isinstance(self.data[-1], bytes):
assert len(self.data) == 1 or isinstance(self.data[-2], str)
for b in self.data[-1][::-1]:
if b != 0:
break
padding_size += 1
padding_size = min(padding_size, 15)
assert padding_size <= max_size
# Assume the size is at least 1 byte, unless `max_size == 0`
if max_size == padding_size and max_size != 0:
return 1, max_size
return max_size - padding_size, max_size
@dataclass
class AsmData:
values: Dict[str, AsmDataEntry] = field(default_factory=dict)
mentioned_labels: Set[str] = field(default_factory=set)
def merge_into(self, other: "AsmData") -> None:
for sym, value in self.values.items():
other.values[sym] = value
for label in self.mentioned_labels:
other.mentioned_labels.add(label)
def is_likely_char(self, c: int) -> bool:
return 0x20 <= c < 0x7F or c in (0, 7, 8, 9, 10, 13, 27)
def detect_heuristic_strings(self) -> None:
for ent in self.values.values():
if (
ent.is_readonly
and len(ent.data) == 1
and isinstance(ent.data[0], bytes)
and len(ent.data[0]) > 1
and ent.data[0][0] != 0
and all(self.is_likely_char(x) for x in ent.data[0])
):
ent.is_string = True
@dataclass
class AsmFile:
filename: str
functions: List[Function] = field(default_factory=list)
asm_data: AsmData = field(default_factory=AsmData)
current_function: Optional[Function] = field(default=None, repr=False)
current_data: Optional[AsmDataEntry] = field(default=None)
def new_function(self, name: str) -> None:
self.current_function = Function(name=name)
self.functions.append(self.current_function)
def new_instruction(self, instruction: Instruction) -> None:
if self.current_function is None:
# Allow (and ignore) nop instructions in the .text
# section before any function labels
if instruction.mnemonic == "nop":
return
else:
raise DecompFailure(
f"unsupported non-nop instruction outside of function ({instruction})"
)
self.current_function.new_instruction(instruction)
def new_label(self, label_name: str) -> None:
assert self.current_function is not None
self.current_function.new_label(label_name)
def new_data_label(self, symbol_name: str, is_readonly: bool, is_bss: bool) -> None:
sort_order = (self.filename, len(self.asm_data.values))
self.current_data = AsmDataEntry(
sort_order, is_readonly=is_readonly, is_bss=is_bss
)
self.asm_data.values[symbol_name] = self.current_data
def new_data_sym(self, sym: str) -> None:
if self.current_data is not None:
self.current_data.data.append(sym)
self.asm_data.mentioned_labels.add(sym)
def new_data_bytes(self, data: bytes, *, is_string: bool = False) -> None:
if self.current_data is None:
return
if not self.current_data.data and is_string:
self.current_data.is_string = True
if self.current_data.data and isinstance(self.current_data.data[-1], bytes):
self.current_data.data[-1] += data
else:
self.current_data.data.append(data)
def __str__(self) -> str:
functions_str = "\n\n".join(str(function) for function in self.functions)
return f"# {self.filename}\n{functions_str}"
def split_arg_list(args: str) -> List[str]:
"""Split a string of comma-separated arguments, handling quotes"""
reader = csv.reader(
[args],
delimiter=",",
doublequote=False,
escapechar="\\",
quotechar='"',
skipinitialspace=True,
)
return [a.strip() for a in next(reader)]
def parse_ascii_directive(line: str, z: bool) -> bytes:
# This is wrong wrt encodings; the assembler really operates on bytes and
# not chars. But for our purposes it should be good enough.
in_quote = False
num_parts = 0
ret: List[bytes] = []
i = 0
digits = "0123456789"
while i < len(line):
c = line[i]
i += 1
if not in_quote:
if c == '"':
in_quote = True
num_parts += 1
else:
if c == '"':
in_quote = False
if z:
ret.append(b"\0")
continue
if c != "\\":
ret.append(c.encode("utf-8"))
continue
if i == len(line):
raise DecompFailure(
"backslash at end of .ascii line not supported: " + line
)
c = line[i]
i += 1
char_escapes = {
"b": b"\b",
"f": b"\f",
"n": b"\n",
"r": b"\r",
"t": b"\t",
"v": b"\v",
}
if c in char_escapes:
ret.append(char_escapes[c])
elif c == "x":
# hex literal, consume any number of hex chars, possibly none
value = 0
while i < len(line) and line[i] in digits + "abcdefABCDEF":
value = value * 16 + int(line[i], 16)
i += 1
ret.append(bytes([value & 0xFF]))
elif c in digits:
# Octal literal, consume up to two more digits.
# Using just the digits 0-7 would be more sane, but this matches GNU as.
it = 0
value = int(c)
while i < len(line) and line[i] in digits and it < 2:
value = value * 8 + int(line[i])
i += 1
it += 1
ret.append(bytes([value & 0xFF]))
else:
ret.append(c.encode("utf-8"))
if in_quote:
raise DecompFailure("unterminated string literal: " + line)
if num_parts == 0:
raise DecompFailure(".ascii with no string: " + line)
return b"".join(ret)
def add_warning(warnings: List[str], new: str) -> None:
if new not in warnings:
warnings.append(new)
def parse_incbin(
args: List[str], options: Options, warnings: List[str]
) -> Optional[bytes]:
try:
if len(args) == 3:
filename = args[0]
offset = int(args[1], 0)
size = int(args[2], 0)
elif len(args) == 1:
filename = args[0]
offset = 0
size = -1
else:
raise ValueError
except ValueError:
raise DecompFailure(f"Could not parse asm_data .incbin directive: {args}")
if not options.incbin_dirs:
add_warning(
warnings,
f"Skipping .incbin directive for {filename}, pass --incbin-dir to set a search directory",
)
return None
for incbin_dir in options.incbin_dirs:
full_path = incbin_dir / filename
try:
with full_path.open("rb") as f:
f.seek(offset)
data = f.read(size)
except OSError:
continue
except MemoryError:
data = b""
if size >= 0 and len(data) != size:
add_warning(
warnings,
f"Unable to read {size} bytes from {full_path} at {offset:#x} (got {len(data)} bytes)",
)
return None
return data
add_warning(
warnings,
f"Unable to find {filename} in any of {len(options.incbin_dirs)} search paths",
)
return None
def parse_file(f: typing.TextIO, arch: ArchAsm, options: Options) -> AsmFile:
filename = Path(f.name).name
asm_file: AsmFile = AsmFile(filename)
ifdef_level: int = 0
ifdef_levels: List[int] = []
curr_section = ".text"
warnings: List[str] = []
defines: Dict[str, Optional[int]] = {
# NULL is a non-standard but common asm macro that expands to 0
"NULL": 0,
**options.preproc_defines,
}
# https://stackoverflow.com/a/241506
def re_comment_replacer(match: Match[str]) -> str:
s = match.group(0)
if s[0] in "/#; \t":
return " "
else:
return s
re_comment_or_string = re.compile(r'[#;].*|/\*.*?\*/|"(?:\\.|[^\\"])*"')
re_whitespace_or_string = re.compile(r'\s+|"(?:\\.|[^\\"])*"')
re_local_glabel = re.compile("L(_.*_)?[0-9A-F]{8}")
re_local_label = re.compile("loc_|locret_|def_|lbl_|LAB_|jump_")
re_label = re.compile(r'(?:([a-zA-Z0-9_.$]+)|"([a-zA-Z0-9_.$<>@,-]+)"):')
T = TypeVar("T")
class LabelKind(Enum):
GLOBAL = "global"
LOCAL = "local"
JUMP_TARGET = "jump_target"
def try_parse(parser: Callable[[], T]) -> T:
try:
return parser()
except ValueError:
raise DecompFailure(
f"Could not parse asm_data {directive} in {curr_section}: {line}"
)
def parse_int(w: str) -> int:
var_value = defines.get(w)
if var_value is not None:
return var_value
return int(w, 0)
for lineno, line in enumerate(f, 1):
# Check for goto markers before stripping comments
emit_goto = any(pattern in line for pattern in options.goto_patterns)
# Strip comments and whitespace (but not within strings)
line = re.sub(re_comment_or_string, re_comment_replacer, line)
line = re.sub(re_whitespace_or_string, re_comment_replacer, line)
line = line.strip()
def process_label(label: str, *, kind: LabelKind) -> None:
if curr_section == ".rodata":
asm_file.new_data_label(label, is_readonly=True, is_bss=False)
elif curr_section == ".data":
asm_file.new_data_label(label, is_readonly=False, is_bss=False)
elif curr_section == ".bss":
asm_file.new_data_label(label, is_readonly=False, is_bss=True)
elif curr_section == ".text":
if label.startswith(".") or kind == LabelKind.JUMP_TARGET:
if asm_file.current_function is None:
raise DecompFailure(f"Label {label} is not within a function!")
asm_file.new_label(label)
elif (
re_local_glabel.match(label)
or (kind != LabelKind.GLOBAL and re_local_label.match(label))
) and asm_file.current_function is not None:
# Don't treat labels as new functions if they follow a
# specific naming pattern. This is used for jump table
# targets in both IDA and old n64split output.
# We skip this behavior for the very first label in the
# file though, to avoid crashes due to unidentified
# functions. (Should possibly be generalized to cover any
# glabel that has a branch that goes across?)
asm_file.new_label(label)
else:
asm_file.new_function(label)
# Check for labels
while True:
g = re_label.match(line)
if not g:
break
label = g.group(1) or g.group(2)
if ifdef_level == 0:
process_label(label, kind=LabelKind.LOCAL)
line = line[len(g.group(0)) :].strip()
if not line:
continue
if "=" in line:
key, value = line.split("=", 1)
key = key.strip()
if " " not in key:
line = f".set {key}, {value}"
directive = line.split()[0]
if directive.startswith("."):
# Assembler directive.
if directive == ".ifdef" or directive == ".ifndef":
macro_name = line.split()[1]
if macro_name not in defines:
defines[macro_name] = None
add_warning(
warnings,
f"Note: assuming {macro_name} is unset for .ifdef, "
f"pass -D{macro_name}/-U{macro_name} to set/unset explicitly.",
)
level = 1 if defines[macro_name] is not None else 0
if directive == ".ifdef":
level = 1 - level
ifdef_level += level
ifdef_levels.append(level)
elif directive.startswith(".if"):
macro_name = line.split()[1]
if macro_name == "0":
level = 1
elif macro_name == "1":
level = 0
else:
level = 0
add_warning(warnings, f"Note: ignoring .if {macro_name} directive")
ifdef_level += level
ifdef_levels.append(level)
elif directive == ".else":
level = ifdef_levels.pop()
ifdef_level -= level
level = 1 - level
ifdef_level += level
ifdef_levels.append(level)
elif directive == ".endif":
ifdef_level -= ifdef_levels.pop()
elif directive == ".macro":
ifdef_level += 1
elif directive == ".endm":
ifdef_level -= 1
elif directive == ".fn":
_, _, args_str = line.partition(" ")
args = split_arg_list(args_str)
asm_file.new_function(args[0])
elif ifdef_level == 0:
if directive == ".section":
curr_section = line.split()[1].split(",")[0]
if curr_section in (".rdata", ".late_rodata", ".sdata2"):
curr_section = ".rodata"
elif curr_section == ".sdata":
curr_section = ".data"
elif curr_section.startswith(".text"):
curr_section = ".text"
elif (
directive == ".rdata"
or directive == ".rodata"
or directive == ".late_rodata"
):
curr_section = ".rodata"
elif directive == ".data":
curr_section = ".data"
elif directive == ".bss":
curr_section = ".bss"
elif directive == ".text":
curr_section = ".text"
elif directive == ".set":
_, _, args_str = line.partition(" ")
args = split_arg_list(args_str)
if len(args) == 1:
# ".set noreorder" or similar, just ignore
pass
elif len(args) == 2:
defines[args[0]] = try_parse(lambda: parse_int(args[1]))
else:
raise DecompFailure(f"Could not parse {directive}: {line}")
elif curr_section in (".rodata", ".data", ".bss"):
_, _, args_str = line.partition(" ")
args = split_arg_list(args_str)
if directive in (".word", ".gpword", ".4byte"):
for w in args:
if not w or w[0].isdigit() or w[0] == "-" or w in defines:
ival = try_parse(lambda: parse_int(w)) & 0xFFFFFFFF
asm_file.new_data_bytes(struct.pack(">I", ival))
else:
asm_file.new_data_sym(w)
elif directive in (".short", ".half", ".2byte"):
for w in args:
ival = try_parse(lambda: parse_int(w)) & 0xFFFF
asm_file.new_data_bytes(struct.pack(">H", ival))
elif directive == ".byte":
for w in args:
ival = try_parse(lambda: parse_int(w)) & 0xFF
asm_file.new_data_bytes(bytes([ival]))
elif directive == ".float":
for w in args:
fval = try_parse(lambda: float(w))
asm_file.new_data_bytes(struct.pack(">f", fval))
elif directive == ".double":
for w in args:
fval = try_parse(lambda: float(w))
asm_file.new_data_bytes(struct.pack(">d", fval))
elif directive in (".asci", ".asciz", ".ascii", ".asciiz"):
z = directive.endswith("z")
asm_file.new_data_bytes(
parse_ascii_directive(line, z), is_string=True
)
elif directive in (".space", ".skip"):
if len(args) == 2:
fill = try_parse(lambda: parse_int(args[1])) & 0xFF
elif len(args) == 1:
fill = 0
else:
raise DecompFailure(
f"Could not parse asm_data {directive} in {curr_section}: {line}"
)
size = try_parse(lambda: parse_int(args[0]))
asm_file.new_data_bytes(bytes([fill] * size))
elif line.startswith(".incbin"):
data = parse_incbin(args, options, warnings)
if data is not None:
asm_file.new_data_bytes(data)
elif ifdef_level == 0:
if directive == "jlabel":
parts = line.split()
if len(parts) >= 2:
process_label(parts[1], kind=LabelKind.JUMP_TARGET)
elif directive in ("glabel", "dlabel"):
parts = line.split()
if len(parts) >= 2:
process_label(parts[1], kind=LabelKind.GLOBAL)
elif curr_section == ".text":
meta = InstructionMeta(
emit_goto=emit_goto,
filename=filename,
lineno=lineno,
synthetic=False,
)
if asm_file.current_function is not None:
reg_formatter = asm_file.current_function.reg_formatter
else:
reg_formatter = RegFormatter()
defined_vars = {k: v for k, v in defines.items() if v is not None}
instr = parse_instruction(line, meta, arch, reg_formatter, defined_vars)
asm_file.new_instruction(instr)
if warnings:
print("/*")
print("\n".join(warnings))
print("*/")
return asm_file

View File

@ -1,433 +0,0 @@
"""Functions and classes useful for parsing an arbitrary assembly instruction."""
import abc
from dataclasses import dataclass, field
import string
from typing import Dict, List, Optional, Union
from .error import DecompFailure
@dataclass(frozen=True)
class Register:
register_name: str
def is_float(self) -> bool:
name = self.register_name
return bool(name) and name[0] == "f" and name != "fp"
def other_f64_reg(self) -> "Register":
assert (
self.is_float()
), "tried to get complement reg of non-floating point register"
num = int(self.register_name[1:])
return Register(f"f{num ^ 1}")
def __str__(self) -> str:
return f"${self.register_name}"
@dataclass(frozen=True)
class AsmGlobalSymbol:
symbol_name: str
def __str__(self) -> str:
return self.symbol_name
@dataclass(frozen=True)
class AsmSectionGlobalSymbol(AsmGlobalSymbol):
section_name: str
addend: int
def asm_section_global_symbol(section_name: str, addend: int) -> AsmSectionGlobalSymbol:
return AsmSectionGlobalSymbol(
symbol_name=f"__{section_name}{hex(addend)[2:].upper()}",
section_name=section_name,
addend=addend,
)
@dataclass(frozen=True)
class Macro:
macro_name: str
argument: "Argument"
def __str__(self) -> str:
return f"%{self.macro_name}({self.argument})"
@dataclass(frozen=True)
class AsmLiteral:
value: int
def signed_value(self) -> int:
return ((self.value + 0x8000) & 0xFFFF) - 0x8000
def __str__(self) -> str:
return hex(self.value)
@dataclass(frozen=True)
class AsmAddressMode:
lhs: "Argument"
rhs: Register
def lhs_as_literal(self) -> int:
assert isinstance(self.lhs, AsmLiteral)
return self.lhs.signed_value()
def __str__(self) -> str:
if self.lhs == AsmLiteral(0):
return f"({self.rhs})"
else:
return f"{self.lhs}({self.rhs})"
@dataclass(frozen=True)
class BinOp:
op: str
lhs: "Argument"
rhs: "Argument"
def __str__(self) -> str:
return f"{self.lhs} {self.op} {self.rhs}"
@dataclass(frozen=True)
class JumpTarget:
target: str
def __str__(self) -> str:
return self.target
Argument = Union[Register, AsmGlobalSymbol, AsmAddressMode, Macro, AsmLiteral, BinOp]
@dataclass(frozen=True)
class AsmInstruction:
mnemonic: str
args: List[Argument]
def __str__(self) -> str:
if not self.args:
return self.mnemonic
args = ", ".join(str(arg) for arg in self.args)
return f"{self.mnemonic} {args}"
class ArchAsmParsing(abc.ABC):
"""Arch-specific information needed to parse asm."""
all_regs: List[Register]
aliased_regs: Dict[str, Register]
@abc.abstractmethod
def normalize_instruction(self, instr: AsmInstruction) -> AsmInstruction:
...
class NaiveParsingArch(ArchAsmParsing):
"""A fake arch that can parse asm in a naive fashion. Used by the pattern matching
machinery to reduce arch dependence."""
all_regs: List[Register] = []
aliased_regs: Dict[str, Register] = {}
def normalize_instruction(self, instr: AsmInstruction) -> AsmInstruction:
return instr
@dataclass
class RegFormatter:
"""Converts register names used in input assembly to the internal register representation,
saves the input's names, and converts back to the input's names for the output."""
used_names: Dict[Register, str] = field(default_factory=dict)
def parse(self, reg_name: str, arch: ArchAsmParsing) -> Register:
return arch.aliased_regs.get(reg_name, Register(reg_name))
def parse_and_store(self, reg_name: str, arch: ArchAsmParsing) -> Register:
internal_reg = arch.aliased_regs.get(reg_name, Register(reg_name))
existing_reg_name = self.used_names.get(internal_reg)
if existing_reg_name is None:
self.used_names[internal_reg] = reg_name
elif existing_reg_name != reg_name:
raise DecompFailure(
f"Source uses multiple names for {internal_reg} ({existing_reg_name}, {reg_name})"
)
return internal_reg
def format(self, reg: Register) -> str:
return self.used_names.get(reg, reg.register_name)
valid_word = string.ascii_letters + string.digits + "_$."
valid_number = "-xX" + string.hexdigits
def parse_word(elems: List[str], valid: str = valid_word) -> str:
ret: str = ""
while elems and elems[0] in valid:
ret += elems.pop(0)
return ret
def parse_quoted(elems: List[str], quote_char: str) -> str:
ret: str = ""
while elems and elems[0] != quote_char:
# Handle backslash-escaped characters
# We only need to care about \\, \" and \' in this context.
if elems[0] == "\\":
elems.pop(0)
if not elems:
break
ret += elems.pop(0)
return ret
def parse_number(elems: List[str]) -> int:
number_str = parse_word(elems, valid_number)
if number_str[0] == "0":
assert len(number_str) == 1 or number_str[1] in "xX"
ret = int(number_str, 0)
return ret
def constant_fold(arg: Argument, defines: Dict[str, int]) -> Argument:
if isinstance(arg, AsmGlobalSymbol) and arg.symbol_name in defines:
return AsmLiteral(defines[arg.symbol_name])
if not isinstance(arg, BinOp):
return arg
lhs = constant_fold(arg.lhs, defines)
rhs = constant_fold(arg.rhs, defines)
if isinstance(lhs, AsmLiteral) and isinstance(rhs, AsmLiteral):
if arg.op == "+":
return AsmLiteral(lhs.value + rhs.value)
if arg.op == "-":
return AsmLiteral(lhs.value - rhs.value)
if arg.op == "*":
return AsmLiteral(lhs.value * rhs.value)
if arg.op == ">>":
return AsmLiteral(lhs.value >> rhs.value)
if arg.op == "<<":
return AsmLiteral(lhs.value << rhs.value)
if arg.op == "&":
return AsmLiteral(lhs.value & rhs.value)
return BinOp(arg.op, lhs, rhs)
def replace_bare_reg(
arg: Argument, arch: ArchAsmParsing, reg_formatter: RegFormatter
) -> Argument:
"""If `arg` is an AsmGlobalSymbol whose name matches a known or aliased register,
convert it into a Register and return it. Otherwise, return the original `arg`."""
if isinstance(arg, AsmGlobalSymbol):
reg_name = arg.symbol_name
if Register(reg_name) in arch.all_regs or reg_name in arch.aliased_regs:
return reg_formatter.parse_and_store(reg_name, arch)
return arg
def get_jump_target(label: Argument) -> JumpTarget:
assert isinstance(label, AsmGlobalSymbol), "invalid branch target"
return JumpTarget(label.symbol_name)
# Main parser.
def parse_arg_elems(
arg_elems: List[str],
arch: ArchAsmParsing,
reg_formatter: RegFormatter,
defines: Dict[str, int],
*,
top_level: bool,
) -> Optional[Argument]:
value: Optional[Argument] = None
def expect(n: str) -> str:
assert arg_elems, f"Expected one of {list(n)}, but reached end of string"
g = arg_elems.pop(0)
assert g in n, f"Expected one of {list(n)}, got {g} (rest: {arg_elems})"
return g
while arg_elems:
tok: str = arg_elems[0]
if tok.isspace():
# Ignore whitespace.
arg_elems.pop(0)
elif tok == ",":
expect(",")
break
elif tok == "$":
# Register.
assert value is None
word = parse_word(arg_elems)
reg = word[1:]
if "$" in reg:
# If there is a second $ in the word, it's a symbol
value = AsmGlobalSymbol(word)
else:
value = Register(reg)
value = reg_formatter.parse_and_store(value.register_name, arch)
elif tok == ".":
# Either a jump target (i.e. a label), or a section reference.
assert value is None
arg_elems.pop(0)
word = parse_word(arg_elems)
if word in ["data", "sdata", "rodata", "rdata", "bss", "sbss", "text"]:
value = asm_section_global_symbol(word, 0)
else:
value = AsmGlobalSymbol("." + word)
elif tok == "%":
# A MIPS reloc macro, e.g. %hi(...) or %lo(...).
assert value is None
arg_elems.pop(0)
macro_name = parse_word(arg_elems)
assert macro_name
expect("(")
# Get the argument of the macro (which must exist).
m = parse_arg_elems(
arg_elems, arch, reg_formatter, defines, top_level=False
)
assert m is not None
m = constant_fold(m, defines)
expect(")")
# A macro may be the lhs of an AsmAddressMode, so we don't return here.
value = Macro(macro_name, m)
elif tok == ")":
# Break out to the parent of this call, since we are in parens.
break
elif tok in string.digits or (tok == "-" and value is None):
# Try a number.
assert value is None
value = AsmLiteral(parse_number(arg_elems))
elif tok == "(":
if value is not None and not top_level:
# Only allow parsing AsmAddressMode at top level. This makes us parse
# a+b(c) as (a+b)(c) instead of a+(b(c)).
break
# Address mode or binary operation.
expect("(")
# Get what is being dereferenced.
rhs = parse_arg_elems(
arg_elems, arch, reg_formatter, defines, top_level=False
)
assert rhs is not None
expect(")")
if isinstance(rhs, BinOp):
# Binary operation.
assert value is None
value = constant_fold(rhs, defines)
else:
# Address mode.
assert top_level
rhs = replace_bare_reg(rhs, arch, reg_formatter)
if rhs == AsmLiteral(0):
rhs = Register("zero")
assert isinstance(rhs, Register)
value = constant_fold(value or AsmLiteral(0), defines)
value = AsmAddressMode(value, rhs)
elif tok == '"':
# Quoted global symbol.
expect('"')
assert value is None
word = parse_quoted(arg_elems, '"')
value = AsmGlobalSymbol(word)
expect('"')
elif tok in valid_word:
# Global symbol.
assert value is None
word = parse_word(arg_elems)
value = AsmGlobalSymbol(word)
elif tok in "<>+-&*":
# Binary operators, used e.g. to modify global symbols or constants.
assert isinstance(value, (AsmLiteral, AsmGlobalSymbol, BinOp))
if tok in "<>":
# bitshifts
expect(tok)
expect(tok)
op = tok + tok
else:
op = expect("&+-*")
if op == "-" and arg_elems[0] == "_":
# Parse `sym-_SDA_BASE_` as a Macro, equivalently to `sym@sda21`
reloc_name = parse_word(arg_elems)
if reloc_name not in ("_SDA_BASE_", "_SDA2_BASE_"):
raise DecompFailure(
f"Unexpected symbol {reloc_name} in subtraction expression"
)
value = Macro("sda21", value)
else:
rhs = parse_arg_elems(
arg_elems, arch, reg_formatter, defines, top_level=False
)
assert rhs is not None
if isinstance(rhs, BinOp) and rhs.op == "*":
rhs = constant_fold(rhs, defines)
if isinstance(rhs, BinOp) and isinstance(
constant_fold(rhs, defines), AsmLiteral
):
raise DecompFailure(
"Math is too complicated for m2c. Try adding parentheses."
)
if (
op == "+"
and isinstance(rhs, AsmLiteral)
and isinstance(value, AsmSectionGlobalSymbol)
):
value = asm_section_global_symbol(
value.section_name, value.addend + rhs.value
)
else:
value = BinOp(op, value, rhs)
elif tok == "@":
# A relocation (e.g. (...)@ha or (...)@l).
if not top_level:
# Parse a+b@l as (a+b)@l, not a+(b@l)
break
arg_elems.pop(0)
reloc_name = parse_word(arg_elems)
assert reloc_name in ("h", "ha", "l", "sda2", "sda21")
assert value
value = Macro(reloc_name, value)
else:
assert False, f"Unknown token {tok} in {arg_elems}"
return value
def parse_args(
args: str,
arch: ArchAsmParsing,
reg_formatter: RegFormatter,
defines: Dict[str, int],
) -> List[Argument]:
arg_elems: List[str] = list(args.strip())
output = []
while arg_elems:
ret = parse_arg_elems(arg_elems, arch, reg_formatter, defines, top_level=True)
assert ret is not None
output.append(
replace_bare_reg(constant_fold(ret, defines), arch, reg_formatter)
)
return output
def parse_asm_instruction(
line: str,
arch: ArchAsmParsing,
reg_formatter: RegFormatter,
defines: Dict[str, int],
) -> AsmInstruction:
# First token is instruction name, rest is args.
line = line.strip()
mnemonic, _, args_str = line.partition(" ")
# Parse arguments.
args = parse_args(args_str, arch, reg_formatter, defines)
instr = AsmInstruction(mnemonic, args)
return arch.normalize_instruction(instr)

View File

@ -1,287 +0,0 @@
import abc
from collections import defaultdict
from dataclasses import dataclass, field
from typing import Dict, List, Optional, Sequence, Set, Tuple, TypeVar, Union
from .asm_file import Label
from .asm_instruction import (
Argument,
AsmAddressMode,
AsmGlobalSymbol,
AsmInstruction,
AsmLiteral,
BinOp,
JumpTarget,
Macro,
NaiveParsingArch,
Register,
RegFormatter,
parse_asm_instruction,
)
from .instruction import (
ArchAsm,
Instruction,
InstructionMeta,
)
BodyPart = Union[Instruction, Label]
ReplacementPart = Union[AsmInstruction, Instruction, Label]
PatternPart = Union[AsmInstruction, Label, None]
Pattern = List[Tuple[PatternPart, bool]]
def make_pattern(*parts: str) -> Pattern:
ret: Pattern = []
for part in parts:
optional = part.endswith("?")
part = part.rstrip("?")
if part == "*":
ret.append((None, optional))
elif part.endswith(":"):
ret.append((Label([part[:-1]]), optional))
else:
ins = parse_asm_instruction(part, NaiveParsingArch(), RegFormatter(), {})
ret.append((ins, optional))
return ret
@dataclass
class Replacement:
new_body: Sequence[ReplacementPart]
num_consumed: int
@dataclass
class AsmMatch:
body: List[BodyPart]
wildcard_items: List[BodyPart]
regs: Dict[str, Register]
literals: Dict[str, int]
class AsmPattern(abc.ABC):
@abc.abstractmethod
def match(self, matcher: "AsmMatcher") -> Optional[Replacement]:
...
class SimpleAsmPattern(AsmPattern):
@property
@abc.abstractmethod
def pattern(self) -> Pattern:
...
@abc.abstractmethod
def replace(self, m: "AsmMatch") -> Optional[Replacement]:
...
def match(self, matcher: "AsmMatcher") -> Optional[Replacement]:
m = matcher.try_match(self.pattern)
if not m:
return None
return self.replace(m)
@dataclass
class TryMatchState:
symbolic_registers: Dict[str, Register] = field(default_factory=dict)
symbolic_labels_def: Dict[str, Label] = field(default_factory=dict)
symbolic_labels_uses: Dict[str, Set[str]] = field(
default_factory=lambda: defaultdict(set)
)
symbolic_literals: Dict[str, int] = field(default_factory=dict)
wildcard_items: List[BodyPart] = field(default_factory=list)
T = TypeVar("T")
def match_var(self, var_map: Dict[str, T], key: str, value: T) -> bool:
if key in var_map:
if var_map[key] != value:
return False
else:
var_map[key] = value
return True
def match_reg(self, actual: Register, exp: Register) -> bool:
if len(exp.register_name) <= 1:
return self.match_var(self.symbolic_registers, exp.register_name, actual)
else:
return exp.register_name == actual.register_name
def match_label_def(self, key: str, defn: Label) -> bool:
assert key not in self.symbolic_labels_def
self.symbolic_labels_def[key] = defn
return self.symbolic_labels_uses[key] <= set(defn.names)
def match_label_use(self, key: str, use: str) -> bool:
defn = self.symbolic_labels_def.get(key, None)
if defn is not None and use not in defn.names:
return False
self.symbolic_labels_uses[key].add(use)
return True
def eval_math(self, e: Argument) -> int:
if isinstance(e, AsmLiteral):
return e.value
if isinstance(e, BinOp):
if e.op == "+":
return self.eval_math(e.lhs) + self.eval_math(e.rhs)
if e.op == "-":
return self.eval_math(e.lhs) - self.eval_math(e.rhs)
if e.op == "<<":
return self.eval_math(e.lhs) << self.eval_math(e.rhs)
assert False, f"bad binop in math pattern: {e}"
elif isinstance(e, AsmGlobalSymbol):
assert (
e.symbol_name in self.symbolic_literals
), f"undefined variable in math pattern: {e.symbol_name}"
return self.symbolic_literals[e.symbol_name]
else:
assert False, f"bad pattern part in math pattern: {e}"
def match_arg(self, a: Argument, e: Argument) -> bool:
if isinstance(e, AsmLiteral):
return isinstance(a, AsmLiteral) and a.value == e.value
if isinstance(e, Register):
return isinstance(a, Register) and self.match_reg(a, e)
if isinstance(e, AsmGlobalSymbol):
if e.symbol_name.startswith("."):
return isinstance(a, AsmGlobalSymbol) and self.match_label_use(
e.symbol_name, a.symbol_name
)
elif e.symbol_name.isupper():
return isinstance(a, AsmLiteral) and self.match_var(
self.symbolic_literals, e.symbol_name, a.value
)
elif e.symbol_name == "_":
return True
else:
return isinstance(a, AsmGlobalSymbol) and a.symbol_name == e.symbol_name
if isinstance(e, AsmAddressMode):
return (
isinstance(a, AsmAddressMode)
and self.match_arg(a.lhs, e.lhs)
and self.match_reg(a.rhs, e.rhs)
)
if isinstance(e, BinOp):
return isinstance(a, AsmLiteral) and a.value == self.eval_math(e)
if isinstance(e, Macro):
return (
isinstance(a, Macro)
and a.macro_name == e.macro_name
and self.match_arg(a.argument, e.argument)
)
assert False, f"bad pattern part: {e}"
def match_one(self, actual: BodyPart, exp: PatternPart) -> bool:
if exp is None:
self.wildcard_items.append(actual)
return True
if isinstance(exp, Label):
assert len(exp.names) == 1
return isinstance(actual, Label) and self.match_label_def(
exp.names[0], actual
)
if not isinstance(actual, Instruction):
return False
ins = actual
if ins.mnemonic != exp.mnemonic:
return False
if exp.args:
if len(ins.args) != len(exp.args):
return False
for a, e in zip(ins.args, exp.args):
if not self.match_arg(a, e):
return False
return True
def match_meta(self, ins: AsmInstruction) -> bool:
assert ins.mnemonic == ".eq"
res = self.eval_math(ins.args[1])
return self.match_arg(AsmLiteral(res), ins.args[0])
@dataclass
class AsmMatcher:
input: List[BodyPart]
labels: Set[str]
output: List[BodyPart] = field(default_factory=list)
index: int = 0
def try_match(self, pattern: Pattern) -> Optional[AsmMatch]:
state = TryMatchState()
start_index = index = self.index
for pat, optional in pattern:
if isinstance(pat, AsmInstruction) and pat.mnemonic[0] == ".":
if not state.match_meta(pat) and not optional:
return None
elif index < len(self.input) and state.match_one(self.input[index], pat):
index += 1
elif not optional:
return None
return AsmMatch(
self.input[start_index:index],
state.wildcard_items,
state.symbolic_registers,
state.symbolic_literals,
)
def derived_meta(self) -> InstructionMeta:
for part in self.input[self.index :]:
if isinstance(part, Instruction):
return part.meta.derived()
return InstructionMeta.missing()
def branch_target_exists(self, name: str) -> bool:
return name in self.labels
def apply(self, repl: Replacement, arch: ArchAsm) -> None:
# Track which registers are overwritten/clobbered in the replacement asm
repl_writes = []
final_instr: Optional[Instruction] = None
for part in repl.new_body:
if isinstance(part, AsmInstruction):
# Parse any AsmInstructions into Instructions before substituting
part = arch.parse(part.mnemonic, part.args, self.derived_meta())
if isinstance(part, Instruction):
# Update list of written registers
repl_writes.extend(part.outputs)
repl_writes.extend(part.clobbers)
final_instr = part
self.output.append(part)
# Calculate which regs are *not* written by the repl asm, but were in the input asm
# Denote the replacement asm as "clobbering" these regs by marking the final instr
for part in self.input[self.index : self.index + repl.num_consumed]:
if isinstance(part, Instruction):
for arg in part.outputs + part.clobbers:
assert final_instr is not None
if arg not in repl_writes and arg not in final_instr.clobbers:
final_instr.clobbers.append(arg)
# Advance the input
self.index += repl.num_consumed
def simplify_patterns(
body: List[BodyPart],
patterns: List[AsmPattern],
arch: ArchAsm,
) -> List[BodyPart]:
"""Detect and simplify asm standard patterns emitted by known compilers. This is
especially useful for patterns that involve branches, which are hard to deal with
in the translate phase."""
labels = {name for item in body if isinstance(item, Label) for name in item.names}
matcher = AsmMatcher(body, labels)
while matcher.index < len(matcher.input):
for pattern in patterns:
m = pattern.match(matcher)
if m:
matcher.apply(m, arch)
break
else:
matcher.apply(Replacement([matcher.input[matcher.index]], 1), arch)
return matcher.output

View File

@ -1,839 +0,0 @@
"""This file handles variable types, function signatures and struct layouts
based on a C AST. Based on the pycparser library."""
import copy
import functools
import hashlib
import pickle
import re
from collections import defaultdict
from dataclasses import dataclass, field
from pathlib import Path
from typing import (
ClassVar,
Dict,
List,
Match,
Optional,
Set,
Tuple,
Union,
cast,
)
from pycparser import c_ast as ca
from pycparser.c_ast import ArrayDecl, FuncDecl, IdentifierType, PtrDecl, TypeDecl
from pycparser.c_generator import CGenerator
from pycparser.c_parser import CParser
from pycparser.plyparser import ParseError
from .error import DecompFailure
CType = Union[PtrDecl, ArrayDecl, TypeDecl, FuncDecl]
StructUnion = Union[ca.Struct, ca.Union]
SimpleType = Union[PtrDecl, TypeDecl]
CParserScope = Dict[str, bool]
@dataclass
class StructField:
type: CType
size: int
name: str
@dataclass
class Struct:
type: CType
fields: Dict[int, List[StructField]]
# TODO: bitfields
has_bitfields: bool
size: int
align: int
@dataclass
class Array:
subtype: "DetailedStructMember"
subctype: CType
subsize: int
dim: int
DetailedStructMember = Union[Array, Struct, None]
@dataclass
class Param:
type: CType
name: Optional[str]
@dataclass
class Function:
type: CType
ret_type: Optional[CType]
params: Optional[List[Param]]
is_variadic: bool
@dataclass(eq=False)
class Enum:
tag: Optional[str]
names: Dict[int, str]
@dataclass(eq=False)
class TypeMap:
# Change VERSION if TypeMap changes to invalidate all preexisting caches
VERSION: ClassVar[int] = 4
cparser_scope: CParserScope = field(default_factory=dict)
source_hash: Optional[str] = None
typedefs: Dict[str, CType] = field(default_factory=dict)
var_types: Dict[str, CType] = field(default_factory=dict)
vars_with_initializers: Set[str] = field(default_factory=set)
functions: Dict[str, Function] = field(default_factory=dict)
structs: Dict[Union[str, StructUnion], Struct] = field(default_factory=dict)
struct_typedefs: Dict[Union[str, StructUnion], TypeDecl] = field(
default_factory=dict
)
enums: Dict[Union[str, ca.Enum], Enum] = field(default_factory=dict)
enum_values: Dict[str, int] = field(default_factory=dict)
def to_c(node: ca.Node) -> str:
return CGenerator().visit(node)
def basic_type(names: List[str]) -> TypeDecl:
idtype = IdentifierType(names=names)
return TypeDecl(declname=None, quals=[], type=idtype, align=[])
def pointer(type: CType) -> CType:
return PtrDecl(quals=[], type=type)
def resolve_typedefs(type: CType, typemap: TypeMap) -> CType:
while (
isinstance(type, TypeDecl)
and isinstance(type.type, IdentifierType)
and len(type.type.names) == 1
and type.type.names[0] in typemap.typedefs
):
type = typemap.typedefs[type.type.names[0]]
return type
def type_of_var_decl(decl: ca.Decl) -> CType:
assert not isinstance(
decl.type, (ca.Struct, ca.Union, ca.Enum)
), "decls that declare variables have CType types"
return decl.type
def type_from_global_decl(decl: ca.Decl) -> CType:
"""Get the CType of a global Decl, stripping names of function parameters."""
tp = type_of_var_decl(decl)
if not isinstance(tp, ca.FuncDecl) or not tp.args:
return tp
def anonymize_param(param: ca.Decl) -> ca.Typename:
param = copy.deepcopy(param)
param.name = None
set_decl_name(param)
return ca.Typename(
name=None, quals=param.quals, type=type_of_var_decl(param), align=[]
)
new_params: List[Union[ca.Decl, ca.ID, ca.Typename, ca.EllipsisParam]] = [
anonymize_param(param) if isinstance(param, ca.Decl) else param
for param in tp.args.params
]
return ca.FuncDecl(args=ca.ParamList(new_params), type=tp.type)
def is_void(type: CType) -> bool:
return (
isinstance(type, ca.TypeDecl)
and isinstance(type.type, ca.IdentifierType)
and type.type.names == ["void"]
)
def primitive_size(type: Union[ca.Enum, ca.IdentifierType]) -> int:
if isinstance(type, ca.Enum):
return 4
names = type.names
if "double" in names:
return 8
if "float" in names:
return 4
if "short" in names:
return 2
if "char" in names:
return 1
if "void" in names:
return 0
if names.count("long") == 2:
return 8
return 4
def primitive_range(type: Union[ca.Enum, ca.IdentifierType]) -> Optional[range]:
bits = 8 * primitive_size(type)
signed = True
if not isinstance(type, ca.Enum):
names = type.names
if "double" in names or "float" in names or "void" in names:
return None
if "unsigned" in names:
signed = False
if signed:
bits -= 1
return range(-(2**bits), 2**bits)
else:
return range(0, 2**bits)
def function_arg_size_align(type: CType, typemap: TypeMap) -> Tuple[int, int]:
type = resolve_typedefs(type, typemap)
if isinstance(type, PtrDecl) or isinstance(type, ArrayDecl):
return 4, 4
assert not isinstance(type, FuncDecl), "Function argument can not be a function"
inner_type = type.type
if isinstance(inner_type, (ca.Struct, ca.Union)):
struct = get_struct(inner_type, typemap)
assert (
struct is not None
), "Function argument can not be of an incomplete struct"
return struct.size, struct.align
size = primitive_size(inner_type)
if size == 0:
raise DecompFailure("Function parameter has void type")
return size, size
def is_struct_type(type: CType, typemap: TypeMap) -> bool:
type = resolve_typedefs(type, typemap)
if not isinstance(type, TypeDecl):
return False
return isinstance(type.type, (ca.Struct, ca.Union))
def is_unk_type(type: CType, typemap: TypeMap) -> bool:
"""Return True if `type` represents an unknown type, or undetermined struct padding."""
# Check for types matching "char unk_N[...];" or "char padN[...];"
if (
isinstance(type, ArrayDecl)
and isinstance(type.type, TypeDecl)
and isinstance(type.type.type, IdentifierType)
and type.type.declname is not None
and type.type.type.names == ["char"]
):
declname = type.type.declname
if declname.startswith("unk_") or declname.startswith("pad"):
return True
# Check for types which are typedefs starting with "UNK_" or "M2C_UNK",
# or are arrays/pointers to one of these types.
while True:
if (
isinstance(type, TypeDecl)
and isinstance(type.type, IdentifierType)
and len(type.type.names) == 1
and type.type.names[0] in typemap.typedefs
):
type_name = type.type.names[0]
if type_name.startswith("UNK_") or type_name.startswith("M2C_UNK"):
return True
type = typemap.typedefs[type_name]
elif isinstance(type, (PtrDecl, ArrayDecl)):
type = type.type
else:
return False
def parse_function(fn: CType) -> Optional[Function]:
if not isinstance(fn, FuncDecl):
return None
params: List[Param] = []
is_variadic = False
has_void = False
if fn.args:
for arg in fn.args.params:
if isinstance(arg, ca.EllipsisParam):
is_variadic = True
elif isinstance(arg, ca.Decl):
params.append(Param(type=type_of_var_decl(arg), name=arg.name))
elif isinstance(arg, ca.ID):
raise DecompFailure(
"K&R-style function header is not supported: " + to_c(fn)
)
else:
assert isinstance(arg, ca.Typename)
if is_void(arg.type):
has_void = True
else:
params.append(Param(type=arg.type, name=None))
maybe_params: Optional[List[Param]] = params
if not params and not has_void and not is_variadic:
# Function declaration without a parameter list
maybe_params = None
ret_type = None if is_void(fn.type) else fn.type
return Function(
type=fn, ret_type=ret_type, params=maybe_params, is_variadic=is_variadic
)
def divmod_towards_zero(lhs: int, rhs: int, op: str) -> int:
if rhs < 0:
rhs = -rhs
lhs = -lhs
if lhs < 0:
return -divmod_towards_zero(-lhs, rhs, op)
if op == "/":
return lhs // rhs
else:
return lhs % rhs
def parse_constant_int(expr: "ca.Expression", typemap: TypeMap) -> int:
if isinstance(expr, ca.Constant):
try:
return int(expr.value.rstrip("lLuU"), 0)
except ValueError:
raise DecompFailure(f"Failed to parse {to_c(expr)} as an int literal")
if isinstance(expr, ca.ID):
if expr.name in typemap.enum_values:
return typemap.enum_values[expr.name]
if isinstance(expr, ca.BinaryOp):
op = expr.op
lhs = parse_constant_int(expr.left, typemap)
if op == "&&" and lhs == 0:
return 0
if op == "||" and lhs != 0:
return 1
rhs = parse_constant_int(expr.right, typemap)
if op == "+":
return lhs + rhs
if op == "-":
return lhs - rhs
if op == "*":
return lhs * rhs
if op == "<<":
return lhs << rhs
if op == ">>":
return lhs >> rhs
if op == "&":
return lhs & rhs
if op == "|":
return lhs | rhs
if op == "^":
return lhs ^ rhs
if op == ">=":
return 1 if lhs >= rhs else 0
if op == "<=":
return 1 if lhs <= rhs else 0
if op == ">":
return 1 if lhs > rhs else 0
if op == "<":
return 1 if lhs < rhs else 0
if op == "==":
return 1 if lhs == rhs else 0
if op == "!=":
return 1 if lhs != rhs else 0
if op in ["&&", "||"]:
return 1 if rhs != 0 else 0
if op in ["/", "%"]:
if rhs == 0:
raise DecompFailure(
f"Division by zero when evaluating expression {to_c(expr)}"
)
return divmod_towards_zero(lhs, rhs, op)
if isinstance(expr, ca.TernaryOp):
cond = parse_constant_int(expr.cond, typemap) != 0
return parse_constant_int(expr.iftrue if cond else expr.iffalse, typemap)
if isinstance(expr, ca.ExprList) and not isinstance(expr.exprs[-1], ca.Typename):
return parse_constant_int(expr.exprs[-1], typemap)
if isinstance(expr, ca.UnaryOp) and not isinstance(expr.expr, ca.Typename):
sub = parse_constant_int(expr.expr, typemap)
if expr.op == "-":
return -sub
if expr.op == "~":
return ~sub
if expr.op == "!":
return 1 if sub == 0 else 1
if isinstance(expr, ca.UnaryOp) and isinstance(expr.expr, ca.Typename):
size, align, _ = parse_struct_member(
expr.expr.type, f"referenced in {expr.op}", typemap, allow_unsized=False
)
if expr.op == "sizeof":
return size
if expr.op == "_Alignof":
return align
if isinstance(expr, ca.Cast):
value = parse_constant_int(expr.expr, typemap)
type = resolve_typedefs(expr.to_type.type, typemap)
if isinstance(type, ca.TypeDecl) and isinstance(
type.type, (ca.Enum, ca.IdentifierType)
):
rng = primitive_range(type.type)
if rng is not None:
return rng.start + (value - rng.start) % (rng.stop - rng.start)
raise DecompFailure(
f"Failed to evaluate expression {to_c(expr)} at compile time; only simple arithmetic is supported for now"
)
def parse_enum(enum: ca.Enum, typemap: TypeMap) -> None:
"""Parse an enum and compute the values of all its enumerators, for use in
constant evaluation.
We match IDO in treating all enums as having size 4, so no need to compute
size or alignment here."""
if enum.values is None:
return
typemap_enum = Enum(tag=enum.name, names={})
typemap.enums[enum] = typemap_enum
if enum.name is not None and enum.name not in typemap.enums:
typemap.enums[enum.name] = typemap_enum
next_value = 0
for enumerator in enum.values.enumerators:
if enumerator.value:
value = parse_constant_int(enumerator.value, typemap)
else:
value = next_value
next_value = value + 1
typemap.enum_values[enumerator.name] = value
# If there are multiple names mapping to a single value, take the last one
typemap_enum.names[value] = enumerator.name
def get_struct(
struct: Union[ca.Struct, ca.Union], typemap: TypeMap
) -> Optional[Struct]:
if struct.name:
return typemap.structs.get(struct.name)
else:
return typemap.structs.get(struct)
class UndefinedStructError(DecompFailure):
pass
def parse_struct(struct: Union[ca.Struct, ca.Union], typemap: TypeMap) -> Struct:
existing = get_struct(struct, typemap)
if existing:
return existing
if struct.decls is None:
raise UndefinedStructError(
f"Tried to use struct {struct.name} before it is defined (does it have a definition?)."
)
ret = do_parse_struct(struct, typemap)
if struct.name:
typemap.structs[struct.name] = ret
typemap.structs[struct] = ret
return ret
def parse_struct_member(
type: CType, field_name: str, typemap: TypeMap, *, allow_unsized: bool
) -> Tuple[int, int, DetailedStructMember]:
type = resolve_typedefs(type, typemap)
if isinstance(type, PtrDecl):
return 4, 4, None
if isinstance(type, ArrayDecl):
if type.dim is None:
raise DecompFailure(f"Array field {field_name} must have a size")
dim = parse_constant_int(type.dim, typemap)
size, align, substr = parse_struct_member(
type.type, field_name, typemap, allow_unsized=False
)
return size * dim, align, Array(substr, type.type, size, dim)
if isinstance(type, FuncDecl):
assert allow_unsized, "Struct can not contain a function"
return 0, 0, None
inner_type = type.type
if isinstance(inner_type, (ca.Struct, ca.Union)):
substr = parse_struct(inner_type, typemap)
return substr.size, substr.align, substr
if isinstance(inner_type, ca.Enum):
parse_enum(inner_type, typemap)
# Otherwise it has to be of type Enum or IdentifierType
size = primitive_size(inner_type)
if size == 0 and not allow_unsized:
raise DecompFailure(f"Field {field_name} cannot be void")
return size, size, None
def do_parse_struct(struct: Union[ca.Struct, ca.Union], typemap: TypeMap) -> Struct:
is_union = isinstance(struct, ca.Union)
assert struct.decls is not None, "enforced by caller"
assert struct.decls, "Empty structs are not valid C"
fields: Dict[int, List[StructField]] = defaultdict(list)
union_size = 0
align = 1
offset = 0
bit_offset = 0
has_bitfields = False
def flush_bitfields() -> None:
nonlocal offset, bit_offset
if not is_union and bit_offset != 0:
bit_offset = 0
offset += 1
for decl in struct.decls:
if not isinstance(decl, ca.Decl):
# Ignore pragmas
continue
type = decl.type
if isinstance(type, (ca.Struct, ca.Union)):
if type.decls is None:
continue
substruct = parse_struct(type, typemap)
if type.name is not None:
# Struct defined within another, which is silly but valid C.
# parse_struct already makes sure it gets defined in the global
# namespace, so no more to do here.
pass
else:
# C extension: anonymous struct/union, whose members are flattened
flush_bitfields()
align = max(align, substruct.align)
offset = (offset + substruct.align - 1) & -substruct.align
for off, sfields in substruct.fields.items():
for field in sfields:
fields[offset + off].append(field)
if is_union:
union_size = max(union_size, substruct.size)
else:
offset += substruct.size
continue
if isinstance(type, ca.Enum):
parse_enum(type, typemap)
continue
field_name = f"{struct.name}.{decl.name}"
if decl.bitsize is not None:
# A bitfield "type a : b;" has the following effects on struct layout:
# - align the struct as if it contained a 'type' field.
# - allocate the next 'b' bits of the struct, going from high bits to low
# within each byte.
# - ensure that 'a' can be loaded using a single load of the size given by
# 'type' (lw/lh/lb, unsigned counterparts). If it straddles a 'type'
# alignment boundary, skip all bits up to that boundary and then use the
# next 'b' bits from there instead.
has_bitfields = True
width = parse_constant_int(decl.bitsize, typemap)
ssize, salign, substr = parse_struct_member(
type, field_name, typemap, allow_unsized=False
)
align = max(align, salign)
if width == 0:
continue
if ssize != salign or substr is not None:
raise DecompFailure(f"Bitfield {field_name} is not of primitive type")
if width > ssize * 8:
raise DecompFailure(f"Width of bitfield {field_name} exceeds its type")
if is_union:
union_size = max(union_size, ssize)
else:
if offset // ssize != (offset + (bit_offset + width - 1) // 8) // ssize:
bit_offset = 0
offset = (offset + ssize) & -ssize
bit_offset += width
offset += bit_offset // 8
bit_offset &= 7
continue
if decl.name is None:
continue
flush_bitfields()
ssize, salign, substr = parse_struct_member(
type, field_name, typemap, allow_unsized=False
)
align = max(align, salign)
offset = (offset + salign - 1) & -salign
fields[offset].append(
StructField(
type=type,
size=ssize,
name=decl.name,
)
)
if is_union:
union_size = max(union_size, ssize)
else:
offset += ssize
flush_bitfields()
# If there is a typedef for this struct, prefer using that name
if struct in typemap.struct_typedefs:
ctype = typemap.struct_typedefs[struct]
elif struct.name and struct.name in typemap.struct_typedefs:
ctype = typemap.struct_typedefs[struct.name]
else:
ctype = TypeDecl(declname=None, quals=[], type=struct, align=[])
size = union_size if is_union else offset
size = (size + align - 1) & -align
return Struct(
type=ctype, fields=fields, has_bitfields=has_bitfields, size=size, align=align
)
def add_builtin_typedefs(source: str) -> str:
"""Add built-in typedefs to the source code (m2c emits those, so it makes
sense to pre-define them to simplify hand-written C contexts)."""
typedefs = {
"u8": "unsigned char",
"s8": "char",
"u16": "unsigned short",
"s16": "short",
"u32": "unsigned int",
"s32": "int",
"u64": "unsigned long long",
"s64": "long long",
"f32": "float",
"f64": "double",
}
line = " ".join(f"typedef {v} {k};" for k, v in typedefs.items())
return line + "\n" + source
def strip_comments(text: str) -> str:
# https://stackoverflow.com/a/241506
def replacer(match: Match[str]) -> str:
s = match.group(0)
if s.startswith("/"):
return " " + "\n" * s.count("\n")
else:
return s
pattern = re.compile(
r'//.*?$|/\*.*?\*/|\'(?:\\.|[^\\\'])*\'|"(?:\\.|[^\\"])*"',
re.DOTALL | re.MULTILINE,
)
return re.sub(pattern, replacer, text)
def strip_macro_defs(text: str) -> str:
"""Strip macro definitions from C source. m2c does not run the preprocessor,
for a bunch of reasons:
- we don't know what include directories to use
- it avoids a dependency on cpp, which is commonly unavailable on MacOS/Windows
- web site security (#includes could read sensitive files)
- performance
Instead that responsibility is passed on to whoever generates the context file.
But the context file may sometimes still contain macro definitions, if generated
with the dual purpose of being used as a real include file that users expect to
preserve macros.
Under the optimistic assumption that the macros aren't necessary for parsing the
context file itself, we strip all macro definitions before parsing."""
pattern = re.compile(r"^[ \t]*#[ \t]*define[ \t](\\\n|.)*", flags=re.MULTILINE)
return re.sub(pattern, lambda m: m.group(0).count("\n") * "\n", text)
def parse_c(
source: str, initial_scope: CParserScope
) -> Tuple[ca.FileAST, CParserScope]:
# This is a modified version of `CParser.parse()` which initializes `_scope_stack`,
# which contains the only stateful part of the parser that needs to be preserved
# when parsing multiple files.
c_parser = CParser()
c_parser.clex.filename = "<source>"
c_parser.clex.reset_lineno()
c_parser._scope_stack = [initial_scope.copy()]
c_parser._last_yielded_token = None
try:
ast = c_parser.cparser.parse(input=source, lexer=c_parser.clex)
except ParseError as e:
msg = str(e)
position, msg = msg.split(": ", 1)
parts = position.split(":")
if len(parts) >= 2:
# Adjust the line number by 1 to correct for the added typedefs
lineno = int(parts[1]) - 1
posstr = f" at line {lineno}"
if len(parts) >= 3:
posstr += f", column {parts[2]}"
try:
line = source.split("\n")[lineno].rstrip()
posstr += "\n\n" + line
except IndexError:
posstr += "(out of bounds?)"
else:
posstr = ""
raise DecompFailure(f"Syntax error when parsing C context.\n{msg}{posstr}")
return ast, c_parser._scope_stack[0].copy()
def build_typemap(source_paths: List[Path], use_cache: bool) -> TypeMap:
# Wrapper to convert `source_paths` into a hashable type
return _build_typemap(tuple(source_paths), use_cache)
@functools.lru_cache(maxsize=16)
def _build_typemap(source_paths: Tuple[Path, ...], use_cache: bool) -> TypeMap:
typemap = TypeMap()
for source_path in source_paths:
source = source_path.read_text(encoding="utf-8-sig")
# Compute a hash of the inputs to the TypeMap, which is used to check if the cached
# version is still valid. The hashing process does not need to be cryptographically
# secure, caching should only be enabled in trusted environments. (Unpickling files
# can lead to arbitrary code execution.)
hasher = hashlib.sha256()
hasher.update(f"version={TypeMap.VERSION}\n".encode("utf-8"))
hasher.update(f"parent={typemap.source_hash}\n".encode("utf-8"))
hasher.update(source.encode("utf-8"))
source_hash = hasher.hexdigest()
cache_path = source_path.with_name(f"{source_path.name}.m2c")
if use_cache and cache_path.exists():
try:
with cache_path.open("rb") as f:
cache = cast(TypeMap, pickle.load(f))
except Exception as e:
print(
f"Warning: Unable to read cache file {cache_path}, skipping ({e})"
)
else:
if cache.source_hash == source_hash:
typemap = cache
continue
source = add_builtin_typedefs(source)
source = strip_comments(source)
source = strip_macro_defs(source)
ast, result_scope = parse_c(source, typemap.cparser_scope)
typemap.cparser_scope = result_scope
typemap.source_hash = source_hash
for item in ast.ext:
if isinstance(item, ca.Typedef):
typemap.typedefs[item.name] = resolve_typedefs(item.type, typemap)
if isinstance(item.type, TypeDecl) and isinstance(
item.type.type, (ca.Struct, ca.Union)
):
typedef = basic_type([item.name])
if item.type.type.name:
typemap.struct_typedefs[item.type.type.name] = typedef
typemap.struct_typedefs[item.type.type] = typedef
if isinstance(item, ca.FuncDef):
assert item.decl.name is not None, "cannot define anonymous function"
fn = parse_function(type_of_var_decl(item.decl))
assert fn is not None
typemap.functions[item.decl.name] = fn
if isinstance(item, ca.Decl) and isinstance(item.type, FuncDecl):
assert item.name is not None, "cannot define anonymous function"
fn = parse_function(item.type)
assert fn is not None
typemap.functions[item.name] = fn
class Visitor(ca.NodeVisitor):
def visit_Struct(self, struct: ca.Struct) -> None:
if struct.decls is not None:
parse_struct(struct, typemap)
def visit_Union(self, union: ca.Union) -> None:
if union.decls is not None:
parse_struct(union, typemap)
def visit_Decl(self, decl: ca.Decl) -> None:
if decl.name is not None:
typemap.var_types[decl.name] = type_from_global_decl(decl)
if decl.init is not None:
typemap.vars_with_initializers.add(decl.name)
if not isinstance(decl.type, FuncDecl):
self.visit(decl.type)
def visit_Enum(self, enum: ca.Enum) -> None:
parse_enum(enum, typemap)
def visit_FuncDef(self, fn: ca.FuncDef) -> None:
if fn.decl.name is not None:
typemap.var_types[fn.decl.name] = type_from_global_decl(fn.decl)
Visitor().visit(ast)
if use_cache:
try:
with cache_path.open("wb") as f2:
pickle.dump(typemap, f2)
except Exception as e:
print(
f"Warning: Unable to write cache file {cache_path}, skipping ({e})"
)
return typemap
def set_decl_name(decl: ca.Decl) -> None:
name = decl.name
type = type_of_var_decl(decl)
while not isinstance(type, TypeDecl):
type = type.type
type.declname = name
def type_to_string(type: CType, name: str = "") -> str:
if isinstance(type, TypeDecl) and isinstance(
type.type, (ca.Struct, ca.Union, ca.Enum)
):
if isinstance(type.type, ca.Struct):
su = "struct"
else:
# (ternary to work around a mypy bug)
su = "union" if isinstance(type.type, ca.Union) else "enum"
if type.type.name:
return f"{su} {type.type.name}"
else:
return f"anon {su}"
decl = ca.Decl(name, [], [], [], [], copy.deepcopy(type), None, None)
set_decl_name(decl)
return to_c(decl)
def dump_typemap(typemap: TypeMap) -> None:
print("Variables:")
for var, type in typemap.var_types.items():
print(f"{type_to_string(type, var)};")
print()
print("Functions:")
for name, fn in typemap.functions.items():
print(f"{type_to_string(fn.type, name)};")
print()
print("Structs:")
for name_or_id, struct in typemap.structs.items():
if not isinstance(name_or_id, str):
continue
print(f"{name_or_id}: size {hex(struct.size)}, align {struct.align}")
for offset, fields in struct.fields.items():
print(f" {hex(offset)}:", end="")
for field in fields:
print(f" {field.name} ({type_to_string(field.type)})", end="")
print()
print()
print("Enums:")
for name, value in typemap.enum_values.items():
print(f"{name}: {value}")
print()

View File

@ -1,613 +0,0 @@
#!/usr/bin/env python
#
# CodeWarrior demangler, supporting operators, templates (including literals), and arrays.
#
# This can be included as a library, or used as a CLI application directly:
#
# $ ./demangle_cw.py 'check__3FooCFUlR3Bar'
# Foo::check (long unsigned, Bar &) const
#
# CodeWarrior's symbol mangling and C++ ABI seems to be based on the Macintosh spec
# defined in this document, section 3.4.1.2 (page 18):
#
# https://mirror.informatimago.com/next/developer.apple.com/tools/mpw-tools/compilers/docs/abi_spec.pdf
#
# It deviates from this spec in a few key ways:
# - The <count> in <qualified_name> is an unterminated <int> (no '_' separator)
# - Parameterized types use '<', ',' and '>' characters instead of the "__PT" prefix
#
# This file, like the rest of this repository, is licensed under the GPL v3. It is based
# on `demangler.cs` by arookas, which was provided under the 3-clause BSD License below:
#
# https://gist.github.com/arookas/0c0cd842c9ca836d7a9ed06a655b1a22
#
# Copyright (c) 2017, arookas
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from contextlib import contextmanager
from dataclasses import dataclass, field
from enum import Enum
from io import StringIO, TextIOBase
from typing import ClassVar, Iterator, List, Optional, Set
__all__ = [
"CxxName",
"CxxSymbol",
"CxxTerm",
"CxxType",
"demangle",
"parse",
]
# These substitutions are performed by the doldisasm.py decomp tool, and not by
# the CodeWarrior compiler directly. Undo them for convience, even though they
# aren't technically part of CW's mangling scheme.
DOLDISASM_SUBSTITUTIONS = [
("<", "$$0"),
(">", "$$1"),
("@", "$$2"),
("\\", "$$3"),
(",", "$$4"),
("-", "$$5"),
]
def read_exact(src: TextIOBase, size: int) -> str:
"""Read exactly `n` bytes from `src`, or raise a ValueError"""
value = src.read(size)
if len(value) != size:
raise ValueError(f"Unable to read {size} bytes; got {value!r}")
return value
@contextmanager
def peeking(src: TextIOBase) -> Iterator[None]:
"""Store the current offset in `src`, and restore it at the end of the context"""
ptr = src.tell()
try:
yield
finally:
src.seek(ptr)
def peek(src: TextIOBase, n: int = 1) -> str:
"""Read up to `n` bytes from `src` without advancing the offset"""
with peeking(src):
return src.read(n)
@contextmanager
def as_stringio(src: str) -> Iterator[StringIO]:
"""Wrap `src` in a `StringIO`, and assert it was fully consumed at the end of the context"""
buf = StringIO(src)
yield buf
leftover = buf.read()
if leftover:
raise ValueError(f"Unable to parse full input, leftover chars: {leftover!r}")
@dataclass
class CxxName:
"""
Represent the name of a C++ symbol, class, or namespace.
This may be parameterized with `template_params`.
"""
NUMBER_CHARS: ClassVar[Set[str]] = set("-0123456789")
name: str
template_params: Optional[List["CxxType"]] = None
@staticmethod
def parse(src: TextIOBase) -> "CxxName":
# Numbers: either a template literal, or a length prefix
number_str = ""
while peek(src) in CxxName.NUMBER_CHARS:
number_str += read_exact(src, 1)
if number_str == "":
raise ValueError(
"Unable to parse CxxName, input did not start with a number"
)
number = int(number_str)
# Check if the number represents a template literal
if peek(src) in (",", ">"):
return CxxName(number_str)
# Otherwise, it is the length of the name
if number <= 0:
raise ValueError("length must be positive")
name = read_exact(src, number)
# Simple case: plain identifier
if "<" not in name:
return CxxName(name)
# Otherwise, parse the template parameters between the `<...>` brackets
if name[-1] != ">":
raise ValueError("unpaired '<'")
base_name, sep, param_strs = name.partition("<")
with as_stringio(param_strs) as buf:
template_params = []
while True:
template_params.append(CxxType.parse(buf))
sep = read_exact(buf, 1)
if sep == ">":
break
if sep == ",":
continue
raise ValueError(f"Unexpected character while parsing CxxName: {sep}")
return CxxName(name=base_name, template_params=template_params)
def __str__(self) -> str:
if self.template_params is None:
return self.name
return f"{self.name}<{', '.join(str(p) for p in self.template_params)}>"
@dataclass
class CxxTerm:
"""
Represent a part of a C++ type, like a token.
This mostly corresponds to a single letter in a mangled type, like 'v' or 'U',
but it also can represent multicharacter components of a mangled type:
- ARRAY: with dimension `array_dim`
- FUNCTION: with `function_params` and `function_return` types
- QUALIFIED name: with a list of paths in `qualified_name`
- SYMBOL_REFERENCE: to the `symbol_reference` symbol
"""
class Kind(Enum):
CONST = "C"
POINTER = "P"
REFERENCE = "R"
SIGNED = "S"
UNSIGNED = "U"
ELLIPSIS = "e"
VOID = "v"
BOOL = "b"
CHAR = "c"
SHORT = "s"
INT = "i"
LONG = "l"
LONG_LONG = "x"
WIDE_CHAR = "w"
FLOAT = "f"
DOUBLE = "d"
LONG_DOUBLE = "r"
ARRAY = "A"
QUALIFIED = "Q"
FUNCTION = "F"
SYMBOL_REFERENCE = "&"
NONTERMINATING_KINDS = {
Kind.CONST,
Kind.POINTER,
Kind.REFERENCE,
Kind.SIGNED,
Kind.UNSIGNED,
Kind.ARRAY,
}
OPS = {
"__nw": " new",
"__nwa": " new[]",
"__dl": " delete",
"__dla": " delete[]",
"__pl": "+",
"__mi": "-",
"__ml": "*",
"__dv": "/",
"__md": "%",
"__er": "^",
"__adv": "/=",
"__ad": "&",
"__or": "|",
"__co": "~",
"__nt": "!",
"__as": "=",
"__lt": "<",
"__gt": ">",
"__apl": "+=",
"__ami": "-=",
"__amu": "*=",
"__amd": "%=",
"__aer": "^=",
"__aad": "&=",
"__aor": "|=",
"__ls": "<<",
"__rs": ">>",
"__ars": ">>=",
"__als": "<<=",
"__eq": "==",
"__ne": "!=",
"__le": "<=",
"__ge": ">=",
"__aa": "&&",
"__oo": "||",
"__pp": "++",
"__mm": "--",
"__cl": "()",
"__vc": "[]",
"__rf": "->",
"__cm": ",",
"__rm": "->*",
}
kind: Kind
# The following fields are only populated for the corresponding `kind`s
array_dim: Optional[int] = None
function_params: Optional[List["CxxType"]] = None
function_return: Optional["CxxType"] = None
qualified_name: Optional[List["CxxName"]] = None
symbol_reference: Optional["CxxSymbol"] = None
@staticmethod
def parse(src: TextIOBase) -> "CxxTerm":
if peek(src) in CxxName.NUMBER_CHARS:
return CxxTerm(
kind=CxxTerm.Kind.QUALIFIED, qualified_name=[CxxName.parse(src)]
)
kind = CxxTerm.Kind(read_exact(src, 1))
if kind == CxxTerm.Kind.QUALIFIED:
count = int(read_exact(src, 1))
qualified_name = []
for _ in range(count):
qualified_name.append(CxxName.parse(src))
return CxxTerm(kind=kind, qualified_name=qualified_name)
if kind == CxxTerm.Kind.FUNCTION:
function_params = []
function_return = None
while peek(src) not in ("", "_", ",", ">"):
function_params.append(CxxType.parse(src))
if peek(src, 1) == "_":
read_exact(src, 1)
function_return = CxxType.parse(src)
return CxxTerm(
kind=kind,
function_params=function_params,
function_return=function_return,
)
if kind == CxxTerm.Kind.ARRAY:
array_dim = 0
while True:
c = read_exact(src, 1)
if c == "_":
break
array_dim = (array_dim * 10) + int(c)
return CxxTerm(kind=kind, array_dim=array_dim)
if kind == CxxTerm.Kind.SYMBOL_REFERENCE:
return CxxTerm(
kind=kind,
symbol_reference=CxxSymbol.parse(src),
)
return CxxTerm(kind=kind)
def __str__(self) -> str:
if self.kind == CxxTerm.Kind.ARRAY:
assert self.array_dim is not None
return f"[{self.array_dim}]"
if self.kind == CxxTerm.Kind.QUALIFIED:
assert self.qualified_name is not None
names = [str(q) for q in self.qualified_name]
if names[-1] in CxxTerm.OPS:
names[-1] = "operator" + CxxTerm.OPS[names[-1]]
elif names[-1] == "__ct":
assert len(names) >= 2
names[-1] = names[-2]
elif names[-1] == "__dt":
assert len(names) >= 2
names[-1] = f"~{names[-2]}"
return "::".join(names)
if self.kind == CxxTerm.Kind.FUNCTION:
assert self.function_params is not None
prefix = ""
if self.function_return is not None:
prefix = f"{self.function_return} (*) "
args = ", ".join(str(p) for p in self.function_params)
return f"{prefix}({args})"
if self.kind == CxxTerm.Kind.SYMBOL_REFERENCE:
assert self.symbol_reference is not None
return f"&{self.symbol_reference}"
if self.kind == CxxTerm.Kind.POINTER:
return "*"
if self.kind == CxxTerm.Kind.REFERENCE:
return "&"
if self.kind == CxxTerm.Kind.LONG_LONG:
return "long long"
if self.kind == CxxTerm.Kind.LONG_DOUBLE:
return "long double"
if self.kind == CxxTerm.Kind.WIDE_CHAR:
return "wchar_t"
if self.kind == CxxTerm.Kind.ELLIPSIS:
return "..."
return str(self.kind.name).lower()
@dataclass
class CxxType:
"""
Represents a complete C++ type.
The final element in `terms` is the primitive type, such as CHAR or
FUNCTION. Working from the end, each previous term modifies the type.
(This order matches the order in the mangled type)
Example:
`[CONST, POINTER, CHAR]` => "char * const"
`[POINTER, CONST, CHAR]` => "char const *"
"""
terms: List[CxxTerm] = field(default_factory=list)
@staticmethod
def parse(src: TextIOBase) -> "CxxType":
terms = []
while True:
if peek(src) in (",", ">"):
break
term = CxxTerm.parse(src)
terms.append(term)
if term.kind not in CxxTerm.NONTERMINATING_KINDS:
break
return CxxType(terms=terms)
def __str__(self) -> str:
return " ".join(str(a) for a in self.terms[::-1]).strip()
@dataclass
class CxxSymbol:
"""Represents a C++ symbol & its type."""
STATIC_FUNCTIONS = {"__sinit", "__sterm"}
name: CxxTerm
type: CxxType
@staticmethod
def parse(src: TextIOBase) -> "CxxSymbol":
# Find the `base_name`, which is the prefix of `src` which usually represents
# original, unmangled name of the symbol. It's typically separated from the
# type information by the rightmost "__", but there are many edge cases.
# The number of underscores that separate the `base_name` from the rest of the type info
strip_underscores = 0
# The nesting depth of "<...>" clauses in `base_name`
template_depth = 0
# By default, the `base_name` is the *entire* src string (with no trailing underscores)
with peeking(src):
base_name = src.read()
with peeking(src):
# `chars` is a buffer of the chars we've read from `src`
chars = ""
while True:
# Try to read 1 character; if it's empty, return the `base_name` we've found so far
c = src.read(1)
if not c:
break
# If we hit either "," or ">" but we are not parsing a template, that means we have
# been called to parse a SYMBOL_REFERENCE inside a template, and we have hit a delimiter.
# Otherwise, track "<" and ">" counts in `template_depth`.
if c in (",", ">") and template_depth == 0:
if strip_underscores == 0:
base_name = chars
break
elif c == "<":
template_depth += 1
elif c == ">":
template_depth -= 1
chars += c
if chars in CxxSymbol.STATIC_FUNCTIONS:
# STATIC_FUNCTIONS are special prefixes which match exactly and only have 1 separating "_"
base_name = chars
strip_underscores = 1
break
elif c == "_" and peek(src) == "_":
# If we're in the middle of reading a "__", then this may be where `base_name` ends.
# However, we only split here if the character after the "__" could be the start of
# of the mangled type info, or a class name.
#
# - "C" or "F" are common starts to mangled type info for functions (CONST, FUNCTION).
# - "Q" or a number indicates the start of a qualified name or a class name.
#
# This is a heuristic, and will fail to parse non-function unqualified symbols (such
# as "foo__Ul") and some functions with internal "__" characters (such as "bar__5__FooFv")
lookahead = peek(src, 2)
if len(lookahead) < 2 or lookahead[1] in "CFQ0123456789":
base_name = chars[:-1]
strip_underscores = 2
# `base_name` is found, so remove it (and any separator underscores) from the input buffer
read_exact(src, len(base_name) + strip_underscores)
if base_name in CxxSymbol.STATIC_FUNCTIONS:
# This is a special case. A function like `__sinit_Foo_cpp` is the static
# constructor (ctor) for "Foo.cpp".
# "Demangle" this into `void Foo_cpp::__sinit(void)`
with as_stringio("Fv_v") as buf:
type = CxxType.parse(buf)
return CxxSymbol(
name=CxxTerm(
CxxTerm.Kind.QUALIFIED,
qualified_name=[CxxName(src.read()), CxxName(base_name)],
),
type=type,
)
# After the "__", the `base_name` is followed by the (optional) qualified class name, then the symbol's type.
class_name: Optional[CxxType] = CxxType.parse(src)
if peek(src) not in ("", ",", ">"):
type = CxxType.parse(src)
else:
assert class_name is not None
type = class_name
class_name = None
# Combine the `base_name` with the qualified class name to build a fully qualified symbol name.
qualified_name: List["CxxName"] = []
if class_name is not None:
assert len(class_name.terms) == 1
assert class_name.terms[0].kind == CxxTerm.Kind.QUALIFIED
assert class_name.terms[0].qualified_name is not None
qualified_name.extend(class_name.terms[0].qualified_name)
with as_stringio(str(len(base_name)) + base_name) as buf:
qualified_name.append(CxxName.parse(buf))
name = CxxTerm(CxxTerm.Kind.QUALIFIED, qualified_name=qualified_name)
return CxxSymbol(name=name, type=type)
def __str__(self) -> str:
return f"{self.name} {self.type}"
def parse(mangled: str) -> CxxSymbol:
for old, new in DOLDISASM_SUBSTITUTIONS:
mangled = mangled.replace(new, old)
with as_stringio(mangled) as buf:
return CxxSymbol.parse(buf)
def demangle(mangled: str) -> str:
try:
return str(parse(mangled))
except ValueError:
return mangled
def test() -> bool:
TEST_CASES = [
# Unmangled, but with underscores
("__foo_bar", "__foo_bar"),
# Namespacing
("get__6FoobarFi", "Foobar::get (int)"),
# Constructor / Destructor
(
"__ct__10FooBarFoosFP7ArgPtrsUsPCc",
"FooBarFoos::FooBarFoos (ArgPtrs *, short unsigned, char const *)",
),
("__dt__10FooBarFoosFv", "FooBarFoos::~FooBarFoos (void)"),
# Overloaded operators
("__dl__FPv", "operator delete (void *)"),
("__nw__FUl", "operator new (long unsigned)"),
("__eq__3FooCFRC3Foo", "Foo::operator== (Foo const &) const"),
# Namespacing & templated arguments
(
"do__Q214GrandFooSystem8MiniFoosFUlPC3VecP3VecfUlUlPP8LateBazzUlUc",
"GrandFooSystem::MiniFoos::do (long unsigned, Vec const *, Vec *, float, long unsigned, long unsigned, LateBazz * *, long unsigned, char unsigned)",
),
(
"spin__11ThingieBaseFRCQ29MyLibrary8FVec3$$0f$$1RCQ29MyLibrary8FVec3$$0f$$1RCQ29MyLibrary8FVec3$$0f$$1",
"ThingieBase::spin (MyLibrary::FVec3<float> const &, MyLibrary::FVec3<float> const &, MyLibrary::FVec3<float> const &)",
),
# Templated function names
(
"function<&alpha,&beta,&GAMMA>__FR5Class_i",
"function<&alpha, &beta, &GAMMA> int (*) (Class &)",
),
(
"function<&m0__5Class,&mf0__5ClassFi>__FR5Class_i",
"function<&m0 Class, &Class::mf0 (int)> int (*) (Class &)",
),
# Static functions
("__sinit_Foo_cpp", "Foo_cpp::__sinit void (*) (void)"),
("__sterm_Foo_cpp", "Foo_cpp::__sterm void (*) (void)"),
# Confusing function names (check that we split on the last valid "__")
("foo__3BarFv", "Bar::foo (void)"),
("foo__3BarFv__3BarFv", "Bar::foo__3BarFv (void)"),
("foo__Q23Bar3BarFv__3BarFv", "Bar::foo__Q23Bar3BarFv (void)"),
(
"copy__Q23std14__copy$$0Pv$$41$$40$$1FPPvPPvPPv",
"std::__copy<void *, 1, 0>::copy (void * *, void * *, void * *)",
),
("__init__bar__9Bar$$03Foo$$1", "__init__bar Bar<Foo>"),
("bar__5__BarFv", "__Bar::bar (void)"),
# These examples we fail to demangle correctly
("bar__5__FooFv", "bar__5__FooFv"), # should be "__Foo::bar (void)"
(
"foo__Ul",
"foo__Ul",
), # should be "foo long unsigned" (or "long unsigned foo")
]
all_pass = True
for mangled, demangled in TEST_CASES:
output = demangle(mangled)
if output != demangled:
print(f"Failed: {(mangled, output)}")
all_pass = False
if all_pass:
print(f"All {len(TEST_CASES)} test cases passed")
return all_pass
def main() -> None:
import sys
if len(sys.argv) != 2:
# Print help
print(f"usage: {sys.argv[0]} <mangled_name>")
sys.exit(1)
if sys.argv[1] == "--test":
# Run internal unit tests
sys.exit(0 if test() else 1)
if sys.argv[1] == "-":
# Batch mode: demangle each line in the input
for line in sys.stdin:
print(demangle(line.strip()))
sys.exit(0)
# Default: demangle the command-line argument
print(demangle(sys.argv[1]))
if __name__ == "__main__":
main()

View File

@ -1,14 +0,0 @@
from dataclasses import dataclass
from typing import NoReturn
@dataclass
class DecompFailure(Exception):
message: str
def __str__(self) -> str:
return self.message
def static_assert_unreachable(x: NoReturn) -> NoReturn:
raise Exception(f"Unreachable: {repr(x)}")

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,216 +0,0 @@
import abc
from contextlib import contextmanager
from dataclasses import dataclass, replace
from typing import Callable, Dict, Iterator, List, Optional, Union
from .error import DecompFailure
from .options import Target
from .asm_instruction import (
ArchAsmParsing,
Argument,
AsmGlobalSymbol,
AsmLiteral,
BinOp,
JumpTarget,
RegFormatter,
Register,
parse_asm_instruction,
)
@dataclass(frozen=True)
class StackLocation:
"""
Represents a word on the stack. Currently only used for pattern matching.
`symbolic_offset` represents a label offset that is only used in patterns,
to represent the "N" in arguments such as `(N+4)($sp)`.
"""
offset: int
symbolic_offset: Optional[str]
def __str__(self) -> str:
prefix = "" if self.symbolic_offset is None else f"{self.symbolic_offset}+"
return f"{prefix}{self.offset}($sp)"
def offset_as_arg(self) -> Argument:
if self.symbolic_offset is None:
return AsmLiteral(self.offset)
if self.offset == 0:
return AsmGlobalSymbol(self.symbolic_offset)
return BinOp(
lhs=AsmGlobalSymbol(self.symbolic_offset),
op="+",
rhs=AsmLiteral(self.offset),
)
@staticmethod
def from_offset(offset: Argument) -> Optional["StackLocation"]:
def align(x: int) -> int:
return x & ~3
if isinstance(offset, AsmLiteral):
return StackLocation(
offset=align(offset.value),
symbolic_offset=None,
)
if isinstance(offset, AsmGlobalSymbol):
return StackLocation(
offset=0,
symbolic_offset=offset.symbol_name,
)
if (
isinstance(offset, BinOp)
and offset.op in ("+", "-")
and isinstance(offset.lhs, AsmGlobalSymbol)
and isinstance(offset.rhs, AsmLiteral)
):
base = offset.rhs.value
if offset.op == "-":
base = -base
return StackLocation(
offset=align(base),
symbolic_offset=offset.lhs.symbol_name,
)
return None
Location = Union[Register, StackLocation]
@dataclass(frozen=True)
class InstructionMeta:
# True if the original asm line was marked with a goto pattern
emit_goto: bool
# Asm source filename & line number
filename: str
lineno: int
# True if the Instruction is not directly from the source asm
synthetic: bool
@staticmethod
def missing() -> "InstructionMeta":
return InstructionMeta(
emit_goto=False, filename="<unknown>", lineno=0, synthetic=True
)
def derived(self) -> "InstructionMeta":
return replace(self, synthetic=True)
def loc_str(self) -> str:
adj = "near" if self.synthetic else "at"
return f"{adj} {self.filename} line {self.lineno}"
@dataclass(frozen=True, eq=False)
class Instruction:
mnemonic: str
args: List[Argument]
meta: InstructionMeta
# Track register and stack dependencies
# An Instruction evaluates by reading from `inputs`, invalidating `clobbers`,
# then writing to `outputs` (in that order)
inputs: List[Location]
clobbers: List[Location]
outputs: List[Location]
# This should be typed as `eval_fn: Optional[Callable[[NodeState, InstrArgs], object]]`
# but this use classes that are defined in translate.py. We're unable to use correct
# types here without creating circular dependencies.
# The return value is ignored, but is typed as `object` so lambdas are more ergonomic.
# This member should only be accessed by `evaluate_instruction`.
eval_fn: Optional[Callable[..., object]]
jump_target: Optional[Union[JumpTarget, Register]] = None
function_target: Optional[Argument] = None
is_conditional: bool = False
is_return: bool = False
is_store: bool = False
# These are for MIPS. `is_branch_likely` refers to branch instructions which
# execute their delay slot only if the branch *is* taken. (Maybe these two
# bools should be merged into a 3-valued enum?)
has_delay_slot: bool = False
is_branch_likely: bool = False
# True if the Instruction was part of a matched IR pattern, but not elided
in_pattern: bool = False
def is_jump(self) -> bool:
return self.jump_target is not None or self.is_return
def __str__(self) -> str:
if not self.args:
return self.mnemonic
args = ", ".join(str(arg) for arg in self.args)
return f"{self.mnemonic} {args}"
def arch_mnemonic(self, arch: "ArchAsm") -> str:
"""Combine architecture name with mnemonic for pattern matching"""
return f"{arch.arch}:{self.mnemonic}"
def clone(self) -> "Instruction":
return replace(self, meta=self.meta.derived())
class ArchAsm(ArchAsmParsing):
"""Arch-specific information that relates to the asm level. Extends ArchAsmParsing."""
arch: Target.ArchEnum
stack_pointer_reg: Register
frame_pointer_reg: Optional[Register]
return_address_reg: Register
all_return_regs: List[Register]
argument_regs: List[Register]
simple_temp_regs: List[Register]
temp_regs: List[Register]
saved_regs: List[Register]
all_regs: List[Register]
@abc.abstractmethod
def missing_return(self) -> List[Instruction]:
...
@abc.abstractmethod
def parse(
self, mnemonic: str, args: List[Argument], meta: InstructionMeta
) -> Instruction:
...
def parse_instruction(
line: str,
meta: InstructionMeta,
arch: ArchAsm,
reg_formatter: RegFormatter,
defines: Dict[str, int],
) -> Instruction:
try:
base = parse_asm_instruction(line, arch, reg_formatter, defines)
return arch.parse(base.mnemonic, base.args, meta)
except Exception as e:
msg = f"Failed to parse instruction {meta.loc_str()}: {line}"
if isinstance(e, DecompFailure):
msg += "\n\n" + e.message
raise DecompFailure(msg)
@dataclass
class InstrProcessingFailure(Exception):
instr: Instruction
def __str__(self) -> str:
return f"Error while processing instruction:\n{self.instr}"
@contextmanager
def set_current_instr(instr: Instruction) -> Iterator[None]:
"""Mark an instruction as being the one currently processed, for the
purposes of error messages. Use like |with set_current_instr(instr): ...|"""
try:
yield
except Exception as e:
raise InstrProcessingFailure(instr) from e

View File

@ -1,416 +0,0 @@
import abc
from collections import defaultdict
from dataclasses import dataclass, field, replace
from typing import ClassVar, Dict, List, Optional, TypeVar
from .error import static_assert_unreachable
from .flow_graph import (
ArchFlowGraph,
BaseNode,
FlowGraph,
InstrRef,
LocationRefSetDict,
RefSet,
Reference,
TerminalNode,
build_flowgraph,
)
from .asm_file import AsmData, Function
from .asm_instruction import (
Argument,
AsmAddressMode,
AsmGlobalSymbol,
AsmInstruction,
AsmLiteral,
BinOp,
JumpTarget,
RegFormatter,
Register,
)
from .instruction import (
Instruction,
InstructionMeta,
Location,
StackLocation,
parse_instruction,
)
@dataclass(eq=False, frozen=True)
class IrPattern(abc.ABC):
"""
Template for defining "IR" patterns that can match against input asm.
The matching process uses the FlowGraph and register analysis to compute
inter-instruction dependencies, so these patterns can match even when
they have been interleaved/reordered by the compiler in the input asm.
IrPattern subclasses *must* define `parts` and `replacement`, and can
optionally implement `check()`.
For now, the pattern cannot contain any branches, and the replacement
must be a single instruction (though, it can be fictive).
"""
parts: ClassVar[List[str]]
replacement: ClassVar[str]
def check(self, m: "IrMatch") -> bool:
"""Override to perform additional checks before replacement."""
return True
def compile(self, arch: ArchFlowGraph) -> "CompiledIrPattern":
missing_meta = InstructionMeta.missing()
regf = RegFormatter()
replacement_instr = parse_instruction(
self.replacement, missing_meta, arch, regf, {}
)
name = f"__pattern_{self.__class__.__name__}"
func = Function(name=name)
# Add a fictive nop instruction for each input to the replacement_instr.
# This acts as a placeholder Reference to represent where the input was set,
# and allows each input to be sourced from a different Reference.
assert len(replacement_instr.inputs) == len(
set(replacement_instr.inputs)
), "pattern inputs must be unique"
for inp in replacement_instr.inputs:
func.new_instruction(
Instruction(
"in.fictive",
[],
meta=missing_meta,
inputs=[],
clobbers=[],
outputs=[inp],
eval_fn=None,
)
)
for part in self.parts:
func.new_instruction(parse_instruction(part, missing_meta, arch, regf, {}))
asm_data = AsmData()
flow_graph = build_flowgraph(func, asm_data, arch, fragment=True)
return CompiledIrPattern(
source=self,
flow_graph=flow_graph,
replacement_instr=replacement_instr,
)
@dataclass(eq=False, frozen=True)
class CompiledIrPattern:
source: IrPattern
flow_graph: FlowGraph
replacement_instr: Instruction
@dataclass
class IrMatch:
"""
IrMatch represents the matched state of an IrPattern.
This object is considered read-only; none of its methods modify its state.
The `map_*` methods take a pattern part and return the matched part of the original asm.
Single-letter registers and all-uppercase symbols in patterns are symbolic,
whereas other registers and symbols are matched literally.
"""
symbolic_registers: Dict[str, Register] = field(default_factory=dict)
symbolic_args: Dict[str, Argument] = field(default_factory=dict)
ref_map: Dict[Reference, RefSet] = field(default_factory=dict)
@staticmethod
def _is_symbolic_reg(arg: Register) -> bool:
# Single-letter registers are symbolic; everything else is literal
return len(arg.register_name) <= 1
@staticmethod
def _is_symbolic_sym(arg: AsmGlobalSymbol) -> bool:
# Uppercase symbols are symbolic; everything else is literal
return arg.symbol_name.isupper()
@staticmethod
def _is_label_sym(arg: AsmGlobalSymbol) -> bool:
return arg.symbol_name.startswith(".")
def eval_math(self, pat: Argument) -> Argument:
# This function can only evaluate math in *patterns*, not candidate
# instructions. It does not need to support arbitrary math, only
# math used by IR patterns.
if isinstance(pat, AsmLiteral):
return pat
if isinstance(pat, BinOp):
lhs = self.eval_math(pat.lhs)
rhs = self.eval_math(pat.rhs)
if isinstance(lhs, AsmLiteral) and isinstance(rhs, AsmLiteral):
if pat.op == "+":
return AsmLiteral(lhs.value + rhs.value)
if pat.op == "-":
return AsmLiteral(lhs.value - rhs.value)
if pat.op == "<<":
return AsmLiteral(lhs.value << rhs.value)
return BinOp(pat.op, lhs, rhs)
elif isinstance(pat, AsmGlobalSymbol):
assert (
pat.symbol_name in self.symbolic_args
), f"undefined variable in math pattern: {pat.symbol_name}"
lit = self.symbolic_args[pat.symbol_name]
return lit
else:
assert False, f"bad pattern expr: {pat}"
def map_reg(self, key: Register) -> Register:
if self._is_symbolic_reg(key):
return self.symbolic_registers[key.register_name]
return key
def map_arg(self, key: Argument) -> Argument:
if isinstance(key, AsmLiteral):
return key
if isinstance(key, Register):
return self.map_reg(key)
if isinstance(key, AsmGlobalSymbol):
assert not self._is_label_sym(key), "not supported yet"
if self._is_symbolic_sym(key):
return self.symbolic_args[key.symbol_name]
return key
if isinstance(key, AsmAddressMode):
return AsmAddressMode(lhs=self.map_arg(key.lhs), rhs=self.map_reg(key.rhs))
if isinstance(key, BinOp):
return self.eval_math(key)
assert False, f"bad pattern part: {key}"
def map_ref(self, key: Reference) -> InstrRef:
refset = self.ref_map[key]
value = refset.get_unique()
assert isinstance(value, InstrRef)
return value
def try_map_ref(self, key: Reference) -> Optional[Reference]:
refset = self.ref_map.get(key)
if refset is None:
return None
return refset.get_unique()
def map_location(self, key: Location) -> Location:
if isinstance(key, Register):
return self.map_reg(key)
if isinstance(key, StackLocation):
loc = StackLocation.from_offset(self.map_arg(key.offset_as_arg()))
assert loc is not None
return loc
static_assert_unreachable(key)
def map_asm(self, key: Instruction) -> AsmInstruction:
return AsmInstruction(key.mnemonic, [self.map_arg(a) for a in key.args])
class TryIrMatch(IrMatch):
"""
TryIrMatch represents the partial (in-progress) match state of an IrPattern.
Unlike IrMatch, all of its `match_*` methods may modify its internal state.
These all take a pair of arguments: pattern part, and candidate asm part.
"""
K = TypeVar("K")
V = TypeVar("V")
def _match_var(self, var_map: Dict[K, V], key: K, value: V) -> bool:
if key in var_map:
if var_map[key] != value:
return False
else:
var_map[key] = value
return True
def match_arg(self, pat: Argument, cand: Argument) -> bool:
if isinstance(pat, AsmLiteral):
return pat == cand
if isinstance(pat, Register):
if not self._is_symbolic_reg(pat):
return pat == cand
if not isinstance(cand, Register):
return False
return self._match_var(self.symbolic_registers, pat.register_name, cand)
if isinstance(pat, AsmGlobalSymbol):
assert not self._is_label_sym(pat), "not supported yet"
if self._is_symbolic_sym(pat):
return self._match_var(self.symbolic_args, pat.symbol_name, cand)
return pat == cand
if isinstance(pat, AsmAddressMode):
return (
isinstance(cand, AsmAddressMode)
and self.match_arg(pat.lhs, cand.lhs)
and self.match_arg(pat.rhs, cand.rhs)
)
if isinstance(pat, BinOp):
return self.eval_math(pat) == cand
assert False, f"bad pattern arg: {pat}"
def match_instr(self, pat: Instruction, cand: Instruction) -> bool:
if pat.mnemonic != cand.mnemonic or len(pat.args) != len(cand.args):
return False
return all(self.match_arg(*args) for args in zip(pat.args, cand.args))
def match_refset(self, pat: Reference, cand_set: RefSet) -> bool:
return self._match_var(self.ref_map, pat, cand_set.copy())
def match_inputrefs(
self, pat: LocationRefSetDict, cand: LocationRefSetDict
) -> bool:
for pat_loc, pat_refs in pat.items():
cand_loc = self.map_location(pat_loc)
cand_refs = cand.get(cand_loc)
pat_ref = pat_refs.get_unique()
assert pat_ref is not None, "patterns can not have phis"
if not self.match_refset(pat_ref, cand_refs):
return False
return True
def rename_reg(self, pat: Register, new_reg: Register) -> None:
assert pat.register_name in self.symbolic_registers
self.symbolic_registers[pat.register_name] = new_reg
def simplify_ir_patterns(
arch: ArchFlowGraph, flow_graph: FlowGraph, patterns: List[IrPattern]
) -> None:
# Precompute a RefSet for each mnemonic
# NB: It's difficult to plainly iterate over all Instruction in the flow_graph
# while it is being modified by the pattern replacement machinery.
refs_by_mnemonic = defaultdict(list)
for node in flow_graph.nodes:
for ref in node.block.instruction_refs:
refs_by_mnemonic[ref.instruction.mnemonic].append(ref)
# Counter used to name temporary registers
fictive_reg_index = 0
for pattern_base in patterns:
pattern = pattern_base.compile(arch)
# For now, patterns can't have branches: they should only have 2 Nodes,
# a BaseNode and an (empty) TerminalNode.
assert (
len(pattern.flow_graph.nodes) == 2
), "branching patterns not yet supported"
assert isinstance(pattern.flow_graph.nodes[0], BaseNode)
assert isinstance(pattern.flow_graph.nodes[1], TerminalNode)
pattern_node = pattern.flow_graph.nodes[0]
pattern_refs = pattern_node.block.instruction_refs
# Split the pattern asm into 3 disjoint sets of instructions:
# input_refs ("in.fictive"s), body_refs, and tail_ref (the last instruction)
n_inputs = len(pattern.replacement_instr.inputs)
input_refs, body_refs, tail_ref = (
pattern_refs[:n_inputs],
pattern_refs[n_inputs:-1],
pattern_refs[-1],
)
assert all(r.instruction.mnemonic == "in.fictive" for r in input_refs)
assert all(r.instruction.mnemonic != "in.fictive" for r in body_refs)
# For now, pattern inputs must be Registers, not StackLocations. It's not always
# trivial to create temporary StackLocations in the same way we create temporary
# Registers during replacement. (Also, we do not have a way to elide temporary
# stack variables during translation like we do with registers.)
assert all(
isinstance(inp, Register) for inp in pattern.replacement_instr.inputs
)
# For now, patterns can only have 1 output register (which must be set by the
# final instruction in the pattern). This simplifies the replacement step because
# we can replace the final instruction and know that all the pattern inputs have
# been assigned by there, and the output has not yet been used.
assert len(pattern.replacement_instr.outputs) == 1
assert pattern.replacement_instr.outputs == tail_ref.instruction.outputs
# Start matches with a mnemonic match for the last instruction in the pattern
for cand_tail_ref in refs_by_mnemonic.get(tail_ref.instruction.mnemonic, []):
state = TryIrMatch()
if not state.match_refset(tail_ref, RefSet([cand_tail_ref])):
continue
# Continue matching by working backwards through the pattern
is_match = True
for pat_ref in [tail_ref] + body_refs[::-1]:
# By pattern construction, pat_ref should be in the state's ref_map
# (It would be missing for "disjoint" or irrelevant instructions in the
# pattern, like random nops: these aren't allowed)
cand = state.try_map_ref(pat_ref)
pat_inputs = pattern.flow_graph.instr_inputs[pat_ref]
if not (
isinstance(cand, InstrRef)
and state.match_instr(pat_ref.instruction, cand.instruction)
and state.match_inputrefs(pat_inputs, flow_graph.instr_inputs[cand])
):
is_match = False
break
# Perform any additional pattern-specific validation
if not is_match or not pattern.source.check(state):
continue
# Create temporary registers for the inputs to the replacement_instr.
# These retain the input register contents even if an unrelated instruction
# overwrites the register in the middle of the pattern.
temp_reg_refs = {}
for pat_ref in input_refs:
assert len(pat_ref.instruction.outputs) == 1
input_reg = pat_ref.instruction.outputs[0]
assert isinstance(input_reg, Register)
# It doesn't matter which instruction we pick; the new move instruction to save
# the register can come before any of the instructions that use it.
input_uses = pattern.flow_graph.instr_uses[pat_ref].get(input_reg)
input_use_ref = state.map_ref(next(iter(input_uses)))
# Create a unique fictive register to act as a temporary
original_reg = state.map_reg(input_reg)
temp_reg = Register(
f"{original_reg.register_name}_fictive_{fictive_reg_index}"
)
fictive_reg_index += 1
state.rename_reg(input_reg, temp_reg)
move_asm = AsmInstruction("move.fictive", [temp_reg, original_reg])
move_ref = input_use_ref.add_instruction_before(move_asm, arch)
temp_reg_refs[temp_reg] = move_ref
# Update the instr_inputs/instr_uses graph
for src_ref in flow_graph.instr_inputs[input_use_ref].get(original_reg):
flow_graph.add_instruction_use(
use=move_ref, loc=original_reg, src=src_ref
)
# Rewrite the final instruction with the pattern's replacement instruction.
repl_ref = state.map_ref(tail_ref)
repl_asm = state.map_asm(pattern.replacement_instr)
repl_ref.replace_instruction(repl_asm, arch)
# Reset repl_ref's dependencies, then repopulate them from temp_reg_refs
flow_graph.clear_instruction_inputs(repl_ref)
for temp_reg, temp_ref in temp_reg_refs.items():
flow_graph.add_instruction_use(use=repl_ref, loc=temp_reg, src=temp_ref)
# For the rest of the instructions in the pattern body, take any instructions
# whose outputs aren't used later and replace them with nops.
for pat_ref in body_refs[::-1]:
cand_ref = state.map_ref(pat_ref)
if flow_graph.instr_uses[cand_ref].is_empty():
# Replace cand_ref with a nop, and clear its dependencies
nop_asm = AsmInstruction("nop", [])
cand_ref.replace_instruction(nop_asm, arch)
flow_graph.clear_instruction_inputs(cand_ref)
elif not cand_ref.instruction.in_pattern:
# It needs to be kept; but ensure the meta.in_pattern flag is set
cand_ref.instruction = replace(
cand_ref.instruction, in_pattern=True
)
# After all of the rewrites above, verify that the instruction dependency
# data structures are still consistent
flow_graph.validate_instruction_graph()

View File

@ -1,647 +0,0 @@
import argparse
import gc
import sys
import traceback
from pathlib import Path
from typing import Dict, List, Optional, Union
from .c_types import build_typemap, dump_typemap
from .error import DecompFailure
from .flow_graph import FlowGraph, build_flowgraph, visualize_flowgraph
from .if_statements import get_function_text
from .options import CodingStyle, Options, Target
from .asm_file import AsmData, Function, parse_file
from .instruction import InstrProcessingFailure
from .translate import (
Arch,
FunctionInfo,
GlobalInfo,
translate_to_ast,
narrow_func_call_outputs,
)
from .types import TypePool
from .arch_mips import MipsArch
from .arch_ppc import PpcArch
def print_current_exception(sanitize: bool) -> None:
"""Print a traceback for the current exception to stdout.
If `sanitize` is true, the filename's full path is stripped,
and the line is set to 0. These changes make the test output
less brittle."""
if sanitize:
tb = traceback.TracebackException(*sys.exc_info())
if tb.exc_type == InstrProcessingFailure and tb.__cause__:
tb = tb.__cause__
for frame in tb.stack:
frame.lineno = 0
frame.filename = Path(frame.filename).name
for line in tb.format(chain=False):
print(line, end="")
else:
traceback.print_exc(file=sys.stdout)
def print_exception_as_comment(
exc: Exception, context: Optional[str], sanitize: bool
) -> None:
context_phrase = f" in {context}" if context is not None else ""
if isinstance(exc, OSError):
print(f"/* OSError{context_phrase}: {exc} */")
return
elif isinstance(exc, DecompFailure):
print("/*")
print(f"Decompilation failure{context_phrase}:\n")
print(exc)
print("*/")
else:
print("/*")
print(f"Internal error{context_phrase}:\n")
print_current_exception(sanitize=sanitize)
print("*/")
def run(options: Options) -> int:
arch: Arch
if options.target.arch == Target.ArchEnum.MIPS:
arch = MipsArch()
elif options.target.arch == Target.ArchEnum.PPC:
arch = PpcArch()
else:
raise ValueError(f"Invalid target arch: {options.target.arch}")
all_functions: Dict[str, Function] = {}
asm_data = AsmData()
try:
for filename in options.filenames:
if filename == "-":
asm_file = parse_file(sys.stdin, arch, options)
else:
with open(filename, "r", encoding="utf-8-sig") as f:
asm_file = parse_file(f, arch, options)
all_functions.update((fn.name, fn) for fn in asm_file.functions)
asm_file.asm_data.merge_into(asm_data)
if options.heuristic_strings:
asm_data.detect_heuristic_strings()
typemap = build_typemap(options.c_contexts, use_cache=options.use_cache)
except Exception as e:
print_exception_as_comment(
e, context=None, sanitize=options.sanitize_tracebacks
)
return 1
if options.dump_typemap:
dump_typemap(typemap)
return 0
if not options.function_indexes_or_names:
functions = list(all_functions.values())
else:
functions = []
for index_or_name in options.function_indexes_or_names:
if isinstance(index_or_name, int):
if not (0 <= index_or_name < len(all_functions)):
print(
f"Function index {index_or_name} is out of bounds (must be between "
f"0 and {len(all_functions) - 1}).",
file=sys.stderr,
)
return 1
functions.append(list(all_functions.values())[index_or_name])
else:
if index_or_name not in all_functions:
print(f"Function {index_or_name} not found.", file=sys.stderr)
return 1
functions.append(all_functions[index_or_name])
fmt = options.formatter()
function_names = set(all_functions.keys())
typepool = TypePool(
unknown_field_prefix="unk_" if fmt.coding_style.unknown_underscore else "unk",
unk_inference=options.unk_inference,
)
global_info = GlobalInfo(
asm_data,
arch,
options.target,
function_names,
typemap,
typepool,
deterministic_vars=options.deterministic_vars,
)
flow_graphs: List[Union[FlowGraph, Exception]] = []
for function in functions:
try:
narrow_func_call_outputs(function, global_info)
graph = build_flowgraph(
function,
global_info.asm_data,
arch,
fragment=False,
print_warnings=options.debug,
)
flow_graphs.append(graph)
except Exception as e:
# Store the exception for later, to preserve the order in the output
flow_graphs.append(e)
# Perform the preliminary passes to improve type resolution, but discard the results/exceptions
for i in range(options.passes - 1):
preliminary_infos = []
for function, flow_graph in zip(functions, flow_graphs):
try:
if isinstance(flow_graph, Exception):
raise flow_graph
flow_graph.reset_block_info()
info = translate_to_ast(function, flow_graph, options, global_info)
preliminary_infos.append(info)
except Exception:
pass
try:
global_info.global_decls(fmt, options.global_decls, [])
except Exception:
pass
for info in preliminary_infos:
try:
get_function_text(info, options)
except Exception:
pass
# This operation can change struct field paths, so it is only performed
# after discarding all of the translated Expressions.
typepool.prune_structs()
function_infos: List[Union[FunctionInfo, Exception]] = []
for function, flow_graph in zip(functions, flow_graphs):
try:
if isinstance(flow_graph, Exception):
raise flow_graph
flow_graph.reset_block_info()
info = translate_to_ast(function, flow_graph, options, global_info)
function_infos.append(info)
except Exception as e:
# Store the exception for later, to preserve the order in the output
function_infos.append(e)
return_code = 0
try:
if options.visualize_flowgraph is not None:
fn_info = function_infos[0]
if isinstance(fn_info, Exception):
raise fn_info
print(visualize_flowgraph(fn_info.flow_graph, options.visualize_flowgraph))
return 0
type_decls = typepool.format_type_declarations(
fmt, stack_structs=options.print_stack_structs
)
if type_decls:
print(type_decls)
global_decls = global_info.global_decls(
fmt,
options.global_decls,
[fn for fn in function_infos if isinstance(fn, FunctionInfo)],
)
if global_decls:
print(global_decls)
except Exception as e:
print_exception_as_comment(
e, context=None, sanitize=options.sanitize_tracebacks
)
return_code = 1
for index, (function, function_info) in enumerate(zip(functions, function_infos)):
if index != 0:
print()
try:
if options.print_assembly:
print(function)
print()
if isinstance(function_info, Exception):
raise function_info
function_text = get_function_text(function_info, options)
print(function_text)
except Exception as e:
print_exception_as_comment(
e,
context=f"function {function.name}",
sanitize=options.sanitize_tracebacks,
)
return_code = 1
for warning in typepool.warnings:
print(fmt.with_comments("", comments=[warning]))
return return_code
def parse_flags(flags: List[str]) -> Options:
parser = argparse.ArgumentParser(
description="Decompile assembly to C.",
usage="%(prog)s [-t mips-ido-c] [--context C_FILE] [-f FN ...] filename [filename ...]",
)
group = parser.add_argument_group("Input Options")
group.add_argument(
metavar="filename",
nargs="+",
dest="filenames",
help="Input asm filename(s)",
)
group.add_argument(
"--context",
metavar="C_FILE",
dest="c_contexts",
action="append",
type=Path,
default=[],
help="Read variable types/function signatures/structs from an existing C file. "
"The file must already have been processed by the C preprocessor.",
)
group.add_argument(
"--no-cache",
action="store_false",
dest="use_cache",
help="Disable caching of variable types/function signatures/structs from the parsed C context. "
"This option should be used for untrusted environments. "
'The cache for "foo/ctx_bar.c" is stored in "foo/ctx_bar.c.m2c". '
"The *.m2c files automatically regenerate when the source file change, and can be ignored.",
)
group.add_argument(
"-D",
metavar="SYM[=VALUE]",
dest="defined",
action="append",
default=[],
help="Mark preprocessor symbol as defined",
)
group.add_argument(
"-U",
metavar="SYM",
dest="undefined",
action="append",
default=[],
help="Mark preprocessor symbol as undefined",
)
group.add_argument(
"--incbin-dir",
dest="incbin_dirs",
action="append",
default=[],
type=Path,
help="Add search path for loading .incbin directives in the input asm",
)
group = parser.add_argument_group("Output Options")
group.add_argument(
"-f",
"--function",
metavar="FN",
dest="functions",
action="append",
default=[],
help="Function index or name to decompile",
)
group.add_argument(
"--globals",
dest="global_decls",
type=Options.GlobalDeclsEnum,
choices=list(Options.GlobalDeclsEnum),
default="used",
help="Control which global declarations & initializers are emitted. "
'"all" includes all globals with entries in .data/.rodata/.bss, as well as inferred symbols. '
'"used" only includes symbols used by the decompiled functions that are not in the context (default). '
'"none" does not emit any global declarations. ',
)
group.add_argument(
"--stack-structs",
dest="print_stack_structs",
action="store_true",
help=(
"Include template structs for each function's stack. These can be modified and passed back "
"into m2c with --context to set the types & names of stack vars."
),
)
group.add_argument(
"--debug",
dest="debug",
action="store_true",
help="Print debug info inline",
)
group.add_argument(
"--print-assembly",
dest="print_assembly",
action="store_true",
help="Print assembly of function to decompile",
)
group.add_argument(
"--dump-typemap",
dest="dump_typemap",
action="store_true",
help="Dump information about all functions and structs from the provided C "
"context. Mainly useful for debugging.",
)
group.add_argument(
"--visualize",
dest="visualize_flowgraph",
nargs="?",
default=None,
const=Options.VisualizeTypeEnum.C,
type=Options.VisualizeTypeEnum,
choices=list(Options.VisualizeTypeEnum),
help="Print an SVG visualization of the control flow graph using graphviz",
)
group.add_argument(
"--sanitize-tracebacks",
dest="sanitize_tracebacks",
action="store_true",
help=argparse.SUPPRESS,
)
group = parser.add_argument_group("Formatting Options")
group.add_argument(
"--valid-syntax",
dest="valid_syntax",
action="store_true",
help="Emit valid C syntax, using macros to indicate unknown types or other "
"unusual statements. Macro definitions are in `m2c_macros.h`.",
)
brace_style = group.add_mutually_exclusive_group()
brace_style.add_argument(
"--allman",
dest="allman",
action="store_true",
help="Put braces on separate lines",
)
brace_style.add_argument(
"--knr",
dest="knr",
action="store_true",
help="Put function opening braces on separate lines",
)
group.add_argument(
"--indent-switch-contents",
dest="indent_switch_contents",
action="store_true",
help="Indent switch statements' contents an extra level",
)
group.add_argument(
"--pointer-style",
dest="pointer_style",
help="Control whether to output pointer asterisks next to the type name (left) "
"or next to the variable name (right). Default: right",
choices=["left", "right"],
default="right",
)
group.add_argument(
"--unk-underscore",
dest="unknown_underscore",
help="Emit unk_X instead of unkX for unknown struct accesses",
action="store_true",
)
group.add_argument(
"--hex-case",
dest="hex_case",
help="Display case labels in hex rather than decimal",
action="store_true",
)
group.add_argument(
"--comment-style",
dest="comment_style",
type=CodingStyle.CommentStyle,
choices=list(CodingStyle.CommentStyle),
default="multiline",
help=(
"Comment formatting. "
'"multiline" for C-style `/* ... */`, '
'"oneline" for C++-style `// ...`, '
'"none" to disable comments. '
"Default: multiline"
),
)
group.add_argument(
"--comment-column",
dest="comment_column",
metavar="N",
type=int,
default=52,
help="Column number to justify comments to. Set to 0 to disable justification. Default: 52",
)
group.add_argument(
"--no-casts",
dest="skip_casts",
action="store_true",
help="Don't emit any type casts",
)
group.add_argument(
"--zfill-constants",
dest="zfill_constants",
action="store_true",
help="Pad hex constants with 0's to fill their type's width.",
)
group.add_argument(
"--force-decimal",
dest="force_decimal",
action="store_true",
help="Force decimal values",
)
group.add_argument(
"--deterministic-vars",
dest="deterministic_vars",
action="store_true",
help="Name temp and phi vars after their location in the source asm, "
"rather than using an incrementing suffix. Can help reduce diff size in tests.",
)
group = parser.add_argument_group("Analysis Options")
group.add_argument(
"-t",
"--target",
dest="target",
type=Target.parse,
default="mips-ido-c",
help="Target architecture, compiler, and language triple. "
"Supported triples: mips-ido-c, mips-gcc-c, mipsel-gcc-c, ppc-mwcc-c++, ppc-mwcc-c. "
"Default is mips-ido-c, `ppc` is an alias for ppc-mwcc-c++. ",
)
group.add_argument(
"--passes",
"-P",
dest="passes",
metavar="N",
type=int,
default=2,
help="Number of translation passes to perform. Each pass may improve type resolution and produce better "
"output, particularly when decompiling multiple functions. Default: 2",
)
group.add_argument(
"--stop-on-error",
dest="stop_on_error",
action="store_true",
help="Stop when encountering any error",
)
group.add_argument(
"--void",
dest="void",
action="store_true",
help="Assume the decompiled function returns void",
)
group.add_argument(
"--gotos-only",
dest="ifs",
action="store_false",
help="Disable control flow generation; emit gotos for everything",
)
group.add_argument(
"--no-ifs",
dest="ifs",
action="store_false",
help=argparse.SUPPRESS,
)
group.add_argument(
"--no-switches",
dest="switch_detection",
action="store_false",
help=(
"Disable detecting irregular switch statements from if trees. "
"Jump tables switches are still emitted."
),
)
group.add_argument(
"--no-andor",
dest="andor_detection",
action="store_false",
help="Disable detection of &&/||",
)
group.add_argument(
"--no-unk-inference",
dest="unk_inference",
action="store_false",
help=(
"Disable type inference on unknown struct fields & unknown global symbol types. "
"See the README for more information on unknown inference."
),
)
group.add_argument(
"--heuristic-strings",
dest="heuristic_strings",
action="store_true",
help="Heuristically detect strings in rodata even when not defined using .asci/.asciz.",
)
group.add_argument(
"--reg-vars",
metavar="REGISTERS",
dest="reg_vars",
help="Use single variables instead of temps/phis for the given "
"registers (comma separated)",
)
group.add_argument(
"--goto",
metavar="PATTERN",
dest="goto_patterns",
action="append",
default=["GOTO"],
help="Emit gotos for branches on lines containing this substring "
'(possibly within a comment). Default: "GOTO". Multiple '
"patterns are allowed.",
)
group.add_argument(
"--pdb-translate",
dest="pdb_translate",
action="store_true",
help=argparse.SUPPRESS,
)
group.add_argument(
"--disable-gc",
dest="disable_gc",
action="store_true",
help="Disable Python garbage collection. Can improve performance at "
"the risk of running out of memory.",
)
args = parser.parse_args(flags)
reg_vars = args.reg_vars.split(",") if args.reg_vars else []
preproc_defines: Dict[str, Optional[int]] = {d: None for d in args.undefined}
for d in args.defined:
parts = d.split("=", 1)
preproc_defines[parts[0]] = int(parts[1], 0) if len(parts) >= 2 else 1
coding_style = CodingStyle(
newline_after_function=args.allman or args.knr,
newline_after_if=args.allman,
newline_before_else=args.allman,
switch_indent_level=2 if args.indent_switch_contents else 1,
pointer_style_left=args.pointer_style == "left",
unknown_underscore=args.unknown_underscore,
hex_case=args.hex_case,
comment_style=args.comment_style,
comment_column=args.comment_column,
)
functions: List[Union[int, str]] = []
for fn in args.functions:
try:
functions.append(int(fn))
except ValueError:
functions.append(fn)
# The debug output interferes with the visualize output
if args.visualize_flowgraph is not None:
args.debug = False
return Options(
filenames=args.filenames,
function_indexes_or_names=functions,
debug=args.debug,
void=args.void,
ifs=args.ifs,
switch_detection=args.switch_detection,
andor_detection=args.andor_detection,
skip_casts=args.skip_casts,
zfill_constants=args.zfill_constants,
force_decimal=args.force_decimal,
heuristic_strings=args.heuristic_strings,
reg_vars=reg_vars,
goto_patterns=args.goto_patterns,
stop_on_error=args.stop_on_error,
print_assembly=args.print_assembly,
visualize_flowgraph=args.visualize_flowgraph,
c_contexts=args.c_contexts,
use_cache=args.use_cache,
dump_typemap=args.dump_typemap,
pdb_translate=args.pdb_translate,
preproc_defines=preproc_defines,
coding_style=coding_style,
sanitize_tracebacks=args.sanitize_tracebacks,
valid_syntax=args.valid_syntax,
global_decls=args.global_decls,
target=args.target,
print_stack_structs=args.print_stack_structs,
unk_inference=args.unk_inference,
passes=args.passes,
incbin_dirs=args.incbin_dirs,
deterministic_vars=args.deterministic_vars,
disable_gc=args.disable_gc,
)
def main() -> None:
# Large functions can sometimes require a higher recursion limit than the
# CPython default. Cap to INT_MAX to avoid an OverflowError, though.
sys.setrecursionlimit(min(2**31 - 1, 10 * sys.getrecursionlimit()))
options = parse_flags(sys.argv[1:])
if options.disable_gc:
gc.disable()
gc.set_threshold(0)
sys.exit(run(options))
if __name__ == "__main__":
main()

View File

@ -1,248 +0,0 @@
import contextlib
from dataclasses import dataclass
import enum
from pathlib import Path
from typing import Dict, Iterator, List, Optional, Union
class ChoicesEnum(enum.Enum):
"""A helper class that is easier to use with argparse"""
def __str__(self) -> str:
return str(self.value)
@dataclass(frozen=True)
class CodingStyle:
class CommentStyle(ChoicesEnum):
MULTILINE = "multiline"
ONELINE = "oneline"
NONE = "none"
newline_after_function: bool
newline_after_if: bool
newline_before_else: bool
switch_indent_level: int
pointer_style_left: bool
unknown_underscore: bool
hex_case: bool
comment_style: CommentStyle
comment_column: int
@dataclass
class Target:
class ArchEnum(ChoicesEnum):
MIPS = "mips"
PPC = "ppc"
class EndianEnum(ChoicesEnum):
LITTLE = "little"
BIG = "big"
class CompilerEnum(ChoicesEnum):
IDO = "ido"
GCC = "gcc"
MWCC = "mwcc"
class LanguageEnum(ChoicesEnum):
C = "c"
CXX = "c++"
arch: ArchEnum
endian: EndianEnum
compiler: CompilerEnum
language: LanguageEnum
def is_big_endian(self) -> bool:
return self.endian == Target.EndianEnum.BIG
@staticmethod
def parse(name: str) -> "Target":
"""
Parse an `arch-compiler-language` triple.
If `-language` is missing, use the default for the compiler.
If `-compiler` is missing, use the default for the arch.
(This makes `mips` an alias for `mips-ido-c`, etc.)
"""
endian = Target.EndianEnum.BIG
terms = name.split("-")
try:
arch_name = terms[0]
if arch_name.endswith("el"):
arch_name = arch_name[:-2]
endian = Target.EndianEnum.LITTLE
arch = Target.ArchEnum(arch_name)
if len(terms) >= 2:
compiler = Target.CompilerEnum(terms[1])
elif arch == Target.ArchEnum.PPC:
compiler = Target.CompilerEnum.MWCC
else:
compiler = Target.CompilerEnum.IDO
if len(terms) >= 3:
language = Target.LanguageEnum(terms[2])
elif compiler == Target.CompilerEnum.MWCC:
language = Target.LanguageEnum.CXX
else:
language = Target.LanguageEnum.C
except ValueError as e:
raise ValueError(f"Unable to parse Target '{name}' ({e})")
return Target(
arch=arch,
endian=endian,
compiler=compiler,
language=language,
)
@dataclass
class Options:
class GlobalDeclsEnum(ChoicesEnum):
ALL = "all"
USED = "used"
NONE = "none"
class VisualizeTypeEnum(ChoicesEnum):
ASM = "asm"
C = "c"
filenames: List[str]
function_indexes_or_names: List[Union[int, str]]
debug: bool
void: bool
ifs: bool
switch_detection: bool
andor_detection: bool
skip_casts: bool
zfill_constants: bool
force_decimal: bool
heuristic_strings: bool
reg_vars: List[str]
goto_patterns: List[str]
stop_on_error: bool
print_assembly: bool
visualize_flowgraph: Optional[VisualizeTypeEnum]
c_contexts: List[Path]
use_cache: bool
dump_typemap: bool
pdb_translate: bool
preproc_defines: Dict[str, Optional[int]] # None = explicitly unset
coding_style: CodingStyle
sanitize_tracebacks: bool
valid_syntax: bool
global_decls: GlobalDeclsEnum
target: Target
print_stack_structs: bool
unk_inference: bool
passes: int
incbin_dirs: List[Path]
deterministic_vars: bool
disable_gc: bool
def formatter(self) -> "Formatter":
return Formatter(
self.coding_style,
skip_casts=self.skip_casts,
zfill_constants=self.zfill_constants,
force_decimal=self.force_decimal,
valid_syntax=self.valid_syntax,
)
DEFAULT_CODING_STYLE: CodingStyle = CodingStyle(
newline_after_function=False,
newline_after_if=False,
newline_before_else=False,
switch_indent_level=1,
pointer_style_left=False,
unknown_underscore=False,
hex_case=False,
comment_style=CodingStyle.CommentStyle.MULTILINE,
comment_column=52,
)
@dataclass
class Formatter:
coding_style: CodingStyle = DEFAULT_CODING_STYLE
indent_step: str = " " * 4
skip_casts: bool = False
extra_indent: int = 0
debug: bool = False
valid_syntax: bool = False
line_length: int = 80
zfill_constants: bool = False
force_decimal: bool = False
def indent(self, line: str, indent: int = 0) -> str:
return self.indent_step * max(indent + self.extra_indent, 0) + line
@contextlib.contextmanager
def indented(self, amt: int = 1) -> Iterator[None]:
try:
self.extra_indent += amt
yield
finally:
self.extra_indent -= amt
def format_array(self, elements: List[str]) -> str:
# If there are no newlines & the output would be short, put it all on one line.
# Here, "line length" is just used as a rough guideline: we aren't accounting
# for the LHS of the assignment or any indentation.
if not any("\n" in el or len(el) > self.line_length for el in elements):
output = f"{{ {', '.join(elements)} }}"
if len(output) < self.line_length:
return output
# Otherwise, put each element on its own line (and include a trailing comma)
output = "{\n"
for el in elements:
# Add 1 indentation level to the string
el = el.replace("\n", "\n" + self.indent_step)
output += self.indent(f"{el},\n", 1)
output += "}"
return output
def with_comments(self, line: str, comments: List[str], *, indent: int = 0) -> str:
"""Indent `line` and append a list of `comments` joined with ';'"""
base = self.indent(line, indent=indent)
# If `comments` is empty; fall back to `Formatter.indent()` behavior
if (
not comments
or self.coding_style.comment_style == CodingStyle.CommentStyle.NONE
):
return base
# Add padding to the style's `comment_column`, only if `line` is non-empty
padding = ""
if line:
padding = max(1, self.coding_style.comment_column - len(base)) * " "
if self.coding_style.comment_style == CodingStyle.CommentStyle.ONELINE:
comment = f"// {'; '.join(comments)}"
else:
comment = f"/* {'; '.join(comments)} */"
return f"{base}{padding}{comment}"
def format_hex(self, val: int) -> str:
return format(val, "x").upper()
def format_int(self, val: int, size_bits: Optional[int] = None) -> str:
if abs(val) < 10 or self.force_decimal:
return str(val)
if self.zfill_constants and size_bits is not None:
hex_digits = f"{abs(val):0{size_bits // 4}X}"
else:
hex_digits = f"{abs(val):X}"
# Always pad 7-digit hex constants to 8 digits. (These are very common and easily confused.)
if len(hex_digits) == 7:
hex_digits = f"0{hex_digits}"
if val < 0:
return f"-0x{hex_digits}"
else:
return f"0x{hex_digits}"

View File

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,43 +0,0 @@
/*
* This header contains macros emitted by m2c in "valid syntax" mode,
* which can be enabled by passing `--valid-syntax` on the command line.
*
* In this mode, unhandled types and expressions are emitted as macros so
* that the output is compilable without human intervention.
*/
#ifndef M2C_MACROS_H
#define M2C_MACROS_H
/* Unknown types */
typedef s32 M2C_UNK;
typedef s8 M2C_UNK8;
typedef s16 M2C_UNK16;
typedef s32 M2C_UNK32;
typedef s64 M2C_UNK64;
/* Unknown field access, like `*(type_ptr) &expr->unk_offset` */
#define M2C_FIELD(expr, type_ptr, offset) (*(type_ptr)((s8 *)(expr) + (offset)))
/* Bitwise (reinterpret) cast */
#define M2C_BITWISE(type, expr) ((type)(expr))
/* Unaligned reads */
#define M2C_LWL(expr) (expr)
#define M2C_FIRST3BYTES(expr) (expr)
#define M2C_UNALIGNED32(expr) (expr)
/* Unhandled instructions */
#define M2C_ERROR(desc) (0)
#define M2C_TRAP_IF(cond) (0)
#define M2C_BREAK() (0)
#define M2C_SYNC() (0)
/* Carry bit from partially-implemented instructions */
#define M2C_CARRY 0
/* Memcpy patterns */
#define M2C_MEMCPY_ALIGNED memcpy
#define M2C_MEMCPY_UNALIGNED memcpy
#endif

View File

@ -1,20 +0,0 @@
[mypy]
check_untyped_defs = True
disallow_any_generics = True
disallow_incomplete_defs = True
disallow_subclassing_any = True
disallow_untyped_calls = True
disallow_untyped_decorators = True
disallow_untyped_defs = True
no_implicit_optional = True
warn_redundant_casts = True
warn_return_any = True
warn_unused_ignores = True
mypy_path = stubs
files = m2c.py, run_tests.py, tests/add_test.py
[mypy-graphviz]
ignore_missing_imports = True
[mypy-capstone]
ignore_missing_imports = True

464
tools/m2c/poetry.lock generated
View File

@ -1,464 +0,0 @@
# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
[[package]]
name = "black"
version = "23.12.1"
description = "The uncompromising code formatter."
optional = false
python-versions = ">=3.8"
files = [
{file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"},
{file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"},
{file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"},
{file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"},
{file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"},
{file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"},
{file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"},
{file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"},
{file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"},
{file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"},
{file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"},
{file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"},
{file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"},
{file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"},
{file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"},
{file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"},
{file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"},
{file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"},
{file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"},
{file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"},
{file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"},
{file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"},
]
[package.dependencies]
click = ">=8.0.0"
mypy-extensions = ">=0.4.3"
packaging = ">=22.0"
pathspec = ">=0.9.0"
platformdirs = ">=2"
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""}
[package.extras]
colorama = ["colorama (>=0.4.3)"]
d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"]
jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
uvloop = ["uvloop (>=0.15.2)"]
[[package]]
name = "cfgv"
version = "3.4.0"
description = "Validate configuration and produce human readable error messages."
optional = false
python-versions = ">=3.8"
files = [
{file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"},
{file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"},
]
[[package]]
name = "click"
version = "8.1.7"
description = "Composable command line interface toolkit"
optional = false
python-versions = ">=3.7"
files = [
{file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"},
{file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"},
]
[package.dependencies]
colorama = {version = "*", markers = "platform_system == \"Windows\""}
[[package]]
name = "colorama"
version = "0.4.6"
description = "Cross-platform colored terminal text."
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
files = [
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
]
[[package]]
name = "coverage"
version = "7.4.0"
description = "Code coverage measurement for Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "coverage-7.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a"},
{file = "coverage-7.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471"},
{file = "coverage-7.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ca5c71a5a1765a0f8f88022c52b6b8be740e512980362f7fdbb03725a0d6b9"},
{file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7c97726520f784239f6c62506bc70e48d01ae71e9da128259d61ca5e9788516"},
{file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:815ac2d0f3398a14286dc2cea223a6f338109f9ecf39a71160cd1628786bc6f5"},
{file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:80b5ee39b7f0131ebec7968baa9b2309eddb35b8403d1869e08f024efd883566"},
{file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5b2ccb7548a0b65974860a78c9ffe1173cfb5877460e5a229238d985565574ae"},
{file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:995ea5c48c4ebfd898eacb098164b3cc826ba273b3049e4a889658548e321b43"},
{file = "coverage-7.4.0-cp310-cp310-win32.whl", hash = "sha256:79287fd95585ed36e83182794a57a46aeae0b64ca53929d1176db56aacc83451"},
{file = "coverage-7.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b14b4f8760006bfdb6e08667af7bc2d8d9bfdb648351915315ea17645347137"},
{file = "coverage-7.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca"},
{file = "coverage-7.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea81d8f9691bb53f4fb4db603203029643caffc82bf998ab5b59ca05560f4c06"},
{file = "coverage-7.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74775198b702868ec2d058cb92720a3c5a9177296f75bd97317c787daf711505"},
{file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76f03940f9973bfaee8cfba70ac991825611b9aac047e5c80d499a44079ec0bc"},
{file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485e9f897cf4856a65a57c7f6ea3dc0d4e6c076c87311d4bc003f82cfe199d25"},
{file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ae8c9d301207e6856865867d762a4b6fd379c714fcc0607a84b92ee63feff70"},
{file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bf477c355274a72435ceb140dc42de0dc1e1e0bf6e97195be30487d8eaaf1a09"},
{file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:83c2dda2666fe32332f8e87481eed056c8b4d163fe18ecc690b02802d36a4d26"},
{file = "coverage-7.4.0-cp311-cp311-win32.whl", hash = "sha256:697d1317e5290a313ef0d369650cfee1a114abb6021fa239ca12b4849ebbd614"},
{file = "coverage-7.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:26776ff6c711d9d835557ee453082025d871e30b3fd6c27fcef14733f67f0590"},
{file = "coverage-7.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:13eaf476ec3e883fe3e5fe3707caeb88268a06284484a3daf8250259ef1ba143"},
{file = "coverage-7.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846f52f46e212affb5bcf131c952fb4075b55aae6b61adc9856222df89cbe3e2"},
{file = "coverage-7.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f66da8695719ccf90e794ed567a1549bb2644a706b41e9f6eae6816b398c4a"},
{file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:164fdcc3246c69a6526a59b744b62e303039a81e42cfbbdc171c91a8cc2f9446"},
{file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:316543f71025a6565677d84bc4df2114e9b6a615aa39fb165d697dba06a54af9"},
{file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bb1de682da0b824411e00a0d4da5a784ec6496b6850fdf8c865c1d68c0e318dd"},
{file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0e8d06778e8fbffccfe96331a3946237f87b1e1d359d7fbe8b06b96c95a5407a"},
{file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a56de34db7b7ff77056a37aedded01b2b98b508227d2d0979d373a9b5d353daa"},
{file = "coverage-7.4.0-cp312-cp312-win32.whl", hash = "sha256:51456e6fa099a8d9d91497202d9563a320513fcf59f33991b0661a4a6f2ad450"},
{file = "coverage-7.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:cd3c1e4cb2ff0083758f09be0f77402e1bdf704adb7f89108007300a6da587d0"},
{file = "coverage-7.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d1bf53c4c8de58d22e0e956a79a5b37f754ed1ffdbf1a260d9dcfa2d8a325e"},
{file = "coverage-7.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:109f5985182b6b81fe33323ab4707011875198c41964f014579cf82cebf2bb85"},
{file = "coverage-7.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc9d4bc55de8003663ec94c2f215d12d42ceea128da8f0f4036235a119c88ac"},
{file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc6d65b21c219ec2072c1293c505cf36e4e913a3f936d80028993dd73c7906b1"},
{file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a10a4920def78bbfff4eff8a05c51be03e42f1c3735be42d851f199144897ba"},
{file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b8e99f06160602bc64da35158bb76c73522a4010f0649be44a4e167ff8555952"},
{file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7d360587e64d006402b7116623cebf9d48893329ef035278969fa3bbf75b697e"},
{file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29f3abe810930311c0b5d1a7140f6395369c3db1be68345638c33eec07535105"},
{file = "coverage-7.4.0-cp38-cp38-win32.whl", hash = "sha256:5040148f4ec43644702e7b16ca864c5314ccb8ee0751ef617d49aa0e2d6bf4f2"},
{file = "coverage-7.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9864463c1c2f9cb3b5db2cf1ff475eed2f0b4285c2aaf4d357b69959941aa555"},
{file = "coverage-7.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:936d38794044b26c99d3dd004d8af0035ac535b92090f7f2bb5aa9c8e2f5cd42"},
{file = "coverage-7.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:799c8f873794a08cdf216aa5d0531c6a3747793b70c53f70e98259720a6fe2d7"},
{file = "coverage-7.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7defbb9737274023e2d7af02cac77043c86ce88a907c58f42b580a97d5bcca9"},
{file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1526d265743fb49363974b7aa8d5899ff64ee07df47dd8d3e37dcc0818f09ed"},
{file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf635a52fc1ea401baf88843ae8708591aa4adff875e5c23220de43b1ccf575c"},
{file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:756ded44f47f330666843b5781be126ab57bb57c22adbb07d83f6b519783b870"},
{file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0eb3c2f32dabe3a4aaf6441dde94f35687224dfd7eb2a7f47f3fd9428e421058"},
{file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bfd5db349d15c08311702611f3dccbef4b4e2ec148fcc636cf8739519b4a5c0f"},
{file = "coverage-7.4.0-cp39-cp39-win32.whl", hash = "sha256:53d7d9158ee03956e0eadac38dfa1ec8068431ef8058fe6447043db1fb40d932"},
{file = "coverage-7.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfd2a8b6b0d8e66e944d47cdec2f47c48fef2ba2f2dff5a9a75757f64172857e"},
{file = "coverage-7.4.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:c530833afc4707fe48524a44844493f36d8727f04dcce91fb978c414a8556cc6"},
{file = "coverage-7.4.0.tar.gz", hash = "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e"},
]
[package.extras]
toml = ["tomli"]
[[package]]
name = "distlib"
version = "0.3.8"
description = "Distribution utilities"
optional = false
python-versions = "*"
files = [
{file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"},
{file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"},
]
[[package]]
name = "filelock"
version = "3.13.1"
description = "A platform independent file lock."
optional = false
python-versions = ">=3.8"
files = [
{file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"},
{file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"},
]
[package.extras]
docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"]
testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"]
typing = ["typing-extensions (>=4.8)"]
[[package]]
name = "graphviz"
version = "0.20.1"
description = "Simple Python interface for Graphviz"
optional = false
python-versions = ">=3.7"
files = [
{file = "graphviz-0.20.1-py3-none-any.whl", hash = "sha256:587c58a223b51611c0cf461132da386edd896a029524ca61a1462b880bf97977"},
{file = "graphviz-0.20.1.zip", hash = "sha256:8c58f14adaa3b947daf26c19bc1e98c4e0702cdc31cf99153e6f06904d492bf8"},
]
[package.extras]
dev = ["flake8", "pep8-naming", "tox (>=3)", "twine", "wheel"]
docs = ["sphinx (>=5)", "sphinx-autodoc-typehints", "sphinx-rtd-theme"]
test = ["coverage", "mock (>=4)", "pytest (>=7)", "pytest-cov", "pytest-mock (>=3)"]
[[package]]
name = "identify"
version = "2.5.33"
description = "File identification library for Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "identify-2.5.33-py2.py3-none-any.whl", hash = "sha256:d40ce5fcd762817627670da8a7d8d8e65f24342d14539c59488dc603bf662e34"},
{file = "identify-2.5.33.tar.gz", hash = "sha256:161558f9fe4559e1557e1bff323e8631f6a0e4837f7497767c1782832f16b62d"},
]
[package.extras]
license = ["ukkonen"]
[[package]]
name = "mypy"
version = "1.8.0"
description = "Optional static typing for Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"},
{file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"},
{file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"},
{file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"},
{file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"},
{file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"},
{file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"},
{file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"},
{file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"},
{file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"},
{file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"},
{file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"},
{file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"},
{file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"},
{file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"},
{file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"},
{file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"},
{file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"},
{file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"},
{file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"},
{file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"},
{file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"},
{file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"},
{file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"},
{file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"},
{file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"},
{file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"},
]
[package.dependencies]
mypy-extensions = ">=1.0.0"
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
typing-extensions = ">=4.1.0"
[package.extras]
dmypy = ["psutil (>=4.0)"]
install-types = ["pip"]
mypyc = ["setuptools (>=50)"]
reports = ["lxml"]
[[package]]
name = "mypy-extensions"
version = "1.0.0"
description = "Type system extensions for programs checked with the mypy type checker."
optional = false
python-versions = ">=3.5"
files = [
{file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
{file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
]
[[package]]
name = "nodeenv"
version = "1.8.0"
description = "Node.js virtual environment builder"
optional = false
python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*"
files = [
{file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"},
{file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"},
]
[package.dependencies]
setuptools = "*"
[[package]]
name = "packaging"
version = "23.2"
description = "Core utilities for Python packages"
optional = false
python-versions = ">=3.7"
files = [
{file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"},
{file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"},
]
[[package]]
name = "pathspec"
version = "0.12.1"
description = "Utility library for gitignore style pattern matching of file paths."
optional = false
python-versions = ">=3.8"
files = [
{file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"},
{file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"},
]
[[package]]
name = "platformdirs"
version = "4.1.0"
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
optional = false
python-versions = ">=3.8"
files = [
{file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"},
{file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"},
]
[package.extras]
docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"]
test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"]
[[package]]
name = "pre-commit"
version = "2.21.0"
description = "A framework for managing and maintaining multi-language pre-commit hooks."
optional = false
python-versions = ">=3.7"
files = [
{file = "pre_commit-2.21.0-py2.py3-none-any.whl", hash = "sha256:e2f91727039fc39a92f58a588a25b87f936de6567eed4f0e673e0507edc75bad"},
{file = "pre_commit-2.21.0.tar.gz", hash = "sha256:31ef31af7e474a8d8995027fefdfcf509b5c913ff31f2015b4ec4beb26a6f658"},
]
[package.dependencies]
cfgv = ">=2.0.0"
identify = ">=1.0.0"
nodeenv = ">=0.11.1"
pyyaml = ">=5.1"
virtualenv = ">=20.10.0"
[[package]]
name = "pycparser"
version = "2.21"
description = "C parser in Python"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
files = [
{file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"},
{file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"},
]
[[package]]
name = "pyyaml"
version = "6.0.1"
description = "YAML parser and emitter for Python"
optional = false
python-versions = ">=3.6"
files = [
{file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"},
{file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"},
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"},
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"},
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"},
{file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"},
{file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"},
{file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"},
{file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"},
{file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"},
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"},
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"},
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"},
{file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"},
{file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"},
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
{file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
{file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
{file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
{file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
{file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"},
{file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"},
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"},
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"},
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"},
{file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"},
{file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"},
{file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"},
{file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"},
{file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"},
{file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"},
{file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"},
{file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"},
{file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"},
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"},
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"},
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"},
{file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"},
{file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"},
{file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"},
{file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"},
{file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"},
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"},
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"},
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"},
{file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"},
{file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"},
{file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"},
{file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
]
[[package]]
name = "setuptools"
version = "69.0.3"
description = "Easily download, build, install, upgrade, and uninstall Python packages"
optional = false
python-versions = ">=3.8"
files = [
{file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"},
{file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"},
]
[package.extras]
docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
[[package]]
name = "tomli"
version = "2.0.1"
description = "A lil' TOML parser"
optional = false
python-versions = ">=3.7"
files = [
{file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
{file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
]
[[package]]
name = "typing-extensions"
version = "4.9.0"
description = "Backported and Experimental Type Hints for Python 3.8+"
optional = false
python-versions = ">=3.8"
files = [
{file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"},
{file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"},
]
[[package]]
name = "virtualenv"
version = "20.25.0"
description = "Virtual Python Environment builder"
optional = false
python-versions = ">=3.7"
files = [
{file = "virtualenv-20.25.0-py3-none-any.whl", hash = "sha256:4238949c5ffe6876362d9c0180fc6c3a824a7b12b80604eeb8085f2ed7460de3"},
{file = "virtualenv-20.25.0.tar.gz", hash = "sha256:bf51c0d9c7dd63ea8e44086fa1e4fb1093a31e963b86959257378aef020e1f1b"},
]
[package.dependencies]
distlib = ">=0.3.7,<1"
filelock = ">=3.12.2,<4"
platformdirs = ">=3.9.1,<5"
[package.extras]
docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"]
test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"]
[metadata]
lock-version = "2.0"
python-versions = "^3.8"
content-hash = "6dcd8bb529d59c853f5a10c3fd9bce4320f254523feb6bd6f975318a652793a5"

View File

@ -1,29 +0,0 @@
[tool.poetry]
name = "m2c"
version = "0.1.0"
description = "MIPS and PowerPC decompiler"
authors = [
"Matt Kempster <kidpixel@gmail.com>",
"Simon Lindholm <simon.lindholm10@gmail.com>",
"Zach Banks <zjbanks@gmail.com>",
"Ethan Roseman <ethteck@gmail.com>"
]
license = "GPL-3.0-only"
readme = "README.md"
repository = "https://github.com/matt-kempster/m2c"
packages = [{include = "m2c"}]
[tool.poetry.dependencies]
python = "^3.8"
pycparser = "^2.21"
graphviz = "^0.20.1"
[tool.poetry.group.dev.dependencies]
pre-commit = "^2.20.0"
mypy = "^1.8.0"
coverage = "^7.2.7"
black = "23.12.1"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"

View File

@ -1,493 +0,0 @@
#!/usr/bin/env python3
import argparse
import contextlib
from dataclasses import dataclass, field
import difflib
import io
import logging
import multiprocessing
import re
import shlex
import sys
from coverage import Coverage
from pathlib import Path
from typing import Any, Iterator, List, Optional, Pattern, Tuple
from m2c.options import Options
CRASH_STRING = "CRASHED\n"
PATH_FLAGS = {"--context", "--incbin-dir"}
@dataclass(frozen=True)
class TestOptions:
should_overwrite: bool
diff_context: int
filter_re: Pattern[str]
fraction: Optional[int] = None
parallel: Optional[int] = None
extra_flags: List[str] = field(default_factory=list)
coverage: Any = None
@dataclass(frozen=True, order=True)
class TestCase:
name: str
asm_file: Path
output_file: Path
brief_crashes: bool = True
flags_path: Optional[Path] = None
flags: List[str] = field(default_factory=list)
def set_up_logging(debug: bool) -> None:
logging.basicConfig(
format="[%(levelname)s] %(message)s",
level=logging.DEBUG if debug else logging.INFO,
)
def get_test_flags(flags_path: Path) -> List[str]:
if not flags_path.is_file():
return []
flags_str = flags_path.read_text()
flags_list = shlex.split(flags_str)
for i, flag in enumerate(flags_list):
if flag not in PATH_FLAGS:
continue
if i + 1 >= len(flags_list):
raise Exception(f"{flags_path} contains {flag} without argument")
flags_list[i + 1] = str(flags_path.parent / flags_list[i + 1])
return flags_list
def decompile_and_compare(
test_case: TestCase, test_options: TestOptions
) -> Tuple[Optional[bool], str]:
# This import is deferred so it can be profiled by the coverage tool
from m2c.main import parse_flags
logging.debug(
f"Decompiling {test_case.asm_file}"
+ (f" into {test_case.output_file}" if test_options.should_overwrite else "")
)
try:
original_contents = test_case.output_file.read_text()
except FileNotFoundError:
if not test_options.should_overwrite:
logging.error(f"{test_case.output_file} does not exist. Skipping.")
return None, f"{test_case.output_file} does not exist. Skippping."
original_contents = "(file did not exist)"
test_flags = ["--sanitize-tracebacks", "--stop-on-error"]
test_flags.extend(test_case.flags)
if test_case.flags_path is not None:
test_flags.extend(get_test_flags(test_case.flags_path))
test_flags.append(str(test_case.asm_file))
test_flags.extend(test_options.extra_flags)
options = parse_flags(test_flags)
final_contents = decompile_and_capture_output(options, test_case.brief_crashes)
if test_options.should_overwrite:
test_case.output_file.parent.mkdir(parents=True, exist_ok=True)
test_case.output_file.write_text(final_contents)
changed = final_contents != original_contents
if changed:
return False, "\n".join(
[
f"Output of {test_case.asm_file} changed! Diff:",
*difflib.unified_diff(
original_contents.splitlines(),
final_contents.splitlines(),
n=test_options.diff_context,
),
]
)
return True, ""
def decompile_and_capture_output(options: Options, brief_crashes: bool) -> str:
# This import is deferred so it can be profiled by the coverage tool
from m2c.main import run as decompile
out_string = io.StringIO()
with contextlib.redirect_stdout(out_string):
returncode = decompile(options)
out_text = out_string.getvalue()
# Rewrite paths in the output to be relative (e.g. in tracebacks)
out_text = out_text.replace(str(Path(__file__).parent), ".")
if returncode == 0:
return out_text
else:
if brief_crashes:
return CRASH_STRING
else:
return f"{CRASH_STRING}\n{out_text}"
def create_e2e_tests(
e2e_top_dir: Path,
e2e_test_path: Path,
) -> List[TestCase]:
cases: List[TestCase] = []
for asm_file in e2e_test_path.glob("*.s"):
output_file = asm_file.parent.joinpath(asm_file.stem + "-out.c")
flags_path = asm_file.parent.joinpath(asm_file.stem + "-flags.txt")
name = f"e2e:{asm_file.relative_to(e2e_top_dir)}"
cases.append(
TestCase(
name=name,
asm_file=asm_file,
output_file=output_file,
brief_crashes=True,
flags_path=flags_path,
flags=["--function", "test"],
)
)
cases.sort()
return cases
def find_tests_basic(asm_dir: Path) -> Iterator[List[Path]]:
# This has been tested with doldecomp projects for SMS, Melee, and SMB1
for asm_file in asm_dir.rglob("*.s"):
yield [asm_file]
def find_tests_oot(asm_dir: Path) -> Iterator[List[Path]]:
rodata_suffixes = [".rodata.s", ".rodata2.s"]
for asm_file in asm_dir.rglob("*.s"):
# Skip .rodata files
asm_name = asm_file.name
if any(asm_name.endswith(s) for s in rodata_suffixes):
continue
path_list = [asm_file]
# Check for .rodata in the same directory
for suffix in rodata_suffixes:
path = asm_file.parent / asm_name.replace(".s", suffix)
if path.exists():
path_list.append(path)
yield path_list
def find_tests_mm(asm_dir: Path) -> Iterator[List[Path]]:
for asm_file in asm_dir.rglob("*.text.s"):
path_list = [asm_file]
# Find .data/.rodata/.bss files in their data directory
data_path = Path(
str(asm_file).replace("/asm/overlays/", "/data/").replace("/asm/", "/data/")
).parent
for f in data_path.glob("*.s"):
path_list.append(f)
yield path_list
def find_tests_splat(asm_dir: Path) -> Iterator[List[Path]]:
# This has only been tested with Paper Mario, but should work with other splat projects
for asm_file in (asm_dir / "nonmatchings").rglob("*.s"):
path_list = [asm_file]
# Find .data/.rodata/.bss files in their data directory
data_path = Path(
str(asm_file).replace("/nonmatchings/", "/data/")
).parent.parent
for f in data_path.glob("*.s"):
path_list.append(f)
yield path_list
def create_project_tests(
base_dir: Path,
output_dir: Path,
context_file: Optional[Path],
name_prefix: str,
) -> List[TestCase]:
cases: List[TestCase] = []
asm_dir = base_dir / "asm"
if "oot" in base_dir.parts:
file_iter = find_tests_oot(asm_dir)
base_flags = ["--target=mips-ido-c", "--stack-structs", "--unk-underscore"]
elif "mm" in base_dir.parts:
file_iter = find_tests_mm(asm_dir)
base_flags = ["--target=mips-ido-c", "--stack-structs", "--unk-underscore"]
elif "papermario" in base_dir.parts:
file_iter = find_tests_splat(asm_dir)
base_flags = [
"--target=mips-gcc-c",
"--stack-structs",
"--unk-underscore",
"--pointer-style=left",
]
else:
file_iter = find_tests_basic(asm_dir)
base_flags = [
"--incbin-dir",
str(base_dir),
"--stack-structs",
"--unk-underscore",
]
for file_list in file_iter:
if not file_list:
continue
flags = base_flags + ["--deterministic-vars"]
if context_file is not None:
flags.extend(["--context", str(context_file)])
test_path = file_list[0].relative_to(base_dir / "asm")
name = f"{name_prefix}:{test_path}"
output_file = (output_dir / test_path).with_suffix(".c")
cases.append(
TestCase(
name=name,
asm_file=file_list[0],
output_file=output_file,
brief_crashes=False,
flags=flags + [str(p) for p in file_list[1:]],
)
)
cases.sort()
return cases
def run_test(
test: Tuple[TestCase, TestOptions]
) -> Tuple[TestCase, Optional[bool], str]:
test_case, test_options = test
if test_options.coverage:
test_options.coverage.switch_context(test_case.name)
did_pass, output = decompile_and_compare(test_case, test_options)
return test_case, did_pass, output
def main(
project_dirs: List[Tuple[Path, bool]],
test_options: TestOptions,
) -> int:
# Collect tests
test_cases: List[TestCase] = []
e2e_top_dir = Path(__file__).parent / "tests" / "end_to_end"
for e2e_test_path in e2e_top_dir.iterdir():
test_cases.extend(create_e2e_tests(e2e_top_dir, e2e_test_path))
for project_dir, use_context in project_dirs:
name_prefix = project_dir.name
if project_dir.match("papermario/ver/us"):
name_prefix = "papermario_us"
elif project_dir.match("papermario/ver/jp"):
name_prefix = "papermario_jp"
context_file: Optional[Path] = None
if use_context:
name_prefix = f"{name_prefix}_ctx"
context_file = project_dir / "ctx.c"
if not context_file.exists():
raise Exception(
f"{project_dir} tests require context file, but {context_file} does not exist"
)
output_dir = Path(__file__).parent / "tests" / "project" / name_prefix
test_cases.extend(
create_project_tests(
project_dir,
output_dir,
context_file,
name_prefix,
)
)
passed, failed = 0, 0
total = len(test_cases)
if test_options.filter_re is not None:
test_cases = [t for t in test_cases if test_options.filter_re.search(t.name)]
if test_options.fraction is not None:
test_cases = test_cases[:: test_options.fraction]
skipped = total - len(test_cases)
test_iterator: Iterator[Tuple[TestCase, Optional[bool], str]]
if test_options.parallel:
pool = multiprocessing.Pool(processes=test_options.parallel)
test_iterator = pool.imap_unordered(
run_test,
((t, test_options) for t in test_cases),
chunksize=4,
)
else:
test_iterator = (run_test((t, test_options)) for t in test_cases)
for test_case, did_pass, output in test_iterator:
if did_pass is None:
logging.info(f"[SKIP] {test_case.name}")
skipped += 1
elif did_pass:
logging.info(f"[PASS] {test_case.name}")
passed += 1
else:
logging.info(f"[FAIL] {test_case.name}")
failed += 1
if output:
logging.info(output)
if test_options.parallel:
pool.terminate()
logging.info(
f"Test summary: {passed} passed, {skipped} skipped, {failed} failed, {passed + skipped + failed} total"
)
if failed > 0 and not test_options.should_overwrite:
return 1
return 0
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Run and record end-to-end decompiler tests."
)
parser.add_argument(
"--debug", dest="debug", help="print debug info", action="store_true"
)
parser.add_argument(
"-j",
"--parallel",
metavar="N",
dest="parallel",
type=int,
help=("Run tests in parallel, with this many processes."),
)
parser.add_argument(
"--diff-context",
metavar="N",
dest="diff_context",
default=3,
type=int,
help=("Number of lines of context to print with in diff output."),
)
parser.add_argument(
"--overwrite",
dest="should_overwrite",
action="store_true",
help=(
"overwrite the contents of the test output files. "
"Do this once before committing."
),
)
parser.add_argument(
"--filter",
metavar="REGEX",
dest="filter_re",
type=lambda x: re.compile(x),
help=("Only run tests matching this regular expression."),
)
parser.add_argument(
"-K",
"--fraction",
metavar="N",
dest="fraction",
type=int,
help=("Only run 1 in every N tests."),
)
parser.add_argument(
"--project",
metavar="DIR",
dest="project_dirs",
action="append",
default=[],
type=lambda p: (Path(p), False),
help=(
"Run tests on the asm files from a decompilation project. "
"The zeldaret/oot and zeldaret/mm projects are supported. "
"Can be specified multiple times."
),
)
parser.add_argument(
"extra_flags",
nargs=argparse.REMAINDER,
help="Additional arguments to pass to m2c. Use `--` to separate them from run_tests's flags.",
)
parser.add_argument(
"--project-with-context",
metavar="DIR",
dest="project_dirs",
action="append",
default=[],
type=lambda p: (Path(p), True),
help=(
"Same as --project, but use the C context file `ctx.c` "
"from the base directory. "
"Can be specified multiple times."
),
)
cov_group = parser.add_argument_group("Coverage")
cov_group.add_argument(
"--coverage",
dest="coverage",
action="store_true",
help="Compute code coverage for tests",
)
cov_group.add_argument(
"--coverage-html",
metavar="DIR",
dest="coverage_html",
help="Output coverage HTML report to directory",
default="htmlcov/",
)
cov_group.add_argument(
"--coverage-emit-data",
dest="coverage_emit_data",
action="store_true",
help="Emit a .coverage data file",
)
args = parser.parse_args()
set_up_logging(args.debug)
cov = None
if args.coverage:
logging.info("Computing code coverage.")
coverage_data_file = None
if args.coverage_emit_data:
coverage_data_file = ".coverage"
logging.info(f"Writing coverage data to {coverage_data_file}")
cov = Coverage(include="src/*", data_file=coverage_data_file, branch=True)
cov.start()
if args.should_overwrite:
logging.info("Overwriting test output files.")
if "--" in args.extra_flags:
args.extra_flags.remove("--")
test_options = TestOptions(
should_overwrite=args.should_overwrite,
diff_context=args.diff_context,
filter_re=args.filter_re,
fraction=args.fraction,
parallel=args.parallel,
extra_flags=args.extra_flags,
coverage=cov,
)
ret = main(args.project_dirs, test_options)
if cov is not None:
cov.stop()
cov.html_report(
directory=args.coverage_html, show_contexts=True, skip_empty=True
)
logging.info(f"Wrote html coverage report to {args.coverage_html}")
sys.exit(ret)

View File

@ -1,34 +0,0 @@
# Minimal stubs for Capstone
from typing import Any, Iterator, List, Optional
ppc: Any = ...
_CsMnemonic = int
_CsRegister = int
CS_ARCH_PPC: int = ...
CS_MODE_32: int = ...
CS_MODE_BIG_ENDIAN: int = ...
class CsInsn:
def __init__(self, cs: Any, all_info: Any) -> None: ...
@property
def id(self) -> _CsMnemonic: ...
@property
def address(self) -> int: ...
@property
def bytes(self) -> bytes: ...
@property
def mnemonic(self) -> str: ...
@property
def op_str(self) -> str: ...
@property
def operands(self) -> List[Any]: ...
def reg_name(self, reg_id: _CsRegister, default: Optional[str] = ...) -> str: ...
class Cs:
detail: bool
imm_unsigned: bool
def __init__(self, arch: int, mode: int) -> None: ...
def disasm(self, data: bytes, base_address: int) -> Iterator[CsInsn]: ...

View File

@ -1,26 +0,0 @@
# -----------------------------------------------------------------
# pycparser: __init__.py
#
# This package file exports some convenience functions for
# interacting with pycparser
#
# Eli Bendersky [https://eli.thegreenplace.net/]
# License: BSD
# -----------------------------------------------------------------
__all__ = ["c_parser", "c_ast"]
__version__ = "2.19"
from typing import Any, List, Union
from . import c_ast
from .c_parser import CParser
def preprocess_file(
filename: str, cpp_path: str = "cpp", cpp_args: Union[List[str], str] = ""
) -> str: ...
def parse_file(
filename: str,
use_cpp: bool = False,
cpp_path: str = "cpp",
cpp_args: str = "",
parser: Any = None,
) -> c_ast.FileAST: ...

View File

@ -1,546 +0,0 @@
# -----------------------------------------------------------------
# pycparser: c_ast.py
#
# AST Node classes.
#
# Eli Bendersky [https://eli.thegreenplace.net/]
# License: BSD
# -----------------------------------------------------------------
from typing import TextIO, Iterable, List, Any, Optional, Union as Union_
from .plyparser import Coord
import sys
class Node(object):
coord: Optional[Coord]
def __repr__(self) -> str: ...
def __iter__(self) -> Iterable[Node]: ...
def children(self) -> Iterable[Node]: ...
def show(
self,
buf: TextIO = sys.stdout,
offset: int = 0,
attrnames: bool = False,
nodenames: bool = False,
showcoord: bool = False,
) -> None: ...
Expression = Union_[
"ArrayRef",
"Assignment",
"BinaryOp",
"Cast",
"CompoundLiteral",
"Constant",
"ExprList",
"FuncCall",
"ID",
"StructRef",
"TernaryOp",
"UnaryOp",
]
Statement = Union_[
Expression,
"Break",
"Case",
"Compound",
"Continue",
"Decl",
"Default",
"DoWhile",
"EmptyStatement",
"For",
"Goto",
"If",
"Label",
"Return",
"Switch",
"Typedef",
"While",
"Pragma",
]
Type = Union_["PtrDecl", "ArrayDecl", "FuncDecl", "TypeDecl"]
InnerType = Union_["IdentifierType", "Struct", "Union", "Enum"]
ExternalDeclaration = Union_["FuncDef", "Decl", "Typedef", "Pragma"]
AnyNode = Union_[
Statement,
Type,
InnerType,
"Alignas",
"FuncDef",
"EllipsisParam",
"Enumerator",
"EnumeratorList",
"FileAST",
"InitList",
"NamedInitializer",
"ParamList",
"Typename",
]
class NodeVisitor:
def visit(self, node: Node) -> None: ...
def generic_visit(self, node: Node) -> None: ...
def visit_Alignas(self, node: Alignas) -> None: ...
def visit_ArrayDecl(self, node: ArrayDecl) -> None: ...
def visit_ArrayRef(self, node: ArrayRef) -> None: ...
def visit_Assignment(self, node: Assignment) -> None: ...
def visit_BinaryOp(self, node: BinaryOp) -> None: ...
def visit_Break(self, node: Break) -> None: ...
def visit_Case(self, node: Case) -> None: ...
def visit_Cast(self, node: Cast) -> None: ...
def visit_Compound(self, node: Compound) -> None: ...
def visit_CompoundLiteral(self, node: CompoundLiteral) -> None: ...
def visit_Constant(self, node: Constant) -> None: ...
def visit_Continue(self, node: Continue) -> None: ...
def visit_Decl(self, node: Decl) -> None: ...
def visit_DeclList(self, node: DeclList) -> None: ...
def visit_Default(self, node: Default) -> None: ...
def visit_DoWhile(self, node: DoWhile) -> None: ...
def visit_EllipsisParam(self, node: EllipsisParam) -> None: ...
def visit_EmptyStatement(self, node: EmptyStatement) -> None: ...
def visit_Enum(self, node: Enum) -> None: ...
def visit_Enumerator(self, node: Enumerator) -> None: ...
def visit_EnumeratorList(self, node: EnumeratorList) -> None: ...
def visit_ExprList(self, node: ExprList) -> None: ...
def visit_FileAST(self, node: FileAST) -> None: ...
def visit_For(self, node: For) -> None: ...
def visit_FuncCall(self, node: FuncCall) -> None: ...
def visit_FuncDecl(self, node: FuncDecl) -> None: ...
def visit_FuncDef(self, node: FuncDef) -> None: ...
def visit_Goto(self, node: Goto) -> None: ...
def visit_ID(self, node: ID) -> None: ...
def visit_IdentifierType(self, node: IdentifierType) -> None: ...
def visit_If(self, node: If) -> None: ...
def visit_InitList(self, node: InitList) -> None: ...
def visit_Label(self, node: Label) -> None: ...
def visit_NamedInitializer(self, node: NamedInitializer) -> None: ...
def visit_ParamList(self, node: ParamList) -> None: ...
def visit_PtrDecl(self, node: PtrDecl) -> None: ...
def visit_Return(self, node: Return) -> None: ...
def visit_Struct(self, node: Struct) -> None: ...
def visit_StructRef(self, node: StructRef) -> None: ...
def visit_Switch(self, node: Switch) -> None: ...
def visit_TernaryOp(self, node: TernaryOp) -> None: ...
def visit_TypeDecl(self, node: TypeDecl) -> None: ...
def visit_Typedef(self, node: Typedef) -> None: ...
def visit_Typename(self, node: Typename) -> None: ...
def visit_UnaryOp(self, node: UnaryOp) -> None: ...
def visit_Union(self, node: Union) -> None: ...
def visit_While(self, node: While) -> None: ...
def visit_Pragma(self, node: Pragma) -> None: ...
class Alignas(Node):
alignment: Union_[Expression, Typename]
coord: Optional[Coord]
def __init__(
self,
alignment: Union_[Expression, Typename],
coord: Optional[Coord] = None,
): ...
class ArrayDecl(Node):
type: Type
dim: Optional[Expression]
dim_quals: List[str]
def __init__(
self,
type: Type,
dim: Optional[Node],
dim_quals: List[str],
coord: Optional[Coord] = None,
): ...
class ArrayRef(Node):
name: Expression
subscript: Expression
def __init__(self, name: Node, subscript: Node, coord: Optional[Coord] = None): ...
class Assignment(Node):
op: str
lvalue: Expression
rvalue: Expression
def __init__(
self,
op: str,
lvalue: Expression,
rvalue: Expression,
coord: Optional[Coord] = None,
): ...
class BinaryOp(Node):
op: str
left: Expression
right: Expression
def __init__(
self, op: str, left: Node, right: Node, coord: Optional[Coord] = None
): ...
class Break(Node):
def __init__(self, coord: Optional[Coord] = None): ...
class Case(Node):
expr: Expression
stmts: List[Statement]
def __init__(
self, expr: Expression, stmts: List[Statement], coord: Optional[Coord] = None
): ...
class Cast(Node):
to_type: "Typename"
expr: Expression
def __init__(
self, to_type: "Typename", expr: Expression, coord: Optional[Coord] = None
): ...
class Compound(Node):
block_items: Optional[List[Statement]]
def __init__(
self, block_items: Optional[List[Statement]], coord: Optional[Coord] = None
): ...
class CompoundLiteral(Node):
type: "Typename"
init: "InitList"
def __init__(
self, type: "Typename", init: "InitList", coord: Optional[Coord] = None
): ...
class Constant(Node):
type: str
value: str
def __init__(self, type: str, value: str, coord: Optional[Coord] = None): ...
class Continue(Node):
def __init__(self, coord: Optional[Coord] = None): ...
class Decl(Node):
name: Optional[str]
quals: List[str] # e.g. const
align: List[Alignas]
storage: List[str] # e.g. register
funcspec: List[str] # e.g. inline
type: Union_[Type, "Struct", "Enum", "Union"]
init: Optional[Union_[Expression, "InitList"]]
bitsize: Optional[Expression]
def __init__(
self,
name: Optional[str],
quals: List[str],
align: List[Alignas],
storage: List[str],
funcspec: List[str],
type: Union_[Type, "Struct", "Enum", "Union"],
init: Optional[Union_[Expression, "InitList"]],
bitsize: Optional[Expression],
coord: Optional[Coord] = None,
): ...
class DeclList(Node):
decls: List[Decl]
def __init__(self, decls: List[Decl], coord: Optional[Coord] = None): ...
class Default(Node):
stmts: List[Statement]
def __init__(self, stmts: List[Statement], coord: Optional[Coord] = None): ...
class DoWhile(Node):
cond: Expression
stmt: Statement
def __init__(
self, cond: Expression, stmt: Statement, coord: Optional[Coord] = None
): ...
class EllipsisParam(Node):
def __init__(self, coord: Optional[Coord] = None): ...
class EmptyStatement(Node):
def __init__(self, coord: Optional[Coord] = None): ...
class Enum(Node):
name: Optional[str]
values: "Optional[EnumeratorList]"
def __init__(
self,
name: Optional[str],
values: "Optional[EnumeratorList]",
coord: Optional[Coord] = None,
): ...
class Enumerator(Node):
name: str
value: Optional[Expression]
def __init__(
self, name: str, value: Optional[Expression], coord: Optional[Coord] = None
): ...
class EnumeratorList(Node):
enumerators: List[Enumerator]
def __init__(
self, enumerators: List[Enumerator], coord: Optional[Coord] = None
): ...
class ExprList(Node):
exprs: List[Union_[Expression, Typename]] # typename only for offsetof
def __init__(
self, exprs: List[Union_[Expression, Typename]], coord: Optional[Coord] = None
): ...
class FileAST(Node):
ext: List[ExternalDeclaration]
def __init__(
self, ext: List[ExternalDeclaration], coord: Optional[Coord] = None
): ...
class For(Node):
init: Union_[None, Expression, DeclList]
cond: Optional[Expression]
next: Optional[Expression]
stmt: Statement
def __init__(
self,
init: Union_[None, Expression, DeclList],
cond: Optional[Expression],
next: Optional[Expression],
stmt: Statement,
coord: Optional[Coord] = None,
): ...
class FuncCall(Node):
name: Expression
args: Optional[ExprList]
def __init__(
self, name: Expression, args: Optional[ExprList], coord: Optional[Coord] = None
): ...
class FuncDecl(Node):
args: Optional[ParamList]
type: Type # return type
def __init__(
self, args: Optional[ParamList], type: Type, coord: Optional[Coord] = None
): ...
class FuncDef(Node):
decl: Decl
param_decls: Optional[List[Decl]]
body: Compound
def __init__(
self,
decl: Decl,
param_decls: Optional[List[Decl]],
body: Compound,
coord: Optional[Coord] = None,
): ...
class Goto(Node):
name: str
def __init__(self, name: str, coord: Optional[Coord] = None): ...
class ID(Node):
name: str
def __init__(self, name: str, coord: Optional[Coord] = None): ...
class IdentifierType(Node):
names: List[str] # e.g. ['long', 'int']
def __init__(self, names: List[str], coord: Optional[Coord] = None): ...
class If(Node):
cond: Expression
iftrue: Statement
iffalse: Optional[Statement]
def __init__(
self,
cond: Expression,
iftrue: Statement,
iffalse: Optional[Statement],
coord: Optional[Coord] = None,
): ...
class InitList(Node):
exprs: List[Union_[Expression, "NamedInitializer"]]
def __init__(
self,
exprs: List[Union_[Expression, "NamedInitializer"]],
coord: Optional[Coord] = None,
): ...
class Label(Node):
name: str
stmt: Statement
def __init__(self, name: str, stmt: Statement, coord: Optional[Coord] = None): ...
class NamedInitializer(Node):
name: List[Expression] # [ID(x), Constant(4)] for {.x[4] = ...}
expr: Expression
def __init__(
self, name: List[Expression], expr: Expression, coord: Optional[Coord] = None
): ...
class ParamList(Node):
params: List[Union_[Decl, ID, Typename, EllipsisParam]]
def __init__(
self,
params: List[Union_[Decl, ID, Typename, EllipsisParam]],
coord: Optional[Coord] = None,
): ...
class PtrDecl(Node):
quals: List[str]
type: Type
def __init__(self, quals: List[str], type: Type, coord: Optional[Coord] = None): ...
class Return(Node):
expr: Optional[Expression]
def __init__(self, expr: Optional[Expression], coord: Optional[Coord] = None): ...
class Struct(Node):
name: Optional[str]
decls: Optional[List[Union_[Decl, Pragma]]]
def __init__(
self,
name: Optional[str],
decls: Optional[List[Union_[Decl, Pragma]]],
coord: Optional[Coord] = None,
): ...
class StructRef(Node):
name: Expression
type: str
field: ID
def __init__(
self, name: Expression, type: str, field: ID, coord: Optional[Coord] = None
): ...
class Switch(Node):
cond: Expression
stmt: Statement
def __init__(
self, cond: Expression, stmt: Statement, coord: Optional[Coord] = None
): ...
class TernaryOp(Node):
cond: Expression
iftrue: Expression
iffalse: Expression
def __init__(
self,
cond: Expression,
iftrue: Expression,
iffalse: Expression,
coord: Optional[Coord] = None,
): ...
class TypeDecl(Node):
declname: Optional[str]
quals: List[str]
type: InnerType
align: List[Alignas]
def __init__(
self,
declname: Optional[str],
quals: List[str],
align: List[Alignas],
type: InnerType,
coord: Optional[Coord] = None,
): ...
class Typedef(Node):
name: str
quals: List[str]
storage: List[str]
type: Type
def __init__(
self,
name: str,
quals: List[str],
storage: List[str],
type: Type,
coord: Optional[Coord] = None,
): ...
class Typename(Node):
name: None
quals: List[str]
align: List[Alignas]
type: Type
def __init__(
self,
name: None,
quals: List[str],
align: List[Alignas],
type: Type,
coord: Optional[Coord] = None,
): ...
class UnaryOp(Node):
op: str
expr: Union_[Expression, Typename]
def __init__(
self, op: str, expr: Union_[Expression, Typename], coord: Optional[Coord] = None
): ...
class Union(Node):
name: Optional[str]
decls: Optional[List[Union_[Decl, Pragma]]]
def __init__(
self,
name: Optional[str],
decls: Optional[List[Union_[Decl, Pragma]]],
coord: Optional[Coord] = None,
): ...
class While(Node):
cond: Expression
stmt: Statement
def __init__(
self, cond: Expression, stmt: Statement, coord: Optional[Coord] = None
): ...
class Pragma(Node):
string: str
def __init__(self, string: str, coord: Optional[Coord] = None): ...

View File

@ -1,13 +0,0 @@
# ------------------------------------------------------------------------------
# pycparser: c_generator.py
#
# C code generator from pycparser AST nodes.
#
# Eli Bendersky [https://eli.thegreenplace.net/]
# License: BSD
# ------------------------------------------------------------------------------
from . import c_ast
class CGenerator:
def __init__(self) -> None: ...
def visit(self, node: c_ast.Node) -> str: ...

View File

@ -1,22 +0,0 @@
# ------------------------------------------------------------------------------
# pycparser: c_parser.py
#
# CParser class: Parser and AST builder for the C language
#
# Eli Bendersky [https://eli.thegreenplace.net/]
# License: BSD
# ------------------------------------------------------------------------------
from . import c_ast
from typing import Any, Dict, List, Optional
class CParser:
clex: Any # CLexer
cparser: Any # LRParser
_scope_stack: List[Dict[str, bool]]
_last_yielded_token: Optional[Any]
def __init__(self) -> None: ...
def parse(
self, text: str, filename: str = "", debuglevel: int = 0
) -> c_ast.FileAST: ...

View File

@ -1,22 +0,0 @@
# -----------------------------------------------------------------
# plyparser.py
#
# PLYParser class and other utilites for simplifying programming
# parsers with PLY
#
# Eli Bendersky [https://eli.thegreenplace.net/]
# License: BSD
# -----------------------------------------------------------------
from typing import Optional
class Coord:
file: str
line: int
column: Optional[int]
def __init__(self, file: str, line: int, column: Optional[int] = None): ...
def __str__(self) -> str: ...
class ParseError(Exception):
pass

View File

@ -1,244 +0,0 @@
#!/usr/bin/env python3
import argparse
import logging
import os
import subprocess
import sys
from contextlib import ExitStack
from pathlib import Path
from tempfile import NamedTemporaryFile
from typing import Dict, List, Optional, Tuple
from dataclasses import dataclass, field, replace
from ppc_disasm import disassemble_ppc_elf
logger = logging.getLogger(__name__)
def set_up_logging(debug: bool) -> None:
logging.basicConfig(
format="[%(levelname)s] %(message)s",
level=logging.DEBUG if debug else logging.INFO,
)
@dataclass
class PathsToBinaries:
IDO_CC: Optional[Path]
MWCC_CC: Optional[Path]
WINE: Optional[Path]
def get_environment_variables() -> PathsToBinaries:
def load(env_var_name: str, error_message: str) -> Optional[Path]:
env_var = os.environ.get(env_var_name)
if env_var is None:
logger.error(error_message)
return None
path = Path(env_var)
if not path.exists():
logger.error(error_message + " (path does not exist)")
return None
return path
IDO_CC = load(
"IDO_CC",
"env variable IDO_CC should point to recompiled IDO cc binary",
)
MWCC_CC = load(
"MWCC_CC", "env variable MWCC_CC should point to a PPC cc binary (mwcceppc.exe)"
)
if MWCC_CC and sys.platform.startswith("linux"):
WINE = load("WINE", "env variable WINE should point to wine or wibo binary")
return PathsToBinaries(IDO_CC=IDO_CC, MWCC_CC=MWCC_CC, WINE=WINE)
@dataclass
class Compiler:
name: str
cc_command: List[str]
def with_cc_flags(self, flags: List[str]) -> "Compiler":
return replace(self, cc_command=self.cc_command + flags)
def get_ido_compilers(paths: PathsToBinaries) -> List[Tuple[str, Compiler]]:
if paths.IDO_CC is not None:
ido = Compiler(
name="ido",
cc_command=[
str(paths.IDO_CC),
"-c",
"-Wab,-r4300_mul",
"-non_shared",
"-G",
"0",
"-Xcpluscomm",
"-fullwarn",
"-wlint",
"-woff",
"819,820,852,821,827,826",
"-signed",
],
)
return [
("irix-g", ido.with_cc_flags(["-g", "-mips2"])),
("irix-o2", ido.with_cc_flags(["-O2", "-mips2"])),
# ("irix-g-mips1", ido.with_cc_flags(["-O2", "-mips1"]))
# ("irix-o2-mips1", ido.with_cc_flags(["-O2", "-mips1"]))
]
logger.warning("IDO tools not found; skipping MIPS compilers")
return []
def get_mwcc_compilers(paths: PathsToBinaries) -> List[Tuple[str, Compiler]]:
if paths.MWCC_CC is not None:
cc_command = [
str(paths.MWCC_CC),
"-c",
"-Cpp_exceptions",
"off",
"-proc",
"gekko",
"-fp",
"hard",
"-enum",
"int",
"-nodefaults",
]
ok = True
if paths.MWCC_CC.suffix == ".exe" and sys.platform.startswith("linux"):
if paths.WINE:
cc_command.insert(0, str(paths.WINE))
else:
ok = False
if ok:
mwcc = Compiler(
name="mwcc",
cc_command=cc_command,
)
return [
("mwcc-o4p", mwcc.with_cc_flags(["-O4,p"])),
# ("mwcc-o4p-s0", mwcc.with_cc_flags(["-O4,p", "-sdata", "0", "-sdata2", "0"]))
]
logger.warning("MWCC tools not found; skipping PPC compilers")
return []
def get_compilers(paths: PathsToBinaries) -> List[Tuple[str, Compiler]]:
compilers: List[Tuple[str, Compiler]] = []
compilers.extend(get_ido_compilers(paths))
compilers.extend(get_mwcc_compilers(paths))
return compilers
def disassemble_mips_elf(temp_out_file: str) -> bytes:
return subprocess.run(
[
sys.executable,
"-m",
"spimdisasm.elfObjDisasm",
"--Mreg-names",
"o32",
"--quiet",
temp_out_file,
"-",
],
stdout=subprocess.PIPE,
check=True,
).stdout
def do_compilation_step(
temp_o_file: str,
in_file: str,
compiler: Compiler,
) -> None:
args = compiler.cc_command + [
"-o",
temp_o_file,
in_file,
]
subprocess.run(args)
def run_compile(in_file: Path, out_file: Path, compiler: Compiler) -> None:
flags_str = " ".join(compiler.cc_command)
logger.info(f"Compiling {in_file} to {out_file} using: {flags_str}")
with NamedTemporaryFile(suffix=".o") as temp_o_file:
logger.debug(f"Compiling {in_file} using: {flags_str}")
do_compilation_step(temp_o_file.name, str(in_file), compiler)
if compiler.name == "ido":
final_asm = disassemble_mips_elf(temp_o_file.name)
out_file.write_bytes(final_asm)
elif compiler.name == "mwcc":
with open(temp_o_file.name, "rb") as o_f:
with out_file.open("w") as out_f:
disassemble_ppc_elf(o_f, out_f)
else:
assert False, compiler.name
logger.info(f"Successfully wrote disassembly to {out_file}.")
def add_test_from_file(
orig_file: Path, env_vars: PathsToBinaries, compilers: List[Tuple[str, Compiler]]
) -> None:
test_dir = orig_file.parent
for asm_filename, compiler in compilers:
asm_file_path = test_dir / (asm_filename + ".s")
try:
run_compile(orig_file, asm_file_path, compiler)
if compiler.name == "mwcc":
# If the flags file doesn't exist, initialize it with the correct --target
ppc_flags = test_dir / (asm_filename + "-flags.txt")
if not ppc_flags.exists():
ppc_flags.write_text("--target ppc-mwcc-c\n")
except Exception:
logger.exception(f"Failed to compile {asm_file_path}")
def main() -> int:
parser = argparse.ArgumentParser(
description="Add or update end-to-end decompiler tests."
)
parser.add_argument(
"files",
help=(
"Files containing C code to compile (then decompile). "
"Each one must have a path of the form "
"`tests/end_to_end/TEST_NAME/orig.c`."
),
nargs="+",
)
parser.add_argument(
"--debug", dest="debug", help="print debug info", action="store_true"
)
args = parser.parse_args()
set_up_logging(args.debug)
env_vars = get_environment_variables()
compilers = get_compilers(env_vars)
if not compilers:
return 2
for orig_filename in args.files:
orig_file = Path(orig_filename).resolve()
if not orig_file.is_file():
logger.error(f"{orig_file} does not exist. Skipping.")
continue
expected_c_file = (
Path(__file__).parent / "end_to_end" / orig_file.parent.name / "orig.c"
).resolve()
expected_cpp_file = expected_c_file.with_suffix(".cpp")
if orig_file != expected_c_file and orig_file != expected_cpp_file:
logger.error(
f"`{orig_file}` does not have a path of the form `{expected_c_file}` or `{expected_cpp_file}`! Skipping."
)
continue
add_test_from_file(orig_file, env_vars, compilers)
return 0
if __name__ == "__main__":
sys.exit(main())

View File

@ -1,222 +0,0 @@
import struct
from dataclasses import dataclass, field
from typing import Any, Dict, List, Optional, Tuple
# Based on the ELF file parser in simonlindholm's asm-differ ("diff.py")
# https://github.com/simonlindholm/asm-differ/blob/4b38c884c1efdc3bfa8b14f13015a69368a8d3a2/diff.py#L1137-L1247
@dataclass(order=True)
class ElfSymbol:
offset: int
name: str
section: Optional["ElfSection"]
@dataclass(order=True)
class ElfRelocation:
section_offset: int
symbol: ElfSymbol
symbol_offset: int
relocation_type: int
@dataclass
class ElfSection:
address: int
name: str
data: bytes = field(repr=False)
relocations: Dict[int, ElfRelocation] = field(default_factory=dict, repr=False)
symbols: Dict[int, ElfSymbol] = field(default_factory=dict, repr=False)
@dataclass
class ElfFile:
sections: Dict[str, ElfSection] = field(default_factory=dict)
symbols: Dict[str, ElfSymbol] = field(default_factory=dict)
@staticmethod
def parse(data: bytes) -> "ElfFile":
if not data:
raise ValueError("Input data is empty")
e_ident = data[:16]
if e_ident[:4] != b"\x7FELF":
raise ValueError(
f"Input data is not an ELF file (magic number is {e_ident[:4]!r})"
)
SHT_PROGBITS = 1
SHT_SYMTAB = 2
SHT_STRTAB = 3
SHT_NOBITS = 8
SHT_REL = 9
SHT_RELA = 4
is_32bit = e_ident[4] == 1
is_little_endian = e_ident[5] == 1
str_end = "<" if is_little_endian else ">"
str_off = "I" if is_32bit else "Q"
def read(spec: str, offset: int) -> Tuple[int, ...]:
spec = spec.replace("P", str_off)
size = struct.calcsize(spec)
return struct.unpack(str_end + spec, data[offset : offset + size])
def read_string(base: int, offset: int) -> str:
if base == 0 or offset == 0:
return ""
offset += base
return data[offset : data.index(b"\0", offset)].decode("latin1")
(
e_type,
e_machine,
e_version,
e_entry,
e_phoff,
e_shoff,
e_flags,
e_ehsize,
e_phentsize,
e_phnum,
e_shentsize,
e_shnum,
e_shstrndx,
) = read("HHIPPPIHHHHHH", 16)
if e_type != 1: # relocatable
raise ValueError(f"Input elf is not relocatable (e_type = {e_type})")
assert e_shoff != 0
assert e_shnum != 0 # don't support > 0xFF00 sections
assert e_shstrndx != 0
@dataclass
class Section:
sh_name: int
sh_type: int
sh_flags: int
sh_addr: int
sh_offset: int
sh_size: int
sh_link: int
sh_info: int
sh_addralign: int
sh_entsize: int
sections = [
Section(*read("IIPPPPIIPP", e_shoff + i * e_shentsize))
for i in range(e_shnum)
]
shstrtab_offset = sections[e_shstrndx].sh_offset
sec_names = [read_string(shstrtab_offset, s.sh_name) for s in sections]
strtab_sections = [
i
for i in range(e_shnum)
if sections[i].sh_type == SHT_STRTAB and i != e_shstrndx
]
assert len(strtab_sections) == 1
strtab_offset = sections[strtab_sections[0]].sh_offset
symtab_sections = [
i for i in range(e_shnum) if sections[i].sh_type == SHT_SYMTAB
]
assert len(symtab_sections) == 1
symtab = sections[symtab_sections[0]]
elf = ElfFile()
# Parse SHT_PROGBIT/SHT_NOBITS sections (.text, .data, .bss, etc.)
for s, name in zip(sections, sec_names):
if s.sh_type == SHT_PROGBITS:
if name == ".comment" or name.startswith(".note"):
continue
section_data = data[s.sh_offset : s.sh_offset + s.sh_size]
elf.sections[name] = ElfSection(
address=s.sh_addr, name=name, data=section_data
)
elif s.sh_type == SHT_NOBITS and s.sh_size != 0:
section_data = b"\0" * s.sh_size
elf.sections[name] = ElfSection(
address=s.sh_addr, name=name, data=section_data
)
# Parse SHT_SYMTAB section (symbol table)
symbols_by_index: List[ElfSymbol] = []
for offset in range(0, symtab.sh_size, symtab.sh_entsize):
if is_32bit:
sym_offset = symtab.sh_offset + offset
st_name, st_value, st_size, st_info, st_other, st_shndx = read(
"IIIBBH", sym_offset
)
else:
sym_offset = symtab.sh_offset + offset
st_name, st_info, st_other, st_shndx, st_value, st_size = read(
"IBBHQQ", sym_offset
)
sym_name = read_string(strtab_offset, st_name)
if st_shndx == 0 or st_shndx >= 0xFF00:
section = None
else:
section = elf.sections.get(sec_names[st_shndx])
symbol = ElfSymbol(offset=st_value, name=sym_name, section=section)
elf.symbols[sym_name] = symbol
symbols_by_index.append(symbol)
# Do not overwrite existing symbols at this address, and skip
# empty names or names starting with "..." (such as `...bss@0`).
if (
section is not None
and st_value not in section.symbols
and sym_name
and not sym_name.startswith("...")
):
section.symbols[st_value] = symbol
# Parse SHT_REL/SHT_RELA sections (relocations)
for s, name in zip(sections, sec_names):
if s.sh_type == SHT_REL or s.sh_type == SHT_RELA:
sec_name = sec_names[s.sh_info]
section = elf.sections.get(sec_name)
if section is None:
continue
sec_base = sections[s.sh_info].sh_offset
for i in range(0, s.sh_size, s.sh_entsize):
if s.sh_type == SHT_REL:
r_offset, r_info = read("PP", s.sh_offset + i)
else:
r_offset, r_info, r_addend = read("PPP", s.sh_offset + i)
if is_32bit:
r_sym = r_info >> 8
r_type = r_info & 0xFF
else:
r_sym = r_info >> 32
r_type = r_info & 0xFFFFFFFF
symbol = symbols_by_index[r_sym]
# Caonicalize the symbol, in case there are multiple symbols at the same offset
if symbol.section is not None:
symbol = symbol.section.symbols[symbol.offset]
if s.sh_type == SHT_REL:
# NB: This isn't needed for PPC, but we may want to re-use this code later.
# We will also need to add support for R_MIPS_{LO16,HI16,26}.
# (PPC uses RELA relocations, which embed the addend in the relocation
# instead of in the relocated section.)
if e_machine == 8 and r_type == 2: # R_MIPS_32
(r_addend,) = read("I", sec_base + r_offset)
else:
continue
reloc = ElfRelocation(
section_offset=r_offset,
symbol=symbol,
symbol_offset=r_addend,
relocation_type=r_type,
)
section.relocations[r_offset] = reloc
return elf

View File

@ -1,21 +0,0 @@
f32 test(s32 arg0) {
f64 sp8;
s32 sp4;
f64 var_ft3;
s32 temp_t9;
sp8 = 1.0;
sp4 = arg0;
if (sp4 != 0) {
do {
var_ft3 = (f64) sp4;
if (sp4 < 0) {
var_ft3 += 4294967296.0;
}
sp8 *= var_ft3;
temp_t9 = sp4 - 1;
sp4 = temp_t9;
} while (temp_t9 != 0);
}
return (f32) sp8;
}

View File

@ -1,46 +0,0 @@
.set noat # allow manual use of $at
.set noreorder # don't insert nops after branches
glabel test
/* 000090 00400090 27BDFFF0 */ addiu $sp, $sp, -0x10
/* 000094 00400094 3C013FF0 */ lui $at, 0x3ff0
/* 000098 00400098 44812800 */ mtc1 $at, $ft0f
/* 00009C 0040009C 44802000 */ mtc1 $zero, $ft0
/* 0000A0 004000A0 00000000 */ nop
/* 0000A4 004000A4 F7A40008 */ sdc1 $ft0, 8($sp)
/* 0000A8 004000A8 AFA40004 */ sw $a0, 4($sp)
/* 0000AC 004000AC 8FAE0004 */ lw $t6, 4($sp)
/* 0000B0 004000B0 11C00011 */ beqz $t6, .L004000F8
/* 0000B4 004000B4 00000000 */ nop
.L004000B8:
/* 0000B8 004000B8 8FAF0004 */ lw $t7, 4($sp)
/* 0000BC 004000BC D7A60008 */ ldc1 $ft1, 8($sp)
/* 0000C0 004000C0 448F4000 */ mtc1 $t7, $ft2
/* 0000C4 004000C4 05E10006 */ bgez $t7, .L004000E0
/* 0000C8 004000C8 468042A1 */ cvt.d.w $ft3, $ft2
/* 0000CC 004000CC 3C0141F0 */ lui $at, 0x41f0
/* 0000D0 004000D0 44818800 */ mtc1 $at, $ft4f
/* 0000D4 004000D4 44808000 */ mtc1 $zero, $ft4
/* 0000D8 004000D8 00000000 */ nop
/* 0000DC 004000DC 46305280 */ add.d $ft3, $ft3, $ft4
.L004000E0:
/* 0000E0 004000E0 462A3482 */ mul.d $ft5, $ft1, $ft3
/* 0000E4 004000E4 F7B20008 */ sdc1 $ft5, 8($sp)
/* 0000E8 004000E8 8FB80004 */ lw $t8, 4($sp)
/* 0000EC 004000EC 2719FFFF */ addiu $t9, $t8, -1
/* 0000F0 004000F0 1720FFF1 */ bnez $t9, .L004000B8
/* 0000F4 004000F4 AFB90004 */ sw $t9, 4($sp)
.L004000F8:
/* 0000F8 004000F8 D7A40008 */ ldc1 $ft0, 8($sp)
/* 0000FC 004000FC 10000003 */ b .L0040010C
/* 000100 00400100 46202020 */ cvt.s.d $fv0, $ft0
/* 000104 00400104 10000001 */ b .L0040010C
/* 000108 00400108 00000000 */ nop
.L0040010C:
/* 00010C 0040010C 03E00008 */ jr $ra
/* 000110 00400110 27BD0010 */ addiu $sp, $sp, 0x10
/* 000114 00400114 00000000 */ nop
/* 000118 00400118 00000000 */ nop
/* 00011C 0040011C 00000000 */ nop

View File

@ -1,62 +0,0 @@
f32 test(s32 arg0) {
f64 var_ft1;
f64 var_ft1_2;
f64 var_ft2;
f64 var_ft4;
f64 var_ft5;
f64 var_fv1;
s32 temp_a1;
s32 temp_ft0;
s32 temp_ft3;
s32 temp_t6;
s32 temp_t7;
s32 temp_t8;
s32 var_v0;
var_fv1 = 0.0;
var_v0 = arg0;
if (arg0 != 0) {
temp_a1 = -(arg0 & 3);
if (temp_a1 != 0) {
do {
var_ft1 = (f64) var_v0;
if (var_v0 < 0) {
var_ft1 += 4294967296.0;
}
var_fv1 *= var_ft1;
var_v0 -= 1;
} while ((temp_a1 + arg0) != var_v0);
if (var_v0 != 0) {
goto loop_6;
}
} else {
do {
loop_6:
temp_t6 = var_v0 - 1;
var_ft4 = (f64) var_v0;
if (var_v0 < 0) {
var_ft4 += 4294967296.0;
}
temp_t7 = var_v0 - 2;
temp_t8 = var_v0 - 3;
var_ft2 = (f64) temp_t6;
if (temp_t6 < 0) {
var_ft2 += 4294967296.0;
}
temp_ft3 = temp_t7;
temp_ft0 = temp_t8;
var_v0 -= 4;
var_ft5 = (f64) temp_ft3;
if (temp_t7 < 0) {
var_ft5 += 4294967296.0;
}
var_ft1_2 = (f64) temp_ft0;
if (temp_t8 < 0) {
var_ft1_2 += 4294967296.0;
}
var_fv1 = var_fv1 * var_ft4 * var_ft2 * var_ft5 * var_ft1_2;
} while (var_v0 != 0);
}
}
return (f32) var_fv1;
}

View File

@ -1,83 +0,0 @@
.set noat # allow manual use of $at
.set noreorder # don't insert nops after branches
glabel test
/* 000090 00400090 3C013FF0 */ lui $at, 0x3ff0
/* 000094 00400094 44811800 */ mtc1 $at, $fv1f
/* 000098 00400098 44801000 */ mtc1 $zero, $fv1
/* 00009C 0040009C 1080003D */ beqz $a0, .L00400194
/* 0000A0 004000A0 00801025 */ move $v0, $a0
/* 0000A4 004000A4 30850003 */ andi $a1, $a0, 3
/* 0000A8 004000A8 00052823 */ negu $a1, $a1
/* 0000AC 004000AC 10A0000F */ beqz $a1, .L004000EC
/* 0000B0 004000B0 00A41821 */ addu $v1, $a1, $a0
/* 0000B4 004000B4 44822000 */ mtc1 $v0, $ft0
.L004000B8:
/* 0000B8 004000B8 3C0141F0 */ lui $at, 0x41f0
/* 0000BC 004000BC 04410005 */ bgez $v0, .L004000D4
/* 0000C0 004000C0 468021A1 */ cvt.d.w $ft1, $ft0
/* 0000C4 004000C4 44814800 */ mtc1 $at, $ft2f
/* 0000C8 004000C8 44804000 */ mtc1 $zero, $ft2
/* 0000CC 004000CC 00000000 */ nop
/* 0000D0 004000D0 46283180 */ add.d $ft1, $ft1, $ft2
.L004000D4:
/* 0000D4 004000D4 46261082 */ mul.d $fv1, $fv1, $ft1
/* 0000D8 004000D8 2442FFFF */ addiu $v0, $v0, -1
/* 0000DC 004000DC 5462FFF6 */ bnel $v1, $v0, .L004000B8
/* 0000E0 004000E0 44822000 */ mtc1 $v0, $ft0
/* 0000E4 004000E4 1040002B */ beqz $v0, .L00400194
/* 0000E8 004000E8 00000000 */ nop
.L004000EC:
/* 0000EC 004000EC 44825000 */ mtc1 $v0, $ft3
.L004000F0:
/* 0000F0 004000F0 244EFFFF */ addiu $t6, $v0, -1
/* 0000F4 004000F4 04410006 */ bgez $v0, .L00400110
/* 0000F8 004000F8 46805421 */ cvt.d.w $ft4, $ft3
/* 0000FC 004000FC 3C0141F0 */ lui $at, 0x41f0
/* 000100 00400100 44819800 */ mtc1 $at, $ft5f
/* 000104 00400104 44809000 */ mtc1 $zero, $ft5
/* 000108 00400108 00000000 */ nop
/* 00010C 0040010C 46328400 */ add.d $ft4, $ft4, $ft5
.L00400110:
/* 000110 00400110 448E2000 */ mtc1 $t6, $ft0
/* 000114 00400114 46301082 */ mul.d $fv1, $fv1, $ft4
/* 000118 00400118 244FFFFE */ addiu $t7, $v0, -2
/* 00011C 0040011C 2458FFFD */ addiu $t8, $v0, -3
/* 000120 00400120 3C0141F0 */ lui $at, 0x41f0
/* 000124 00400124 05C10005 */ bgez $t6, .L0040013C
/* 000128 00400128 46802221 */ cvt.d.w $ft2, $ft0
/* 00012C 0040012C 44813800 */ mtc1 $at, $ft1f
/* 000130 00400130 44803000 */ mtc1 $zero, $ft1
/* 000134 00400134 00000000 */ nop
/* 000138 00400138 46264200 */ add.d $ft2, $ft2, $ft1
.L0040013C:
/* 00013C 0040013C 448F5000 */ mtc1 $t7, $ft3
/* 000140 00400140 46281082 */ mul.d $fv1, $fv1, $ft2
/* 000144 00400144 44982000 */ mtc1 $t8, $ft0
/* 000148 00400148 2442FFFC */ addiu $v0, $v0, -4
/* 00014C 0040014C 3C0141F0 */ lui $at, 0x41f0
/* 000150 00400150 05E10005 */ bgez $t7, .L00400168
/* 000154 00400154 468054A1 */ cvt.d.w $ft5, $ft3
/* 000158 00400158 44818800 */ mtc1 $at, $ft4f
/* 00015C 0040015C 44808000 */ mtc1 $zero, $ft4
/* 000160 00400160 00000000 */ nop
/* 000164 00400164 46309480 */ add.d $ft5, $ft5, $ft4
.L00400168:
/* 000168 00400168 46321082 */ mul.d $fv1, $fv1, $ft5
/* 00016C 0040016C 3C0141F0 */ lui $at, 0x41f0
/* 000170 00400170 07010005 */ bgez $t8, .L00400188
/* 000174 00400174 468021A1 */ cvt.d.w $ft1, $ft0
/* 000178 00400178 44814800 */ mtc1 $at, $ft2f
/* 00017C 0040017C 44804000 */ mtc1 $zero, $ft2
/* 000180 00400180 00000000 */ nop
/* 000184 00400184 46283180 */ add.d $ft1, $ft1, $ft2
.L00400188:
/* 000188 00400188 46261082 */ mul.d $fv1, $fv1, $ft1
/* 00018C 0040018C 5440FFD8 */ bnezl $v0, .L004000F0
/* 000190 00400190 44825000 */ mtc1 $v0, $ft3
.L00400194:
/* 000194 00400194 03E00008 */ jr $ra
/* 000198 00400198 46201020 */ cvt.s.d $fv0, $fv1
/* 00019C 0040019C 00000000 */ nop

View File

@ -1,10 +0,0 @@
float test(unsigned int y) {
double ret = 1.0;
unsigned int i;
for (i = y; i != 0; i--) {
ret *= i;
}
return ret;
}

View File

@ -1,6 +0,0 @@
f32 test(f32 arg0) {
f64 sp10;
sp10 = (f64) sqrtf(fabsf(arg0));
return (f32) sqrt(fabs(sp10));
}

View File

@ -1,22 +0,0 @@
.set noat # allow manual use of $at
.set noreorder # don't insert nops after branches
glabel test
/* 000090 00400090 27BDFFE8 */ addiu $sp, $sp, -0x18
/* 000094 00400094 46006005 */ abs.s $f0, $f12
/* 000098 00400098 46000386 */ mov.s $f14, $f0
/* 00009C 0040009C 46007004 */ sqrt.s $f0, $f14
/* 0000A0 004000A0 46000121 */ cvt.d.s $f4, $f0
/* 0000A4 004000A4 F7A40010 */ sdc1 $f4, 0x10($sp)
/* 0000A8 004000A8 D7A00010 */ ldc1 $f0, 0x10($sp)
/* 0000AC 004000AC 46200005 */ abs.d $f0, $f0
/* 0000B0 004000B0 46200406 */ mov.d $f16, $f0
/* 0000B4 004000B4 46208004 */ sqrt.d $f0, $f16
/* 0000B8 004000B8 10000003 */ b .L004000C8
/* 0000BC 004000BC 46200020 */ cvt.s.d $f0, $f0
/* 0000C0 004000C0 10000001 */ b .L004000C8
/* 0000C4 004000C4 00000000 */ nop
.L004000C8:
/* 0000C8 004000C8 03E00008 */ jr $ra
/* 0000CC 004000CC 27BD0018 */ addiu $sp, $sp, 0x18

View File

@ -1,3 +0,0 @@
f32 test(f32 arg0) {
return (f32) sqrt(fabs((f64) sqrtf(fabsf(arg0))));
}

View File

@ -1,14 +0,0 @@
.set noat # allow manual use of $at
.set noreorder # don't insert nops after branches
glabel test
/* 000090 00400090 46006005 */ abs.s $f0, $f12
/* 000094 00400094 46000004 */ sqrt.s $f0, $f0
/* 000098 00400098 46000021 */ cvt.d.s $f0, $f0
/* 00009C 0040009C 46200005 */ abs.d $f0, $f0
/* 0000A0 004000A0 46200004 */ sqrt.d $f0, $f0
/* 0000A4 004000A4 03E00008 */ jr $ra
/* 0000A8 004000A8 46200020 */ cvt.s.d $f0, $f0
/* 0000AC 004000AC 00000000 */ nop

View File

@ -1 +0,0 @@
--target ppc-mwcc-c

View File

@ -1,11 +0,0 @@
? fabs(); /* extern */
? fabsf(); /* extern */
f64 sqrt(); /* extern */
? sqrtf(); /* extern */
f32 test(void) {
fabsf();
sqrtf();
fabs();
return (f32) sqrt();
}

View File

@ -1,19 +0,0 @@
.include "macros.inc"
.section .text # 0x0 - 0x30
.global test
test:
/* 00000000 00000000 7C 08 02 A6 */ mflr r0
/* 00000004 00000004 90 01 00 04 */ stw r0, 4(r1)
/* 00000008 00000008 94 21 FF F8 */ stwu r1, -8(r1)
/* 0000000C 0000000C 48 00 00 01 */ bl fabsf
/* 00000010 00000010 48 00 00 01 */ bl sqrtf
/* 00000014 00000014 48 00 00 01 */ bl fabs
/* 00000018 00000018 48 00 00 01 */ bl sqrt
/* 0000001C 0000001C 80 01 00 0C */ lwz r0, 0xc(r1)
/* 00000020 00000020 FC 20 08 18 */ frsp f1, f1
/* 00000024 00000024 38 21 00 08 */ addi r1, r1, 8
/* 00000028 00000028 7C 08 03 A6 */ mtlr r0
/* 0000002C 0000002C 4E 80 00 20 */ blr

View File

@ -1,13 +0,0 @@
float fabsf(float x);
double fabs(double x);
float sqrtf(float x);
double sqrt(double x);
#pragma intrinsic (fabsf)
#pragma intrinsic (fabs)
#pragma intrinsic (sqrtf)
#pragma intrinsic (sqrt)
float test(float a) {
double r = (double)sqrtf(fabsf(a));
return (float)sqrt(fabs(r));
}

View File

@ -1,47 +0,0 @@
s32 func_00400090(s32); /* static */
s32 test(s32 arg0, s32 arg1, s32 arg2, s32 arg3) {
s32 sp24;
s32 sp20;
s32 sp1C;
s32 temp_t6;
s32 temp_t9;
sp24 = arg0 + arg1;
sp20 = arg1 + arg2;
sp1C = 0;
if ((sp24 != 0) || (sp20 != 0) || (sp20 = func_00400090(sp20), (sp20 != 0)) || (sp24 = 2, (arg3 != 0))) {
sp1C = 1;
} else if (arg0 != 0) {
sp1C = -1;
} else {
sp1C = -2;
}
sp1C += arg2;
if ((sp24 != 0) && (sp20 != 0)) {
temp_t6 = sp24 + sp20;
sp24 = temp_t6;
sp20 = func_00400090(temp_t6);
if ((sp20 != 0) && (arg3 != 0)) {
if (sp1C < 5) {
do {
sp1C += 1;
sp1C *= 2;
} while (sp1C < 5);
}
sp1C += 5;
}
}
if ((sp24 != 0) && (sp20 != 0) && (temp_t9 = sp24 + sp20, sp24 = temp_t9, sp20 = func_00400090(temp_t9), (sp20 != 0)) && (arg3 != 0)) {
if (sp1C < 5) {
do {
sp1C += 1;
sp1C *= 2;
} while (sp1C < 5);
}
sp1C += 5;
} else {
sp1C += 6;
}
return sp1C;
}

View File

@ -1,158 +0,0 @@
.set noat # allow manual use of $at
.set noreorder # don't insert nops after branches
glabel func_00400090
/* 000090 00400090 03E00008 */ jr $ra
/* 000094 00400094 24820001 */ addiu $v0, $a0, 1
/* 000098 00400098 03E00008 */ jr $ra
/* 00009C 0040009C 00000000 */ nop
/* 0000A0 004000A0 03E00008 */ jr $ra
/* 0000A4 004000A4 00000000 */ nop
glabel test
/* 0000A8 004000A8 27BDFFD8 */ addiu $sp, $sp, -0x28
/* 0000AC 004000AC AFBF0014 */ sw $ra, 0x14($sp)
/* 0000B0 004000B0 AFA40028 */ sw $a0, 0x28($sp)
/* 0000B4 004000B4 AFA5002C */ sw $a1, 0x2c($sp)
/* 0000B8 004000B8 AFA60030 */ sw $a2, 0x30($sp)
/* 0000BC 004000BC AFA70034 */ sw $a3, 0x34($sp)
/* 0000C0 004000C0 8FAE0028 */ lw $t6, 0x28($sp)
/* 0000C4 004000C4 8FAF002C */ lw $t7, 0x2c($sp)
/* 0000C8 004000C8 01CFC021 */ addu $t8, $t6, $t7
/* 0000CC 004000CC AFB80024 */ sw $t8, 0x24($sp)
/* 0000D0 004000D0 8FB9002C */ lw $t9, 0x2c($sp)
/* 0000D4 004000D4 8FA80030 */ lw $t0, 0x30($sp)
/* 0000D8 004000D8 03284821 */ addu $t1, $t9, $t0
/* 0000DC 004000DC AFA90020 */ sw $t1, 0x20($sp)
/* 0000E0 004000E0 AFA0001C */ sw $zero, 0x1c($sp)
/* 0000E4 004000E4 8FAA0024 */ lw $t2, 0x24($sp)
/* 0000E8 004000E8 1540000F */ bnez $t2, .L00400128
/* 0000EC 004000EC 00000000 */ nop
/* 0000F0 004000F0 8FAB0020 */ lw $t3, 0x20($sp)
/* 0000F4 004000F4 1560000C */ bnez $t3, .L00400128
/* 0000F8 004000F8 00000000 */ nop
/* 0000FC 004000FC 0C100024 */ jal func_00400090
/* 000100 00400100 01602025 */ move $a0, $t3
/* 000104 00400104 AFA20020 */ sw $v0, 0x20($sp)
/* 000108 00400108 8FAC0020 */ lw $t4, 0x20($sp)
/* 00010C 0040010C 15800006 */ bnez $t4, .L00400128
/* 000110 00400110 00000000 */ nop
/* 000114 00400114 8FAE0034 */ lw $t6, 0x34($sp)
/* 000118 00400118 240D0002 */ addiu $t5, $zero, 2
/* 00011C 0040011C AFAD0024 */ sw $t5, 0x24($sp)
/* 000120 00400120 11C00004 */ beqz $t6, .L00400134
/* 000124 00400124 00000000 */ nop
.L00400128:
/* 000128 00400128 240F0001 */ addiu $t7, $zero, 1
/* 00012C 0040012C 10000009 */ b .L00400154
/* 000130 00400130 AFAF001C */ sw $t7, 0x1c($sp)
.L00400134:
/* 000134 00400134 8FB80028 */ lw $t8, 0x28($sp)
/* 000138 00400138 13000004 */ beqz $t8, .L0040014C
/* 00013C 0040013C 00000000 */ nop
/* 000140 00400140 2419FFFF */ addiu $t9, $zero, -1
/* 000144 00400144 10000003 */ b .L00400154
/* 000148 00400148 AFB9001C */ sw $t9, 0x1c($sp)
.L0040014C:
/* 00014C 0040014C 2408FFFE */ addiu $t0, $zero, -2
/* 000150 00400150 AFA8001C */ sw $t0, 0x1c($sp)
.L00400154:
/* 000154 00400154 8FA9001C */ lw $t1, 0x1c($sp)
/* 000158 00400158 8FAA0030 */ lw $t2, 0x30($sp)
/* 00015C 0040015C 012A5821 */ addu $t3, $t1, $t2
/* 000160 00400160 AFAB001C */ sw $t3, 0x1c($sp)
/* 000164 00400164 8FAC0024 */ lw $t4, 0x24($sp)
/* 000168 00400168 11800020 */ beqz $t4, .L004001EC
/* 00016C 0040016C 00000000 */ nop
/* 000170 00400170 8FAD0020 */ lw $t5, 0x20($sp)
/* 000174 00400174 11A0001D */ beqz $t5, .L004001EC
/* 000178 00400178 00000000 */ nop
/* 00017C 0040017C 018D7021 */ addu $t6, $t4, $t5
/* 000180 00400180 AFAE0024 */ sw $t6, 0x24($sp)
/* 000184 00400184 0C100024 */ jal func_00400090
/* 000188 00400188 01C02025 */ move $a0, $t6
/* 00018C 0040018C AFA20020 */ sw $v0, 0x20($sp)
/* 000190 00400190 8FAF0020 */ lw $t7, 0x20($sp)
/* 000194 00400194 11E00015 */ beqz $t7, .L004001EC
/* 000198 00400198 00000000 */ nop
/* 00019C 0040019C 8FB80034 */ lw $t8, 0x34($sp)
/* 0001A0 004001A0 13000012 */ beqz $t8, .L004001EC
/* 0001A4 004001A4 00000000 */ nop
/* 0001A8 004001A8 8FB9001C */ lw $t9, 0x1c($sp)
/* 0001AC 004001AC 2B210005 */ slti $at, $t9, 5
/* 0001B0 004001B0 1020000B */ beqz $at, .L004001E0
/* 0001B4 004001B4 00000000 */ nop
.L004001B8:
/* 0001B8 004001B8 8FA8001C */ lw $t0, 0x1c($sp)
/* 0001BC 004001BC 25090001 */ addiu $t1, $t0, 1
/* 0001C0 004001C0 AFA9001C */ sw $t1, 0x1c($sp)
/* 0001C4 004001C4 8FAA001C */ lw $t2, 0x1c($sp)
/* 0001C8 004001C8 000A5840 */ sll $t3, $t2, 1
/* 0001CC 004001CC AFAB001C */ sw $t3, 0x1c($sp)
/* 0001D0 004001D0 8FAC001C */ lw $t4, 0x1c($sp)
/* 0001D4 004001D4 29810005 */ slti $at, $t4, 5
/* 0001D8 004001D8 1420FFF7 */ bnez $at, .L004001B8
/* 0001DC 004001DC 00000000 */ nop
.L004001E0:
/* 0001E0 004001E0 8FAD001C */ lw $t5, 0x1c($sp)
/* 0001E4 004001E4 25AE0005 */ addiu $t6, $t5, 5
/* 0001E8 004001E8 AFAE001C */ sw $t6, 0x1c($sp)
.L004001EC:
/* 0001EC 004001EC 8FAF0024 */ lw $t7, 0x24($sp)
/* 0001F0 004001F0 11E00021 */ beqz $t7, .L00400278
/* 0001F4 004001F4 00000000 */ nop
/* 0001F8 004001F8 8FB80020 */ lw $t8, 0x20($sp)
/* 0001FC 004001FC 1300001E */ beqz $t8, .L00400278
/* 000200 00400200 00000000 */ nop
/* 000204 00400204 01F8C821 */ addu $t9, $t7, $t8
/* 000208 00400208 AFB90024 */ sw $t9, 0x24($sp)
/* 00020C 0040020C 0C100024 */ jal func_00400090
/* 000210 00400210 03202025 */ move $a0, $t9
/* 000214 00400214 AFA20020 */ sw $v0, 0x20($sp)
/* 000218 00400218 8FA80020 */ lw $t0, 0x20($sp)
/* 00021C 0040021C 11000016 */ beqz $t0, .L00400278
/* 000220 00400220 00000000 */ nop
/* 000224 00400224 8FA90034 */ lw $t1, 0x34($sp)
/* 000228 00400228 11200013 */ beqz $t1, .L00400278
/* 00022C 0040022C 00000000 */ nop
/* 000230 00400230 8FAA001C */ lw $t2, 0x1c($sp)
/* 000234 00400234 29410005 */ slti $at, $t2, 5
/* 000238 00400238 1020000B */ beqz $at, .L00400268
/* 00023C 0040023C 00000000 */ nop
.L00400240:
/* 000240 00400240 8FAB001C */ lw $t3, 0x1c($sp)
/* 000244 00400244 256C0001 */ addiu $t4, $t3, 1
/* 000248 00400248 AFAC001C */ sw $t4, 0x1c($sp)
/* 00024C 0040024C 8FAD001C */ lw $t5, 0x1c($sp)
/* 000250 00400250 000D7040 */ sll $t6, $t5, 1
/* 000254 00400254 AFAE001C */ sw $t6, 0x1c($sp)
/* 000258 00400258 8FAF001C */ lw $t7, 0x1c($sp)
/* 00025C 0040025C 29E10005 */ slti $at, $t7, 5
/* 000260 00400260 1420FFF7 */ bnez $at, .L00400240
/* 000264 00400264 00000000 */ nop
.L00400268:
/* 000268 00400268 8FB8001C */ lw $t8, 0x1c($sp)
/* 00026C 0040026C 27190005 */ addiu $t9, $t8, 5
/* 000270 00400270 10000004 */ b .L00400284
/* 000274 00400274 AFB9001C */ sw $t9, 0x1c($sp)
.L00400278:
/* 000278 00400278 8FA8001C */ lw $t0, 0x1c($sp)
/* 00027C 0040027C 25090006 */ addiu $t1, $t0, 6
/* 000280 00400280 AFA9001C */ sw $t1, 0x1c($sp)
.L00400284:
/* 000284 00400284 10000003 */ b .L00400294
/* 000288 00400288 8FA2001C */ lw $v0, 0x1c($sp)
/* 00028C 0040028C 10000001 */ b .L00400294
/* 000290 00400290 00000000 */ nop
.L00400294:
/* 000294 00400294 8FBF0014 */ lw $ra, 0x14($sp)
/* 000298 00400298 27BD0028 */ addiu $sp, $sp, 0x28
/* 00029C 0040029C 03E00008 */ jr $ra
/* 0002A0 004002A0 00000000 */ nop
/* 0002A4 004002A4 00000000 */ nop
/* 0002A8 004002A8 00000000 */ nop
/* 0002AC 004002AC 00000000 */ nop

View File

@ -1 +0,0 @@
--deterministic-vars

View File

@ -1,57 +0,0 @@
s32 func_00400090(s32); /* static */
s32 test(s32 arg0, s32 arg1, s32 arg2, s32 arg3) {
s32 sp24;
s32 sp20;
s32 temp_t3_60;
s32 temp_t5_82;
s32 temp_t7_13;
s32 temp_v0_22;
s32 temp_v0_47;
s32 var_s0_12;
s32 var_t0_19;
s32 var_v1_34;
s32 var_v1_42;
s32 var_v1_88;
var_s0_12 = arg0 + arg1;
temp_t7_13 = arg1 + arg2;
sp20 = temp_t7_13;
var_t0_19 = temp_t7_13;
if ((var_s0_12 != 0) || (temp_t7_13 != 0) || (temp_v0_22 = func_00400090(temp_t7_13), var_t0_19 = temp_v0_22, (temp_v0_22 != 0)) || (var_s0_12 = 2, (arg3 != 0))) {
var_v1_34 = 1;
} else {
var_v1_34 = -2;
if (arg0 != 0) {
var_v1_34 = -1;
}
}
var_v1_42 = var_v1_34 + arg2;
if ((var_s0_12 != 0) && (var_t0_19 != 0)) {
var_s0_12 += var_t0_19;
sp24 = var_v1_42;
temp_v0_47 = func_00400090(var_s0_12);
var_t0_19 = temp_v0_47;
if ((temp_v0_47 != 0) && (arg3 != 0)) {
if (var_v1_42 < 5) {
do {
temp_t3_60 = (var_v1_42 + 1) * 2;
var_v1_42 = temp_t3_60;
} while (temp_t3_60 < 5);
}
var_v1_42 += 5;
}
}
if ((var_s0_12 != 0) && (var_t0_19 != 0) && (sp24 = var_v1_42, (func_00400090(var_s0_12 + var_t0_19) != 0)) && (arg3 != 0)) {
if (var_v1_42 < 5) {
do {
temp_t5_82 = (var_v1_42 + 1) * 2;
var_v1_42 = temp_t5_82;
} while (temp_t5_82 < 5);
}
var_v1_88 = var_v1_42 + 5;
} else {
var_v1_88 = var_v1_42 + 6;
}
return var_v1_88;
}

View File

@ -1,96 +0,0 @@
.set noat # allow manual use of $at
.set noreorder # don't insert nops after branches
glabel func_00400090
/* 000090 00400090 03E00008 */ jr $ra
/* 000094 00400094 24820001 */ addiu $v0, $a0, 1
glabel test
/* 000098 00400098 27BDFFD0 */ addiu $sp, $sp, -0x30
/* 00009C 0040009C AFB00018 */ sw $s0, 0x18($sp)
/* 0000A0 004000A0 00858021 */ addu $s0, $a0, $a1
/* 0000A4 004000A4 00A67821 */ addu $t7, $a1, $a2
/* 0000A8 004000A8 AFBF001C */ sw $ra, 0x1c($sp)
/* 0000AC 004000AC AFA40030 */ sw $a0, 0x30($sp)
/* 0000B0 004000B0 AFA7003C */ sw $a3, 0x3c($sp)
/* 0000B4 004000B4 AFAF0020 */ sw $t7, 0x20($sp)
/* 0000B8 004000B8 1600000D */ bnez $s0, .L004000F0
/* 0000BC 004000BC 01E04025 */ move $t0, $t7
/* 0000C0 004000C0 15E0000B */ bnez $t7, .L004000F0
/* 0000C4 004000C4 01E02025 */ move $a0, $t7
/* 0000C8 004000C8 0C100024 */ jal func_00400090
/* 0000CC 004000CC AFA60038 */ sw $a2, 0x38($sp)
/* 0000D0 004000D0 8FA60038 */ lw $a2, 0x38($sp)
/* 0000D4 004000D4 14400006 */ bnez $v0, .L004000F0
/* 0000D8 004000D8 00404025 */ move $t0, $v0
/* 0000DC 004000DC 8FB9003C */ lw $t9, 0x3c($sp)
/* 0000E0 004000E0 24100002 */ addiu $s0, $zero, 2
/* 0000E4 004000E4 8FA90030 */ lw $t1, 0x30($sp)
/* 0000E8 004000E8 13200003 */ beqz $t9, .L004000F8
/* 0000EC 004000EC 00000000 */ nop
.L004000F0:
/* 0000F0 004000F0 10000005 */ b .L00400108
/* 0000F4 004000F4 24030001 */ addiu $v1, $zero, 1
.L004000F8:
/* 0000F8 004000F8 11200003 */ beqz $t1, .L00400108
/* 0000FC 004000FC 2403FFFE */ addiu $v1, $zero, -2
/* 000100 00400100 10000001 */ b .L00400108
/* 000104 00400104 2403FFFF */ addiu $v1, $zero, -1
.L00400108:
/* 000108 00400108 12000016 */ beqz $s0, .L00400164
/* 00010C 0040010C 00661821 */ addu $v1, $v1, $a2
/* 000110 00400110 11000014 */ beqz $t0, .L00400164
/* 000114 00400114 00000000 */ nop
/* 000118 00400118 02088021 */ addu $s0, $s0, $t0
/* 00011C 0040011C 02002025 */ move $a0, $s0
/* 000120 00400120 0C100024 */ jal func_00400090
/* 000124 00400124 AFA30024 */ sw $v1, 0x24($sp)
/* 000128 00400128 8FA30024 */ lw $v1, 0x24($sp)
/* 00012C 0040012C 1040000D */ beqz $v0, .L00400164
/* 000130 00400130 00404025 */ move $t0, $v0
/* 000134 00400134 8FAA003C */ lw $t2, 0x3c($sp)
/* 000138 00400138 28610005 */ slti $at, $v1, 5
/* 00013C 0040013C 11400009 */ beqz $t2, .L00400164
/* 000140 00400140 00000000 */ nop
/* 000144 00400144 50200007 */ beql $at, $zero, .L00400164
/* 000148 00400148 24630005 */ addiu $v1, $v1, 5
.L0040014C:
/* 00014C 0040014C 24630001 */ addiu $v1, $v1, 1
/* 000150 00400150 00035840 */ sll $t3, $v1, 1
/* 000154 00400154 29610005 */ slti $at, $t3, 5
/* 000158 00400158 1420FFFC */ bnez $at, .L0040014C
/* 00015C 0040015C 01601825 */ move $v1, $t3
/* 000160 00400160 24630005 */ addiu $v1, $v1, 5
.L00400164:
/* 000164 00400164 52000015 */ beql $s0, $zero, .L004001BC
/* 000168 00400168 24630006 */ addiu $v1, $v1, 6
/* 00016C 0040016C 11000012 */ beqz $t0, .L004001B8
/* 000170 00400170 02082021 */ addu $a0, $s0, $t0
/* 000174 00400174 0C100024 */ jal func_00400090
/* 000178 00400178 AFA30024 */ sw $v1, 0x24($sp)
/* 00017C 0040017C 1040000E */ beqz $v0, .L004001B8
/* 000180 00400180 8FA30024 */ lw $v1, 0x24($sp)
/* 000184 00400184 8FAC003C */ lw $t4, 0x3c($sp)
/* 000188 00400188 28610005 */ slti $at, $v1, 5
/* 00018C 0040018C 5180000B */ beql $t4, $zero, .L004001BC
/* 000190 00400190 24630006 */ addiu $v1, $v1, 6
/* 000194 00400194 10200006 */ beqz $at, .L004001B0
/* 000198 00400198 00000000 */ nop
.L0040019C:
/* 00019C 0040019C 24630001 */ addiu $v1, $v1, 1
/* 0001A0 004001A0 00036840 */ sll $t5, $v1, 1
/* 0001A4 004001A4 29A10005 */ slti $at, $t5, 5
/* 0001A8 004001A8 1420FFFC */ bnez $at, .L0040019C
/* 0001AC 004001AC 01A01825 */ move $v1, $t5
.L004001B0:
/* 0001B0 004001B0 10000002 */ b .L004001BC
/* 0001B4 004001B4 24630005 */ addiu $v1, $v1, 5
.L004001B8:
/* 0001B8 004001B8 24630006 */ addiu $v1, $v1, 6
.L004001BC:
/* 0001BC 004001BC 8FBF001C */ lw $ra, 0x1c($sp)
/* 0001C0 004001C0 8FB00018 */ lw $s0, 0x18($sp)
/* 0001C4 004001C4 27BD0030 */ addiu $sp, $sp, 0x30
/* 0001C8 004001C8 03E00008 */ jr $ra
/* 0001CC 004001CC 00601025 */ move $v0, $v1

View File

@ -1,98 +0,0 @@
s32 func_00400090(s32); /* static */
s32 test(s32 arg0, s32 arg1, s32 arg2, s32 arg3) {
s32 sp2C;
s32 sp24;
s32 sp20;
s32 sp1C;
s32 temp_a0;
s32 temp_a0_2;
s32 temp_t0;
s32 temp_t5;
s32 temp_t7;
s32 temp_t9;
s32 temp_v0;
s32 temp_v0_2;
s32 var_t1;
s32 var_v1;
s32 var_v1_2;
s32 var_v1_3;
temp_t0 = arg0 + arg1;
temp_t7 = arg1 + arg2;
sp2C = temp_t0;
sp1C = temp_t7;
var_t1 = temp_t7;
if (temp_t0 == 0) {
if (temp_t7 == 0) {
sp20 = temp_t0;
temp_v0 = func_00400090(temp_t7);
var_t1 = temp_v0;
if (temp_v0 == 0) {
if (arg3 != 0) {
goto block_4;
}
var_v1_2 = -2;
if (arg0 != 0) {
var_v1_2 = -1;
}
} else {
goto block_4;
}
} else {
goto block_4;
}
} else {
block_4:
var_v1_2 = 1;
}
var_v1_3 = var_v1_2 + arg2;
if (temp_t0 != 0) {
temp_a0 = temp_t0 + var_t1;
if (var_t1 != 0) {
sp2C = temp_a0;
sp24 = var_v1_3;
temp_v0_2 = func_00400090(temp_a0);
var_t1 = temp_v0_2;
if (temp_v0_2 != 0) {
if (arg3 != 0) {
if (var_v1_3 < 5) {
do {
temp_t5 = (var_v1_3 + 1) * 2;
var_v1_3 = temp_t5;
} while (temp_t5 < 5);
}
var_v1_3 += 5;
}
}
}
}
if (sp2C != 0) {
temp_a0_2 = sp2C + var_t1;
if (var_t1 != 0) {
sp2C = temp_a0_2;
sp24 = var_v1_3;
if (func_00400090(temp_a0_2) != 0) {
if (arg3 != 0) {
if (var_v1_3 < 5) {
do {
temp_t9 = (var_v1_3 + 1) * 2;
var_v1_3 = temp_t9;
} while (temp_t9 < 5);
}
var_v1 = var_v1_3 + 5;
} else {
goto block_21;
}
} else {
goto block_21;
}
} else {
goto block_21;
}
} else {
block_21:
var_v1 = var_v1_3 + 6;
}
return var_v1;
}

View File

@ -1,102 +0,0 @@
.set noat # allow manual use of $at
.set noreorder # don't insert nops after branches
glabel func_00400090
/* 000090 00400090 03E00008 */ jr $ra
/* 000094 00400094 24820001 */ addiu $v0, $a0, 1
glabel test
/* 000098 00400098 27BDFFD0 */ addiu $sp, $sp, -0x30
/* 00009C 0040009C 00854021 */ addu $t0, $a0, $a1
/* 0000A0 004000A0 00A67821 */ addu $t7, $a1, $a2
/* 0000A4 004000A4 AFBF0014 */ sw $ra, 0x14($sp)
/* 0000A8 004000A8 AFA40030 */ sw $a0, 0x30($sp)
/* 0000AC 004000AC AFA7003C */ sw $a3, 0x3c($sp)
/* 0000B0 004000B0 AFA8002C */ sw $t0, 0x2c($sp)
/* 0000B4 004000B4 AFAF001C */ sw $t7, 0x1c($sp)
/* 0000B8 004000B8 1500000E */ bnez $t0, .L004000F4
/* 0000BC 004000BC 01E04825 */ move $t1, $t7
/* 0000C0 004000C0 15E0000C */ bnez $t7, .L004000F4
/* 0000C4 004000C4 01E02025 */ move $a0, $t7
/* 0000C8 004000C8 AFA60038 */ sw $a2, 0x38($sp)
/* 0000CC 004000CC 0C100024 */ jal func_00400090
/* 0000D0 004000D0 AFA80020 */ sw $t0, 0x20($sp)
/* 0000D4 004000D4 8FA60038 */ lw $a2, 0x38($sp)
/* 0000D8 004000D8 8FA80020 */ lw $t0, 0x20($sp)
/* 0000DC 004000DC 14400005 */ bnez $v0, .L004000F4
/* 0000E0 004000E0 00404825 */ move $t1, $v0
/* 0000E4 004000E4 8FB9003C */ lw $t9, 0x3c($sp)
/* 0000E8 004000E8 8FAA0030 */ lw $t2, 0x30($sp)
/* 0000EC 004000EC 13200003 */ beqz $t9, .L004000FC
/* 0000F0 004000F0 00000000 */ nop
.L004000F4:
/* 0000F4 004000F4 10000005 */ b .L0040010C
/* 0000F8 004000F8 24030001 */ addiu $v1, $zero, 1
.L004000FC:
/* 0000FC 004000FC 11400003 */ beqz $t2, .L0040010C
/* 000100 00400100 2403FFFE */ addiu $v1, $zero, -2
/* 000104 00400104 10000001 */ b .L0040010C
/* 000108 00400108 2403FFFF */ addiu $v1, $zero, -1
.L0040010C:
/* 00010C 0040010C 11000015 */ beqz $t0, .L00400164
/* 000110 00400110 00661821 */ addu $v1, $v1, $a2
/* 000114 00400114 11200013 */ beqz $t1, .L00400164
/* 000118 00400118 01092021 */ addu $a0, $t0, $t1
/* 00011C 0040011C AFA4002C */ sw $a0, 0x2c($sp)
/* 000120 00400120 0C100024 */ jal func_00400090
/* 000124 00400124 AFA30024 */ sw $v1, 0x24($sp)
/* 000128 00400128 8FA30024 */ lw $v1, 0x24($sp)
/* 00012C 0040012C 1040000D */ beqz $v0, .L00400164
/* 000130 00400130 00404825 */ move $t1, $v0
/* 000134 00400134 8FAC003C */ lw $t4, 0x3c($sp)
/* 000138 00400138 28610005 */ slti $at, $v1, 5
/* 00013C 0040013C 5180000A */ beql $t4, $zero, .L00400168
/* 000140 00400140 8FAE002C */ lw $t6, 0x2c($sp)
/* 000144 00400144 50200007 */ beql $at, $zero, .L00400164
/* 000148 00400148 24630005 */ addiu $v1, $v1, 5
.L0040014C:
/* 00014C 0040014C 24630001 */ addiu $v1, $v1, 1
/* 000150 00400150 00036840 */ sll $t5, $v1, 1
/* 000154 00400154 29A10005 */ slti $at, $t5, 5
/* 000158 00400158 1420FFFC */ bnez $at, .L0040014C
/* 00015C 0040015C 01A01825 */ move $v1, $t5
/* 000160 00400160 24630005 */ addiu $v1, $v1, 5
.L00400164:
/* 000164 00400164 8FAE002C */ lw $t6, 0x2c($sp)
.L00400168:
/* 000168 00400168 51C00016 */ beql $t6, $zero, .L004001C4
/* 00016C 0040016C 24630006 */ addiu $v1, $v1, 6
/* 000170 00400170 11200013 */ beqz $t1, .L004001C0
/* 000174 00400174 01C92021 */ addu $a0, $t6, $t1
/* 000178 00400178 AFA4002C */ sw $a0, 0x2c($sp)
/* 00017C 0040017C 0C100024 */ jal func_00400090
/* 000180 00400180 AFA30024 */ sw $v1, 0x24($sp)
/* 000184 00400184 1040000E */ beqz $v0, .L004001C0
/* 000188 00400188 8FA30024 */ lw $v1, 0x24($sp)
/* 00018C 0040018C 8FB8003C */ lw $t8, 0x3c($sp)
/* 000190 00400190 28610005 */ slti $at, $v1, 5
/* 000194 00400194 5300000B */ beql $t8, $zero, .L004001C4
/* 000198 00400198 24630006 */ addiu $v1, $v1, 6
/* 00019C 0040019C 10200006 */ beqz $at, .L004001B8
/* 0001A0 004001A0 00000000 */ nop
.L004001A4:
/* 0001A4 004001A4 24630001 */ addiu $v1, $v1, 1
/* 0001A8 004001A8 0003C840 */ sll $t9, $v1, 1
/* 0001AC 004001AC 2B210005 */ slti $at, $t9, 5
/* 0001B0 004001B0 1420FFFC */ bnez $at, .L004001A4
/* 0001B4 004001B4 03201825 */ move $v1, $t9
.L004001B8:
/* 0001B8 004001B8 10000002 */ b .L004001C4
/* 0001BC 004001BC 24630005 */ addiu $v1, $v1, 5
.L004001C0:
/* 0001C0 004001C0 24630006 */ addiu $v1, $v1, 6
.L004001C4:
/* 0001C4 004001C4 8FBF0014 */ lw $ra, 0x14($sp)
/* 0001C8 004001C8 27BD0030 */ addiu $sp, $sp, 0x30
/* 0001CC 004001CC 00601025 */ move $v0, $v1
/* 0001D0 004001D0 03E00008 */ jr $ra
/* 0001D4 004001D4 00000000 */ nop
/* 0001D8 004001D8 00000000 */ nop
/* 0001DC 004001DC 00000000 */ nop

View File

@ -1,57 +0,0 @@
s32 func_00400090(s32); /* static */
s32 test(s32 arg0, s32 arg1, s32 arg2, s32 arg3) {
s32 sp24;
s32 sp20;
s32 temp_t3;
s32 temp_t5;
s32 temp_t7;
s32 temp_v0;
s32 temp_v0_2;
s32 var_s0;
s32 var_t0;
s32 var_v1;
s32 var_v1_2;
s32 var_v1_3;
var_s0 = arg0 + arg1;
temp_t7 = arg1 + arg2;
sp20 = temp_t7;
var_t0 = temp_t7;
if ((var_s0 != 0) || (temp_t7 != 0) || (temp_v0 = func_00400090(temp_t7), var_t0 = temp_v0, (temp_v0 != 0)) || (var_s0 = 2, (arg3 != 0))) {
var_v1_2 = 1;
} else {
var_v1_2 = -2;
if (arg0 != 0) {
var_v1_2 = -1;
}
}
var_v1_3 = var_v1_2 + arg2;
if ((var_s0 != 0) && (var_t0 != 0)) {
var_s0 += var_t0;
sp24 = var_v1_3;
temp_v0_2 = func_00400090(var_s0);
var_t0 = temp_v0_2;
if ((temp_v0_2 != 0) && (arg3 != 0)) {
if (var_v1_3 < 5) {
do {
temp_t3 = (var_v1_3 + 1) * 2;
var_v1_3 = temp_t3;
} while (temp_t3 < 5);
}
var_v1_3 += 5;
}
}
if ((var_s0 != 0) && (var_t0 != 0) && (sp24 = var_v1_3, (func_00400090(var_s0 + var_t0) != 0)) && (arg3 != 0)) {
if (var_v1_3 < 5) {
do {
temp_t5 = (var_v1_3 + 1) * 2;
var_v1_3 = temp_t5;
} while (temp_t5 < 5);
}
var_v1 = var_v1_3 + 5;
} else {
var_v1 = var_v1_3 + 6;
}
return var_v1;
}

View File

@ -1,96 +0,0 @@
.set noat # allow manual use of $at
.set noreorder # don't insert nops after branches
glabel func_00400090
/* 000090 00400090 03E00008 */ jr $ra
/* 000094 00400094 24820001 */ addiu $v0, $a0, 1
glabel test
/* 000098 00400098 27BDFFD0 */ addiu $sp, $sp, -0x30
/* 00009C 0040009C AFB00018 */ sw $s0, 0x18($sp)
/* 0000A0 004000A0 00858021 */ addu $s0, $a0, $a1
/* 0000A4 004000A4 00A67821 */ addu $t7, $a1, $a2
/* 0000A8 004000A8 AFBF001C */ sw $ra, 0x1c($sp)
/* 0000AC 004000AC AFA40030 */ sw $a0, 0x30($sp)
/* 0000B0 004000B0 AFA7003C */ sw $a3, 0x3c($sp)
/* 0000B4 004000B4 AFAF0020 */ sw $t7, 0x20($sp)
/* 0000B8 004000B8 1600000D */ bnez $s0, .L004000F0
/* 0000BC 004000BC 01E04025 */ move $t0, $t7
/* 0000C0 004000C0 15E0000B */ bnez $t7, .L004000F0
/* 0000C4 004000C4 01E02025 */ move $a0, $t7
/* 0000C8 004000C8 0C100024 */ jal func_00400090
/* 0000CC 004000CC AFA60038 */ sw $a2, 0x38($sp)
/* 0000D0 004000D0 8FA60038 */ lw $a2, 0x38($sp)
/* 0000D4 004000D4 14400006 */ bnez $v0, .L004000F0
/* 0000D8 004000D8 00404025 */ move $t0, $v0
/* 0000DC 004000DC 8FB9003C */ lw $t9, 0x3c($sp)
/* 0000E0 004000E0 24100002 */ addiu $s0, $zero, 2
/* 0000E4 004000E4 8FA90030 */ lw $t1, 0x30($sp)
/* 0000E8 004000E8 13200003 */ beqz $t9, .L004000F8
/* 0000EC 004000EC 00000000 */ nop
.L004000F0:
/* 0000F0 004000F0 10000005 */ b .L00400108
/* 0000F4 004000F4 24030001 */ addiu $v1, $zero, 1
.L004000F8:
/* 0000F8 004000F8 11200003 */ beqz $t1, .L00400108
/* 0000FC 004000FC 2403FFFE */ addiu $v1, $zero, -2
/* 000100 00400100 10000001 */ b .L00400108
/* 000104 00400104 2403FFFF */ addiu $v1, $zero, -1
.L00400108:
/* 000108 00400108 12000016 */ beqz $s0, .L00400164
/* 00010C 0040010C 00661821 */ addu $v1, $v1, $a2
/* 000110 00400110 11000014 */ beqz $t0, .L00400164
/* 000114 00400114 00000000 */ nop
/* 000118 00400118 02088021 */ addu $s0, $s0, $t0
/* 00011C 0040011C 02002025 */ move $a0, $s0
/* 000120 00400120 0C100024 */ jal func_00400090
/* 000124 00400124 AFA30024 */ sw $v1, 0x24($sp)
/* 000128 00400128 8FA30024 */ lw $v1, 0x24($sp)
/* 00012C 0040012C 1040000D */ beqz $v0, .L00400164
/* 000130 00400130 00404025 */ move $t0, $v0
/* 000134 00400134 8FAA003C */ lw $t2, 0x3c($sp)
/* 000138 00400138 28610005 */ slti $at, $v1, 5
/* 00013C 0040013C 11400009 */ beqz $t2, .L00400164
/* 000140 00400140 00000000 */ nop
/* 000144 00400144 50200007 */ beql $at, $zero, .L00400164
/* 000148 00400148 24630005 */ addiu $v1, $v1, 5
.L0040014C:
/* 00014C 0040014C 24630001 */ addiu $v1, $v1, 1
/* 000150 00400150 00035840 */ sll $t3, $v1, 1
/* 000154 00400154 29610005 */ slti $at, $t3, 5
/* 000158 00400158 1420FFFC */ bnez $at, .L0040014C
/* 00015C 0040015C 01601825 */ move $v1, $t3
/* 000160 00400160 24630005 */ addiu $v1, $v1, 5
.L00400164:
/* 000164 00400164 52000015 */ beql $s0, $zero, .L004001BC
/* 000168 00400168 24630006 */ addiu $v1, $v1, 6
/* 00016C 0040016C 11000012 */ beqz $t0, .L004001B8
/* 000170 00400170 02082021 */ addu $a0, $s0, $t0
/* 000174 00400174 0C100024 */ jal func_00400090
/* 000178 00400178 AFA30024 */ sw $v1, 0x24($sp)
/* 00017C 0040017C 1040000E */ beqz $v0, .L004001B8
/* 000180 00400180 8FA30024 */ lw $v1, 0x24($sp)
/* 000184 00400184 8FAC003C */ lw $t4, 0x3c($sp)
/* 000188 00400188 28610005 */ slti $at, $v1, 5
/* 00018C 0040018C 5180000B */ beql $t4, $zero, .L004001BC
/* 000190 00400190 24630006 */ addiu $v1, $v1, 6
/* 000194 00400194 10200006 */ beqz $at, .L004001B0
/* 000198 00400198 00000000 */ nop
.L0040019C:
/* 00019C 0040019C 24630001 */ addiu $v1, $v1, 1
/* 0001A0 004001A0 00036840 */ sll $t5, $v1, 1
/* 0001A4 004001A4 29A10005 */ slti $at, $t5, 5
/* 0001A8 004001A8 1420FFFC */ bnez $at, .L0040019C
/* 0001AC 004001AC 01A01825 */ move $v1, $t5
.L004001B0:
/* 0001B0 004001B0 10000002 */ b .L004001BC
/* 0001B4 004001B4 24630005 */ addiu $v1, $v1, 5
.L004001B8:
/* 0001B8 004001B8 24630006 */ addiu $v1, $v1, 6
.L004001BC:
/* 0001BC 004001BC 8FBF001C */ lw $ra, 0x1c($sp)
/* 0001C0 004001C0 8FB00018 */ lw $s0, 0x18($sp)
/* 0001C4 004001C4 27BD0030 */ addiu $sp, $sp, 0x30
/* 0001C8 004001C8 03E00008 */ jr $ra
/* 0001CC 004001CC 00601025 */ move $v0, $v1

View File

@ -1 +0,0 @@
--target ppc-mwcc-c

View File

@ -1,47 +0,0 @@
s32 foo(s32); /* static */
s32 test(s32 arg0, s32 arg1, s32 arg2, s32 arg3) {
s32 sp14;
s32 temp_r0;
s32 var_r30;
s32 var_r30_2;
s32 var_r30_3;
s32 var_r31;
s32 var_r3;
temp_r0 = arg0 + arg1;
var_r3 = arg1 + arg2;
sp14 = arg3;
var_r31 = temp_r0;
if ((temp_r0 != 0) || (var_r3 != 0) || (var_r3 = foo(), ((var_r3 == 0) == 0)) || (var_r31 = 2, ((sp14 == 0) == 0))) {
var_r30_2 = 1;
} else if (arg0 != 0) {
var_r30_2 = -1;
} else {
var_r30_2 = -2;
}
var_r30_3 = var_r30_2 + arg2;
if ((var_r31 != 0) && (var_r3 != 0)) {
var_r31 += var_r3;
var_r3 = foo(var_r31);
if ((var_r3 != 0) && (sp14 != 0)) {
loop_14:
if (var_r30_3 < 5) {
var_r30_3 = (var_r30_3 + 1) * 2;
goto loop_14;
}
var_r30_3 += 5;
}
}
if ((var_r31 != 0) && (var_r3 != 0) && (foo(var_r31 + var_r3) != 0) && (sp14 != 0)) {
loop_22:
if (var_r30_3 < 5) {
var_r30_3 = (var_r30_3 + 1) * 2;
goto loop_22;
}
var_r30 = var_r30_3 + 5;
} else {
var_r30 = var_r30_3 + 6;
}
return var_r30;
}

View File

@ -1,99 +0,0 @@
.include "macros.inc"
.section .text # 0x0 - 0x138
.global foo
foo:
/* 00000000 00000000 38 63 00 01 */ addi r3, r3, 1
/* 00000004 00000004 4E 80 00 20 */ blr
.global test
test:
/* 00000008 00000008 7C 08 02 A6 */ mflr r0
/* 0000000C 0000000C 90 01 00 04 */ stw r0, 4(r1)
/* 00000010 00000010 94 21 FF D0 */ stwu r1, -0x30(r1)
/* 00000014 00000014 93 E1 00 2C */ stw r31, 0x2c(r1)
/* 00000018 00000018 93 C1 00 28 */ stw r30, 0x28(r1)
/* 0000001C 0000001C 7C 7E 1B 78 */ mr r30, r3
/* 00000020 00000020 7C 1E 22 15 */ add. r0, r30, r4
/* 00000024 00000024 93 A1 00 24 */ stw r29, 0x24(r1)
/* 00000028 00000028 3B A5 00 00 */ addi r29, r5, 0
/* 0000002C 0000002C 7C 64 EA 14 */ add r3, r4, r29
/* 00000030 00000030 90 C1 00 14 */ stw r6, 0x14(r1)
/* 00000034 00000034 7C 1F 03 78 */ mr r31, r0
/* 00000038 00000038 40 82 00 28 */ bne .L00000060
/* 0000003C 0000003C 2C 03 00 00 */ cmpwi r3, 0
/* 00000040 00000040 40 82 00 20 */ bne .L00000060
/* 00000044 00000044 48 00 00 01 */ bl foo
/* 00000048 00000048 2C 03 00 00 */ cmpwi r3, 0
/* 0000004C 0000004C 40 82 00 14 */ bne .L00000060
/* 00000050 00000050 80 01 00 14 */ lwz r0, 0x14(r1)
/* 00000054 00000054 3B E0 00 02 */ li r31, 2
/* 00000058 00000058 2C 00 00 00 */ cmpwi r0, 0
/* 0000005C 0000005C 41 82 00 0C */ beq .L00000068
.L00000060:
/* 00000060 00000060 3B C0 00 01 */ li r30, 1
/* 00000064 00000064 48 00 00 18 */ b .L0000007C
.L00000068:
/* 00000068 00000068 2C 1E 00 00 */ cmpwi r30, 0
/* 0000006C 0000006C 41 82 00 0C */ beq .L00000078
/* 00000070 00000070 3B C0 FF FF */ li r30, -1
/* 00000074 00000074 48 00 00 08 */ b .L0000007C
.L00000078:
/* 00000078 00000078 3B C0 FF FE */ li r30, -2
.L0000007C:
/* 0000007C 0000007C 2C 1F 00 00 */ cmpwi r31, 0
/* 00000080 00000080 7F DE EA 14 */ add r30, r30, r29
/* 00000084 00000084 41 82 00 44 */ beq .L000000C8
/* 00000088 00000088 2C 03 00 00 */ cmpwi r3, 0
/* 0000008C 0000008C 41 82 00 3C */ beq .L000000C8
/* 00000090 00000090 7F FF 1A 14 */ add r31, r31, r3
/* 00000094 00000094 38 7F 00 00 */ addi r3, r31, 0
/* 00000098 00000098 48 00 00 01 */ bl foo
/* 0000009C 0000009C 2C 03 00 00 */ cmpwi r3, 0
/* 000000A0 000000A0 41 82 00 28 */ beq .L000000C8
/* 000000A4 000000A4 80 01 00 14 */ lwz r0, 0x14(r1)
/* 000000A8 000000A8 2C 00 00 00 */ cmpwi r0, 0
/* 000000AC 000000AC 41 82 00 1C */ beq .L000000C8
/* 000000B0 000000B0 48 00 00 0C */ b .L000000BC
.L000000B4:
/* 000000B4 000000B4 3B DE 00 01 */ addi r30, r30, 1
/* 000000B8 000000B8 57 DE 08 3C */ slwi r30, r30, 1
.L000000BC:
/* 000000BC 000000BC 2C 1E 00 05 */ cmpwi r30, 5
/* 000000C0 000000C0 41 80 FF F4 */ blt .L000000B4
/* 000000C4 000000C4 3B DE 00 05 */ addi r30, r30, 5
.L000000C8:
/* 000000C8 000000C8 2C 1F 00 00 */ cmpwi r31, 0
/* 000000CC 000000CC 41 82 00 48 */ beq .L00000114
/* 000000D0 000000D0 2C 03 00 00 */ cmpwi r3, 0
/* 000000D4 000000D4 41 82 00 40 */ beq .L00000114
/* 000000D8 000000D8 7F FF 1A 14 */ add r31, r31, r3
/* 000000DC 000000DC 38 7F 00 00 */ addi r3, r31, 0
/* 000000E0 000000E0 48 00 00 01 */ bl foo
/* 000000E4 000000E4 2C 03 00 00 */ cmpwi r3, 0
/* 000000E8 000000E8 41 82 00 2C */ beq .L00000114
/* 000000EC 000000EC 80 01 00 14 */ lwz r0, 0x14(r1)
/* 000000F0 000000F0 2C 00 00 00 */ cmpwi r0, 0
/* 000000F4 000000F4 41 82 00 20 */ beq .L00000114
/* 000000F8 000000F8 48 00 00 0C */ b .L00000104
.L000000FC:
/* 000000FC 000000FC 3B DE 00 01 */ addi r30, r30, 1
/* 00000100 00000100 57 DE 08 3C */ slwi r30, r30, 1
.L00000104:
/* 00000104 00000104 2C 1E 00 05 */ cmpwi r30, 5
/* 00000108 00000108 41 80 FF F4 */ blt .L000000FC
/* 0000010C 0000010C 3B DE 00 05 */ addi r30, r30, 5
/* 00000110 00000110 48 00 00 08 */ b .L00000118
.L00000114:
/* 00000114 00000114 3B DE 00 06 */ addi r30, r30, 6
.L00000118:
/* 00000118 00000118 80 01 00 34 */ lwz r0, 0x34(r1)
/* 0000011C 0000011C 7F C3 F3 78 */ mr r3, r30
/* 00000120 00000120 83 E1 00 2C */ lwz r31, 0x2c(r1)
/* 00000124 00000124 83 C1 00 28 */ lwz r30, 0x28(r1)
/* 00000128 00000128 7C 08 03 A6 */ mtlr r0
/* 0000012C 0000012C 83 A1 00 24 */ lwz r29, 0x24(r1)
/* 00000130 00000130 38 21 00 30 */ addi r1, r1, 0x30
/* 00000134 00000134 4E 80 00 20 */ blr

View File

@ -1,42 +0,0 @@
int foo(int arg) {
return arg + 1;
}
int test(int a, int b, int c, int d) {
int var1;
int var2;
int ret;
var1 = a + b;
var2 = b + c;
ret = 0;
if (var1 || var2 || (var2 = foo(var2)) || (var1 = 2, d)) {
ret = 1;
} else if (a) {
ret = -1;
} else {
ret = -2;
}
ret += c;
if (var1 && var2 && ((var1 += var2), (var2 = foo(var1))) && d) {
while (ret < 5) {
ret += 1;
ret *= 2;
}
ret += 5;
}
if (var1 && var2 && ((var1 += var2), (var2 = foo(var1))) && d) {
while (ret < 5) {
ret += 1;
ret *= 2;
}
ret += 5;
} else {
ret += 6;
}
return ret;
}

View File

@ -1,23 +0,0 @@
s32 func_00400090(s32); /* static */
s32 test(s32 arg0, s32 arg1, s32 arg2, s32 arg3) {
s32 sp24;
s32 sp20;
s32 sp1C;
sp24 = arg0 + arg1;
sp20 = arg1 + arg2;
sp1C = arg2 + arg3;
if ((sp24 != 0) && (sp20 != 0) && (sp1C != 0)) {
sp24 = func_00400090(sp24 + arg0);
if (sp24 >= 0xB) {
sp24 = func_00400090(sp24 + arg1);
sp20 = func_00400090(sp20 + arg2);
sp1C = func_00400090(sp1C + arg3);
if ((sp24 != 0) && (sp20 != 0) && (sp1C != 0)) {
return 1;
}
}
}
return 0;
}

View File

@ -1,87 +0,0 @@
.set noat # allow manual use of $at
.set noreorder # don't insert nops after branches
glabel func_00400090
/* 000090 00400090 03E00008 */ jr $ra
/* 000094 00400094 24820001 */ addiu $v0, $a0, 1
/* 000098 00400098 03E00008 */ jr $ra
/* 00009C 0040009C 00000000 */ nop
/* 0000A0 004000A0 03E00008 */ jr $ra
/* 0000A4 004000A4 00000000 */ nop
glabel test
/* 0000A8 004000A8 27BDFFD8 */ addiu $sp, $sp, -0x28
/* 0000AC 004000AC AFBF0014 */ sw $ra, 0x14($sp)
/* 0000B0 004000B0 AFA40028 */ sw $a0, 0x28($sp)
/* 0000B4 004000B4 AFA5002C */ sw $a1, 0x2c($sp)
/* 0000B8 004000B8 AFA60030 */ sw $a2, 0x30($sp)
/* 0000BC 004000BC AFA70034 */ sw $a3, 0x34($sp)
/* 0000C0 004000C0 8FAE0028 */ lw $t6, 0x28($sp)
/* 0000C4 004000C4 8FAF002C */ lw $t7, 0x2c($sp)
/* 0000C8 004000C8 01CFC021 */ addu $t8, $t6, $t7
/* 0000CC 004000CC AFB80024 */ sw $t8, 0x24($sp)
/* 0000D0 004000D0 8FB9002C */ lw $t9, 0x2c($sp)
/* 0000D4 004000D4 8FA80030 */ lw $t0, 0x30($sp)
/* 0000D8 004000D8 03284821 */ addu $t1, $t9, $t0
/* 0000DC 004000DC AFA90020 */ sw $t1, 0x20($sp)
/* 0000E0 004000E0 8FAA0030 */ lw $t2, 0x30($sp)
/* 0000E4 004000E4 8FAB0034 */ lw $t3, 0x34($sp)
/* 0000E8 004000E8 014B6021 */ addu $t4, $t2, $t3
/* 0000EC 004000EC AFAC001C */ sw $t4, 0x1c($sp)
/* 0000F0 004000F0 8FAD0024 */ lw $t5, 0x24($sp)
/* 0000F4 004000F4 11A0002A */ beqz $t5, .L004001A0
/* 0000F8 004000F8 00000000 */ nop
/* 0000FC 004000FC 8FAE0020 */ lw $t6, 0x20($sp)
/* 000100 00400100 11C00027 */ beqz $t6, .L004001A0
/* 000104 00400104 00000000 */ nop
/* 000108 00400108 8FAF001C */ lw $t7, 0x1c($sp)
/* 00010C 0040010C 11E00024 */ beqz $t7, .L004001A0
/* 000110 00400110 00000000 */ nop
/* 000114 00400114 8FB80024 */ lw $t8, 0x24($sp)
/* 000118 00400118 8FB90028 */ lw $t9, 0x28($sp)
/* 00011C 0040011C 0C100024 */ jal func_00400090
/* 000120 00400120 03192021 */ addu $a0, $t8, $t9
/* 000124 00400124 AFA20024 */ sw $v0, 0x24($sp)
/* 000128 00400128 8FA80024 */ lw $t0, 0x24($sp)
/* 00012C 0040012C 2901000B */ slti $at, $t0, 0xb
/* 000130 00400130 1420001B */ bnez $at, .L004001A0
/* 000134 00400134 00000000 */ nop
/* 000138 00400138 8FA90024 */ lw $t1, 0x24($sp)
/* 00013C 0040013C 8FAA002C */ lw $t2, 0x2c($sp)
/* 000140 00400140 0C100024 */ jal func_00400090
/* 000144 00400144 012A2021 */ addu $a0, $t1, $t2
/* 000148 00400148 AFA20024 */ sw $v0, 0x24($sp)
/* 00014C 0040014C 8FAB0020 */ lw $t3, 0x20($sp)
/* 000150 00400150 8FAC0030 */ lw $t4, 0x30($sp)
/* 000154 00400154 0C100024 */ jal func_00400090
/* 000158 00400158 016C2021 */ addu $a0, $t3, $t4
/* 00015C 0040015C AFA20020 */ sw $v0, 0x20($sp)
/* 000160 00400160 8FAD001C */ lw $t5, 0x1c($sp)
/* 000164 00400164 8FAE0034 */ lw $t6, 0x34($sp)
/* 000168 00400168 0C100024 */ jal func_00400090
/* 00016C 0040016C 01AE2021 */ addu $a0, $t5, $t6
/* 000170 00400170 AFA2001C */ sw $v0, 0x1c($sp)
/* 000174 00400174 8FAF0024 */ lw $t7, 0x24($sp)
/* 000178 00400178 11E00009 */ beqz $t7, .L004001A0
/* 00017C 0040017C 00000000 */ nop
/* 000180 00400180 8FB80020 */ lw $t8, 0x20($sp)
/* 000184 00400184 13000006 */ beqz $t8, .L004001A0
/* 000188 00400188 00000000 */ nop
/* 00018C 0040018C 8FB9001C */ lw $t9, 0x1c($sp)
/* 000190 00400190 13200003 */ beqz $t9, .L004001A0
/* 000194 00400194 00000000 */ nop
/* 000198 00400198 10000005 */ b .L004001B0
/* 00019C 0040019C 24020001 */ addiu $v0, $zero, 1
.L004001A0:
/* 0001A0 004001A0 10000003 */ b .L004001B0
/* 0001A4 004001A4 00001025 */ move $v0, $zero
/* 0001A8 004001A8 10000001 */ b .L004001B0
/* 0001AC 004001AC 00000000 */ nop
.L004001B0:
/* 0001B0 004001B0 8FBF0014 */ lw $ra, 0x14($sp)
/* 0001B4 004001B4 27BD0028 */ addiu $sp, $sp, 0x28
/* 0001B8 004001B8 03E00008 */ jr $ra
/* 0001BC 004001BC 00000000 */ nop

View File

@ -1,36 +0,0 @@
s32 func_00400090(s32, s32, s32, s32); /* static */
s32 test(s32 arg0, s32 arg1, s32 arg2, s32 arg3) {
s32 sp2C;
s32 sp28;
s32 sp1C;
s32 sp18;
s32 temp_a3;
s32 temp_v0;
s32 temp_v0_2;
s32 temp_v0_3;
s32 temp_v1;
temp_v0 = arg0 + arg1;
if (temp_v0 != 0) {
temp_a3 = arg1 + arg2;
if (temp_a3 != 0) {
temp_v1 = arg2 + arg3;
if (temp_v1 != 0) {
sp18 = temp_v1;
sp1C = temp_a3;
temp_v0_2 = func_00400090(temp_v0 + arg0, temp_a3);
if (temp_v0_2 >= 0xB) {
sp1C = temp_a3;
sp2C = func_00400090(temp_v0_2 + arg1, arg1, arg2, temp_a3);
sp28 = func_00400090(temp_a3 + arg2);
temp_v0_3 = func_00400090(sp18 + arg3);
if ((sp2C != 0) && (sp28 != 0) && (temp_v0_3 != 0)) {
return 1;
}
}
}
}
}
return 0;
}

View File

@ -1,66 +0,0 @@
.set noat # allow manual use of $at
.set noreorder # don't insert nops after branches
glabel func_00400090
/* 000090 00400090 03E00008 */ jr $ra
/* 000094 00400094 24820001 */ addiu $v0, $a0, 1
glabel test
/* 000098 00400098 27BDFFD0 */ addiu $sp, $sp, -0x30
/* 00009C 0040009C 00851021 */ addu $v0, $a0, $a1
/* 0000A0 004000A0 AFBF0014 */ sw $ra, 0x14($sp)
/* 0000A4 004000A4 AFA40030 */ sw $a0, 0x30($sp)
/* 0000A8 004000A8 10400029 */ beqz $v0, .L00400150
/* 0000AC 004000AC AFA7003C */ sw $a3, 0x3c($sp)
/* 0000B0 004000B0 00A63821 */ addu $a3, $a1, $a2
/* 0000B4 004000B4 10E00026 */ beqz $a3, .L00400150
/* 0000B8 004000B8 8FAF003C */ lw $t7, 0x3c($sp)
/* 0000BC 004000BC 00CF1821 */ addu $v1, $a2, $t7
/* 0000C0 004000C0 10600023 */ beqz $v1, .L00400150
/* 0000C4 004000C4 00442021 */ addu $a0, $v0, $a0
/* 0000C8 004000C8 AFA30018 */ sw $v1, 0x18($sp)
/* 0000CC 004000CC AFA50034 */ sw $a1, 0x34($sp)
/* 0000D0 004000D0 AFA60038 */ sw $a2, 0x38($sp)
/* 0000D4 004000D4 0C100024 */ jal func_00400090
/* 0000D8 004000D8 AFA7001C */ sw $a3, 0x1c($sp)
/* 0000DC 004000DC 2841000B */ slti $at, $v0, 0xb
/* 0000E0 004000E0 8FA50034 */ lw $a1, 0x34($sp)
/* 0000E4 004000E4 8FA60038 */ lw $a2, 0x38($sp)
/* 0000E8 004000E8 14200019 */ bnez $at, .L00400150
/* 0000EC 004000EC 8FA7001C */ lw $a3, 0x1c($sp)
/* 0000F0 004000F0 00452021 */ addu $a0, $v0, $a1
/* 0000F4 004000F4 AFA60038 */ sw $a2, 0x38($sp)
/* 0000F8 004000F8 0C100024 */ jal func_00400090
/* 0000FC 004000FC AFA7001C */ sw $a3, 0x1c($sp)
/* 000100 00400100 8FA60038 */ lw $a2, 0x38($sp)
/* 000104 00400104 8FA7001C */ lw $a3, 0x1c($sp)
/* 000108 00400108 AFA2002C */ sw $v0, 0x2c($sp)
/* 00010C 0040010C 0C100024 */ jal func_00400090
/* 000110 00400110 00E62021 */ addu $a0, $a3, $a2
/* 000114 00400114 8FB80018 */ lw $t8, 0x18($sp)
/* 000118 00400118 8FB9003C */ lw $t9, 0x3c($sp)
/* 00011C 0040011C AFA20028 */ sw $v0, 0x28($sp)
/* 000120 00400120 0C100024 */ jal func_00400090
/* 000124 00400124 03192021 */ addu $a0, $t8, $t9
/* 000128 00400128 8FA8002C */ lw $t0, 0x2c($sp)
/* 00012C 0040012C 8FA90028 */ lw $t1, 0x28($sp)
/* 000130 00400130 51000008 */ beql $t0, $zero, .L00400154
/* 000134 00400134 00001025 */ move $v0, $zero
/* 000138 00400138 51200006 */ beql $t1, $zero, .L00400154
/* 00013C 0040013C 00001025 */ move $v0, $zero
/* 000140 00400140 50400004 */ beql $v0, $zero, .L00400154
/* 000144 00400144 00001025 */ move $v0, $zero
/* 000148 00400148 10000002 */ b .L00400154
/* 00014C 0040014C 24020001 */ addiu $v0, $zero, 1
.L00400150:
/* 000150 00400150 00001025 */ move $v0, $zero
.L00400154:
/* 000154 00400154 8FBF0014 */ lw $ra, 0x14($sp)
/* 000158 00400158 27BD0030 */ addiu $sp, $sp, 0x30
/* 00015C 0040015C 03E00008 */ jr $ra
/* 000160 00400160 00000000 */ nop
/* 000164 00400164 00000000 */ nop
/* 000168 00400168 00000000 */ nop
/* 00016C 0040016C 00000000 */ nop

View File

@ -1 +0,0 @@
--target ppc-mwcc-c

View File

@ -1,27 +0,0 @@
s32 foo(s32); /* static */
s32 test(s32 arg0, s32 arg1, s32 arg2, s32 arg3) {
s32 temp_r0;
s32 temp_r29;
s32 temp_r30;
s32 temp_r31;
s32 temp_r31_2;
s32 temp_r3;
s32 temp_r3_2;
temp_r0 = arg0 + arg1;
temp_r31 = arg1 + arg2;
temp_r29 = arg2 + arg3;
if ((temp_r0 != 0) && (temp_r31 != 0) && (temp_r29 != 0)) {
temp_r3 = foo(temp_r0 + arg0);
if (temp_r3 > 0xA) {
temp_r30 = foo(temp_r3 + arg1);
temp_r31_2 = foo(temp_r31 + arg2);
temp_r3_2 = foo(temp_r29 + arg3);
if ((temp_r30 != 0) && (temp_r31_2 != 0) && (temp_r3_2 != 0)) {
return 1;
}
}
}
return 0;
}

View File

@ -1,55 +0,0 @@
.include "macros.inc"
.section .text # 0x0 - 0xac
.global foo
foo:
/* 00000000 00000000 38 63 00 01 */ addi r3, r3, 1
/* 00000004 00000004 4E 80 00 20 */ blr
.global test
test:
/* 00000008 00000008 7C 08 02 A6 */ mflr r0
/* 0000000C 0000000C 90 01 00 04 */ stw r0, 4(r1)
/* 00000010 00000010 94 21 FF D0 */ stwu r1, -0x30(r1)
/* 00000014 00000014 BF 61 00 1C */ stmw r27, 0x1c(r1)
/* 00000018 00000018 3B C4 00 00 */ addi r30, r4, 0
/* 0000001C 0000001C 3B 65 00 00 */ addi r27, r5, 0
/* 00000020 00000020 3B 86 00 00 */ addi r28, r6, 0
/* 00000024 00000024 7C 03 F2 15 */ add. r0, r3, r30
/* 00000028 00000028 7F FE DA 14 */ add r31, r30, r27
/* 0000002C 0000002C 7F BB E2 14 */ add r29, r27, r28
/* 00000030 00000030 41 82 00 64 */ beq .L00000094
/* 00000034 00000034 2C 1F 00 00 */ cmpwi r31, 0
/* 00000038 00000038 41 82 00 5C */ beq .L00000094
/* 0000003C 0000003C 2C 1D 00 00 */ cmpwi r29, 0
/* 00000040 00000040 41 82 00 54 */ beq .L00000094
/* 00000044 00000044 7C 60 1A 14 */ add r3, r0, r3
/* 00000048 00000048 48 00 00 01 */ bl foo
/* 0000004C 0000004C 2C 03 00 0A */ cmpwi r3, 0xa
/* 00000050 00000050 40 81 00 44 */ ble .L00000094
/* 00000054 00000054 7C 63 F2 14 */ add r3, r3, r30
/* 00000058 00000058 48 00 00 01 */ bl foo
/* 0000005C 0000005C 3B C3 00 00 */ addi r30, r3, 0
/* 00000060 00000060 7C 7F DA 14 */ add r3, r31, r27
/* 00000064 00000064 48 00 00 01 */ bl foo
/* 00000068 00000068 3B E3 00 00 */ addi r31, r3, 0
/* 0000006C 0000006C 7C 7D E2 14 */ add r3, r29, r28
/* 00000070 00000070 48 00 00 01 */ bl foo
/* 00000074 00000074 2C 1E 00 00 */ cmpwi r30, 0
/* 00000078 00000078 41 82 00 1C */ beq .L00000094
/* 0000007C 0000007C 2C 1F 00 00 */ cmpwi r31, 0
/* 00000080 00000080 41 82 00 14 */ beq .L00000094
/* 00000084 00000084 2C 03 00 00 */ cmpwi r3, 0
/* 00000088 00000088 41 82 00 0C */ beq .L00000094
/* 0000008C 0000008C 38 60 00 01 */ li r3, 1
/* 00000090 00000090 48 00 00 08 */ b .L00000098
.L00000094:
/* 00000094 00000094 38 60 00 00 */ li r3, 0
.L00000098:
/* 00000098 00000098 BB 61 00 1C */ lmw r27, 0x1c(r1)
/* 0000009C 0000009C 80 01 00 34 */ lwz r0, 0x34(r1)
/* 000000A0 000000A0 38 21 00 30 */ addi r1, r1, 0x30
/* 000000A4 000000A4 7C 08 03 A6 */ mtlr r0
/* 000000A8 000000A8 4E 80 00 20 */ blr

View File

@ -1,26 +0,0 @@
int foo(int arg) {
return arg + 1;
}
int test(int a, int b, int c, int d) {
int var1;
int var2;
int var3;
var1 = a + b;
var2 = b + c;
var3 = c + d;
if (var1 && var2 && var3) {
var1 = foo(var1 + a);
if (var1 > 10) {
var1 = foo(var1 + b);
var2 = foo(var2 + c);
var3 = foo(var3 + d);
if (var1 && var2 && var3) {
return 1;
}
}
}
return 0;
}

View File

@ -1,65 +0,0 @@
s32 test(s32 arg0, s32 arg1, s32 arg2, s32 arg3) {
s32 sp4;
s32 sp0;
s32 temp_t4;
sp0 = 0;
if ((arg0 != 0) && ((arg1 != 0) || (arg2 != 0))) {
loop_3:
sp0 += 1;
if (arg0 != 0) {
if ((arg1 != 0) || (arg2 != 0)) {
goto loop_3;
}
}
}
if ((arg0 != 0) || ((arg1 != 0) && (arg2 != 0))) {
loop_9:
sp0 += 1;
if (arg0 != 0) {
goto loop_9;
}
if ((arg1 != 0) && (arg2 != 0)) {
goto loop_9;
}
}
if (arg0 != 0) {
loop_13:
sp0 += 1;
if ((arg1 != 0) && ((arg2 != 0) || (arg3 != 0))) {
goto block_30;
}
sp0 += 1;
if ((arg1 != 0) || ((arg2 != 0) && (arg3 != 0))) {
goto block_30;
}
sp0 += 1;
if ((arg1 != 0) && ((arg2 != 0) || (arg3 != 0))) {
} else {
sp0 += 1;
if ((arg1 != 0) || ((arg2 != 0) && (arg3 != 0))) {
} else {
sp0 += 1;
block_30:
if (arg0 != 0) {
goto loop_13;
}
}
}
}
sp4 = 0;
if ((arg0 != 0) || (arg1 != 0)) {
loop_33:
sp0 += 1;
temp_t4 = sp4 + (arg2 + arg3);
sp4 = temp_t4;
if (temp_t4 < 0xA) {
if ((arg0 != 0) || (arg1 != 0)) {
goto loop_33;
}
}
}
return sp0;
}

View File

@ -1,132 +0,0 @@
.set noat # allow manual use of $at
.set noreorder # don't insert nops after branches
glabel test
/* 000090 00400090 27BDFFF8 */ addiu $sp, $sp, -8
/* 000094 00400094 AFA00000 */ sw $zero, ($sp)
/* 000098 00400098 1080000E */ beqz $a0, .L004000D4
/* 00009C 0040009C 00000000 */ nop
/* 0000A0 004000A0 14A00003 */ bnez $a1, .L004000B0
/* 0000A4 004000A4 00000000 */ nop
/* 0000A8 004000A8 10C0000A */ beqz $a2, .L004000D4
/* 0000AC 004000AC 00000000 */ nop
.L004000B0:
/* 0000B0 004000B0 8FAE0000 */ lw $t6, ($sp)
/* 0000B4 004000B4 25CF0001 */ addiu $t7, $t6, 1
/* 0000B8 004000B8 AFAF0000 */ sw $t7, ($sp)
/* 0000BC 004000BC 10800005 */ beqz $a0, .L004000D4
/* 0000C0 004000C0 00000000 */ nop
/* 0000C4 004000C4 14A0FFFA */ bnez $a1, .L004000B0
/* 0000C8 004000C8 00000000 */ nop
/* 0000CC 004000CC 14C0FFF8 */ bnez $a2, .L004000B0
/* 0000D0 004000D0 00000000 */ nop
.L004000D4:
/* 0000D4 004000D4 14800005 */ bnez $a0, .L004000EC
/* 0000D8 004000D8 00000000 */ nop
/* 0000DC 004000DC 10A0000C */ beqz $a1, .L00400110
/* 0000E0 004000E0 00000000 */ nop
/* 0000E4 004000E4 10C0000A */ beqz $a2, .L00400110
/* 0000E8 004000E8 00000000 */ nop
.L004000EC:
/* 0000EC 004000EC 8FB80000 */ lw $t8, ($sp)
/* 0000F0 004000F0 27190001 */ addiu $t9, $t8, 1
/* 0000F4 004000F4 AFB90000 */ sw $t9, ($sp)
/* 0000F8 004000F8 1480FFFC */ bnez $a0, .L004000EC
/* 0000FC 004000FC 00000000 */ nop
/* 000100 00400100 10A00003 */ beqz $a1, .L00400110
/* 000104 00400104 00000000 */ nop
/* 000108 00400108 14C0FFF8 */ bnez $a2, .L004000EC
/* 00010C 0040010C 00000000 */ nop
.L00400110:
/* 000110 00400110 10800032 */ beqz $a0, .L004001DC
/* 000114 00400114 00000000 */ nop
.L00400118:
/* 000118 00400118 8FA80000 */ lw $t0, ($sp)
/* 00011C 0040011C 25090001 */ addiu $t1, $t0, 1
/* 000120 00400120 AFA90000 */ sw $t1, ($sp)
/* 000124 00400124 10A00007 */ beqz $a1, .L00400144
/* 000128 00400128 00000000 */ nop
/* 00012C 0040012C 14C00003 */ bnez $a2, .L0040013C
/* 000130 00400130 00000000 */ nop
/* 000134 00400134 10E00003 */ beqz $a3, .L00400144
/* 000138 00400138 00000000 */ nop
.L0040013C:
/* 00013C 0040013C 10000025 */ b .L004001D4
/* 000140 00400140 00000000 */ nop
.L00400144:
/* 000144 00400144 8FAA0000 */ lw $t2, ($sp)
/* 000148 00400148 254B0001 */ addiu $t3, $t2, 1
/* 00014C 0040014C AFAB0000 */ sw $t3, ($sp)
/* 000150 00400150 14A00005 */ bnez $a1, .L00400168
/* 000154 00400154 00000000 */ nop
/* 000158 00400158 10C00005 */ beqz $a2, .L00400170
/* 00015C 0040015C 00000000 */ nop
/* 000160 00400160 10E00003 */ beqz $a3, .L00400170
/* 000164 00400164 00000000 */ nop
.L00400168:
/* 000168 00400168 1000001A */ b .L004001D4
/* 00016C 0040016C 00000000 */ nop
.L00400170:
/* 000170 00400170 8FAC0000 */ lw $t4, ($sp)
/* 000174 00400174 258D0001 */ addiu $t5, $t4, 1
/* 000178 00400178 AFAD0000 */ sw $t5, ($sp)
/* 00017C 0040017C 10A00007 */ beqz $a1, .L0040019C
/* 000180 00400180 00000000 */ nop
/* 000184 00400184 14C00003 */ bnez $a2, .L00400194
/* 000188 00400188 00000000 */ nop
/* 00018C 0040018C 10E00003 */ beqz $a3, .L0040019C
/* 000190 00400190 00000000 */ nop
.L00400194:
/* 000194 00400194 10000011 */ b .L004001DC
/* 000198 00400198 00000000 */ nop
.L0040019C:
/* 00019C 0040019C 8FAE0000 */ lw $t6, ($sp)
/* 0001A0 004001A0 25CF0001 */ addiu $t7, $t6, 1
/* 0001A4 004001A4 AFAF0000 */ sw $t7, ($sp)
/* 0001A8 004001A8 14A00005 */ bnez $a1, .L004001C0
/* 0001AC 004001AC 00000000 */ nop
/* 0001B0 004001B0 10C00005 */ beqz $a2, .L004001C8
/* 0001B4 004001B4 00000000 */ nop
/* 0001B8 004001B8 10E00003 */ beqz $a3, .L004001C8
/* 0001BC 004001BC 00000000 */ nop
.L004001C0:
/* 0001C0 004001C0 10000006 */ b .L004001DC
/* 0001C4 004001C4 00000000 */ nop
.L004001C8:
/* 0001C8 004001C8 8FB80000 */ lw $t8, ($sp)
/* 0001CC 004001CC 27190001 */ addiu $t9, $t8, 1
/* 0001D0 004001D0 AFB90000 */ sw $t9, ($sp)
.L004001D4:
/* 0001D4 004001D4 1480FFD0 */ bnez $a0, .L00400118
/* 0001D8 004001D8 00000000 */ nop
.L004001DC:
/* 0001DC 004001DC 14800003 */ bnez $a0, .L004001EC
/* 0001E0 004001E0 AFA00004 */ sw $zero, 4($sp)
/* 0001E4 004001E4 10A0000E */ beqz $a1, .L00400220
/* 0001E8 004001E8 00000000 */ nop
.L004001EC:
/* 0001EC 004001EC 8FA80000 */ lw $t0, ($sp)
/* 0001F0 004001F0 25090001 */ addiu $t1, $t0, 1
/* 0001F4 004001F4 AFA90000 */ sw $t1, ($sp)
/* 0001F8 004001F8 8FAA0004 */ lw $t2, 4($sp)
/* 0001FC 004001FC 00C75821 */ addu $t3, $a2, $a3
/* 000200 00400200 014B6021 */ addu $t4, $t2, $t3
/* 000204 00400204 2981000A */ slti $at, $t4, 0xa
/* 000208 00400208 10200005 */ beqz $at, .L00400220
/* 00020C 0040020C AFAC0004 */ sw $t4, 4($sp)
/* 000210 00400210 1480FFF6 */ bnez $a0, .L004001EC
/* 000214 00400214 00000000 */ nop
/* 000218 00400218 14A0FFF4 */ bnez $a1, .L004001EC
/* 00021C 0040021C 00000000 */ nop
.L00400220:
/* 000220 00400220 10000003 */ b .L00400230
/* 000224 00400224 8FA20000 */ lw $v0, ($sp)
/* 000228 00400228 10000001 */ b .L00400230
/* 00022C 0040022C 00000000 */ nop
.L00400230:
/* 000230 00400230 03E00008 */ jr $ra
/* 000234 00400234 27BD0008 */ addiu $sp, $sp, 8
/* 000238 00400238 00000000 */ nop
/* 00023C 0040023C 00000000 */ nop

View File

@ -1,56 +0,0 @@
s32 test(s32 arg0, s32 arg1, s32 arg2, s32 arg3) {
s32 var_v0;
s32 var_v1;
var_v1 = 0;
if ((arg0 != 0) && ((arg1 != 0) || (arg2 != 0))) {
loop_3:
var_v1 += 1;
if (arg0 != 0) {
if ((arg1 != 0) || (arg2 != 0)) {
goto loop_3;
}
}
}
if ((arg0 != 0) || ((arg1 != 0) && (arg2 != 0))) {
loop_9:
var_v1 += 1;
if (arg0 != 0) {
goto loop_9;
}
if ((arg1 != 0) && (arg2 != 0)) {
goto loop_9;
}
}
if (arg0 != 0) {
loop_13:
var_v1 += 1;
if (((arg1 == 0) || ((arg2 == 0) && (arg3 == 0))) && (var_v1 += 1, (arg1 == 0)) && ((arg2 == 0) || (arg3 == 0))) {
var_v1 += 1;
if ((arg1 == 0) || ((arg2 == 0) && (arg3 == 0))) {
var_v1 += 1;
if ((arg1 == 0) && ((arg2 == 0) || (arg3 == 0))) {
var_v1 += 1;
goto block_26;
}
}
} else {
block_26:
if (arg0 != 0) {
goto loop_13;
}
}
}
var_v0 = 0;
if ((arg0 != 0) || (arg1 != 0)) {
loop_29:
var_v0 = var_v0 + arg2 + arg3;
var_v1 += 1;
if (var_v0 < 0xA) {
if ((arg0 != 0) || (arg1 != 0)) {
goto loop_29;
}
}
}
return var_v1;
}

View File

@ -1,87 +0,0 @@
.set noat # allow manual use of $at
.set noreorder # don't insert nops after branches
glabel test
/* 000090 00400090 1080000B */ beqz $a0, .L004000C0
/* 000094 00400094 00001825 */ move $v1, $zero
/* 000098 00400098 14A00003 */ bnez $a1, .L004000A8
/* 00009C 0040009C 00000000 */ nop
/* 0000A0 004000A0 10C00007 */ beqz $a2, .L004000C0
/* 0000A4 004000A4 00000000 */ nop
.L004000A8:
/* 0000A8 004000A8 10800005 */ beqz $a0, .L004000C0
/* 0000AC 004000AC 24630001 */ addiu $v1, $v1, 1
/* 0000B0 004000B0 14A0FFFD */ bnez $a1, .L004000A8
/* 0000B4 004000B4 00000000 */ nop
/* 0000B8 004000B8 14C0FFFB */ bnez $a2, .L004000A8
/* 0000BC 004000BC 00000000 */ nop
.L004000C0:
/* 0000C0 004000C0 14800005 */ bnez $a0, .L004000D8
/* 0000C4 004000C4 00000000 */ nop
/* 0000C8 004000C8 10A00009 */ beqz $a1, .L004000F0
/* 0000CC 004000CC 00000000 */ nop
/* 0000D0 004000D0 10C00007 */ beqz $a2, .L004000F0
/* 0000D4 004000D4 00000000 */ nop
.L004000D8:
/* 0000D8 004000D8 1480FFFF */ bnez $a0, .L004000D8
/* 0000DC 004000DC 24630001 */ addiu $v1, $v1, 1
/* 0000E0 004000E0 10A00003 */ beqz $a1, .L004000F0
/* 0000E4 004000E4 00000000 */ nop
/* 0000E8 004000E8 14C0FFFB */ bnez $a2, .L004000D8
/* 0000EC 004000EC 00000000 */ nop
.L004000F0:
/* 0000F0 004000F0 1080001C */ beqz $a0, .L00400164
/* 0000F4 004000F4 00000000 */ nop
.L004000F8:
/* 0000F8 004000F8 10A00005 */ beqz $a1, .L00400110
/* 0000FC 004000FC 24630001 */ addiu $v1, $v1, 1
/* 000100 00400100 14C00016 */ bnez $a2, .L0040015C
/* 000104 00400104 00000000 */ nop
/* 000108 00400108 14E00014 */ bnez $a3, .L0040015C
/* 00010C 0040010C 00000000 */ nop
.L00400110:
/* 000110 00400110 14A00012 */ bnez $a1, .L0040015C
/* 000114 00400114 24630001 */ addiu $v1, $v1, 1
/* 000118 00400118 10C00003 */ beqz $a2, .L00400128
/* 00011C 0040011C 00000000 */ nop
/* 000120 00400120 14E0000E */ bnez $a3, .L0040015C
/* 000124 00400124 00000000 */ nop
.L00400128:
/* 000128 00400128 10A00005 */ beqz $a1, .L00400140
/* 00012C 0040012C 24630001 */ addiu $v1, $v1, 1
/* 000130 00400130 14C0000C */ bnez $a2, .L00400164
/* 000134 00400134 00000000 */ nop
/* 000138 00400138 14E0000A */ bnez $a3, .L00400164
/* 00013C 0040013C 00000000 */ nop
.L00400140:
/* 000140 00400140 14A00008 */ bnez $a1, .L00400164
/* 000144 00400144 24630001 */ addiu $v1, $v1, 1
/* 000148 00400148 50C00004 */ beql $a2, $zero, .L0040015C
/* 00014C 0040014C 24630001 */ addiu $v1, $v1, 1
/* 000150 00400150 14E00004 */ bnez $a3, .L00400164
/* 000154 00400154 00000000 */ nop
/* 000158 00400158 24630001 */ addiu $v1, $v1, 1
.L0040015C:
/* 00015C 0040015C 1480FFE6 */ bnez $a0, .L004000F8
/* 000160 00400160 00000000 */ nop
.L00400164:
/* 000164 00400164 14800002 */ bnez $a0, .L00400170
/* 000168 00400168 00001025 */ move $v0, $zero
/* 00016C 0040016C 10A00009 */ beqz $a1, .L00400194
.L00400170:
/* 000170 00400170 00467021 */ addu $t6, $v0, $a2
.L00400174:
/* 000174 00400174 01C71021 */ addu $v0, $t6, $a3
/* 000178 00400178 2841000A */ slti $at, $v0, 0xa
/* 00017C 0040017C 10200005 */ beqz $at, .L00400194
/* 000180 00400180 24630001 */ addiu $v1, $v1, 1
/* 000184 00400184 5480FFFB */ bnezl $a0, .L00400174
/* 000188 00400188 00467021 */ addu $t6, $v0, $a2
/* 00018C 0040018C 54A0FFF9 */ bnezl $a1, .L00400174
/* 000190 00400190 00467021 */ addu $t6, $v0, $a2
.L00400194:
/* 000194 00400194 03E00008 */ jr $ra
/* 000198 00400198 00601025 */ move $v0, $v1
/* 00019C 0040019C 00000000 */ nop

View File

@ -1 +0,0 @@
--target ppc-mwcc-c

View File

@ -1,52 +0,0 @@
s32 test(s32 arg0, s32 arg1, s32 arg2, s32 arg3) {
s32 var_r5;
s32 var_r7;
var_r7 = 0;
loop_2:
if (arg0 != 0) {
if ((arg1 == 0) && (arg2 == 0)) {
} else {
var_r7 += 1;
goto loop_2;
}
}
loop_7:
if (arg0 != 0) {
block_6:
var_r7 += 1;
goto loop_7;
}
if (arg1 != 0) {
if (arg2 == 0) {
} else {
goto block_6;
}
}
loop_24:
if (arg0 != 0) {
var_r7 += 1;
if (((arg1 == 0) || ((arg2 == 0) && (arg3 == 0))) && (var_r7 += 1, ((arg1 == 0) != 0)) && ((arg2 == 0) || (arg3 == 0))) {
var_r7 += 1;
if ((arg1 == 0) || ((arg2 == 0) && (arg3 == 0))) {
var_r7 += 1;
if ((arg1 == 0) && ((arg2 == 0) || (arg3 == 0))) {
var_r7 += 1;
goto loop_24;
}
}
} else {
goto loop_24;
}
}
var_r5 = 0;
loop_27:
if ((var_r5 < 0xA) && ((arg0 != 0) || (arg1 != 0))) {
var_r5 += arg2 + arg3;
var_r7 += 1;
goto loop_27;
}
return var_r7;
}

View File

@ -1,83 +0,0 @@
.include "macros.inc"
.section .text # 0x0 - 0xf8
.global test
test:
/* 00000000 00000000 38 E0 00 00 */ li r7, 0
/* 00000004 00000004 48 00 00 08 */ b .L0000000C
.L00000008:
/* 00000008 00000008 38 E7 00 01 */ addi r7, r7, 1
.L0000000C:
/* 0000000C 0000000C 2C 03 00 00 */ cmpwi r3, 0
/* 00000010 00000010 41 82 00 1C */ beq .L0000002C
/* 00000014 00000014 2C 04 00 00 */ cmpwi r4, 0
/* 00000018 00000018 40 82 FF F0 */ bne .L00000008
/* 0000001C 0000001C 2C 05 00 00 */ cmpwi r5, 0
/* 00000020 00000020 40 82 FF E8 */ bne .L00000008
/* 00000024 00000024 48 00 00 08 */ b .L0000002C
.L00000028:
/* 00000028 00000028 38 E7 00 01 */ addi r7, r7, 1
.L0000002C:
/* 0000002C 0000002C 2C 03 00 00 */ cmpwi r3, 0
/* 00000030 00000030 40 82 FF F8 */ bne .L00000028
/* 00000034 00000034 2C 04 00 00 */ cmpwi r4, 0
/* 00000038 00000038 41 82 00 84 */ beq .L000000BC
/* 0000003C 0000003C 2C 05 00 00 */ cmpwi r5, 0
/* 00000040 00000040 40 82 FF E8 */ bne .L00000028
/* 00000044 00000044 48 00 00 78 */ b .L000000BC
.L00000048:
/* 00000048 00000048 2C 04 00 00 */ cmpwi r4, 0
/* 0000004C 0000004C 38 E7 00 01 */ addi r7, r7, 1
/* 00000050 00000050 41 82 00 14 */ beq .L00000064
/* 00000054 00000054 2C 05 00 00 */ cmpwi r5, 0
/* 00000058 00000058 40 82 00 64 */ bne .L000000BC
/* 0000005C 0000005C 2C 06 00 00 */ cmpwi r6, 0
/* 00000060 00000060 40 82 00 5C */ bne .L000000BC
.L00000064:
/* 00000064 00000064 2C 04 00 00 */ cmpwi r4, 0
/* 00000068 00000068 38 E7 00 01 */ addi r7, r7, 1
/* 0000006C 0000006C 40 82 00 50 */ bne .L000000BC
/* 00000070 00000070 2C 05 00 00 */ cmpwi r5, 0
/* 00000074 00000074 41 82 00 0C */ beq .L00000080
/* 00000078 00000078 2C 06 00 00 */ cmpwi r6, 0
/* 0000007C 0000007C 40 82 00 40 */ bne .L000000BC
.L00000080:
/* 00000080 00000080 2C 04 00 00 */ cmpwi r4, 0
/* 00000084 00000084 38 E7 00 01 */ addi r7, r7, 1
/* 00000088 00000088 41 82 00 14 */ beq .L0000009C
/* 0000008C 0000008C 2C 05 00 00 */ cmpwi r5, 0
/* 00000090 00000090 40 82 00 34 */ bne .L000000C4
/* 00000094 00000094 2C 06 00 00 */ cmpwi r6, 0
/* 00000098 00000098 40 82 00 2C */ bne .L000000C4
.L0000009C:
/* 0000009C 0000009C 2C 04 00 00 */ cmpwi r4, 0
/* 000000A0 000000A0 38 E7 00 01 */ addi r7, r7, 1
/* 000000A4 000000A4 40 82 00 20 */ bne .L000000C4
/* 000000A8 000000A8 2C 05 00 00 */ cmpwi r5, 0
/* 000000AC 000000AC 41 82 00 0C */ beq .L000000B8
/* 000000B0 000000B0 2C 06 00 00 */ cmpwi r6, 0
/* 000000B4 000000B4 40 82 00 10 */ bne .L000000C4
.L000000B8:
/* 000000B8 000000B8 38 E7 00 01 */ addi r7, r7, 1
.L000000BC:
/* 000000BC 000000BC 2C 03 00 00 */ cmpwi r3, 0
/* 000000C0 000000C0 40 82 FF 88 */ bne .L00000048
.L000000C4:
/* 000000C4 000000C4 7C 05 32 14 */ add r0, r5, r6
/* 000000C8 000000C8 38 A0 00 00 */ li r5, 0
/* 000000CC 000000CC 48 00 00 0C */ b .L000000D8
.L000000D0:
/* 000000D0 000000D0 7C A5 02 14 */ add r5, r5, r0
/* 000000D4 000000D4 38 E7 00 01 */ addi r7, r7, 1
.L000000D8:
/* 000000D8 000000D8 2C 05 00 0A */ cmpwi r5, 0xa
/* 000000DC 000000DC 40 80 00 14 */ bge .L000000F0
/* 000000E0 000000E0 2C 03 00 00 */ cmpwi r3, 0
/* 000000E4 000000E4 40 82 FF EC */ bne .L000000D0
/* 000000E8 000000E8 2C 04 00 00 */ cmpwi r4, 0
/* 000000EC 000000EC 40 82 FF E4 */ bne .L000000D0
.L000000F0:
/* 000000F0 000000F0 7C E3 3B 78 */ mr r3, r7
/* 000000F4 000000F4 4E 80 00 20 */ blr

View File

@ -1,38 +0,0 @@
int test(int a, int b, int c, int d) {
int i;
int ret = 0;
while (a && (b || c)) {
ret++;
}
while (a || (b && c)) {
ret++;
}
while (a) {
ret++;
if (b && (c || d)) {
continue;
}
ret++;
if (b || (c && d)) {
continue;
}
ret++;
if (b && (c || d)) {
break;
}
ret++;
if (b || (c && d)) {
break;
}
ret++;
}
for (i = 0; i < 10 && (a || b); i += c + d) {
ret++;
}
return ret;
}

View File

@ -1,53 +0,0 @@
s32 test(s32 arg0, s32 arg1, s32 arg2, s32 arg3) {
s32 spC;
s32 sp8;
s32 sp4;
s32 var_t0;
s32 var_t0_2;
spC = arg0 + arg1;
sp8 = arg1 + arg2;
sp4 = 0;
if ((spC != 0) || ((sp8 != 0) && ((arg0 * arg1) != 0)) || ((arg3 != 0) && (arg0 != 0))) {
sp4 = 1;
}
if ((arg0 != 0) && ((arg1 != 0) || (arg2 != 0)) && ((arg3 != 0) || ((arg0 + 1) != 0))) {
sp4 = 2;
}
if (((arg0 != 0) && (arg3 != 0)) || (((arg1 != 0) || (arg2 != 0)) && ((arg0 + 1) != 0))) {
sp4 = 3;
}
if ((arg0 != 0) && (arg1 != 0) && ((arg2 != 0) || (arg3 != 0)) && (((arg0 + 1) != 0) || ((arg1 + 1) != 0))) {
sp4 = 4;
}
if ((((arg0 != 0) || (arg1 != 0)) && (arg2 != 0)) || ((arg3 != 0) && ((arg0 + 1) != 0)) || ((arg1 + 1) != 0) || ((arg2 + 1) != 0)) {
sp4 = 5;
}
if ((((arg0 != 0) && (arg1 != 0)) || ((arg2 != 0) && (arg3 != 0))) && (((arg0 + 1) != 0) || ((arg1 + 1) != 0))) {
sp4 = 6;
}
if (arg0 != 0) {
if (arg1 != 0) {
var_t0 = arg2;
} else {
var_t0 = arg3;
}
if ((var_t0 == (arg0 + 1)) && ((arg1 + 1) != 0)) {
sp4 = 7;
}
}
if (arg0 == 0) {
if (arg1 != 0) {
var_t0_2 = arg2;
} else {
var_t0_2 = arg3;
}
if ((var_t0_2 == (arg0 + 1)) || ((arg1 + 1) != 0)) {
goto block_53;
}
} else {
block_53:
sp4 = 8;
}
return sp4;
}

View File

@ -1,172 +0,0 @@
.set noat # allow manual use of $at
.set noreorder # don't insert nops after branches
glabel test
/* 000090 00400090 27BDFFF0 */ addiu $sp, $sp, -0x10
/* 000094 00400094 00857021 */ addu $t6, $a0, $a1
/* 000098 00400098 AFAE000C */ sw $t6, 0xc($sp)
/* 00009C 0040009C 00A67821 */ addu $t7, $a1, $a2
/* 0000A0 004000A0 AFAF0008 */ sw $t7, 8($sp)
/* 0000A4 004000A4 AFA00004 */ sw $zero, 4($sp)
/* 0000A8 004000A8 8FB8000C */ lw $t8, 0xc($sp)
/* 0000AC 004000AC 1700000C */ bnez $t8, .L004000E0
/* 0000B0 004000B0 00000000 */ nop
/* 0000B4 004000B4 8FB90008 */ lw $t9, 8($sp)
/* 0000B8 004000B8 13200005 */ beqz $t9, .L004000D0
/* 0000BC 004000BC 00000000 */ nop
/* 0000C0 004000C0 00850019 */ multu $a0, $a1
/* 0000C4 004000C4 00004812 */ mflo $t1
/* 0000C8 004000C8 15200005 */ bnez $t1, .L004000E0
/* 0000CC 004000CC 00000000 */ nop
.L004000D0:
/* 0000D0 004000D0 10E00005 */ beqz $a3, .L004000E8
/* 0000D4 004000D4 00000000 */ nop
/* 0000D8 004000D8 10800003 */ beqz $a0, .L004000E8
/* 0000DC 004000DC 00000000 */ nop
.L004000E0:
/* 0000E0 004000E0 240A0001 */ addiu $t2, $zero, 1
/* 0000E4 004000E4 AFAA0004 */ sw $t2, 4($sp)
.L004000E8:
/* 0000E8 004000E8 1080000C */ beqz $a0, .L0040011C
/* 0000EC 004000EC 00000000 */ nop
/* 0000F0 004000F0 14A00003 */ bnez $a1, .L00400100
/* 0000F4 004000F4 00000000 */ nop
/* 0000F8 004000F8 10C00008 */ beqz $a2, .L0040011C
/* 0000FC 004000FC 00000000 */ nop
.L00400100:
/* 000100 00400100 14E00004 */ bnez $a3, .L00400114
/* 000104 00400104 00000000 */ nop
/* 000108 00400108 248B0001 */ addiu $t3, $a0, 1
/* 00010C 0040010C 11600003 */ beqz $t3, .L0040011C
/* 000110 00400110 00000000 */ nop
.L00400114:
/* 000114 00400114 240C0002 */ addiu $t4, $zero, 2
/* 000118 00400118 AFAC0004 */ sw $t4, 4($sp)
.L0040011C:
/* 00011C 0040011C 10800003 */ beqz $a0, .L0040012C
/* 000120 00400120 00000000 */ nop
/* 000124 00400124 14E00008 */ bnez $a3, .L00400148
/* 000128 00400128 00000000 */ nop
.L0040012C:
/* 00012C 0040012C 14A00003 */ bnez $a1, .L0040013C
/* 000130 00400130 00000000 */ nop
/* 000134 00400134 10C00006 */ beqz $a2, .L00400150
/* 000138 00400138 00000000 */ nop
.L0040013C:
/* 00013C 0040013C 248D0001 */ addiu $t5, $a0, 1
/* 000140 00400140 11A00003 */ beqz $t5, .L00400150
/* 000144 00400144 00000000 */ nop
.L00400148:
/* 000148 00400148 240E0003 */ addiu $t6, $zero, 3
/* 00014C 0040014C AFAE0004 */ sw $t6, 4($sp)
.L00400150:
/* 000150 00400150 1080000F */ beqz $a0, .L00400190
/* 000154 00400154 00000000 */ nop
/* 000158 00400158 10A0000D */ beqz $a1, .L00400190
/* 00015C 0040015C 00000000 */ nop
/* 000160 00400160 14C00003 */ bnez $a2, .L00400170
/* 000164 00400164 00000000 */ nop
/* 000168 00400168 10E00009 */ beqz $a3, .L00400190
/* 00016C 0040016C 00000000 */ nop
.L00400170:
/* 000170 00400170 248F0001 */ addiu $t7, $a0, 1
/* 000174 00400174 15E00004 */ bnez $t7, .L00400188
/* 000178 00400178 00000000 */ nop
/* 00017C 0040017C 24B80001 */ addiu $t8, $a1, 1
/* 000180 00400180 13000003 */ beqz $t8, .L00400190
/* 000184 00400184 00000000 */ nop
.L00400188:
/* 000188 00400188 24190004 */ addiu $t9, $zero, 4
/* 00018C 0040018C AFB90004 */ sw $t9, 4($sp)
.L00400190:
/* 000190 00400190 14800003 */ bnez $a0, .L004001A0
/* 000194 00400194 00000000 */ nop
/* 000198 00400198 10A00003 */ beqz $a1, .L004001A8
/* 00019C 0040019C 00000000 */ nop
.L004001A0:
/* 0001A0 004001A0 14C0000C */ bnez $a2, .L004001D4
/* 0001A4 004001A4 00000000 */ nop
.L004001A8:
/* 0001A8 004001A8 10E00004 */ beqz $a3, .L004001BC
/* 0001AC 004001AC 00000000 */ nop
/* 0001B0 004001B0 24890001 */ addiu $t1, $a0, 1
/* 0001B4 004001B4 15200007 */ bnez $t1, .L004001D4
/* 0001B8 004001B8 00000000 */ nop
.L004001BC:
/* 0001BC 004001BC 24AA0001 */ addiu $t2, $a1, 1
/* 0001C0 004001C0 15400004 */ bnez $t2, .L004001D4
/* 0001C4 004001C4 00000000 */ nop
/* 0001C8 004001C8 24CB0001 */ addiu $t3, $a2, 1
/* 0001CC 004001CC 11600003 */ beqz $t3, .L004001DC
/* 0001D0 004001D0 00000000 */ nop
.L004001D4:
/* 0001D4 004001D4 240C0005 */ addiu $t4, $zero, 5
/* 0001D8 004001D8 AFAC0004 */ sw $t4, 4($sp)
.L004001DC:
/* 0001DC 004001DC 10800003 */ beqz $a0, .L004001EC
/* 0001E0 004001E0 00000000 */ nop
/* 0001E4 004001E4 14A00005 */ bnez $a1, .L004001FC
/* 0001E8 004001E8 00000000 */ nop
.L004001EC:
/* 0001EC 004001EC 10C0000B */ beqz $a2, .L0040021C
/* 0001F0 004001F0 00000000 */ nop
/* 0001F4 004001F4 10E00009 */ beqz $a3, .L0040021C
/* 0001F8 004001F8 00000000 */ nop
.L004001FC:
/* 0001FC 004001FC 248D0001 */ addiu $t5, $a0, 1
/* 000200 00400200 15A00004 */ bnez $t5, .L00400214
/* 000204 00400204 00000000 */ nop
/* 000208 00400208 24AE0001 */ addiu $t6, $a1, 1
/* 00020C 0040020C 11C00003 */ beqz $t6, .L0040021C
/* 000210 00400210 00000000 */ nop
.L00400214:
/* 000214 00400214 240F0006 */ addiu $t7, $zero, 6
/* 000218 00400218 AFAF0004 */ sw $t7, 4($sp)
.L0040021C:
/* 00021C 0040021C 1080000E */ beqz $a0, .L00400258
/* 000220 00400220 00000000 */ nop
/* 000224 00400224 10A00003 */ beqz $a1, .L00400234
/* 000228 00400228 00000000 */ nop
/* 00022C 0040022C 10000002 */ b .L00400238
/* 000230 00400230 00C04025 */ move $t0, $a2
.L00400234:
/* 000234 00400234 00E04025 */ move $t0, $a3
.L00400238:
/* 000238 00400238 24980001 */ addiu $t8, $a0, 1
/* 00023C 0040023C 15180006 */ bne $t0, $t8, .L00400258
/* 000240 00400240 00000000 */ nop
/* 000244 00400244 24B90001 */ addiu $t9, $a1, 1
/* 000248 00400248 13200003 */ beqz $t9, .L00400258
/* 00024C 0040024C 00000000 */ nop
/* 000250 00400250 24090007 */ addiu $t1, $zero, 7
/* 000254 00400254 AFA90004 */ sw $t1, 4($sp)
.L00400258:
/* 000258 00400258 1480000C */ bnez $a0, .L0040028C
/* 00025C 0040025C 00000000 */ nop
/* 000260 00400260 10A00003 */ beqz $a1, .L00400270
/* 000264 00400264 00000000 */ nop
/* 000268 00400268 10000002 */ b .L00400274
/* 00026C 0040026C 00C04025 */ move $t0, $a2
.L00400270:
/* 000270 00400270 00E04025 */ move $t0, $a3
.L00400274:
/* 000274 00400274 248A0001 */ addiu $t2, $a0, 1
/* 000278 00400278 110A0004 */ beq $t0, $t2, .L0040028C
/* 00027C 0040027C 00000000 */ nop
/* 000280 00400280 24AB0001 */ addiu $t3, $a1, 1
/* 000284 00400284 11600003 */ beqz $t3, .L00400294
/* 000288 00400288 00000000 */ nop
.L0040028C:
/* 00028C 0040028C 240C0008 */ addiu $t4, $zero, 8
/* 000290 00400290 AFAC0004 */ sw $t4, 4($sp)
.L00400294:
/* 000294 00400294 10000003 */ b .L004002A4
/* 000298 00400298 8FA20004 */ lw $v0, 4($sp)
/* 00029C 0040029C 10000001 */ b .L004002A4
/* 0002A0 004002A0 00000000 */ nop
.L004002A4:
/* 0002A4 004002A4 03E00008 */ jr $ra
/* 0002A8 004002A8 27BD0010 */ addiu $sp, $sp, 0x10
/* 0002AC 004002AC 00000000 */ nop

View File

@ -1,49 +0,0 @@
s32 test(s32 arg0, s32 arg1, s32 arg2, s32 arg3) {
s32 var_t0;
s32 var_t0_2;
s32 var_v1;
var_v1 = 0;
if (((arg0 + arg1) != 0) || (((arg1 + arg2) != 0) && ((arg0 * arg1) != 0)) || ((arg3 != 0) && (arg0 != 0))) {
var_v1 = 1;
}
if ((arg0 != 0) && ((arg1 != 0) || (arg2 != 0)) && ((arg3 != 0) || ((arg0 + 1) != 0))) {
var_v1 = 2;
}
if (((arg0 != 0) && (arg3 != 0)) || (((arg1 != 0) || (arg2 != 0)) && ((arg0 + 1) != 0))) {
var_v1 = 3;
}
if ((arg0 != 0) && (arg1 != 0) && ((arg2 != 0) || (arg3 != 0)) && (((arg0 + 1) != 0) || ((arg1 + 1) != 0))) {
var_v1 = 4;
}
if ((((arg0 != 0) || (arg1 != 0)) && (arg2 != 0)) || ((arg3 != 0) && ((arg0 + 1) != 0)) || ((arg1 + 1) != 0) || ((arg2 + 1) != 0)) {
var_v1 = 5;
}
if ((((arg0 != 0) && (arg1 != 0)) || ((arg2 != 0) && (arg3 != 0))) && (((arg0 + 1) != 0) || ((arg1 + 1) != 0))) {
var_v1 = 6;
}
if (arg0 != 0) {
if (arg1 != 0) {
var_t0 = arg2;
} else {
var_t0 = arg3;
}
if ((var_t0 == (arg0 + 1)) && ((arg1 + 1) != 0)) {
var_v1 = 7;
}
}
if (arg0 == 0) {
if (arg1 != 0) {
var_t0_2 = arg2;
} else {
var_t0_2 = arg3;
}
if ((var_t0_2 == (arg0 + 1)) || ((arg1 + 1) != 0)) {
goto block_53;
}
} else {
block_53:
var_v1 = 8;
}
return var_v1;
}

View File

@ -1,137 +0,0 @@
.set noat # allow manual use of $at
.set noreorder # don't insert nops after branches
glabel test
/* 000090 00400090 00857021 */ addu $t6, $a0, $a1
/* 000094 00400094 15C0000C */ bnez $t6, .L004000C8
/* 000098 00400098 00001825 */ move $v1, $zero
/* 00009C 0040009C 00A67821 */ addu $t7, $a1, $a2
/* 0000A0 004000A0 11E00005 */ beqz $t7, .L004000B8
/* 0000A4 004000A4 00000000 */ nop
/* 0000A8 004000A8 00850019 */ multu $a0, $a1
/* 0000AC 004000AC 0000C012 */ mflo $t8
/* 0000B0 004000B0 57000006 */ bnezl $t8, .L004000CC
/* 0000B4 004000B4 24030001 */ addiu $v1, $zero, 1
.L004000B8:
/* 0000B8 004000B8 10E00004 */ beqz $a3, .L004000CC
/* 0000BC 004000BC 00000000 */ nop
/* 0000C0 004000C0 10800002 */ beqz $a0, .L004000CC
/* 0000C4 004000C4 00000000 */ nop
.L004000C8:
/* 0000C8 004000C8 24030001 */ addiu $v1, $zero, 1
.L004000CC:
/* 0000CC 004000CC 1080000A */ beqz $a0, .L004000F8
/* 0000D0 004000D0 00000000 */ nop
/* 0000D4 004000D4 14A00003 */ bnez $a1, .L004000E4
/* 0000D8 004000D8 00000000 */ nop
/* 0000DC 004000DC 10C00006 */ beqz $a2, .L004000F8
/* 0000E0 004000E0 00000000 */ nop
.L004000E4:
/* 0000E4 004000E4 14E00003 */ bnez $a3, .L004000F4
/* 0000E8 004000E8 24990001 */ addiu $t9, $a0, 1
/* 0000EC 004000EC 13200002 */ beqz $t9, .L004000F8
/* 0000F0 004000F0 00000000 */ nop
.L004000F4:
/* 0000F4 004000F4 24030002 */ addiu $v1, $zero, 2
.L004000F8:
/* 0000F8 004000F8 10800003 */ beqz $a0, .L00400108
/* 0000FC 004000FC 00000000 */ nop
/* 000100 00400100 54E00008 */ bnezl $a3, .L00400124
/* 000104 00400104 24030003 */ addiu $v1, $zero, 3
.L00400108:
/* 000108 00400108 14A00003 */ bnez $a1, .L00400118
/* 00010C 0040010C 24890001 */ addiu $t1, $a0, 1
/* 000110 00400110 10C00004 */ beqz $a2, .L00400124
/* 000114 00400114 00000000 */ nop
.L00400118:
/* 000118 00400118 11200002 */ beqz $t1, .L00400124
/* 00011C 0040011C 00000000 */ nop
/* 000120 00400120 24030003 */ addiu $v1, $zero, 3
.L00400124:
/* 000124 00400124 1080000C */ beqz $a0, .L00400158
/* 000128 00400128 00000000 */ nop
/* 00012C 0040012C 10A0000A */ beqz $a1, .L00400158
/* 000130 00400130 00000000 */ nop
/* 000134 00400134 14C00003 */ bnez $a2, .L00400144
/* 000138 00400138 248A0001 */ addiu $t2, $a0, 1
/* 00013C 0040013C 10E00006 */ beqz $a3, .L00400158
/* 000140 00400140 00000000 */ nop
.L00400144:
/* 000144 00400144 15400003 */ bnez $t2, .L00400154
/* 000148 00400148 24AB0001 */ addiu $t3, $a1, 1
/* 00014C 0040014C 11600002 */ beqz $t3, .L00400158
/* 000150 00400150 00000000 */ nop
.L00400154:
/* 000154 00400154 24030004 */ addiu $v1, $zero, 4
.L00400158:
/* 000158 00400158 14800003 */ bnez $a0, .L00400168
/* 00015C 0040015C 00000000 */ nop
/* 000160 00400160 10A00003 */ beqz $a1, .L00400170
/* 000164 00400164 00000000 */ nop
.L00400168:
/* 000168 00400168 54C0000A */ bnezl $a2, .L00400194
/* 00016C 0040016C 24030005 */ addiu $v1, $zero, 5
.L00400170:
/* 000170 00400170 10E00002 */ beqz $a3, .L0040017C
/* 000174 00400174 248C0001 */ addiu $t4, $a0, 1
/* 000178 00400178 15800005 */ bnez $t4, .L00400190
.L0040017C:
/* 00017C 0040017C 24AD0001 */ addiu $t5, $a1, 1
/* 000180 00400180 15A00003 */ bnez $t5, .L00400190
/* 000184 00400184 24CE0001 */ addiu $t6, $a2, 1
/* 000188 00400188 11C00002 */ beqz $t6, .L00400194
/* 00018C 0040018C 00000000 */ nop
.L00400190:
/* 000190 00400190 24030005 */ addiu $v1, $zero, 5
.L00400194:
/* 000194 00400194 10800003 */ beqz $a0, .L004001A4
/* 000198 00400198 00000000 */ nop
/* 00019C 0040019C 54A00005 */ bnezl $a1, .L004001B4
/* 0001A0 004001A0 248F0001 */ addiu $t7, $a0, 1
.L004001A4:
/* 0001A4 004001A4 10C00008 */ beqz $a2, .L004001C8
/* 0001A8 004001A8 00000000 */ nop
/* 0001AC 004001AC 10E00006 */ beqz $a3, .L004001C8
/* 0001B0 004001B0 248F0001 */ addiu $t7, $a0, 1
.L004001B4:
/* 0001B4 004001B4 15E00003 */ bnez $t7, .L004001C4
/* 0001B8 004001B8 24B80001 */ addiu $t8, $a1, 1
/* 0001BC 004001BC 13000002 */ beqz $t8, .L004001C8
/* 0001C0 004001C0 00000000 */ nop
.L004001C4:
/* 0001C4 004001C4 24030006 */ addiu $v1, $zero, 6
.L004001C8:
/* 0001C8 004001C8 1080000B */ beqz $a0, .L004001F8
/* 0001CC 004001CC 00000000 */ nop
/* 0001D0 004001D0 10A00003 */ beqz $a1, .L004001E0
/* 0001D4 004001D4 24820001 */ addiu $v0, $a0, 1
/* 0001D8 004001D8 10000002 */ b .L004001E4
/* 0001DC 004001DC 00C04025 */ move $t0, $a2
.L004001E0:
/* 0001E0 004001E0 00E04025 */ move $t0, $a3
.L004001E4:
/* 0001E4 004001E4 15020004 */ bne $t0, $v0, .L004001F8
/* 0001E8 004001E8 24B90001 */ addiu $t9, $a1, 1
/* 0001EC 004001EC 13200002 */ beqz $t9, .L004001F8
/* 0001F0 004001F0 00000000 */ nop
/* 0001F4 004001F4 24030007 */ addiu $v1, $zero, 7
.L004001F8:
/* 0001F8 004001F8 5480000B */ bnezl $a0, .L00400228
/* 0001FC 004001FC 24030008 */ addiu $v1, $zero, 8
/* 000200 00400200 10A00003 */ beqz $a1, .L00400210
/* 000204 00400204 24820001 */ addiu $v0, $a0, 1
/* 000208 00400208 10000002 */ b .L00400214
/* 00020C 0040020C 00C04025 */ move $t0, $a2
.L00400210:
/* 000210 00400210 00E04025 */ move $t0, $a3
.L00400214:
/* 000214 00400214 11020003 */ beq $t0, $v0, .L00400224
/* 000218 00400218 24A90001 */ addiu $t1, $a1, 1
/* 00021C 0040021C 11200002 */ beqz $t1, .L00400228
/* 000220 00400220 00000000 */ nop
.L00400224:
/* 000224 00400224 24030008 */ addiu $v1, $zero, 8
.L00400228:
/* 000228 00400228 03E00008 */ jr $ra
/* 00022C 0040022C 00601025 */ move $v0, $v1

View File

@ -1 +0,0 @@
--target ppc-mwcc-c

View File

@ -1,50 +0,0 @@
s32 test(s32 arg0, s32 arg1, s32 arg2, s32 arg3) {
s32 var_r5;
s32 var_r7;
s32 var_r8;
var_r5 = arg2;
var_r8 = 0;
if (((arg0 + arg1) != 0) || (((s32) (arg1 + var_r5) != 0) && ((arg0 * arg1) != 0)) || ((arg3 != 0) && (arg0 != 0))) {
var_r8 = 1;
}
if ((arg0 != 0) && ((arg1 != 0) || (var_r5 != 0)) && ((arg3 != 0) || ((arg0 + 1) != 0))) {
var_r8 = 2;
}
if (((arg0 != 0) && (arg3 != 0)) || (((arg1 != 0) || (var_r5 != 0)) && ((arg0 + 1) != 0))) {
var_r8 = 3;
}
if ((arg0 != 0) && (arg1 != 0) && ((var_r5 != 0) || (arg3 != 0)) && (((arg0 + 1) != 0) || ((arg1 + 1) != 0))) {
var_r8 = 4;
}
if ((((arg0 != 0) || (arg1 != 0)) && (var_r5 != 0)) || ((arg3 != 0) && ((arg0 + 1) != 0)) || ((arg1 + 1) != 0) || ((var_r5 + 1) != 0)) {
var_r8 = 5;
}
if ((((arg0 != 0) && (arg1 != 0)) || ((var_r5 != 0) && (arg3 != 0))) && (((arg0 + 1) != 0) || ((arg1 + 1) != 0))) {
var_r8 = 6;
}
if (arg0 != 0) {
if (arg1 != 0) {
var_r7 = var_r5;
} else {
var_r7 = arg3;
}
if (((s32) (arg0 + 1) == var_r7) && ((arg1 + 1) != 0)) {
var_r8 = 7;
}
}
if (arg0 == 0) {
if (arg1 != 0) {
} else {
var_r5 = arg3;
}
if (((s32) (arg0 + 1) == var_r5) || ((arg1 + 1) != 0)) {
goto block_53;
}
} else {
block_53:
var_r8 = 8;
}
return var_r8;
}

View File

@ -1,139 +0,0 @@
.include "macros.inc"
.section .text # 0x0 - 0x19c
.global test
test:
/* 00000000 00000000 7C 03 22 15 */ add. r0, r3, r4
/* 00000004 00000004 7C 04 2A 14 */ add r0, r4, r5
/* 00000008 00000008 39 00 00 00 */ li r8, 0
/* 0000000C 0000000C 40 82 00 24 */ bne .L00000030
/* 00000010 00000010 2C 00 00 00 */ cmpwi r0, 0
/* 00000014 00000014 41 82 00 0C */ beq .L00000020
/* 00000018 00000018 7C 03 21 D7 */ mullw. r0, r3, r4
/* 0000001C 0000001C 40 82 00 14 */ bne .L00000030
.L00000020:
/* 00000020 00000020 2C 06 00 00 */ cmpwi r6, 0
/* 00000024 00000024 41 82 00 10 */ beq .L00000034
/* 00000028 00000028 2C 03 00 00 */ cmpwi r3, 0
/* 0000002C 0000002C 41 82 00 08 */ beq .L00000034
.L00000030:
/* 00000030 00000030 39 00 00 01 */ li r8, 1
.L00000034:
/* 00000034 00000034 2C 03 00 00 */ cmpwi r3, 0
/* 00000038 00000038 41 82 00 28 */ beq .L00000060
/* 0000003C 0000003C 2C 04 00 00 */ cmpwi r4, 0
/* 00000040 00000040 40 82 00 0C */ bne .L0000004C
/* 00000044 00000044 2C 05 00 00 */ cmpwi r5, 0
/* 00000048 00000048 41 82 00 18 */ beq .L00000060
.L0000004C:
/* 0000004C 0000004C 2C 06 00 00 */ cmpwi r6, 0
/* 00000050 00000050 40 82 00 0C */ bne .L0000005C
/* 00000054 00000054 34 03 00 01 */ addic. r0, r3, 1
/* 00000058 00000058 41 82 00 08 */ beq .L00000060
.L0000005C:
/* 0000005C 0000005C 39 00 00 02 */ li r8, 2
.L00000060:
/* 00000060 00000060 2C 03 00 00 */ cmpwi r3, 0
/* 00000064 00000064 41 82 00 0C */ beq .L00000070
/* 00000068 00000068 2C 06 00 00 */ cmpwi r6, 0
/* 0000006C 0000006C 40 82 00 1C */ bne .L00000088
.L00000070:
/* 00000070 00000070 2C 04 00 00 */ cmpwi r4, 0
/* 00000074 00000074 40 82 00 0C */ bne .L00000080
/* 00000078 00000078 2C 05 00 00 */ cmpwi r5, 0
/* 0000007C 0000007C 41 82 00 10 */ beq .L0000008C
.L00000080:
/* 00000080 00000080 34 03 00 01 */ addic. r0, r3, 1
/* 00000084 00000084 41 82 00 08 */ beq .L0000008C
.L00000088:
/* 00000088 00000088 39 00 00 03 */ li r8, 3
.L0000008C:
/* 0000008C 0000008C 2C 03 00 00 */ cmpwi r3, 0
/* 00000090 00000090 41 82 00 30 */ beq .L000000C0
/* 00000094 00000094 2C 04 00 00 */ cmpwi r4, 0
/* 00000098 00000098 41 82 00 28 */ beq .L000000C0
/* 0000009C 0000009C 2C 05 00 00 */ cmpwi r5, 0
/* 000000A0 000000A0 40 82 00 0C */ bne .L000000AC
/* 000000A4 000000A4 2C 06 00 00 */ cmpwi r6, 0
/* 000000A8 000000A8 41 82 00 18 */ beq .L000000C0
.L000000AC:
/* 000000AC 000000AC 34 03 00 01 */ addic. r0, r3, 1
/* 000000B0 000000B0 40 82 00 0C */ bne .L000000BC
/* 000000B4 000000B4 34 04 00 01 */ addic. r0, r4, 1
/* 000000B8 000000B8 41 82 00 08 */ beq .L000000C0
.L000000BC:
/* 000000BC 000000BC 39 00 00 04 */ li r8, 4
.L000000C0:
/* 000000C0 000000C0 2C 03 00 00 */ cmpwi r3, 0
/* 000000C4 000000C4 40 82 00 0C */ bne .L000000D0
/* 000000C8 000000C8 2C 04 00 00 */ cmpwi r4, 0
/* 000000CC 000000CC 41 82 00 0C */ beq .L000000D8
.L000000D0:
/* 000000D0 000000D0 2C 05 00 00 */ cmpwi r5, 0
/* 000000D4 000000D4 40 82 00 24 */ bne .L000000F8
.L000000D8:
/* 000000D8 000000D8 2C 06 00 00 */ cmpwi r6, 0
/* 000000DC 000000DC 41 82 00 0C */ beq .L000000E8
/* 000000E0 000000E0 34 03 00 01 */ addic. r0, r3, 1
/* 000000E4 000000E4 40 82 00 14 */ bne .L000000F8
.L000000E8:
/* 000000E8 000000E8 34 04 00 01 */ addic. r0, r4, 1
/* 000000EC 000000EC 40 82 00 0C */ bne .L000000F8
/* 000000F0 000000F0 34 05 00 01 */ addic. r0, r5, 1
/* 000000F4 000000F4 41 82 00 08 */ beq .L000000FC
.L000000F8:
/* 000000F8 000000F8 39 00 00 05 */ li r8, 5
.L000000FC:
/* 000000FC 000000FC 2C 03 00 00 */ cmpwi r3, 0
/* 00000100 00000100 41 82 00 0C */ beq .L0000010C
/* 00000104 00000104 2C 04 00 00 */ cmpwi r4, 0
/* 00000108 00000108 40 82 00 14 */ bne .L0000011C
.L0000010C:
/* 0000010C 0000010C 2C 05 00 00 */ cmpwi r5, 0
/* 00000110 00000110 41 82 00 20 */ beq .L00000130
/* 00000114 00000114 2C 06 00 00 */ cmpwi r6, 0
/* 00000118 00000118 41 82 00 18 */ beq .L00000130
.L0000011C:
/* 0000011C 0000011C 34 03 00 01 */ addic. r0, r3, 1
/* 00000120 00000120 40 82 00 0C */ bne .L0000012C
/* 00000124 00000124 34 04 00 01 */ addic. r0, r4, 1
/* 00000128 00000128 41 82 00 08 */ beq .L00000130
.L0000012C:
/* 0000012C 0000012C 39 00 00 06 */ li r8, 6
.L00000130:
/* 00000130 00000130 2C 03 00 00 */ cmpwi r3, 0
/* 00000134 00000134 41 82 00 30 */ beq .L00000164
/* 00000138 00000138 2C 04 00 00 */ cmpwi r4, 0
/* 0000013C 0000013C 41 82 00 0C */ beq .L00000148
/* 00000140 00000140 7C A7 2B 78 */ mr r7, r5
/* 00000144 00000144 48 00 00 08 */ b .L0000014C
.L00000148:
/* 00000148 00000148 7C C7 33 78 */ mr r7, r6
.L0000014C:
/* 0000014C 0000014C 38 03 00 01 */ addi r0, r3, 1
/* 00000150 00000150 7C 00 38 00 */ cmpw r0, r7
/* 00000154 00000154 40 82 00 10 */ bne .L00000164
/* 00000158 00000158 34 04 00 01 */ addic. r0, r4, 1
/* 0000015C 0000015C 41 82 00 08 */ beq .L00000164
/* 00000160 00000160 39 00 00 07 */ li r8, 7
.L00000164:
/* 00000164 00000164 2C 03 00 00 */ cmpwi r3, 0
/* 00000168 00000168 40 82 00 28 */ bne .L00000190
/* 0000016C 0000016C 2C 04 00 00 */ cmpwi r4, 0
/* 00000170 00000170 41 82 00 08 */ beq .L00000178
/* 00000174 00000174 48 00 00 08 */ b .L0000017C
.L00000178:
/* 00000178 00000178 7C C5 33 78 */ mr r5, r6
.L0000017C:
/* 0000017C 0000017C 38 03 00 01 */ addi r0, r3, 1
/* 00000180 00000180 7C 00 28 00 */ cmpw r0, r5
/* 00000184 00000184 41 82 00 0C */ beq .L00000190
/* 00000188 00000188 34 04 00 01 */ addic. r0, r4, 1
/* 0000018C 0000018C 41 82 00 08 */ beq .L00000194
.L00000190:
/* 00000190 00000190 39 00 00 08 */ li r8, 8
.L00000194:
/* 00000194 00000194 7D 03 43 78 */ mr r3, r8
/* 00000198 00000198 4E 80 00 20 */ blr

View File

@ -1,43 +0,0 @@
int test(int a, int b, int c, int d) {
int var1;
int var2;
int ret;
var1 = a + b;
var2 = b + c;
ret = 0;
if (var1 || (var2 && (a * b)) || (d && a)) {
ret = 1;
}
if (a && (b || c) && (d || (a + 1))) {
ret = 2;
}
if ((a && d) || ((b || c) && (a + 1))) {
ret = 3;
}
if (a && b && (c || d) && (a + 1 || b + 1)) {
ret = 4;
}
if (((a || b) && c) || (d && (a + 1)) || (b + 1) || (c + 1)) {
ret = 5;
}
if (((a && b) || (c && d)) && ((a + 1) || (b + 1))) {
ret = 6;
}
if (a && (b ? c : d) == (a + 1) && (b + 1)) {
ret = 7;
}
if (a || (b ? c : d) == (a + 1) || (b + 1)) {
ret = 8;
}
return ret;
}

View File

@ -1,6 +0,0 @@
s32 test(s32 arg0, s32 arg1, s32 arg2, s32 arg3) {
if (((arg0 != 0) || (arg1 != 0)) && ((arg2 != 0) || (arg3 != 0))) {
return arg0 + arg1;
}
return arg2 + arg3;
}

View File

@ -1,27 +0,0 @@
.set noat # allow manual use of $at
.set noreorder # don't insert nops after branches
glabel test
/* 000090 00400090 14800003 */ bnez $a0, .L004000A0
/* 000094 00400094 00000000 */ nop
/* 000098 00400098 10A00007 */ beqz $a1, .L004000B8
/* 00009C 0040009C 00000000 */ nop
.L004000A0:
/* 0000A0 004000A0 14C00003 */ bnez $a2, .L004000B0
/* 0000A4 004000A4 00000000 */ nop
/* 0000A8 004000A8 10E00003 */ beqz $a3, .L004000B8
/* 0000AC 004000AC 00000000 */ nop
.L004000B0:
/* 0000B0 004000B0 03E00008 */ jr $ra
/* 0000B4 004000B4 00851021 */ addu $v0, $a0, $a1
.L004000B8:
/* 0000B8 004000B8 03E00008 */ jr $ra
/* 0000BC 004000BC 00C71021 */ addu $v0, $a2, $a3
/* 0000C0 004000C0 03E00008 */ jr $ra
/* 0000C4 004000C4 00000000 */ nop
/* 0000C8 004000C8 03E00008 */ jr $ra
/* 0000CC 004000CC 00000000 */ nop

View File

@ -1,6 +0,0 @@
s32 test(s32 arg0, s32 arg1, s32 arg2, s32 arg3) {
if (((arg0 != 0) || (arg1 != 0)) && ((arg2 != 0) || (arg3 != 0))) {
return arg0 + arg1;
}
return arg2 + arg3;
}

View File

@ -1,26 +0,0 @@
.set noat # allow manual use of $at
.set noreorder # don't insert nops after branches
glabel test
/* 000090 00400090 14800003 */ bnez $a0, .L004000A0
/* 000094 00400094 00000000 */ nop
/* 000098 00400098 50A00008 */ beql $a1, $zero, .L004000BC
/* 00009C 0040009C 00C71021 */ addu $v0, $a2, $a3
.L004000A0:
/* 0000A0 004000A0 14C00003 */ bnez $a2, .L004000B0
/* 0000A4 004000A4 00000000 */ nop
/* 0000A8 004000A8 50E00004 */ beql $a3, $zero, .L004000BC
/* 0000AC 004000AC 00C71021 */ addu $v0, $a2, $a3
.L004000B0:
/* 0000B0 004000B0 03E00008 */ jr $ra
/* 0000B4 004000B4 00851021 */ addu $v0, $a0, $a1
/* 0000B8 004000B8 00C71021 */ addu $v0, $a2, $a3
.L004000BC:
/* 0000BC 004000BC 03E00008 */ jr $ra
/* 0000C0 004000C0 00000000 */ nop
/* 0000C4 004000C4 00000000 */ nop
/* 0000C8 004000C8 00000000 */ nop
/* 0000CC 004000CC 00000000 */ nop

View File

@ -1 +0,0 @@
--target ppc-mwcc-c

View File

@ -1,6 +0,0 @@
s32 test(s32 arg0, s32 arg1, s32 arg2, s32 arg3) {
if (((arg0 != 0) || (arg1 != 0)) && ((arg2 != 0) || (arg3 != 0))) {
return arg0 + arg1;
}
return arg2 + arg3;
}

View File

@ -1,22 +0,0 @@
.include "macros.inc"
.section .text # 0x0 - 0x30
.global test
test:
/* 00000000 00000000 2C 03 00 00 */ cmpwi r3, 0
/* 00000004 00000004 40 82 00 0C */ bne .L00000010
/* 00000008 00000008 2C 04 00 00 */ cmpwi r4, 0
/* 0000000C 0000000C 41 82 00 1C */ beq .L00000028
.L00000010:
/* 00000010 00000010 2C 05 00 00 */ cmpwi r5, 0
/* 00000014 00000014 40 82 00 0C */ bne .L00000020
/* 00000018 00000018 2C 06 00 00 */ cmpwi r6, 0
/* 0000001C 0000001C 41 82 00 0C */ beq .L00000028
.L00000020:
/* 00000020 00000020 7C 63 22 14 */ add r3, r3, r4
/* 00000024 00000024 4E 80 00 20 */ blr
.L00000028:
/* 00000028 00000028 7C 65 32 14 */ add r3, r5, r6
/* 0000002C 0000002C 4E 80 00 20 */ blr

View File

@ -1,6 +0,0 @@
int test(int a, int b, int c, int d) {
if ((a || b) && (c || d)) {
return a + b;
}
return c + d;
}

Some files were not shown because too many files have changed in this diff Show More