mirror of
https://github.com/androguard/androguard.git
synced 2024-11-23 05:00:11 +00:00
Fix a bug with invoke-super/range
Split variables to separate each definition (not exactly SSA, but sort of)
This commit is contained in:
commit
d4770c4b4b
1
.hgsubstate
Normal file
1
.hgsubstate
Normal file
@ -0,0 +1 @@
|
||||
ceadaf0d341fb0cc3c46fbafcd474a6f2c3ed320 elsim
|
27
CHANGELOG
Normal file
27
CHANGELOG
Normal file
@ -0,0 +1,27 @@
|
||||
1.9:
|
||||
- fix AXML bugs
|
||||
- add ARSC support
|
||||
- add sublimetext plugin
|
||||
- remove useless dependencies (networkx, pygments)
|
||||
- improve decompiler
|
||||
- fix various bugs for python2.6
|
||||
- add a tool (androdis) to disasm a dex file at a specific offset
|
||||
|
||||
1.6:
|
||||
- fix doc
|
||||
- add auto analysis
|
||||
- comunity
|
||||
|
||||
1.5.1:
|
||||
DAD:
|
||||
- Propagatation pass has been updated
|
||||
- Switches with fallthrough are handled
|
||||
Core:
|
||||
- fix and add the zipmodule python
|
||||
- fix bugs in search of packages
|
||||
- fix bugs in show_*
|
||||
- fix bug in ganalysis.py
|
||||
|
||||
1.5: stable release
|
||||
|
||||
|
177
LICENCE-2.0
Normal file
177
LICENCE-2.0
Normal file
@ -0,0 +1,177 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
15
Makefile
Normal file
15
Makefile
Normal file
@ -0,0 +1,15 @@
|
||||
CD = cd
|
||||
RM = rm -f
|
||||
|
||||
|
||||
.SILENT:
|
||||
|
||||
all : LIBS
|
||||
|
||||
LIBS :
|
||||
# cd androguard/core/bytecodes/libdvm && make
|
||||
cd elsim && make
|
||||
|
||||
clean :
|
||||
# cd androguard/core/bytecodes/libdvm && make clean
|
||||
cd elsim && make clean
|
113
README.txt
Normal file
113
README.txt
Normal file
@ -0,0 +1,113 @@
|
||||
##########################################################################
|
||||
################################# Androguard #############################
|
||||
##########################################################################
|
||||
################### http://code.google.com/p/androguard ##################
|
||||
######################## dev (at) androguard.re ##########################
|
||||
##########################################################################
|
||||
|
||||
1 -] About
|
||||
|
||||
Androguard (Android Guard) is primarily a tool written in full python to
|
||||
play with :
|
||||
- DEX, ODEX
|
||||
- APK
|
||||
- Android's binary xml
|
||||
|
||||
2 -] Usage
|
||||
|
||||
You need to follow the following information to install dependencies
|
||||
for androguard :
|
||||
http://code.google.com/p/androguard/wiki/Installation
|
||||
|
||||
You must go to the website to see more example :
|
||||
http://code.google.com/p/androguard/wiki/Usage
|
||||
|
||||
2.1 --] API
|
||||
|
||||
2.1.1 --] Instructions
|
||||
|
||||
http://code.google.com/p/androguard/wiki/Instructions
|
||||
|
||||
2.2 --] Demos
|
||||
|
||||
see the source codes in the directory 'demos'
|
||||
|
||||
2.3 --] Tools
|
||||
|
||||
http://code.google.com/p/androguard/wiki/Usage
|
||||
|
||||
2.4 --] Disassembler
|
||||
|
||||
http://code.google.com/p/androguard/wiki/Disassembler
|
||||
|
||||
2.5 --] Analysis
|
||||
|
||||
http://code.google.com/p/androguard/wiki/Analysis
|
||||
|
||||
2.6 --] Visualization
|
||||
|
||||
http://code.google.com/p/androguard/wiki/Visualization
|
||||
|
||||
2.7 --] Similarities, Diffing, plagiarism/rip-off indicator
|
||||
|
||||
http://code.google.com/p/androguard/wiki/Similarity
|
||||
http://code.google.com/p/androguard/wiki/DetectingApplications
|
||||
|
||||
2.8 --] Open Source database of android malwares
|
||||
|
||||
http://code.google.com/p/androguard/wiki/DatabaseAndroidMalwares
|
||||
|
||||
2.9 --] Decompiler
|
||||
|
||||
2.10 --] Reverse
|
||||
|
||||
http://code.google.com/p/androguard/wiki/RE
|
||||
|
||||
3 -] Roadmap/Issues
|
||||
http://code.google.com/p/androguard/wiki/RoadMap
|
||||
http://code.google.com/p/androguard/issues/list
|
||||
|
||||
4 -] Authors: Androguard Team
|
||||
|
||||
Androguard + tools: Anthony Desnos <desnos at t0t0.fr>
|
||||
DAD (DAD is A Decompiler): Geoffroy Gueguen <geoffroy dot gueguen at gmail dot com>
|
||||
|
||||
5 -] Contributors
|
||||
|
||||
Craig Smith <agent dot craig at gmail dot com>: 64 bits patch + magic tricks
|
||||
|
||||
6 -] Licenses
|
||||
|
||||
6.1 --] Androguard
|
||||
|
||||
Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
|
||||
All rights reserved.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS-IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
6.2 -] DAD
|
||||
|
||||
Copyright (C) 2012, Geoffroy Gueguen <geoffroy.gueguen@gmail.com>
|
||||
All rights reserved.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS-IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
22
ag-st/Default (Linux).sublime-keymap
Executable file
22
ag-st/Default (Linux).sublime-keymap
Executable file
@ -0,0 +1,22 @@
|
||||
[
|
||||
{
|
||||
"keys": ["ctrl+f5"],
|
||||
"command": "ag"
|
||||
},
|
||||
{
|
||||
"keys": ["f5"],
|
||||
"command": "ag_tr"
|
||||
},
|
||||
{
|
||||
"keys": ["ctrl+f6"],
|
||||
"command": "ag_ref_from"
|
||||
},
|
||||
{
|
||||
"keys": ["ctrl+f7"],
|
||||
"command": "ag_ref_to"
|
||||
},
|
||||
{
|
||||
"keys": ["ctrl+f8"],
|
||||
"command": "ag_reset"
|
||||
}
|
||||
]
|
22
ag-st/Default (OSX).sublime-keymap
Executable file
22
ag-st/Default (OSX).sublime-keymap
Executable file
@ -0,0 +1,22 @@
|
||||
[
|
||||
{
|
||||
"keys": ["ctrl+f5"],
|
||||
"command": "ag"
|
||||
},
|
||||
{
|
||||
"keys": ["f5"],
|
||||
"command": "ag_tr"
|
||||
},
|
||||
{
|
||||
"keys": ["ctrl+f6"],
|
||||
"command": "ag_ref_from"
|
||||
},
|
||||
{
|
||||
"keys": ["ctrl+f7"],
|
||||
"command": "ag_ref_to"
|
||||
},
|
||||
{
|
||||
"keys": ["ctrl+f8"],
|
||||
"command": "ag_reset"
|
||||
}
|
||||
]
|
22
ag-st/Default (Windows).sublime-keymap
Executable file
22
ag-st/Default (Windows).sublime-keymap
Executable file
@ -0,0 +1,22 @@
|
||||
[
|
||||
{
|
||||
"keys": ["ctrl+f5"],
|
||||
"command": "ag"
|
||||
},
|
||||
{
|
||||
"keys": ["f5"],
|
||||
"command": "ag_tr"
|
||||
},
|
||||
{
|
||||
"keys": ["ctrl+f6"],
|
||||
"command": "ag_ref_from"
|
||||
},
|
||||
{
|
||||
"keys": ["ctrl+f7"],
|
||||
"command": "ag_ref_to"
|
||||
},
|
||||
{
|
||||
"keys": ["ctrl+f8"],
|
||||
"command": "ag_reset"
|
||||
}
|
||||
]
|
22
ag-st/Default.sublime-commands
Executable file
22
ag-st/Default.sublime-commands
Executable file
@ -0,0 +1,22 @@
|
||||
[
|
||||
{
|
||||
"keys": ["ctrl+f5"],
|
||||
"command": "ag"
|
||||
},
|
||||
{
|
||||
"keys": ["f5"],
|
||||
"command": "ag_tr"
|
||||
},
|
||||
{
|
||||
"keys": ["ctrl+f6"],
|
||||
"command": "ag_ref_from"
|
||||
},
|
||||
{
|
||||
"keys": ["ctrl+f7"],
|
||||
"command": "ag_ref_to"
|
||||
},
|
||||
{
|
||||
"keys": ["ctrl+f8"],
|
||||
"command": "ag_reset"
|
||||
}
|
||||
]
|
8
ag-st/Default.sublime-mousemap
Normal file
8
ag-st/Default.sublime-mousemap
Normal file
@ -0,0 +1,8 @@
|
||||
[
|
||||
{
|
||||
"button": "button1", "count": 2,
|
||||
"press_command": "drag_select",
|
||||
"press_args": {"by": "words"},
|
||||
"command": "ag_double_click"
|
||||
}
|
||||
]
|
31
ag-st/Main.sublime-menu
Normal file
31
ag-st/Main.sublime-menu
Normal file
@ -0,0 +1,31 @@
|
||||
[
|
||||
{
|
||||
|
||||
"id": "tools",
|
||||
"caption": "Tools",
|
||||
"children":
|
||||
[
|
||||
{
|
||||
"id": "packages",
|
||||
"caption": "Packages",
|
||||
"children":
|
||||
[
|
||||
{
|
||||
"id": "androguard",
|
||||
"caption": "Androguard",
|
||||
"children":
|
||||
[
|
||||
{
|
||||
"caption": "View Strings",
|
||||
"command": "ag_strings"
|
||||
},
|
||||
{
|
||||
"caption": "-"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
32
ag-st/ag.JSON-tmLanguage
Normal file
32
ag-st/ag.JSON-tmLanguage
Normal file
@ -0,0 +1,32 @@
|
||||
{ "name": "Dalvik Classes (Androguard)",
|
||||
"scopeName": "source.ag",
|
||||
"fileTypes": [""],
|
||||
"patterns": [
|
||||
{
|
||||
"match": "^([\\w\\d\\_\/\\$]+)",
|
||||
"name": "storage.package",
|
||||
"comment": "package"
|
||||
},
|
||||
{
|
||||
"match": "^(\\s*[\\w\\d\\_\/\\$]+)(\\s*)(extends)(\\s*)(.)*",
|
||||
"name": "storage.class",
|
||||
"comment": "current class"
|
||||
},
|
||||
{
|
||||
"match": "^(\\s*(method\\:))(\\s*)((\\<init\\>)|(\\<clinit\\>))(.)+",
|
||||
"name": "support",
|
||||
"comment": "init/clinit method"
|
||||
},
|
||||
{
|
||||
"match": "^(\\s*(method\\:))(.)+",
|
||||
"name": "support.function.classicmethod",
|
||||
"comment": "method"
|
||||
},
|
||||
{
|
||||
"match": "^(\\s*(field\\:))(.)+",
|
||||
"name": "variable",
|
||||
"comment": "field"
|
||||
}
|
||||
],
|
||||
"uuid": "75ba8e42-b55b-4823-b4aa-e4e407da9c8d"
|
||||
}
|
772
ag-st/ag.py
Normal file
772
ag-st/ag.py
Normal file
@ -0,0 +1,772 @@
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (C) 2013, Anthony Desnos <desnos at t0t0.fr>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Androguard is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Androguard is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with Androguard. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
import sublime
|
||||
import sublime_plugin
|
||||
|
||||
import os
|
||||
import threading
|
||||
import hashlib
|
||||
|
||||
|
||||
from androguard.core.bytecodes import apk, dvm
|
||||
from androguard.core.analysis import analysis
|
||||
from androguard.core.analysis import ganalysis
|
||||
from androguard.decompiler import decompiler
|
||||
from androguard.core import androconf
|
||||
|
||||
AG_DEX_VIEW = {}
|
||||
AG_APK_VIEW = {}
|
||||
|
||||
AG_DEX_VIEW_LINK = {}
|
||||
AG_REVERT_METHODS = {}
|
||||
AG_REVERT_FIELDS = {}
|
||||
AG_SC = {}
|
||||
AG_METHOD_ID = {}
|
||||
AG_FIELD_ID = {}
|
||||
AG_CLASS_ID = {}
|
||||
AG_AXML_ID = {}
|
||||
AG_ARSC_ID = {}
|
||||
|
||||
|
||||
AG_METHODS_LINE = {}
|
||||
AG_FIELDS_LINE = {}
|
||||
AG_CLASSES_LINE = {}
|
||||
|
||||
FILENAMES = {}
|
||||
|
||||
|
||||
def get_setting(key, default=None):
|
||||
return sublime.load_settings("ag.sublime-settings").get(key, default)
|
||||
|
||||
|
||||
def is_setting(key):
|
||||
return sublime.load_settings("ag.sublime-settings").has(key)
|
||||
|
||||
|
||||
def get_bytecodes_class(dex_object, ana_object, class_obj):
|
||||
i_buffer = ""
|
||||
|
||||
for i in class_obj.get_methods():
|
||||
i_buffer += dvm.get_bytecodes_method(dex_object, ana_object, i)
|
||||
|
||||
return i_buffer
|
||||
|
||||
|
||||
def get_field_info(field):
|
||||
i_buffer = ""
|
||||
|
||||
i_buffer += "# %s->%s %s [access_flags=%s]\n#\n" % (field.get_class_name(), field.get_name(), field.get_descriptor(), field.get_access_flags_string())
|
||||
|
||||
init_value = field.get_init_value()
|
||||
if init_value != None:
|
||||
i_buffer += repr(str(init_value.get_value()))
|
||||
|
||||
return i_buffer
|
||||
|
||||
|
||||
def get_axml_info(apk_object):
|
||||
i_buffer = "PERMISSIONS:\n"
|
||||
details_permissions = apk_object.get_details_permissions()
|
||||
for i in details_permissions:
|
||||
i_buffer += "\t%s %s\n" % (i, details_permissions[i])
|
||||
i_buffer += "\nMAIN ACTIVITY: %s\n" % apk_object.get_main_activity()
|
||||
|
||||
i_buffer += "\nACTIVITIES:\n"
|
||||
for i in apk_object.get_activities():
|
||||
i_buffer += "\t%s\n" % (i)
|
||||
|
||||
i_buffer += "\nSERVICES:\n"
|
||||
for i in apk_object.get_services():
|
||||
i_buffer += "\t%s\n" % (i)
|
||||
|
||||
i_buffer += "\nRECEIVERS:\n"
|
||||
for i in apk_object.get_receivers():
|
||||
i_buffer += "\t%s\n" % (i)
|
||||
|
||||
i_buffer += "\nPROVIDERS:\n"
|
||||
for i in apk_object.get_providers():
|
||||
i_buffer += "\t%s\n" % (i)
|
||||
|
||||
return i_buffer
|
||||
|
||||
|
||||
def get_sourcecode_method(dex_object, ana_object, method):
|
||||
return method.get_source()
|
||||
|
||||
|
||||
class MethodView:
|
||||
def __init__(self, orig_id, method):
|
||||
self.view = sublime.active_window().new_file()
|
||||
self.dex_object, self.ana_object = AG_DEX_VIEW[orig_id]
|
||||
AG_DEX_VIEW_LINK[self.view.id()] = orig_id
|
||||
AG_REVERT_METHODS[method] = self.view
|
||||
|
||||
self.view.set_name("%s-%s-%s.mag" % (method.get_class_name(), method.get_name(), method.get_descriptor()))
|
||||
self.view.set_syntax_file("Packages/ag-st/agbytecodes.tmLanguage")
|
||||
|
||||
self.view.set_scratch(True)
|
||||
edit = self.view.begin_edit()
|
||||
|
||||
i_buffer = dvm.get_bytecodes_method(self.dex_object, self.ana_object, method)
|
||||
AG_METHOD_ID[self.view.id()] = method
|
||||
|
||||
self.view.replace(edit, sublime.Region(0, self.view.size()), i_buffer)
|
||||
self.view.end_edit(edit)
|
||||
self.view.sel().clear()
|
||||
|
||||
if self.view.id() not in AG_SC:
|
||||
AG_SC[self.view.id()] = False
|
||||
|
||||
|
||||
class FieldView:
|
||||
def __init__(self, orig_id, field):
|
||||
self.view = sublime.active_window().new_file()
|
||||
self.dex_object, self.ana_object = AG_DEX_VIEW[orig_id]
|
||||
AG_DEX_VIEW_LINK[self.view.id()] = orig_id
|
||||
AG_REVERT_FIELDS[field] = self.view
|
||||
|
||||
self.view.set_name("%s-%s-%s.fag" % (field.get_class_name(), field.get_name(), field.get_descriptor()))
|
||||
self.view.set_syntax_file("Packages/ag-st/agbytecodes.tmLanguage")
|
||||
|
||||
self.view.set_scratch(True)
|
||||
edit = self.view.begin_edit()
|
||||
|
||||
i_buffer = get_field_info(field)
|
||||
AG_FIELD_ID[self.view.id()] = field
|
||||
|
||||
self.view.replace(edit, sublime.Region(0, self.view.size()), i_buffer)
|
||||
self.view.end_edit(edit)
|
||||
self.view.sel().clear()
|
||||
|
||||
|
||||
class ClassView:
|
||||
def __init__(self, orig_id, class_obj):
|
||||
self.view = sublime.active_window().new_file()
|
||||
self.dex_object, self.ana_object = AG_DEX_VIEW[orig_id]
|
||||
AG_DEX_VIEW_LINK[self.view.id()] = orig_id
|
||||
|
||||
self.view.set_name("%s.cag" % (class_obj.get_name()))
|
||||
self.view.set_syntax_file("Packages/ag-st/agbytecodes.tmLanguage")
|
||||
|
||||
self.view.set_scratch(True)
|
||||
edit = self.view.begin_edit()
|
||||
|
||||
i_buffer = get_bytecodes_class(self.dex_object, self.ana_object, class_obj)
|
||||
|
||||
AG_CLASS_ID[self.view.id()] = class_obj
|
||||
|
||||
self.view.replace(edit, sublime.Region(0, self.view.size()), i_buffer)
|
||||
self.view.end_edit(edit)
|
||||
self.view.sel().clear()
|
||||
|
||||
if self.view.id() not in AG_SC:
|
||||
AG_SC[self.view.id()] = False
|
||||
|
||||
|
||||
class AgDoubleClick(sublime_plugin.TextCommand):
|
||||
def extract_bb(self, raw, position):
|
||||
raw_list = raw.split(" ")
|
||||
idx = 0
|
||||
for i in raw_list:
|
||||
begin = idx
|
||||
end = idx + len(i)
|
||||
|
||||
if position >= begin and position <= end:
|
||||
if ":" in i:
|
||||
return i.split(":")[-1]
|
||||
return i
|
||||
idx += len(i)
|
||||
return None
|
||||
|
||||
def run(self, edit):
|
||||
if self.view.id() in AG_METHOD_ID and self.view.id() in AG_SC:
|
||||
if not AG_SC[self.view.id()]:
|
||||
for sel in self.view.sel():
|
||||
if self.view.scope_name(sel.begin()) == 'source.agbt markup.list ':
|
||||
scope_region = self.view.extract_scope(sel.begin())
|
||||
|
||||
scope_value = self.view.substr(scope_region)
|
||||
|
||||
bb_selected = self.extract_bb(scope_value, sel.begin() - scope_region.begin())
|
||||
region_bb = self.view.find("^(%s)" % bb_selected, 0)
|
||||
self.view.run_command("goto_line", {"line": self.view.rowcol(region_bb.end())[0] + 1})
|
||||
|
||||
if self.view.id() in AG_DEX_VIEW:
|
||||
current_view_id = self.view.id()
|
||||
datas = []
|
||||
try:
|
||||
for sel in self.view.sel():
|
||||
x, y = self.view.rowcol(sel.begin())
|
||||
datas.append(x)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
dex_object, ana_object = AG_DEX_VIEW[self.view.id()]
|
||||
|
||||
for x in datas:
|
||||
if x in AG_METHODS_LINE[current_view_id]:
|
||||
MethodView(self.view.id(), AG_METHODS_LINE[current_view_id][x])
|
||||
elif x in AG_FIELDS_LINE[current_view_id]:
|
||||
FieldView(self.view.id(), AG_FIELDS_LINE[current_view_id][x])
|
||||
elif x in AG_CLASSES_LINE[current_view_id]:
|
||||
ClassView(self.view.id(), AG_CLASSES_LINE[current_view_id][x])
|
||||
|
||||
elif self.view.id() in AG_APK_VIEW:
|
||||
apk_object = AG_APK_VIEW[self.view.id()]
|
||||
|
||||
datas = []
|
||||
try:
|
||||
for sel in self.view.sel():
|
||||
datas.append(self.view.substr(self.view.line(sel)))
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
filename = FILENAMES[self.view.id()]
|
||||
for x in datas:
|
||||
if x == "classes.dex":
|
||||
at = AnalyseDexThread(sublime.active_window().new_file(), filename + "-classes", apk_object.get_dex())
|
||||
at.run()
|
||||
elif x == "AndroidManifest.xml":
|
||||
at = AnalyseAXMLThread(sublime.active_window().new_file(), filename + "-AndroidManifest", apk_object)
|
||||
at.run()
|
||||
elif x == "resources.arsc":
|
||||
at = AnalyseARSCThread(sublime.active_window().new_file(), filename + "-resources", apk_object.get_file(x))
|
||||
at.run()
|
||||
elif ".xml" in x:
|
||||
at = AnalyseAXMLSimpleThread(sublime.active_window().new_file(), filename + "-%s" + x, apk_object.get_file(x))
|
||||
at.run()
|
||||
else:
|
||||
new_view = sublime.active_window().new_file()
|
||||
new_view.set_name("%s-%s" % (filename, x))
|
||||
new_view.set_syntax_file("Packages/Text/Plain text.tmLanguage")
|
||||
|
||||
new_view.set_scratch(True)
|
||||
edit = new_view.begin_edit()
|
||||
new_view.sel().clear()
|
||||
|
||||
i_buffer = apk_object.get_file(x).decode('utf-8', 'replace')
|
||||
|
||||
new_view.replace(edit, sublime.Region(0, new_view.size()), i_buffer)
|
||||
new_view.end_edit(edit)
|
||||
new_view.set_read_only(True)
|
||||
|
||||
|
||||
class ThreadProgress():
|
||||
def __init__(self, thread, message, success_message):
|
||||
self.thread = thread
|
||||
self.message = message
|
||||
self.success_message = success_message
|
||||
self.addend = 1
|
||||
self.size = 8
|
||||
sublime.set_timeout(lambda: self.run(0), 100)
|
||||
|
||||
def run(self, i):
|
||||
if not self.thread.is_alive():
|
||||
if hasattr(self.thread, 'result') and not self.thread.result:
|
||||
sublime.status_message('')
|
||||
return
|
||||
sublime.status_message(self.success_message)
|
||||
return
|
||||
|
||||
before = i % self.size
|
||||
after = (self.size - 1) - before
|
||||
sublime.status_message('%s [%s=%s]' % \
|
||||
(self.message, ' ' * before, ' ' * after))
|
||||
if not after:
|
||||
self.addend = -1
|
||||
if not before:
|
||||
self.addend = 1
|
||||
i += self.addend
|
||||
sublime.set_timeout(lambda: self.run(i), 100)
|
||||
|
||||
|
||||
class AnalyseAXMLThread:
|
||||
def __init__(self, view, filename, apk_object):
|
||||
self.view = view
|
||||
self.apk_object = apk_object
|
||||
self.filename = filename
|
||||
#threading.Thread.__init__(self)
|
||||
|
||||
def run(self):
|
||||
self.view.set_name("%s.uaxml" % (self.filename))
|
||||
|
||||
self.view.set_scratch(True)
|
||||
edit = self.view.begin_edit()
|
||||
self.view.sel().clear()
|
||||
#self.view.set_syntax_file("Packages/ag-st/agapk.tmLanguage")
|
||||
|
||||
i_buffer = get_axml_info(self.apk_object)
|
||||
|
||||
self.view.replace(edit, sublime.Region(0, self.view.size()), i_buffer)
|
||||
self.view.end_edit(edit)
|
||||
self.view.set_read_only(True)
|
||||
|
||||
AG_AXML_ID[self.view.id()] = self.apk_object
|
||||
|
||||
if self.view.id() not in AG_SC:
|
||||
AG_SC[self.view.id()] = False
|
||||
|
||||
|
||||
class AnalyseAXMLSimpleThread:
|
||||
def __init__(self, view, filename, raw_object):
|
||||
self.view = view
|
||||
self.raw_object = raw_object
|
||||
self.filename = filename
|
||||
#threading.Thread.__init__(self)
|
||||
|
||||
def run(self):
|
||||
self.view.set_name("%s.uaxml" % (self.filename))
|
||||
|
||||
self.view.set_scratch(True)
|
||||
edit = self.view.begin_edit()
|
||||
self.view.sel().clear()
|
||||
self.view.set_syntax_file("Packages/XML/XML.tmLanguage")
|
||||
|
||||
ap = apk.AXMLPrinter(self.raw_object)
|
||||
i_buffer = ap.get_xml()
|
||||
|
||||
self.view.replace(edit, sublime.Region(0, self.view.size()), i_buffer)
|
||||
self.view.end_edit(edit)
|
||||
self.view.set_read_only(True)
|
||||
|
||||
|
||||
class AnalyseARSCThread:
|
||||
def __init__(self, view, filename, raw_object):
|
||||
self.view = view
|
||||
self.raw_object = raw_object
|
||||
self.filename = filename
|
||||
#threading.Thread.__init__(self)
|
||||
|
||||
def run(self):
|
||||
self.view.set_name("%s.uarsc" % (self.filename))
|
||||
|
||||
self.view.set_scratch(True)
|
||||
edit = self.view.begin_edit()
|
||||
self.view.sel().clear()
|
||||
#self.view.set_syntax_file("Packages/ag-st/agapk.tmLanguage")
|
||||
|
||||
arscobj = apk.ARSCParser(self.raw_object)
|
||||
i_buffer = apk.get_arsc_info(arscobj)
|
||||
|
||||
self.view.replace(edit, sublime.Region(0, self.view.size()), i_buffer)
|
||||
self.view.end_edit(edit)
|
||||
self.view.set_read_only(True)
|
||||
|
||||
AG_ARSC_ID[self.view.id()] = arscobj
|
||||
|
||||
if self.view.id() not in AG_SC:
|
||||
AG_SC[self.view.id()] = False
|
||||
|
||||
|
||||
class AnalyseAPKThread:
|
||||
def __init__(self, view, filename, raw):
|
||||
self.view = view
|
||||
self.raw = raw
|
||||
self.filename = filename
|
||||
#threading.Thread.__init__(self)
|
||||
|
||||
def run(self):
|
||||
apk_object = apk.APK(self.raw, raw=True)
|
||||
self.view.set_name("%s.uapk" % (self.filename))
|
||||
|
||||
self.view.set_scratch(True)
|
||||
edit = self.view.begin_edit()
|
||||
self.view.sel().clear()
|
||||
self.view.set_syntax_file("Packages/ag-st/agapk.tmLanguage")
|
||||
|
||||
i_buffer = ""
|
||||
# files_list = apk_object.get_files_types()
|
||||
# for i in files_list:
|
||||
# i_buffer += "%s: %s" % (i, files_list[i])
|
||||
|
||||
for i in sorted(apk_object.get_files()):
|
||||
i_buffer += "%s\n" % i
|
||||
|
||||
self.view.replace(edit, sublime.Region(0, self.view.size()), i_buffer)
|
||||
self.view.end_edit(edit)
|
||||
self.view.set_read_only(True)
|
||||
AG_APK_VIEW[self.view.id()] = apk_object
|
||||
FILENAMES[self.view.id()] = hashlib.sha1(apk_object.get_raw()).hexdigest()
|
||||
|
||||
|
||||
class AnalyseDexThread: # (threading.Thread):
|
||||
def __init__(self, view, filename, raw):
|
||||
self.view = view
|
||||
self.raw = raw
|
||||
self.filename = filename
|
||||
#threading.Thread.__init__(self)
|
||||
|
||||
def run(self):
|
||||
if androconf.is_android_raw(self.raw) == "DEY":
|
||||
dex_object = dvm.DalvikOdexVMFormat(self.raw)
|
||||
else:
|
||||
dex_object = dvm.DalvikVMFormat(self.raw)
|
||||
|
||||
ana_object = analysis.uVMAnalysis(dex_object)
|
||||
gvm_object = ganalysis.GVMAnalysis(ana_object, None)
|
||||
|
||||
dex_object.set_vmanalysis(ana_object)
|
||||
dex_object.set_gvmanalysis(gvm_object)
|
||||
|
||||
for i in androconf.CONF:
|
||||
if is_setting(i):
|
||||
androconf.CONF[i] = get_setting(i)
|
||||
|
||||
decompiler_option = get_setting("DEFAULT_DECOMPILER", "dad")
|
||||
|
||||
if decompiler_option == "dex2jad":
|
||||
dex_object.set_decompiler(decompiler.DecompilerDex2Jad(
|
||||
dex_object,
|
||||
androconf.CONF["PATH_DEX2JAR"],
|
||||
androconf.CONF["BIN_DEX2JAR"],
|
||||
androconf.CONF["PATH_JAD"],
|
||||
androconf.CONF["BIN_JAD"],
|
||||
androconf.CONF["TMP_DIRECTORY"]))
|
||||
elif decompiler_option == "ded":
|
||||
dex_object.set_decompiler(decompiler.DecompilerDed(
|
||||
dex_object,
|
||||
androconf.CONF["PATH_DED"],
|
||||
androconf.CONF["BIN_DED"],
|
||||
androconf.CONF["TMP_DIRECTORY"]))
|
||||
else:
|
||||
dex_object.set_decompiler(decompiler.DecompilerDAD(dex_object, ana_object))
|
||||
|
||||
dex_object.create_xref()
|
||||
dex_object.create_dref()
|
||||
|
||||
self.view.set_name("%s.ag" % (self.filename))
|
||||
|
||||
self.view.set_scratch(True)
|
||||
edit = self.view.begin_edit()
|
||||
self.view.sel().clear()
|
||||
self.view.set_syntax_file("Packages/ag-st/ag.tmLanguage")
|
||||
|
||||
by_package = {}
|
||||
for current_class in dex_object.get_classes():
|
||||
name = current_class.get_name()
|
||||
|
||||
try:
|
||||
by_package[os.path.dirname(name)].append(current_class)
|
||||
except KeyError:
|
||||
by_package[os.path.dirname(name)] = []
|
||||
by_package[os.path.dirname(name)].append(current_class)
|
||||
|
||||
b_buffer = ""
|
||||
line = 0
|
||||
|
||||
AG_METHODS_LINE[self.view.id()] = {}
|
||||
AG_CLASSES_LINE[self.view.id()] = {}
|
||||
AG_FIELDS_LINE[self.view.id()] = {}
|
||||
for key in sorted(by_package.iterkeys()):
|
||||
b_buffer += "%s\n" % key
|
||||
line += 1
|
||||
|
||||
for c_class in sorted(by_package[key], key=lambda k: k.get_name()):
|
||||
b_buffer += "\t%s extends %s\n" % (c_class.get_name()[1:-1], c_class.get_superclassname()[1:-1])
|
||||
AG_CLASSES_LINE[self.view.id()][line] = c_class
|
||||
line += 1
|
||||
|
||||
for j in c_class.get_methods():
|
||||
b_buffer += "\t\tmethod: %s %s [%s] size:%d\n" % (j.get_name(), j.get_descriptor(), j.get_access_flags_string(), j.get_length())
|
||||
AG_METHODS_LINE[self.view.id()][line] = j
|
||||
line += 1
|
||||
|
||||
b_buffer += "\n"
|
||||
line += 1
|
||||
|
||||
for j in c_class.get_fields():
|
||||
b_buffer += "\t\tfield: %s %s [%s %s]" % (j.get_name(), j.get_descriptor(), j.get_access_flags_string(), dvm.get_type(j.get_descriptor()))
|
||||
|
||||
init_value = j.get_init_value()
|
||||
if init_value != None:
|
||||
b_buffer += " (%s)" % repr(str(init_value.get_value()))
|
||||
b_buffer += "\n"
|
||||
|
||||
AG_FIELDS_LINE[self.view.id()][line] = j
|
||||
line += 1
|
||||
|
||||
b_buffer += "\n"
|
||||
line += 1
|
||||
|
||||
l = dex_object.print_classes_hierarchy()
|
||||
h_buffer = ""
|
||||
for i in l:
|
||||
h_buffer += i + "\n"
|
||||
|
||||
b_buffer += h_buffer
|
||||
|
||||
self.view.replace(edit, sublime.Region(0, self.view.size()), b_buffer)
|
||||
self.view.end_edit(edit)
|
||||
self.view.set_read_only(True)
|
||||
AG_DEX_VIEW[self.view.id()] = (dex_object, ana_object)
|
||||
FILENAMES[self.view.id()] = hashlib.sha1(dex_object.get_buff()).hexdigest()
|
||||
|
||||
|
||||
class AgCommand(sublime_plugin.WindowCommand):
|
||||
def run(self):
|
||||
self.view = self.window.active_view()
|
||||
|
||||
filename = self.view.file_name()
|
||||
|
||||
ret = androconf.is_android(filename)
|
||||
if ret == "APK":
|
||||
at = AnalyseAPKThread(self.window.new_file(), filename, open(filename, "rb").read())
|
||||
at.run()
|
||||
elif ret == "DEX" or ret == "DEY":
|
||||
at = AnalyseDexThread(self.window.new_file(), filename, open(filename, "rb").read())
|
||||
at.run()
|
||||
elif ret == "AXML":
|
||||
at = AnalyseAXMLSimpleThread(self.window.new_file(), filename, open(filename, "rb").read())
|
||||
at.run()
|
||||
elif ret == "ARSC":
|
||||
at = AnalyseARSCThread(self.window.new_file(), filename, open(filename, "rb").read())
|
||||
at.run()
|
||||
|
||||
#thread = AnalyseThread(self.window.new_file(), filename, open(filename, "rb").read())
|
||||
#thread.start()
|
||||
#ThreadProgress(thread,
|
||||
# "Analysing app ...",
|
||||
# "Finished !")
|
||||
|
||||
|
||||
def get_strings_info(dex_object, ana_object):
|
||||
i_buffer = ""
|
||||
|
||||
for i in dex_object.get_strings():
|
||||
i_buffer += repr(i) + "\n"
|
||||
if ana_object != None:
|
||||
ref = ana_object.tainted_variables.get_string(i)
|
||||
if ref != None:
|
||||
for path in ref.get_paths():
|
||||
access, idx = path[0]
|
||||
m_idx = path[1]
|
||||
method = dex_object.get_cm_method(m_idx)
|
||||
i_buffer += "\t\t%s %x %s->%s %s\n" % (access, idx, method[0], method[1], method[2][0] + method[2][1])
|
||||
|
||||
return i_buffer
|
||||
|
||||
|
||||
class AgStrings(sublime_plugin.WindowCommand):
|
||||
def run(self):
|
||||
self.view = self.window.active_view()
|
||||
if self.view.id() in AG_DEX_VIEW:
|
||||
dex_object, ana_object = AG_DEX_VIEW[self.view.id()]
|
||||
|
||||
view = sublime.active_window().new_file()
|
||||
|
||||
filename = FILENAMES[self.view.id()]
|
||||
view.set_name("%s.strings" % filename)
|
||||
|
||||
view.set_scratch(True)
|
||||
edit = view.begin_edit()
|
||||
|
||||
i_buffer = get_strings_info(dex_object, ana_object)
|
||||
|
||||
view.replace(edit, sublime.Region(0, view.size()), i_buffer)
|
||||
view.end_edit(edit)
|
||||
view.sel().clear()
|
||||
|
||||
|
||||
class AgTrCommand(sublime_plugin.WindowCommand):
|
||||
def run(self):
|
||||
self.view = self.window.active_view()
|
||||
|
||||
if self.view.id() in AG_METHOD_ID:
|
||||
dex_object, ana_object = AG_DEX_VIEW[AG_DEX_VIEW_LINK[self.view.id()]]
|
||||
|
||||
self.view.sel().clear()
|
||||
if not AG_SC[self.view.id()]:
|
||||
self.view.set_syntax_file("Packages/Java/Java.tmLanguage")
|
||||
i_buffer = get_sourcecode_method(dex_object, ana_object, AG_METHOD_ID[self.view.id()])
|
||||
else:
|
||||
self.view.set_syntax_file("Packages/ag-st/agbytecodes.tmLanguage")
|
||||
i_buffer = dvm.get_bytecodes_method(dex_object, ana_object, AG_METHOD_ID[self.view.id()])
|
||||
|
||||
self.view.set_read_only(False)
|
||||
edit = self.view.begin_edit()
|
||||
self.view.replace(edit, sublime.Region(0, self.view.size()), i_buffer)
|
||||
self.view.end_edit(edit)
|
||||
AG_SC[self.view.id()] = not AG_SC[self.view.id()]
|
||||
|
||||
elif self.view.id() in AG_CLASS_ID:
|
||||
dex_object, ana_object = AG_DEX_VIEW[AG_DEX_VIEW_LINK[self.view.id()]]
|
||||
|
||||
self.view.sel().clear()
|
||||
|
||||
if not AG_SC[self.view.id()]:
|
||||
self.view.set_syntax_file("Packages/Java/Java.tmLanguage")
|
||||
i_buffer = AG_CLASS_ID[self.view.id()].get_source()
|
||||
else:
|
||||
self.view.set_syntax_file("Packages/ag-st/agbytecodes.tmLanguage")
|
||||
i_buffer = get_bytecodes_class(dex_object, ana_object, AG_CLASS_ID[self.view.id()])
|
||||
|
||||
self.view.set_read_only(False)
|
||||
edit = self.view.begin_edit()
|
||||
self.view.replace(edit, sublime.Region(0, self.view.size()), i_buffer)
|
||||
self.view.end_edit(edit)
|
||||
|
||||
AG_SC[self.view.id()] = not AG_SC[self.view.id()]
|
||||
|
||||
elif self.view.id() in AG_AXML_ID:
|
||||
apk_object = AG_AXML_ID[self.view.id()]
|
||||
|
||||
self.view.sel().clear()
|
||||
|
||||
if not AG_SC[self.view.id()]:
|
||||
i_buffer = apk_object.get_android_manifest_xml().toprettyxml()
|
||||
self.view.set_syntax_file("Packages/XML/XML.tmLanguage")
|
||||
else:
|
||||
i_buffer = get_axml_info(apk_object)
|
||||
|
||||
self.view.set_read_only(False)
|
||||
edit = self.view.begin_edit()
|
||||
self.view.replace(edit, sublime.Region(0, self.view.size()), i_buffer)
|
||||
self.view.end_edit(edit)
|
||||
|
||||
AG_SC[self.view.id()] = not AG_SC[self.view.id()]
|
||||
|
||||
|
||||
class AgRefFromCommand(sublime_plugin.WindowCommand):
|
||||
def set_ref(self, value):
|
||||
if value == -1:
|
||||
return
|
||||
|
||||
if self.view.id() in AG_METHOD_ID:
|
||||
self.set_ref_method(value, 0)
|
||||
elif self.view.id() in AG_FIELD_ID:
|
||||
self.set_ref_method(value, 1)
|
||||
|
||||
def set_ref_method(self, value, action):
|
||||
if action == 0:
|
||||
method = AG_METHOD_ID[self.view.id()]
|
||||
x_method = method.XREFfrom.items[value][0]
|
||||
else:
|
||||
field = AG_FIELD_ID[self.view.id()]
|
||||
x_method = field.DREFr.items[value][0]
|
||||
|
||||
if x_method in AG_REVERT_METHODS:
|
||||
if self.window.get_view_index(AG_REVERT_METHODS[x_method])[0] != -1:
|
||||
self.window.focus_view(AG_REVERT_METHODS[x_method])
|
||||
else:
|
||||
del AG_REVERT_METHODS[x_method]
|
||||
MethodView(AG_DEX_VIEW_LINK[self.view.id()], x_method)
|
||||
else:
|
||||
MethodView(AG_DEX_VIEW_LINK[self.view.id()], x_method)
|
||||
|
||||
def run(self):
|
||||
self.option_list = []
|
||||
|
||||
self.view = self.window.active_view()
|
||||
if self.view.id() in AG_METHOD_ID:
|
||||
method = AG_METHOD_ID[self.view.id()]
|
||||
for i in method.XREFfrom.items:
|
||||
x_method = i[0]
|
||||
self.option_list.append("%s %s %s" % (x_method.get_class_name(), x_method.get_name(), x_method.get_descriptor()))
|
||||
elif self.view.id() in AG_FIELD_ID:
|
||||
field = AG_FIELD_ID[self.view.id()]
|
||||
for i in field.DREFr.items:
|
||||
x_method = i[0]
|
||||
self.option_list.append("%s %s %s" % (x_method.get_class_name(), x_method.get_name(), x_method.get_descriptor()))
|
||||
|
||||
self.window.show_quick_panel(self.option_list, self.set_ref)
|
||||
|
||||
|
||||
class AgRefToCommand(sublime_plugin.WindowCommand):
|
||||
def set_ref(self, value):
|
||||
if value == -1:
|
||||
return
|
||||
|
||||
if self.view.id() in AG_METHOD_ID:
|
||||
self.set_ref_method(value, 0)
|
||||
elif self.view.id() in AG_FIELD_ID:
|
||||
self.set_ref_method(value, 1)
|
||||
|
||||
def set_ref_method(self, value, action):
|
||||
if action == 0:
|
||||
method = AG_METHOD_ID[self.view.id()]
|
||||
x_method = method.XREFto.items[value][0]
|
||||
else:
|
||||
field = AG_FIELD_ID[self.view.id()]
|
||||
x_method = field.DREFw.items[value][0]
|
||||
|
||||
if x_method in AG_REVERT_METHODS:
|
||||
if self.window.get_view_index(AG_REVERT_METHODS[x_method])[0] != -1:
|
||||
self.window.focus_view(AG_REVERT_METHODS[x_method])
|
||||
else:
|
||||
del AG_REVERT_METHODS[x_method]
|
||||
MethodView(AG_DEX_VIEW_LINK[self.view.id()], x_method)
|
||||
else:
|
||||
MethodView(AG_DEX_VIEW_LINK[self.view.id()], x_method)
|
||||
|
||||
def run(self):
|
||||
self.option_list = []
|
||||
|
||||
self.view = self.window.active_view()
|
||||
if self.view.id() in AG_METHOD_ID:
|
||||
method = AG_METHOD_ID[self.view.id()]
|
||||
for i in method.XREFto.items:
|
||||
x_method = i[0]
|
||||
self.option_list.append("%s %s %s" % (x_method.get_class_name(), x_method.get_name(), x_method.get_descriptor()))
|
||||
elif self.view.id() in AG_FIELD_ID:
|
||||
field = AG_FIELD_ID[self.view.id()]
|
||||
for i in field.DREFw.items:
|
||||
x_method = i[0]
|
||||
self.option_list.append("%s %s %s" % (x_method.get_class_name(), x_method.get_name(), x_method.get_descriptor()))
|
||||
|
||||
self.window.show_quick_panel(self.option_list, self.set_ref)
|
||||
|
||||
|
||||
class AgReset(sublime_plugin.WindowCommand):
|
||||
def run(self):
|
||||
self.view = self.window.active_view()
|
||||
|
||||
global AG_DEX_VIEW
|
||||
global AG_APK_VIEW
|
||||
global AG_DEX_VIEW_LINK
|
||||
global AG_REVERT_METHODS
|
||||
global AG_REVERT_FIELDS
|
||||
global AG_SC
|
||||
global AG_METHOD_ID
|
||||
global AG_FIELD_ID
|
||||
global AG_CLASS_ID
|
||||
global AG_METHODS_LINE
|
||||
global AG_FIELDS_LINE
|
||||
global AG_CLASSES_LINE
|
||||
global AG_AXML_ID
|
||||
global AG_ARSC_ID
|
||||
|
||||
AG_DEX_VIEW = {}
|
||||
AG_APK_VIEW = {}
|
||||
|
||||
AG_DEX_VIEW_LINK = {}
|
||||
AG_REVERT_METHODS = {}
|
||||
AG_REVERT_FIELDS = {}
|
||||
AG_SC = {}
|
||||
AG_METHOD_ID = {}
|
||||
AG_FIELD_ID = {}
|
||||
AG_CLASS_ID = {}
|
||||
AG_AXML_ID = {}
|
||||
AG_ARSC_ID = {}
|
||||
|
||||
AG_METHODS_LINE = {}
|
||||
AG_FIELDS_LINE = {}
|
||||
AG_CLASSES_LINE = {}
|
||||
|
||||
print "Reset Androguard Plugin"
|
59
ag-st/ag.tmLanguage
Normal file
59
ag-st/ag.tmLanguage
Normal file
@ -0,0 +1,59 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>fileTypes</key>
|
||||
<array>
|
||||
<string></string>
|
||||
</array>
|
||||
<key>name</key>
|
||||
<string>Dalvik Classes (Androguard)</string>
|
||||
<key>patterns</key>
|
||||
<array>
|
||||
<dict>
|
||||
<key>comment</key>
|
||||
<string>package</string>
|
||||
<key>match</key>
|
||||
<string>^([\w\d\_/\$]+)</string>
|
||||
<key>name</key>
|
||||
<string>storage.package</string>
|
||||
</dict>
|
||||
<dict>
|
||||
<key>comment</key>
|
||||
<string>current class</string>
|
||||
<key>match</key>
|
||||
<string>^(\s*[\w\d\_/\$]+)(\s*)(extends)(\s*)(.)*</string>
|
||||
<key>name</key>
|
||||
<string>storage.class</string>
|
||||
</dict>
|
||||
<dict>
|
||||
<key>comment</key>
|
||||
<string>init/clinit method</string>
|
||||
<key>match</key>
|
||||
<string>^(\s*(method\:))(\s*)((\<init\>)|(\<clinit\>))(.)+</string>
|
||||
<key>name</key>
|
||||
<string>support</string>
|
||||
</dict>
|
||||
<dict>
|
||||
<key>comment</key>
|
||||
<string>method</string>
|
||||
<key>match</key>
|
||||
<string>^(\s*(method\:))(.)+</string>
|
||||
<key>name</key>
|
||||
<string>support.function.classicmethod</string>
|
||||
</dict>
|
||||
<dict>
|
||||
<key>comment</key>
|
||||
<string>field</string>
|
||||
<key>match</key>
|
||||
<string>^(\s*(field\:))(.)+</string>
|
||||
<key>name</key>
|
||||
<string>variable</string>
|
||||
</dict>
|
||||
</array>
|
||||
<key>scopeName</key>
|
||||
<string>source.ag</string>
|
||||
<key>uuid</key>
|
||||
<string>75ba8e42-b55b-4823-b4aa-e4e407da9c8d</string>
|
||||
</dict>
|
||||
</plist>
|
17
ag-st/agapk.JSON-tmLanguage
Normal file
17
ag-st/agapk.JSON-tmLanguage
Normal file
@ -0,0 +1,17 @@
|
||||
{ "name": "APK (Androguard)",
|
||||
"scopeName": "source.uapk",
|
||||
"fileTypes": [""],
|
||||
"patterns": [
|
||||
{
|
||||
"match": "^((classes\\.dex)|(AndroidManifest\\.xml)|(resources\\.arsc))",
|
||||
"name": "keyword.resources",
|
||||
"captures": {
|
||||
"1": { "name": "keyword.classes" },
|
||||
"2": { "name": "keyword.androidmanifest" },
|
||||
"3": { "name": "keyword.resources" }
|
||||
},
|
||||
"comment": "resources.arsc"
|
||||
}
|
||||
],
|
||||
"uuid": "75ba8e42-b55b-4823-b4aa-e4e407da9d8c"
|
||||
}
|
45
ag-st/agapk.tmLanguage
Normal file
45
ag-st/agapk.tmLanguage
Normal file
@ -0,0 +1,45 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>fileTypes</key>
|
||||
<array>
|
||||
<string></string>
|
||||
</array>
|
||||
<key>name</key>
|
||||
<string>APK (Androguard)</string>
|
||||
<key>patterns</key>
|
||||
<array>
|
||||
<dict>
|
||||
<key>captures</key>
|
||||
<dict>
|
||||
<key>1</key>
|
||||
<dict>
|
||||
<key>name</key>
|
||||
<string>keyword.classes</string>
|
||||
</dict>
|
||||
<key>2</key>
|
||||
<dict>
|
||||
<key>name</key>
|
||||
<string>keyword.androidmanifest</string>
|
||||
</dict>
|
||||
<key>3</key>
|
||||
<dict>
|
||||
<key>name</key>
|
||||
<string>keyword.resources</string>
|
||||
</dict>
|
||||
</dict>
|
||||
<key>comment</key>
|
||||
<string>resources.arsc</string>
|
||||
<key>match</key>
|
||||
<string>^((classes\.dex)|(AndroidManifest\.xml)|(resources\.arsc))</string>
|
||||
<key>name</key>
|
||||
<string>keyword.resources</string>
|
||||
</dict>
|
||||
</array>
|
||||
<key>scopeName</key>
|
||||
<string>source.uapk</string>
|
||||
<key>uuid</key>
|
||||
<string>75ba8e42-b55b-4823-b4aa-e4e407da9d8c</string>
|
||||
</dict>
|
||||
</plist>
|
62
ag-st/agbytecodes.JSON-tmLanguage
Normal file
62
ag-st/agbytecodes.JSON-tmLanguage
Normal file
@ -0,0 +1,62 @@
|
||||
{ "name": "Dalvik Bytecodes (Androguard)",
|
||||
"scopeName": "source.agbt",
|
||||
"fileTypes": [""],
|
||||
"patterns": [
|
||||
{
|
||||
"match": "\\s*(#).*$\\n?",
|
||||
"name": "comment",
|
||||
"comment": "comment"
|
||||
},
|
||||
{
|
||||
"match": "^([\\w\\d\\-\\<\\>\\_]+)(@)([\\w\\d]+)(\\s*)(:)",
|
||||
"name": "markup.bold",
|
||||
"comment": "bb name"
|
||||
},
|
||||
{
|
||||
"match": "(\\-|\\+)([\\d+a-z]+)",
|
||||
"name": "storage.int",
|
||||
"comment": "hexa value"
|
||||
},
|
||||
{
|
||||
"match": ",\\s(\\d*)",
|
||||
"name": "storage.int",
|
||||
"captures": {
|
||||
"1": { "name": "storage.int" }
|
||||
},
|
||||
"comment": "int value"
|
||||
},
|
||||
{
|
||||
"match": "^(\\s+\\d*\\s+)",
|
||||
"name": "markup.italic",
|
||||
"comment": "decimal value"
|
||||
},
|
||||
{
|
||||
"match": "(\\[)([\\w\\d\\-\\s@\\<\\>\\_\\:])+(\\])",
|
||||
"name": "markup.list",
|
||||
"comment": "next bbs list"
|
||||
},
|
||||
{
|
||||
"match": "(\\()([\\d\\-a-z]+)(\\))(\\s+)([a-z\\-\/0-9]+)([0-9]*)(\\s+)",
|
||||
"name": "keyword.control",
|
||||
"captures": {
|
||||
"2": { "name": "support.constant" },
|
||||
"3": { "name": "keyword.control.instruction" }
|
||||
},
|
||||
"comment": "hexa + instruction"
|
||||
},
|
||||
{
|
||||
"match": "(v\\d+)",
|
||||
"name": "variable",
|
||||
"comment": "register"
|
||||
},
|
||||
{ "name": "string.quoted.single",
|
||||
"begin": "(\\')",
|
||||
"end": "(\\')"
|
||||
},
|
||||
{ "name": "string.quoted.double",
|
||||
"begin": "(\")",
|
||||
"end": "(\")"
|
||||
}
|
||||
],
|
||||
"uuid": "75ba8e42-b55b-4823-b4aa-e4e407da9c8e"
|
||||
}
|
120
ag-st/agbytecodes.tmLanguage
Normal file
120
ag-st/agbytecodes.tmLanguage
Normal file
@ -0,0 +1,120 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>fileTypes</key>
|
||||
<array>
|
||||
<string></string>
|
||||
</array>
|
||||
<key>name</key>
|
||||
<string>Dalvik Bytecodes (Androguard)</string>
|
||||
<key>patterns</key>
|
||||
<array>
|
||||
<dict>
|
||||
<key>comment</key>
|
||||
<string>comment</string>
|
||||
<key>match</key>
|
||||
<string>\s*(#).*$\n?</string>
|
||||
<key>name</key>
|
||||
<string>comment</string>
|
||||
</dict>
|
||||
<dict>
|
||||
<key>comment</key>
|
||||
<string>bb name</string>
|
||||
<key>match</key>
|
||||
<string>^([\w\d\-\<\>\_]+)(@)([\w\d]+)(\s*)(:)</string>
|
||||
<key>name</key>
|
||||
<string>markup.bold</string>
|
||||
</dict>
|
||||
<dict>
|
||||
<key>comment</key>
|
||||
<string>hexa value</string>
|
||||
<key>match</key>
|
||||
<string>(\-|\+)([\d+a-z]+)</string>
|
||||
<key>name</key>
|
||||
<string>storage.int</string>
|
||||
</dict>
|
||||
<dict>
|
||||
<key>captures</key>
|
||||
<dict>
|
||||
<key>1</key>
|
||||
<dict>
|
||||
<key>name</key>
|
||||
<string>storage.int</string>
|
||||
</dict>
|
||||
</dict>
|
||||
<key>comment</key>
|
||||
<string>int value</string>
|
||||
<key>match</key>
|
||||
<string>,\s(\d*)</string>
|
||||
<key>name</key>
|
||||
<string>storage.int</string>
|
||||
</dict>
|
||||
<dict>
|
||||
<key>comment</key>
|
||||
<string>decimal value</string>
|
||||
<key>match</key>
|
||||
<string>^(\s+\d*\s+)</string>
|
||||
<key>name</key>
|
||||
<string>markup.italic</string>
|
||||
</dict>
|
||||
<dict>
|
||||
<key>comment</key>
|
||||
<string>next bbs list</string>
|
||||
<key>match</key>
|
||||
<string>(\[)([\w\d\-\s@\<\>\_\:])+(\])</string>
|
||||
<key>name</key>
|
||||
<string>markup.list</string>
|
||||
</dict>
|
||||
<dict>
|
||||
<key>captures</key>
|
||||
<dict>
|
||||
<key>2</key>
|
||||
<dict>
|
||||
<key>name</key>
|
||||
<string>support.constant</string>
|
||||
</dict>
|
||||
<key>3</key>
|
||||
<dict>
|
||||
<key>name</key>
|
||||
<string>keyword.control.instruction</string>
|
||||
</dict>
|
||||
</dict>
|
||||
<key>comment</key>
|
||||
<string>hexa + instruction</string>
|
||||
<key>match</key>
|
||||
<string>(\()([\d\-a-z]+)(\))(\s+)([a-z\-/0-9]+)([0-9]*)(\s+)</string>
|
||||
<key>name</key>
|
||||
<string>keyword.control</string>
|
||||
</dict>
|
||||
<dict>
|
||||
<key>comment</key>
|
||||
<string>register</string>
|
||||
<key>match</key>
|
||||
<string>(v\d+)</string>
|
||||
<key>name</key>
|
||||
<string>variable.smali</string>
|
||||
</dict>
|
||||
<dict>
|
||||
<key>begin</key>
|
||||
<string>(\')</string>
|
||||
<key>end</key>
|
||||
<string>(\')</string>
|
||||
<key>name</key>
|
||||
<string>string.quoted.single</string>
|
||||
</dict>
|
||||
<dict>
|
||||
<key>begin</key>
|
||||
<string>(")</string>
|
||||
<key>end</key>
|
||||
<string>(")</string>
|
||||
<key>name</key>
|
||||
<string>string.quoted.double</string>
|
||||
</dict>
|
||||
</array>
|
||||
<key>scopeName</key>
|
||||
<string>source.agbt</string>
|
||||
<key>uuid</key>
|
||||
<string>75ba8e42-b55b-4823-b4aa-e4e407da9c8e</string>
|
||||
</dict>
|
||||
</plist>
|
105
androapkinfo.py
Executable file
105
androapkinfo.py
Executable file
@ -0,0 +1,105 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Androguard is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Androguard is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with Androguard. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import sys, os
|
||||
|
||||
from optparse import OptionParser
|
||||
|
||||
from androguard.core import androconf
|
||||
from androguard.core.bytecodes import apk
|
||||
from androguard.core.bytecodes import dvm
|
||||
from androguard.core.analysis import analysis
|
||||
|
||||
option_0 = { 'name' : ('-i', '--input'), 'help' : 'file : use this filename (APK)', 'nargs' : 1 }
|
||||
option_1 = { 'name' : ('-d', '--directory'), 'help' : 'directory : use this directory', 'nargs' : 1 }
|
||||
option_2 = { 'name' : ('-t', '--tag'), 'help' : 'display tags', 'action' : 'count' }
|
||||
option_3 = { 'name' : ('-v', '--version'), 'help' : 'version', 'action' : 'count' }
|
||||
|
||||
options = [option_0, option_1, option_2, option_3]
|
||||
|
||||
def display_dvm_info(apk):
|
||||
vm = dvm.DalvikVMFormat(apk.get_dex())
|
||||
vmx = analysis.uVMAnalysis(vm)
|
||||
|
||||
print "Native code:", analysis.is_native_code(vmx)
|
||||
print "Dynamic code:", analysis.is_dyn_code(vmx)
|
||||
print "Reflection code:", analysis.is_reflection_code(vmx)
|
||||
print "Ascii Obfuscation:", analysis.is_ascii_obfuscation(vm)
|
||||
|
||||
for i in vmx.get_methods():
|
||||
i.create_tags()
|
||||
if not i.tags.empty():
|
||||
print i.method.get_class_name(), i.method.get_name(), i.tags
|
||||
|
||||
def main(options, arguments) :
|
||||
if options.input != None :
|
||||
ret_type = androconf.is_android( options.input )
|
||||
|
||||
print os.path.basename(options.input), ":"
|
||||
if ret_type == "APK" :
|
||||
try :
|
||||
a = apk.APK(options.input, zipmodule=2)
|
||||
if a.is_valid_APK() :
|
||||
a.show()
|
||||
display_dvm_info( a )
|
||||
else :
|
||||
print "INVALID"
|
||||
except Exception, e :
|
||||
print "ERROR", e
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
elif options.directory != None :
|
||||
for root, dirs, files in os.walk( options.directory, followlinks=True ) :
|
||||
if files != [] :
|
||||
for f in files :
|
||||
real_filename = root
|
||||
if real_filename[-1] != "/" :
|
||||
real_filename += "/"
|
||||
real_filename += f
|
||||
|
||||
ret_type = androconf.is_android( real_filename )
|
||||
if ret_type == "APK" :
|
||||
print os.path.basename( real_filename ), ":"
|
||||
try :
|
||||
a = apk.APK( real_filename )
|
||||
if a.is_valid_APK() :
|
||||
a.show()
|
||||
display_dvm_info( a )
|
||||
else :
|
||||
print "INVALID APK"
|
||||
raise("ooos")
|
||||
except Exception, e :
|
||||
print "ERROR", e
|
||||
raise("ooos")
|
||||
|
||||
elif options.version != None :
|
||||
print "Androapkinfo version %s" % androconf.ANDROGUARD_VERSION
|
||||
|
||||
if __name__ == "__main__" :
|
||||
parser = OptionParser()
|
||||
for option in options :
|
||||
param = option['name']
|
||||
del option['name']
|
||||
parser.add_option(*param, **option)
|
||||
|
||||
options, arguments = parser.parse_args()
|
||||
sys.argv[:] = arguments
|
||||
main(options, arguments)
|
89
androarsc.py
Executable file
89
androarsc.py
Executable file
@ -0,0 +1,89 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Androguard is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Androguard is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with Androguard. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import sys
|
||||
from optparse import OptionParser
|
||||
from xml.dom import minidom
|
||||
import codecs
|
||||
|
||||
from androguard.core import androconf
|
||||
from androguard.core.bytecodes import apk
|
||||
|
||||
|
||||
option_0 = { 'name' : ('-i', '--input'), 'help' : 'filename input (APK or android resources(arsc))', 'nargs' : 1 }
|
||||
option_1 = { 'name' : ('-p', '--package'), 'help' : 'select the package (optional)', 'nargs' : 1 }
|
||||
option_2 = { 'name' : ('-l', '--locale'), 'help' : 'select the locale (optional)', 'nargs' : 1 }
|
||||
option_3 = { 'name' : ('-t', '--type'), 'help' : 'select the type (string, interger, public, ...)', 'nargs' : 1 }
|
||||
option_4 = { 'name' : ('-o', '--output'), 'help' : 'filename output', 'nargs' : 1 }
|
||||
option_5 = { 'name' : ('-v', '--version'), 'help' : 'version of the API', 'action' : 'count' }
|
||||
options = [option_0, option_1, option_2, option_3, option_4, option_5]
|
||||
|
||||
|
||||
def main(options, arguments):
|
||||
if options.input != None:
|
||||
buff = ""
|
||||
|
||||
arscobj = None
|
||||
ret_type = androconf.is_android(options.input)
|
||||
if ret_type == "APK":
|
||||
a = apk.APK(options.input)
|
||||
arscobj = a.get_android_resources()
|
||||
elif ret_type == "ARSC":
|
||||
arscobj = apk.ARSCParser(open(options.input, "rb").read())
|
||||
else:
|
||||
print "Unknown file type"
|
||||
return
|
||||
|
||||
if not options.package and not options.type and not options.locale:
|
||||
buff = ""
|
||||
for package in arscobj.get_packages_names():
|
||||
buff += package + "\n"
|
||||
for locale in arscobj.get_locales(package):
|
||||
buff += "\t" + repr(locale) + "\n"
|
||||
for ttype in arscobj.get_types(package, locale):
|
||||
buff += "\t\t" + ttype + "\n"
|
||||
|
||||
else:
|
||||
package = options.package or arscobj.get_packages_names()[0]
|
||||
ttype = options.type or "public"
|
||||
locale = options.locale or '\x00\x00'
|
||||
|
||||
buff = minidom.parseString(getattr(arscobj, "get_" + ttype + "_resources")(package, locale)).toprettyxml()
|
||||
|
||||
if options.output != None:
|
||||
fd = codecs.open(options.output, "w", "utf-8")
|
||||
fd.write(buff)
|
||||
fd.close()
|
||||
else:
|
||||
print buff
|
||||
|
||||
elif options.version != None:
|
||||
print "Androarsc version %s" % androconf.ANDROGUARD_VERSION
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = OptionParser()
|
||||
for option in options:
|
||||
param = option['name']
|
||||
del option['name']
|
||||
parser.add_option(*param, **option)
|
||||
|
||||
options, arguments = parser.parse_args()
|
||||
sys.argv[:] = arguments
|
||||
main(options, arguments)
|
60
androauto.py
Executable file
60
androauto.py
Executable file
@ -0,0 +1,60 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Androguard is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Androguard is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with Androguard. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import sys
|
||||
|
||||
from optparse import OptionParser
|
||||
from androguard.core.analysis import auto
|
||||
from androguard.core.androconf import set_debug
|
||||
|
||||
option_0 = {'name': ('-d', '--directory'), 'help': 'directory input', 'nargs': 1}
|
||||
option_1 = {'name': ('-v', '--verbose'), 'help': 'add debug', 'action': 'count'}
|
||||
options = [option_0, option_1]
|
||||
|
||||
|
||||
class AndroLog:
|
||||
def __init__(self, id_file, filename):
|
||||
self.id_file = id_file
|
||||
|
||||
|
||||
def main(options, arguments):
|
||||
if options.verbose:
|
||||
set_debug()
|
||||
|
||||
if options.directory:
|
||||
settings = {
|
||||
"my": auto.DirectoryAndroAnalysis(options.directory),
|
||||
"log": AndroLog,
|
||||
"max_fetcher": 3,
|
||||
}
|
||||
|
||||
aa = auto.AndroAuto(settings)
|
||||
aa.go()
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = OptionParser()
|
||||
for option in options:
|
||||
param = option['name']
|
||||
del option['name']
|
||||
parser.add_option(*param, **option)
|
||||
|
||||
options, arguments = parser.parse_args()
|
||||
sys.argv[:] = arguments
|
||||
main(options, arguments)
|
70
androaxml.py
Executable file
70
androaxml.py
Executable file
@ -0,0 +1,70 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Androguard is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Androguard is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with Androguard. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import sys
|
||||
from optparse import OptionParser
|
||||
from xml.dom import minidom
|
||||
import codecs
|
||||
|
||||
from androguard.core import androconf
|
||||
from androguard.core.bytecodes import apk
|
||||
|
||||
|
||||
option_0 = { 'name' : ('-i', '--input'), 'help' : 'filename input (APK or android\'s binary xml)', 'nargs' : 1 }
|
||||
option_1 = { 'name' : ('-o', '--output'), 'help' : 'filename output of the xml', 'nargs' : 1 }
|
||||
option_2 = { 'name' : ('-v', '--version'), 'help' : 'version of the API', 'action' : 'count' }
|
||||
options = [option_0, option_1, option_2]
|
||||
|
||||
|
||||
def main(options, arguments) :
|
||||
if options.input != None :
|
||||
buff = ""
|
||||
|
||||
ret_type = androconf.is_android(options.input)
|
||||
if ret_type == "APK":
|
||||
a = apk.APK(options.input)
|
||||
buff = a.get_android_manifest_xml().toprettyxml(encoding="utf-8")
|
||||
elif ".xml" in options.input:
|
||||
ap = apk.AXMLPrinter(open(options.input, "rb").read())
|
||||
buff = minidom.parseString(ap.get_buff()).toprettyxml(encoding="utf-8")
|
||||
else:
|
||||
print "Unknown file type"
|
||||
return
|
||||
|
||||
if options.output != None :
|
||||
fd = codecs.open(options.output, "w", "utf-8")
|
||||
fd.write( buff )
|
||||
fd.close()
|
||||
else :
|
||||
print buff
|
||||
|
||||
elif options.version != None :
|
||||
print "Androaxml version %s" % androconf.ANDROGUARD_VERSION
|
||||
|
||||
if __name__ == "__main__" :
|
||||
parser = OptionParser()
|
||||
for option in options :
|
||||
param = option['name']
|
||||
del option['name']
|
||||
parser.add_option(*param, **option)
|
||||
|
||||
options, arguments = parser.parse_args()
|
||||
sys.argv[:] = arguments
|
||||
main(options, arguments)
|
67
androcsign.py
Executable file
67
androcsign.py
Executable file
@ -0,0 +1,67 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Androguard is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Androguard is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with Androguard. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import sys
|
||||
|
||||
from androguard.core import androconf
|
||||
|
||||
sys.path.append("./elsim/")
|
||||
from elsim.elsign import dalvik_elsign
|
||||
|
||||
from optparse import OptionParser
|
||||
|
||||
option_0 = { 'name' : ('-i', '--input'), 'help' : 'file : use this filename', 'nargs' : 1 }
|
||||
option_1 = { 'name' : ('-r', '--remove'), 'help' : 'remote the signature', 'nargs' : 1 }
|
||||
option_2 = { 'name' : ('-o', '--output'), 'help' : 'output database', 'nargs' : 1 }
|
||||
option_3 = { 'name' : ('-l', '--list'), 'help' : 'list signatures in database', 'nargs' : 1 }
|
||||
option_4 = { 'name' : ('-c', '--check'), 'help' : 'check signatures in database', 'nargs' : 1 }
|
||||
option_5 = { 'name' : ('-v', '--version'), 'help' : 'version of the API', 'action' : 'count' }
|
||||
|
||||
options = [option_0, option_1, option_2, option_3, option_4, option_5]
|
||||
|
||||
def main(options, arguments) :
|
||||
s = dalvik_elsign.CSignature(pcs=dalvik_elsign.PublicCSignature)
|
||||
if options.input != None :
|
||||
ret = s.add_file( open( options.input, "rb" ).read() )
|
||||
if ret != None and options.output != None :
|
||||
s.add_indb( ret, options.output )
|
||||
|
||||
elif options.list != None :
|
||||
s.list_indb( options.list )
|
||||
|
||||
elif options.remove != None :
|
||||
s.remove_indb( options.remove, options.output )
|
||||
|
||||
elif options.check != None :
|
||||
s.check_db( options.check )
|
||||
|
||||
elif options.version != None :
|
||||
print "Androcsign version %s" % androconf.ANDROGUARD_VERSION
|
||||
|
||||
if __name__ == "__main__" :
|
||||
parser = OptionParser()
|
||||
for option in options :
|
||||
param = option['name']
|
||||
del option['name']
|
||||
parser.add_option(*param, **option)
|
||||
|
||||
options, arguments = parser.parse_args()
|
||||
sys.argv[:] = arguments
|
||||
main(options, arguments)
|
222
androdd.py
Executable file
222
androdd.py
Executable file
@ -0,0 +1,222 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (C) 2012/2013, Anthony Desnos <desnos at t0t0.fr>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Androguard is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Androguard is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with Androguard. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import shutil
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
|
||||
from optparse import OptionParser
|
||||
|
||||
from androguard.core.androgen import Androguard
|
||||
from androguard.core import androconf
|
||||
from androguard.core.analysis import analysis
|
||||
from androguard.core.bytecodes import dvm
|
||||
from androguard.core.bytecode import method2dot, method2format
|
||||
from androguard.decompiler import decompiler
|
||||
|
||||
option_0 = { 'name' : ('-i', '--input'), 'help' : 'file : use this filename', 'nargs' : 1 }
|
||||
option_1 = { 'name' : ('-o', '--output'), 'help' : 'base directory to output all files', 'nargs' : 1 }
|
||||
option_2 = { 'name' : ('-d', '--decompiler'), 'help' : 'choose a decompiler', 'nargs' : 1 }
|
||||
option_3 = { 'name' : ('-j', '--jar'), 'help' : 'output jar file', 'action' : 'count' }
|
||||
|
||||
option_4 = { 'name' : ('-f', '--format'), 'help' : 'write the method in specific format (png, ...)', 'nargs' : 1 }
|
||||
|
||||
option_5 = { 'name' : ('-l', '--limit'), 'help' : 'limit analysis to specific methods/classes by using a regexp', 'nargs' : 1}
|
||||
option_6 = { 'name' : ('-v', '--version'), 'help' : 'version of the API', 'action' : 'count' }
|
||||
|
||||
options = [option_0, option_1, option_2, option_3, option_4, option_5, option_6]
|
||||
|
||||
|
||||
def valid_class_name(class_name):
|
||||
if class_name[-1] == ";":
|
||||
return class_name[1:-1]
|
||||
return class_name
|
||||
|
||||
|
||||
def create_directory(class_name, output):
|
||||
output_name = output
|
||||
if output_name[-1] != "/":
|
||||
output_name = output_name + "/"
|
||||
|
||||
pathdir = output_name + class_name
|
||||
try:
|
||||
if not os.path.exists(pathdir):
|
||||
os.makedirs(pathdir)
|
||||
except OSError:
|
||||
# FIXME
|
||||
pass
|
||||
|
||||
|
||||
def export_apps_to_format(filename, a, output, methods_filter=None, jar=None, decompiler_type=None, format=None):
|
||||
print "Dump information %s in %s" % (filename, output)
|
||||
|
||||
if not os.path.exists(output):
|
||||
print "Create directory %s" % output
|
||||
os.makedirs(output)
|
||||
else:
|
||||
print "Clean directory %s" % output
|
||||
androconf.rrmdir(output)
|
||||
os.makedirs(output)
|
||||
|
||||
methods_filter_expr = None
|
||||
if methods_filter:
|
||||
methods_filter_expr = re.compile(methods_filter)
|
||||
|
||||
output_name = output
|
||||
if output_name[-1] != "/":
|
||||
output_name = output_name + "/"
|
||||
|
||||
dump_classes = []
|
||||
for vm in a.get_vms():
|
||||
print "Analysis ...",
|
||||
sys.stdout.flush()
|
||||
vmx = analysis.VMAnalysis(vm)
|
||||
print "End"
|
||||
|
||||
print "Decompilation ...",
|
||||
sys.stdout.flush()
|
||||
|
||||
if not decompiler_type:
|
||||
vm.set_decompiler(decompiler.DecompilerDAD(vm, vmx))
|
||||
elif decompiler_type == "dex2jad":
|
||||
vm.set_decompiler(decompiler.DecompilerDex2Jad(vm,
|
||||
androconf.CONF["PATH_DEX2JAR"],
|
||||
androconf.CONF["BIN_DEX2JAR"],
|
||||
androconf.CONF["PATH_JAD"],
|
||||
androconf.CONF["BIN_JAD"],
|
||||
androconf.CONF["TMP_DIRECTORY"]))
|
||||
elif decompiler_type == "dex2winejad":
|
||||
vm.set_decompiler(decompiler.DecompilerDex2WineJad(vm,
|
||||
androconf.CONF["PATH_DEX2JAR"],
|
||||
androconf.CONF["BIN_DEX2JAR"],
|
||||
androconf.CONF["PATH_JAD"],
|
||||
androconf.CONF["BIN_WINEJAD"],
|
||||
androconf.CONF["TMP_DIRECTORY"]))
|
||||
elif decompiler_type == "ded":
|
||||
vm.set_decompiler(decompiler.DecompilerDed(vm,
|
||||
androconf.CONF["PATH_DED"],
|
||||
androconf.CONF["BIN_DED"],
|
||||
androconf.CONF["TMP_DIRECTORY"]))
|
||||
elif decompiler_type == "dex2fernflower":
|
||||
vm.set_decompiler(decompiler.DecompilerDex2Fernflower(vm,
|
||||
androconf.CONF["PATH_DEX2JAR"],
|
||||
androconf.CONF["BIN_DEX2JAR"],
|
||||
androconf.CONF["PATH_FERNFLOWER"],
|
||||
androconf.CONF["BIN_FERNFLOWER"],
|
||||
androconf.CONF["OPTIONS_FERNFLOWER"],
|
||||
androconf.CONF["TMP_DIRECTORY"]))
|
||||
else:
|
||||
raise("invalid decompiler !")
|
||||
print "End"
|
||||
|
||||
if options.jar:
|
||||
print "jar ...",
|
||||
filenamejar = decompiler.Dex2Jar(vm,
|
||||
androconf.CONF["PATH_DEX2JAR"],
|
||||
androconf.CONF["BIN_DEX2JAR"],
|
||||
androconf.CONF["TMP_DIRECTORY"]).get_jar()
|
||||
shutil.move(filenamejar, output + "classes.jar")
|
||||
print "End"
|
||||
|
||||
for method in vm.get_methods():
|
||||
if methods_filter_expr:
|
||||
msig = "%s%s%s" % (method.get_class_name(),
|
||||
method.get_name(),
|
||||
method.get_descriptor())
|
||||
if not methods_filter_expr.search(msig):
|
||||
continue
|
||||
|
||||
filename_class = valid_class_name(method.get_class_name())
|
||||
create_directory(filename_class, output)
|
||||
|
||||
print "Dump %s %s %s ..." % (method.get_class_name(),
|
||||
method.get_name(),
|
||||
method.get_descriptor()),
|
||||
|
||||
filename_class = output_name + filename_class
|
||||
if filename_class[-1] != "/":
|
||||
filename_class = filename_class + "/"
|
||||
|
||||
descriptor = method.get_descriptor()
|
||||
descriptor = descriptor.replace(";", "")
|
||||
descriptor = descriptor.replace(" ", "")
|
||||
descriptor = descriptor.replace("(", "-")
|
||||
descriptor = descriptor.replace(")", "-")
|
||||
descriptor = descriptor.replace("/", "_")
|
||||
|
||||
filename = filename_class + method.get_name() + descriptor
|
||||
if len(method.get_name() + descriptor) > 250:
|
||||
all_identical_name_methods = vm.get_methods_descriptor(method.get_class_name(), method.get_name())
|
||||
pos = 0
|
||||
for i in all_identical_name_methods:
|
||||
if i.get_descriptor() == method.get_descriptor():
|
||||
break
|
||||
pos += 1
|
||||
|
||||
filename = filename_class + method.get_name() + "_%d" % pos
|
||||
|
||||
buff = method2dot(vmx.get_method(method))
|
||||
|
||||
if format:
|
||||
print "%s ..." % format,
|
||||
method2format(filename + "." + format, format, None, buff)
|
||||
|
||||
if method.get_class_name() not in dump_classes:
|
||||
print "source codes ...",
|
||||
current_class = vm.get_class(method.get_class_name())
|
||||
current_filename_class = valid_class_name(current_class.get_name())
|
||||
create_directory(filename_class, output)
|
||||
|
||||
current_filename_class = output_name + current_filename_class + ".java"
|
||||
fd = open(current_filename_class, "w")
|
||||
fd.write(current_class.get_source())
|
||||
fd.close()
|
||||
|
||||
dump_classes.append(method.get_class_name())
|
||||
|
||||
print "bytecodes ...",
|
||||
bytecode_buff = dvm.get_bytecodes_method(vm, vmx, method)
|
||||
fd = open(filename + ".ag", "w")
|
||||
fd.write(bytecode_buff)
|
||||
fd.close()
|
||||
|
||||
print
|
||||
|
||||
|
||||
def main(options, arguments):
|
||||
if options.input != None and options.output != None:
|
||||
a = Androguard([options.input])
|
||||
export_apps_to_format(options.input, a, options.output, options.limit, options.jar, options.decompiler, options.format)
|
||||
elif options.version != None:
|
||||
print "Androdd version %s" % androconf.ANDROGUARD_VERSION
|
||||
else:
|
||||
print "Please, specify an input file and an output directory"
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = OptionParser()
|
||||
for option in options:
|
||||
param = option['name']
|
||||
del option['name']
|
||||
parser.add_option(*param, **option)
|
||||
|
||||
options, arguments = parser.parse_args()
|
||||
sys.argv[:] = arguments
|
||||
main(options, arguments)
|
118
androdiff.py
Executable file
118
androdiff.py
Executable file
@ -0,0 +1,118 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Androguard is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Androguard is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with Androguard. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import sys
|
||||
|
||||
from optparse import OptionParser
|
||||
|
||||
from androguard.core.bytecodes import apk, dvm
|
||||
from androguard.core.analysis import analysis
|
||||
from androguard.core import androconf
|
||||
|
||||
sys.path.append("./elsim")
|
||||
from elsim import elsim
|
||||
from elsim.elsim_dalvik import ProxyDalvik, FILTERS_DALVIK_SIM, ProxyDalvikMethod, FILTERS_DALVIK_BB
|
||||
from elsim.elsim_dalvik import ProxyDalvikBasicBlock, FILTERS_DALVIK_DIFF_BB
|
||||
from elsim.elsim_dalvik import DiffDalvikMethod
|
||||
|
||||
|
||||
option_0 = { 'name' : ('-i', '--input'), 'help' : 'file : use these filenames', 'nargs' : 2 }
|
||||
option_1 = { 'name' : ('-t', '--threshold'), 'help' : 'define the threshold', 'nargs' : 1 }
|
||||
option_2 = { 'name' : ('-c', '--compressor'), 'help' : 'define the compressor', 'nargs' : 1 }
|
||||
option_3 = { 'name' : ('-d', '--display'), 'help' : 'display the file in human readable format', 'action' : 'count' }
|
||||
#option_4 = { 'name' : ('-e', '--exclude'), 'help' : 'exclude specific blocks (0 : orig, 1 : diff, 2 : new)', 'nargs' : 1 }
|
||||
option_5 = { 'name' : ('-e', '--exclude'), 'help' : 'exclude specific class name (python regexp)', 'nargs' : 1 }
|
||||
option_6 = { 'name' : ('-s', '--size'), 'help' : 'exclude specific method below the specific size', 'nargs' : 1 }
|
||||
option_7 = { 'name' : ('-v', '--version'), 'help' : 'version of the API', 'action' : 'count' }
|
||||
|
||||
options = [option_0, option_1, option_2, option_3, option_5, option_6, option_7]
|
||||
|
||||
def main(options, arguments) :
|
||||
details = False
|
||||
if options.display != None :
|
||||
details = True
|
||||
|
||||
if options.input != None :
|
||||
ret_type = androconf.is_android( options.input[0] )
|
||||
if ret_type == "APK" :
|
||||
a = apk.APK( options.input[0] )
|
||||
d1 = dvm.DalvikVMFormat( a.get_dex() )
|
||||
elif ret_type == "DEX" :
|
||||
d1 = dvm.DalvikVMFormat( open(options.input[0], "rb").read() )
|
||||
|
||||
dx1 = analysis.VMAnalysis( d1 )
|
||||
|
||||
ret_type = androconf.is_android( options.input[1] )
|
||||
if ret_type == "APK" :
|
||||
a = apk.APK( options.input[1] )
|
||||
d2 = dvm.DalvikVMFormat( a.get_dex() )
|
||||
elif ret_type == "DEX" :
|
||||
d2 = dvm.DalvikVMFormat( open(options.input[1], "rb").read() )
|
||||
|
||||
dx2 = analysis.VMAnalysis( d2 )
|
||||
|
||||
print d1, dx1, d2, dx2
|
||||
sys.stdout.flush()
|
||||
|
||||
threshold = None
|
||||
if options.threshold != None :
|
||||
threshold = float(options.threshold)
|
||||
|
||||
FS = FILTERS_DALVIK_SIM
|
||||
FS[elsim.FILTER_SKIPPED_METH].set_regexp( options.exclude )
|
||||
FS[elsim.FILTER_SKIPPED_METH].set_size( options.size )
|
||||
el = elsim.Elsim( ProxyDalvik(d1, dx1), ProxyDalvik(d2, dx2), FS, threshold, options.compressor )
|
||||
el.show()
|
||||
|
||||
e1 = elsim.split_elements( el, el.get_similar_elements() )
|
||||
for i in e1 :
|
||||
j = e1[ i ]
|
||||
elb = elsim.Elsim( ProxyDalvikMethod(i), ProxyDalvikMethod(j), FILTERS_DALVIK_BB, threshold, options.compressor )
|
||||
#elb.show()
|
||||
|
||||
eld = elsim.Eldiff( ProxyDalvikBasicBlock(elb), FILTERS_DALVIK_DIFF_BB )
|
||||
#eld.show()
|
||||
|
||||
ddm = DiffDalvikMethod( i, j, elb, eld )
|
||||
ddm.show()
|
||||
|
||||
print "NEW METHODS"
|
||||
enew = el.get_new_elements()
|
||||
for i in enew :
|
||||
el.show_element( i, False )
|
||||
|
||||
print "DELETED METHODS"
|
||||
edel = el.get_deleted_elements()
|
||||
for i in edel :
|
||||
el.show_element( i )
|
||||
|
||||
elif options.version != None :
|
||||
print "Androdiff version %s" % androconf.ANDROGUARD_VERSION
|
||||
|
||||
if __name__ == "__main__" :
|
||||
parser = OptionParser()
|
||||
for option in options :
|
||||
param = option['name']
|
||||
del option['name']
|
||||
parser.add_option(*param, **option)
|
||||
|
||||
options, arguments = parser.parse_args()
|
||||
sys.argv[:] = arguments
|
||||
main(options, arguments)
|
66
androdis.py
Executable file
66
androdis.py
Executable file
@ -0,0 +1,66 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (C) 2012, Axelle Apvrille <aafortinet at gmail.com>
|
||||
# Anthony Desnos <desnos at t0t0.fr>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Androguard is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Androguard is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with Androguard. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import sys
|
||||
import os
|
||||
from optparse import OptionParser
|
||||
from androguard.core import androconf
|
||||
from androguard.core.bytecodes import dvm
|
||||
from androguard.core.bytecodes.apk import *
|
||||
|
||||
option_0 = { 'name' : ('-i', '--input'), 'help' : 'file : use this filename (DEX/ODEX)', 'nargs' : 1 }
|
||||
option_1 = { 'name' : ('-o', '--offset'), 'help' : 'offset to disassemble', 'nargs' : 1 }
|
||||
option_2 = { 'name' : ('-s', '--size'), 'help' : 'size', 'nargs' : 1 }
|
||||
|
||||
options = [option_0, option_1, option_2]
|
||||
|
||||
|
||||
def disassemble(dex, offset, size):
|
||||
d = dvm.auto(dex)
|
||||
if d != None:
|
||||
nb = 0
|
||||
idx = offset
|
||||
for i in d.disassemble(offset, size):
|
||||
print "%-8d(%08x)" % (nb, idx),
|
||||
i.show(idx)
|
||||
print
|
||||
|
||||
idx += i.get_length()
|
||||
nb += 1
|
||||
|
||||
|
||||
def main(options, arguments):
|
||||
if options.input and options.offset and options.size:
|
||||
offset = int(options.offset, 0)
|
||||
size = int(options.size, 0)
|
||||
disassemble(options.input, offset, size)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = OptionParser()
|
||||
for option in options:
|
||||
param = option['name']
|
||||
del option['name']
|
||||
parser.add_option(*param, **option)
|
||||
|
||||
options, arguments = parser.parse_args()
|
||||
sys.argv[:] = arguments
|
||||
main(options, arguments)
|
149
androdump.py
Executable file
149
androdump.py
Executable file
@ -0,0 +1,149 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Androguard is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Androguard is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with Androguard. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import sys, os, cmd, threading, re, atexit
|
||||
|
||||
from optparse import OptionParser
|
||||
|
||||
import androguard, androconf, jvm
|
||||
|
||||
# External Libraries
|
||||
|
||||
# python-ptrace : http://bitbucket.org/haypo/python-ptrace/
|
||||
from ptrace import PtraceError
|
||||
from ptrace.tools import locateProgram
|
||||
from ptrace.debugger import ProcessExit, DebuggerError, PtraceDebugger, ProcessExit, ProcessSignal, NewProcessEvent, ProcessExecution
|
||||
from ptrace.debugger.memory_mapping import readProcessMappings
|
||||
####################
|
||||
|
||||
option_0 = { 'name' : ('-i', '--input'), 'help' : 'pid', 'nargs' : 1 }
|
||||
|
||||
option_1 = { 'name' : ('-v', '--version'), 'help' : 'version of the API', 'action' : 'count' }
|
||||
|
||||
options = [option_0, option_1]
|
||||
|
||||
MAGIC_PATTERN = "\xca\xfe\xba\xbe"
|
||||
|
||||
class AndroPreDump :
|
||||
def __init__(self, input) :
|
||||
self.data = []
|
||||
|
||||
self.pid = int(input)
|
||||
self.debugger = PtraceDebugger()
|
||||
self.process = self.debugger.addProcess(self.pid, is_attached=False)
|
||||
atexit.register(self.debugger.quit)
|
||||
|
||||
Header = False
|
||||
Code = False
|
||||
|
||||
self.procmaps = readProcessMappings(self.process)
|
||||
for pm in self.procmaps:
|
||||
if pm.permissions.find("w") != -1 and pm.pathname == None :
|
||||
|
||||
# if Code == False and Header == True :
|
||||
# data = self.process.readBytes(pm.start, pm.end-pm.start)
|
||||
# idx = data.find("SourceFile")
|
||||
# if idx != -1 :
|
||||
# print "CODE", pm
|
||||
# self.data.append( (pm, data, idx) )
|
||||
# Code = True
|
||||
|
||||
if Header == False :
|
||||
data = self.process.readBytes(pm.start, pm.end-pm.start)
|
||||
idx = data.find(MAGIC_PATTERN)
|
||||
if idx != -1 :
|
||||
print "HEADER", pm
|
||||
self.data.append( (pm, data) )
|
||||
Header = True
|
||||
|
||||
self.dumpMemory( "java_dump_memory" )
|
||||
# self.dumpFiles( "java_files" )
|
||||
|
||||
def write(self, idx, buff) :
|
||||
self.process.writeBytes( idx, buff )
|
||||
|
||||
def getFilesBuffer(self) :
|
||||
for i in self.data :
|
||||
d = i[1]
|
||||
x = d.find(MAGIC_PATTERN)
|
||||
idx = x
|
||||
while x != -1 :
|
||||
yield i[0].start + idx, d[x:]
|
||||
d = d[x+len(MAGIC_PATTERN):]
|
||||
|
||||
idx += len(MAGIC_PATTERN)
|
||||
x = d.find(MAGIC_PATTERN)
|
||||
idx += x
|
||||
|
||||
def dumpMemory(self, base_filename) :
|
||||
for i in self.data :
|
||||
fd = open(base_filename + "-" + "0x%x-0x%x" % (i[0].start, i[0].end), "w")
|
||||
fd.write( i[1] )
|
||||
fd.close()
|
||||
|
||||
def dumpFiles(self, base_filename) :
|
||||
for i in self.data :
|
||||
fd = open(base_filename + "-" + "0x%x-0x%x" % (i[0].start + i[2], i[0].end), "w")
|
||||
fd.write( i[1][i[2]:] )
|
||||
fd.close()
|
||||
|
||||
class AndroDump :
|
||||
def __init__(self, adp) :
|
||||
self.__adp = adp
|
||||
|
||||
for i in self.__adp.getFilesBuffer() :
|
||||
try :
|
||||
print "0x%x :" % (i[0])
|
||||
j = jvm.JVMFormat( i[1] )
|
||||
|
||||
for method in j.get_methods() :
|
||||
print "\t -->", method.get_class_name(), method.get_name(), method.get_descriptor()
|
||||
|
||||
# if (method.get_class_name() == "Test2" and method.get_name() == "main") :
|
||||
# print "patch"
|
||||
|
||||
# code = method.get_code()
|
||||
#code.remplace_at( 51, [ "bipush", 20 ] )
|
||||
# code.show()
|
||||
|
||||
# print "\t\t-> %x" % (len(j.save()))
|
||||
|
||||
# self.__adp.write( i[0], j.save() )
|
||||
except Exception, e :
|
||||
print e
|
||||
|
||||
def main(options, arguments) :
|
||||
if options.input != None :
|
||||
apd = AndroPreDump( options.input )
|
||||
AndroDump( apd )
|
||||
|
||||
elif options.version != None :
|
||||
print "Androdump version %s" % androconf.ANDROGUARD_VERSION
|
||||
|
||||
if __name__ == "__main__" :
|
||||
parser = OptionParser()
|
||||
for option in options :
|
||||
param = option['name']
|
||||
del option['name']
|
||||
parser.add_option(*param, **option)
|
||||
|
||||
options, arguments = parser.parse_args()
|
||||
sys.argv[:] = arguments
|
||||
main(options, arguments)
|
68
androgexf.py
Executable file
68
androgexf.py
Executable file
@ -0,0 +1,68 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Androguard is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Androguard is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with Androguard. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from xml.sax.saxutils import escape, unescape
|
||||
import sys, hashlib, os
|
||||
from optparse import OptionParser
|
||||
|
||||
from androguard.core.bytecodes import apk, dvm
|
||||
from androguard.core.analysis import analysis, ganalysis
|
||||
from androguard.core import androconf
|
||||
|
||||
option_0 = { 'name' : ('-i', '--input'), 'help' : 'filename input (dex, apk)', 'nargs' : 1 }
|
||||
option_1 = { 'name' : ('-o', '--output'), 'help' : 'filename output of the gexf', 'nargs' : 1 }
|
||||
|
||||
options = [option_0, option_1]
|
||||
|
||||
def main(options, arguments) :
|
||||
if options.input != None and options.output != None :
|
||||
ret_type = androconf.is_android( options.input )
|
||||
|
||||
vm = None
|
||||
a = None
|
||||
if ret_type == "APK" :
|
||||
a = apk.APK( options.input )
|
||||
if a.is_valid_APK() :
|
||||
vm = dvm.DalvikVMFormat( a.get_dex() )
|
||||
else :
|
||||
print "INVALID APK"
|
||||
elif ret_type == "DEX" :
|
||||
try :
|
||||
vm = dvm.DalvikVMFormat( open(options.input, "rb").read() )
|
||||
except Exception, e :
|
||||
print "INVALID DEX", e
|
||||
|
||||
vmx = analysis.VMAnalysis( vm )
|
||||
gvmx = ganalysis.GVMAnalysis( vmx, a )
|
||||
|
||||
b = gvmx.export_to_gexf()
|
||||
androconf.save_to_disk( b, options.output )
|
||||
|
||||
if __name__ == "__main__" :
|
||||
parser = OptionParser()
|
||||
for option in options :
|
||||
param = option['name']
|
||||
del option['name']
|
||||
parser.add_option(*param, **option)
|
||||
|
||||
|
||||
options, arguments = parser.parse_args()
|
||||
sys.argv[:] = arguments
|
||||
main(options, arguments)
|
0
androguard/__init__.py
Normal file
0
androguard/__init__.py
Normal file
0
androguard/core/__init__.py
Normal file
0
androguard/core/__init__.py
Normal file
0
androguard/core/analysis/__init__.py
Normal file
0
androguard/core/analysis/__init__.py
Normal file
2550
androguard/core/analysis/analysis.py
Normal file
2550
androguard/core/analysis/analysis.py
Normal file
File diff suppressed because it is too large
Load Diff
358
androguard/core/analysis/auto.py
Normal file
358
androguard/core/analysis/auto.py
Normal file
@ -0,0 +1,358 @@
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS-IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import Queue
|
||||
import threading
|
||||
import time
|
||||
import zlib
|
||||
|
||||
from androguard.core import androconf
|
||||
from androguard.core.bytecodes import apk, dvm
|
||||
from androguard.core.analysis import analysis
|
||||
from androguard.core.androconf import debug
|
||||
|
||||
|
||||
class AndroAuto(object):
|
||||
"""
|
||||
The main class which analyse automatically android apps by calling methods
|
||||
from a specific object
|
||||
:param settings: the settings of the analysis
|
||||
:type settings: dict
|
||||
"""
|
||||
def __init__(self, settings):
|
||||
self.settings = settings
|
||||
|
||||
def dump(self):
|
||||
"""
|
||||
Dump the analysis
|
||||
"""
|
||||
self.settings["my"].dump()
|
||||
|
||||
def dump_file(self, filename):
|
||||
"""
|
||||
Dump the analysis in a filename
|
||||
"""
|
||||
self.settings["my"].dump_file(filename)
|
||||
|
||||
def go(self):
|
||||
"""
|
||||
Launch the analysis
|
||||
"""
|
||||
myandro = self.settings["my"]
|
||||
|
||||
def worker(idx, q):
|
||||
debug("Running worker-%d" % idx)
|
||||
|
||||
while True:
|
||||
a, d, dx, axmlobj, arscobj = None, None, None, None, None
|
||||
try:
|
||||
filename, fileraw = q.get()
|
||||
id_file = zlib.adler32(fileraw)
|
||||
|
||||
debug("(worker-%d) get %s %d" % (idx, filename, id_file))
|
||||
|
||||
log = self.settings["log"](id_file, filename)
|
||||
|
||||
is_analysis_dex, is_analysis_adex = True, True
|
||||
debug("(worker-%d) filtering file %d" % (idx, id_file))
|
||||
filter_file_ret, filter_file_type = myandro.filter_file(log, fileraw)
|
||||
if filter_file_ret:
|
||||
debug("(worker-%d) analysis %s" % (id_file, filter_file_type))
|
||||
|
||||
if filter_file_type == "APK":
|
||||
a = myandro.create_apk(log, fileraw)
|
||||
is_analysis_dex = myandro.analysis_apk(log, a)
|
||||
fileraw = a.get_dex()
|
||||
filter_file_type = androconf.is_android_raw(fileraw)
|
||||
|
||||
elif filter_file_type == "AXML":
|
||||
axmlobj = myandro.create_axml(log, fileraw)
|
||||
myandro.analysis_axml(log, axmlobj)
|
||||
|
||||
elif filter_file_type == "ARSC":
|
||||
arscobj = myandro.create_arsc(log, fileraw)
|
||||
myandro.analysis_arsc(log, arscobj)
|
||||
|
||||
if is_analysis_dex and filter_file_type == "DEX":
|
||||
d = myandro.create_dex(log, fileraw)
|
||||
is_analysis_adex = myandro.analysis_dex(log, d)
|
||||
|
||||
elif is_analysis_dex and filter_file_type == "DEY":
|
||||
d = myandro.create_dey(log, fileraw)
|
||||
is_analysis_adex = myandro.analysis_dey(log, d)
|
||||
|
||||
if is_analysis_adex and d:
|
||||
dx = myandro.create_adex(log, d)
|
||||
myandro.analysis_adex(log, dx)
|
||||
|
||||
myandro.analysis_app(log, a, d, dx)
|
||||
|
||||
myandro.finish(log)
|
||||
except Exception, why:
|
||||
myandro.crash(log, why)
|
||||
myandro.finish(log)
|
||||
|
||||
del a, d, dx, axmlobj, arscobj
|
||||
q.task_done()
|
||||
|
||||
q = Queue.Queue(self.settings["max_fetcher"])
|
||||
for i in range(self.settings["max_fetcher"]):
|
||||
t = threading.Thread(target=worker, args=[i, q])
|
||||
t.daemon = True
|
||||
t.start()
|
||||
|
||||
terminated = True
|
||||
while terminated:
|
||||
terminated = myandro.fetcher(q)
|
||||
|
||||
try:
|
||||
if terminated:
|
||||
time.sleep(10)
|
||||
except KeyboardInterrupt:
|
||||
terminated = False
|
||||
|
||||
q.join()
|
||||
|
||||
|
||||
class DefaultAndroAnalysis(object):
|
||||
"""
|
||||
This class can be used as a template in order to analyse apps
|
||||
"""
|
||||
def fetcher(self, q):
|
||||
"""
|
||||
This method is called to fetch a new app in order to analyse it. The queue
|
||||
must be fill with the following format: (filename, raw)
|
||||
|
||||
:param q: the Queue to put new app
|
||||
"""
|
||||
pass
|
||||
|
||||
def filter_file(self, log, fileraw):
|
||||
"""
|
||||
This method is called in order to filer a specific app
|
||||
|
||||
:param log: an object which corresponds to a unique app
|
||||
:param fileraw: the raw app (a string)
|
||||
|
||||
:rtype: a set with 2 elements, the return value (boolean) if it is necessary to
|
||||
continue the analysis and the file type
|
||||
"""
|
||||
file_type = androconf.is_android_raw(fileraw)
|
||||
if file_type == "APK" or file_type == "DEX" or file_type == "DEY" or file_type == "AXML" or file_type == "ARSC":
|
||||
if file_type == "APK":
|
||||
if androconf.is_valid_android_raw(fileraw):
|
||||
return (True, "APK")
|
||||
else:
|
||||
return (True, file_type)
|
||||
return (False, None)
|
||||
|
||||
def create_axml(self, log, fileraw):
|
||||
"""
|
||||
This method is called in order to create a new AXML object
|
||||
|
||||
:param log: an object which corresponds to a unique app
|
||||
:param fileraw: the raw axml (a string)
|
||||
|
||||
:rtype: an :class:`APK` object
|
||||
"""
|
||||
return apk.AXMLPrinter(fileraw)
|
||||
|
||||
def create_arsc(self, log, fileraw):
|
||||
"""
|
||||
This method is called in order to create a new ARSC object
|
||||
|
||||
:param log: an object which corresponds to a unique app
|
||||
:param fileraw: the raw arsc (a string)
|
||||
|
||||
:rtype: an :class:`APK` object
|
||||
"""
|
||||
return apk.ARSCParser(fileraw)
|
||||
|
||||
def create_apk(self, log, fileraw):
|
||||
"""
|
||||
This method is called in order to create a new APK object
|
||||
|
||||
:param log: an object which corresponds to a unique app
|
||||
:param fileraw: the raw apk (a string)
|
||||
|
||||
:rtype: an :class:`APK` object
|
||||
"""
|
||||
return apk.APK(fileraw, raw=True, zipmodule=2)
|
||||
|
||||
def create_dex(self, log, dexraw):
|
||||
"""
|
||||
This method is called in order to create a DalvikVMFormat object
|
||||
|
||||
:param log: an object which corresponds to a unique app
|
||||
:param dexraw: the raw classes.dex (a string)
|
||||
|
||||
:rtype: a :class:`DalvikVMFormat` object
|
||||
"""
|
||||
return dvm.DalvikVMFormat(dexraw)
|
||||
|
||||
def create_dey(self, log, deyraw):
|
||||
"""
|
||||
This method is called in order to create a DalvikOdexVMFormat object
|
||||
|
||||
:param log: an object which corresponds to a unique app
|
||||
:param dexraw: the raw odex file (a string)
|
||||
|
||||
:rtype: a :class:`DalvikOdexVMFormat` object
|
||||
"""
|
||||
return dvm.DalvikOdexVMFormat(deyraw)
|
||||
|
||||
def create_adex(self, log, dexobj):
|
||||
"""
|
||||
This method is called in order to create a VMAnalysis object
|
||||
|
||||
:param log: an object which corresponds to a unique app
|
||||
:param dexobj: a :class:`DalvikVMFormat` object
|
||||
|
||||
:rytpe: a :class:`VMAnalysis` object
|
||||
"""
|
||||
return analysis.uVMAnalysis(dexobj)
|
||||
|
||||
def analysis_axml(self, log, axmlobj):
|
||||
"""
|
||||
This method is called in order to know if the analysis must continue
|
||||
|
||||
:param log: an object which corresponds to a unique app
|
||||
:param axmlobj: a :class:`AXMLPrinter` object
|
||||
|
||||
:rtype: a boolean
|
||||
"""
|
||||
return True
|
||||
|
||||
def analysis_arsc(self, log, arscobj):
|
||||
"""
|
||||
This method is called in order to know if the analysis must continue
|
||||
|
||||
:param log: an object which corresponds to a unique app
|
||||
:param arscobj: a :class:`ARSCParser` object
|
||||
|
||||
:rtype: a boolean
|
||||
"""
|
||||
return True
|
||||
|
||||
def analysis_apk(self, log, apkobj):
|
||||
"""
|
||||
This method is called in order to know if the analysis must continue
|
||||
|
||||
:param log: an object which corresponds to a unique app
|
||||
:param apkobj: a :class:`APK` object
|
||||
|
||||
:rtype: a boolean
|
||||
"""
|
||||
return True
|
||||
|
||||
def analysis_dex(self, log, dexobj):
|
||||
"""
|
||||
This method is called in order to know if the analysis must continue
|
||||
|
||||
:param log: an object which corresponds to a unique app
|
||||
:param dexobj: a :class:`DalvikVMFormat` object
|
||||
|
||||
:rtype: a boolean
|
||||
"""
|
||||
return True
|
||||
|
||||
def analysis_dey(self, log, deyobj):
|
||||
"""
|
||||
This method is called in order to know if the analysis must continue
|
||||
|
||||
:param log: an object which corresponds to a unique app
|
||||
:param deyobj: a :class:`DalvikOdexVMFormat` object
|
||||
|
||||
:rtype: a boolean
|
||||
"""
|
||||
return True
|
||||
|
||||
def analysis_adex(self, log, adexobj):
|
||||
"""
|
||||
This method is called in order to know if the analysis must continue
|
||||
|
||||
:param log: an object which corresponds to a unique app
|
||||
:param adexobj: a :class:`VMAnalysis` object
|
||||
|
||||
:rtype: a boolean
|
||||
"""
|
||||
return True
|
||||
|
||||
def analysis_app(self, log, apkobj, dexobj, adexobj):
|
||||
"""
|
||||
This method is called if you wish to analyse the final app
|
||||
|
||||
:param log: an object which corresponds to a unique app
|
||||
:param apkobj: a :class:`APK` object
|
||||
:param dexobj: a :class:`DalvikVMFormat` object
|
||||
:param adexobj: a :class:`VMAnalysis` object
|
||||
"""
|
||||
pass
|
||||
|
||||
def finish(self, log):
|
||||
"""
|
||||
This method is called before the end of the analysis
|
||||
|
||||
:param log: an object which corresponds to a unique app
|
||||
"""
|
||||
pass
|
||||
|
||||
def crash(self, log, why):
|
||||
"""
|
||||
This method is called if a crash appends
|
||||
|
||||
:param log: an object which corresponds to a unique app
|
||||
:param why: the string exception
|
||||
"""
|
||||
pass
|
||||
|
||||
def dump(self):
|
||||
"""
|
||||
This method is called to dump the result
|
||||
|
||||
:param log: an object which corresponds to a unique app
|
||||
"""
|
||||
pass
|
||||
|
||||
def dump_file(self, filename):
|
||||
"""
|
||||
This method is called to dump the result in a file
|
||||
|
||||
:param log: an object which corresponds to a unique app
|
||||
:param filename: the filename to dump the result
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class DirectoryAndroAnalysis(DefaultAndroAnalysis):
|
||||
"""
|
||||
A simple class example to analyse a directory
|
||||
"""
|
||||
def __init__(self, directory):
|
||||
self.directory = directory
|
||||
|
||||
def fetcher(self, q):
|
||||
for root, dirs, files in os.walk(self.directory, followlinks=True):
|
||||
if files != []:
|
||||
for f in files:
|
||||
real_filename = root
|
||||
if real_filename[-1] != "/":
|
||||
real_filename += "/"
|
||||
real_filename += f
|
||||
q.put((real_filename, open(real_filename, "rb").read()))
|
||||
return False
|
3488
androguard/core/analysis/ganalysis.py
Normal file
3488
androguard/core/analysis/ganalysis.py
Normal file
File diff suppressed because it is too large
Load Diff
1047
androguard/core/analysis/risk.py
Normal file
1047
androguard/core/analysis/risk.py
Normal file
File diff suppressed because it is too large
Load Diff
376
androguard/core/analysis/sign.py
Normal file
376
androguard/core/analysis/sign.py
Normal file
@ -0,0 +1,376 @@
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS-IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
from androguard.core.analysis.analysis import TAINTED_PACKAGE_CREATE, TAINTED_PACKAGE_CALL
|
||||
from androguard.core.bytecodes import dvm
|
||||
|
||||
TAINTED_PACKAGE_INTERNAL_CALL = 2
|
||||
FIELD_ACCESS = { "R" : 0, "W" : 1 }
|
||||
PACKAGE_ACCESS = { TAINTED_PACKAGE_CREATE : 0, TAINTED_PACKAGE_CALL : 1, TAINTED_PACKAGE_INTERNAL_CALL : 2 }
|
||||
|
||||
class Sign :
|
||||
def __init__(self) :
|
||||
self.levels = {}
|
||||
self.hlevels = []
|
||||
|
||||
def add(self, level, value) :
|
||||
self.levels[ level ] = value
|
||||
self.hlevels.append( level )
|
||||
|
||||
def get_level(self, l) :
|
||||
return self.levels[ "L%d" % l ]
|
||||
|
||||
def get_string(self) :
|
||||
buff = ""
|
||||
for i in self.hlevels :
|
||||
buff += self.levels[ i ]
|
||||
return buff
|
||||
|
||||
def get_list(self) :
|
||||
return self.levels[ "sequencebb" ]
|
||||
|
||||
class Signature :
|
||||
def __init__(self, vmx) :
|
||||
self.vmx = vmx
|
||||
self.tainted_packages = self.vmx.get_tainted_packages()
|
||||
self.tainted_variables = self.vmx.get_tainted_variables()
|
||||
|
||||
self._cached_signatures = {}
|
||||
self._cached_fields = {}
|
||||
self._cached_packages = {}
|
||||
self._global_cached = {}
|
||||
|
||||
self.levels = {
|
||||
# Classical method signature with basic blocks, strings, fields, packages
|
||||
"L0" : {
|
||||
0 : ( "_get_strings_a", "_get_fields_a", "_get_packages_a" ),
|
||||
1 : ( "_get_strings_pa", "_get_fields_a", "_get_packages_a" ),
|
||||
2 : ( "_get_strings_a", "_get_fields_a", "_get_packages_pa_1" ),
|
||||
3 : ( "_get_strings_a", "_get_fields_a", "_get_packages_pa_2" ),
|
||||
},
|
||||
|
||||
# strings
|
||||
"L1" : [ "_get_strings_a1" ],
|
||||
|
||||
# exceptions
|
||||
"L2" : [ "_get_exceptions" ],
|
||||
|
||||
# fill array data
|
||||
"L3" : [ "_get_fill_array_data" ],
|
||||
}
|
||||
|
||||
self.classes_names = None
|
||||
self._init_caches()
|
||||
|
||||
def _get_method_info(self, m) :
|
||||
m1 = m.get_method()
|
||||
return "%s-%s-%s" % (m1.get_class_name(), m1.get_name(), m1.get_descriptor())
|
||||
|
||||
|
||||
def _get_sequence_bb(self, analysis_method) :
|
||||
l = []
|
||||
|
||||
for i in analysis_method.basic_blocks.get() :
|
||||
buff = ""
|
||||
instructions = [j for j in i.get_instructions()]
|
||||
if len(instructions) > 5 :
|
||||
for ins in instructions :
|
||||
buff += ins.get_name()
|
||||
if buff != "" :
|
||||
l.append( buff )
|
||||
|
||||
return l
|
||||
|
||||
def _get_hex(self, analysis_method) :
|
||||
code = analysis_method.get_method().get_code()
|
||||
if code == None :
|
||||
return ""
|
||||
|
||||
buff = ""
|
||||
for i in code.get_bc().get_instructions() :
|
||||
buff += dvm.clean_name_instruction( i )
|
||||
buff += dvm.static_operand_instruction( i )
|
||||
|
||||
return buff
|
||||
|
||||
def _get_bb(self, analysis_method, functions, options) :
|
||||
bbs = []
|
||||
for b in analysis_method.basic_blocks.get() :
|
||||
l = []
|
||||
l.append( (b.start, "B") )
|
||||
l.append( (b.start, "[") )
|
||||
|
||||
internal = []
|
||||
|
||||
op_value = b.get_last().get_op_value()
|
||||
|
||||
# return
|
||||
if op_value >= 0x0e and op_value <= 0x11 :
|
||||
internal.append( (b.end-1, "R") )
|
||||
|
||||
# if
|
||||
elif op_value >= 0x32 and op_value <= 0x3d :
|
||||
internal.append( (b.end-1, "I") )
|
||||
|
||||
# goto
|
||||
elif op_value >= 0x28 and op_value <= 0x2a :
|
||||
internal.append( (b.end-1, "G") )
|
||||
|
||||
# sparse or packed switch
|
||||
elif op_value >= 0x2b and op_value <= 0x2c :
|
||||
internal.append( (b.end-1, "G") )
|
||||
|
||||
|
||||
for f in functions :
|
||||
try :
|
||||
internal.extend( getattr( self, f )( analysis_method, options ) )
|
||||
except TypeError :
|
||||
internal.extend( getattr( self, f )( analysis_method ) )
|
||||
|
||||
internal.sort()
|
||||
|
||||
for i in internal :
|
||||
if i[0] >= b.start and i[0] < b.end :
|
||||
l.append( i )
|
||||
|
||||
del internal
|
||||
|
||||
l.append( (b.end, "]") )
|
||||
|
||||
bbs.append( ''.join(i[1] for i in l) )
|
||||
return bbs
|
||||
|
||||
def _init_caches(self) :
|
||||
if self._cached_fields == {} :
|
||||
for f_t, f in self.tainted_variables.get_fields() :
|
||||
self._cached_fields[ f ] = f_t.get_paths_length()
|
||||
n = 0
|
||||
for f in sorted( self._cached_fields ) :
|
||||
self._cached_fields[ f ] = n
|
||||
n += 1
|
||||
|
||||
if self._cached_packages == {} :
|
||||
for m_t, m in self.tainted_packages.get_packages() :
|
||||
self._cached_packages[ m ] = m_t.get_paths_length()
|
||||
n = 0
|
||||
for m in sorted( self._cached_packages ) :
|
||||
self._cached_packages[ m ] = n
|
||||
n += 1
|
||||
|
||||
def _get_fill_array_data(self, analysis_method) :
|
||||
buff = ""
|
||||
for b in analysis_method.basic_blocks.get() :
|
||||
for i in b.get_instructions() :
|
||||
if i.get_name() == "FILL-ARRAY-DATA" :
|
||||
buff_tmp = i.get_operands()
|
||||
for j in range(0, len(buff_tmp)) :
|
||||
buff += "\\x%02x" % ord( buff_tmp[j] )
|
||||
return buff
|
||||
|
||||
def _get_exceptions(self, analysis_method) :
|
||||
buff = ""
|
||||
|
||||
method = analysis_method.get_method()
|
||||
code = method.get_code()
|
||||
if code == None or code.get_tries_size() <= 0 :
|
||||
return buff
|
||||
|
||||
handler_catch_list = code.get_handlers()
|
||||
|
||||
for handler_catch in handler_catch_list.get_list() :
|
||||
for handler in handler_catch.get_handlers() :
|
||||
buff += analysis_method.get_vm().get_cm_type( handler.get_type_idx() )
|
||||
return buff
|
||||
|
||||
def _get_strings_a1(self, analysis_method) :
|
||||
buff = ""
|
||||
|
||||
strings_method = self.tainted_variables.get_strings_by_method( analysis_method.get_method() )
|
||||
for s in strings_method :
|
||||
for path in strings_method[s] :
|
||||
buff += s.replace('\n', ' ')
|
||||
return buff
|
||||
|
||||
def _get_strings_pa(self, analysis_method) :
|
||||
l = []
|
||||
|
||||
strings_method = self.tainted_variables.get_strings_by_method( analysis_method.get_method() )
|
||||
for s in strings_method :
|
||||
for path in strings_method[s] :
|
||||
l.append( ( path[1], "S%d" % len(s) ) )
|
||||
return l
|
||||
|
||||
|
||||
def _get_strings_a(self, analysis_method) :
|
||||
key = "SA-%s" % self._get_method_info(analysis_method)
|
||||
if key in self._global_cached :
|
||||
return self._global_cached[ key ]
|
||||
|
||||
l = []
|
||||
|
||||
strings_method = self.tainted_variables.get_strings_by_method( analysis_method.get_method() )
|
||||
for s in strings_method :
|
||||
for path in strings_method[s] :
|
||||
l.append( ( path[1], "S") )
|
||||
|
||||
self._global_cached[ key ] = l
|
||||
return l
|
||||
|
||||
def _get_fields_a(self, analysis_method) :
|
||||
key = "FA-%s" % self._get_method_info(analysis_method)
|
||||
if key in self._global_cached :
|
||||
return self._global_cached[ key ]
|
||||
|
||||
fields_method = self.tainted_variables.get_fields_by_method( analysis_method.get_method() )
|
||||
l = []
|
||||
|
||||
for f in fields_method :
|
||||
for path in fields_method[ f ] :
|
||||
l.append( (path[1], "F%d" % FIELD_ACCESS[ path[0] ]) )
|
||||
|
||||
self._global_cached[ key ] = l
|
||||
return l
|
||||
|
||||
def _get_packages_a(self, analysis_method) :
|
||||
packages_method = self.tainted_packages.get_packages_by_method( analysis_method.get_method() )
|
||||
l = []
|
||||
|
||||
for m in packages_method :
|
||||
for path in packages_method[ m ] :
|
||||
l.append( (path.get_idx(), "P%s" % (PACKAGE_ACCESS[ path.get_access_flag() ]) ) )
|
||||
return l
|
||||
|
||||
def _get_packages(self, analysis_method, include_packages) :
|
||||
l = self._get_packages_pa_1( analysis_method, include_packages )
|
||||
return "".join([ i[1] for i in l ])
|
||||
|
||||
def _get_packages_pa_1(self, analysis_method, include_packages) :
|
||||
key = "PA1-%s-%s" % (self._get_method_info(analysis_method), include_packages)
|
||||
if key in self._global_cached :
|
||||
return self._global_cached[ key ]
|
||||
|
||||
packages_method = self.tainted_packages.get_packages_by_method( analysis_method.get_method() )
|
||||
if self.classes_names == None :
|
||||
self.classes_names = analysis_method.get_vm().get_classes_names()
|
||||
|
||||
l = []
|
||||
|
||||
|
||||
for m in packages_method :
|
||||
for path in packages_method[ m ] :
|
||||
present = False
|
||||
for i in include_packages :
|
||||
if m.find(i) == 0 :
|
||||
present = True
|
||||
break
|
||||
|
||||
if path.get_access_flag() == 1 :
|
||||
dst_class_name, dst_method_name, dst_descriptor = path.get_dst( analysis_method.get_vm().get_class_manager() )
|
||||
|
||||
if dst_class_name in self.classes_names :
|
||||
l.append( (path.get_idx(), "P%s" % (PACKAGE_ACCESS[ 2 ]) ) )
|
||||
else :
|
||||
if present == True :
|
||||
l.append( (path.get_idx(), "P%s{%s%s%s}" % (PACKAGE_ACCESS[ path.get_access_flag() ], dst_class_name, dst_method_name, dst_descriptor ) ) )
|
||||
else :
|
||||
l.append( (path.get_idx(), "P%s" % (PACKAGE_ACCESS[ path.get_access_flag() ]) ) )
|
||||
else :
|
||||
if present == True :
|
||||
l.append( (path.get_idx(), "P%s{%s}" % (PACKAGE_ACCESS[ path.get_access_flag() ], m) ) )
|
||||
else :
|
||||
l.append( (path.get_idx(), "P%s" % (PACKAGE_ACCESS[ path.get_access_flag() ]) ) )
|
||||
|
||||
self._global_cached[ key ] = l
|
||||
return l
|
||||
|
||||
def _get_packages_pa_2(self, analysis_method, include_packages) :
|
||||
packages_method = self.tainted_packages.get_packages_by_method( analysis_method.get_method() )
|
||||
|
||||
l = []
|
||||
|
||||
for m in packages_method :
|
||||
for path in packages_method[ m ] :
|
||||
present = False
|
||||
for i in include_packages :
|
||||
if m.find(i) == 0 :
|
||||
present = True
|
||||
break
|
||||
|
||||
if present == True :
|
||||
l.append( (path.get_idx(), "P%s" % (PACKAGE_ACCESS[ path.get_access_flag() ]) ) )
|
||||
continue
|
||||
|
||||
if path.get_access_flag() == 1 :
|
||||
dst_class_name, dst_method_name, dst_descriptor = path.get_dst( analysis_method.get_vm().get_class_manager() )
|
||||
l.append( (path.get_idx(), "P%s{%s%s%s}" % (PACKAGE_ACCESS[ path.get_access_flag() ], dst_class_name, dst_method_name, dst_descriptor ) ) )
|
||||
else :
|
||||
l.append( (path.get_idx(), "P%s{%s}" % (PACKAGE_ACCESS[ path.get_access_flag() ], m) ) )
|
||||
|
||||
return l
|
||||
|
||||
def get_method(self, analysis_method, signature_type, signature_arguments={}) :
|
||||
key = "%s-%s-%s" % (self._get_method_info(analysis_method), signature_type, signature_arguments)
|
||||
|
||||
if key in self._cached_signatures :
|
||||
return self._cached_signatures[ key ]
|
||||
|
||||
s = Sign()
|
||||
|
||||
#print signature_type, signature_arguments
|
||||
for i in signature_type.split(":") :
|
||||
# print i, signature_arguments[ i ]
|
||||
if i == "L0" :
|
||||
_type = self.levels[ i ][ signature_arguments[ i ][ "type" ] ]
|
||||
try :
|
||||
_arguments = signature_arguments[ i ][ "arguments" ]
|
||||
except KeyError :
|
||||
_arguments = []
|
||||
|
||||
value = self._get_bb( analysis_method, _type, _arguments )
|
||||
s.add( i, ''.join(z for z in value) )
|
||||
|
||||
elif i == "L4" :
|
||||
try :
|
||||
_arguments = signature_arguments[ i ][ "arguments" ]
|
||||
except KeyError :
|
||||
_arguments = []
|
||||
|
||||
value = self._get_packages( analysis_method, _arguments )
|
||||
s.add( i , value )
|
||||
|
||||
elif i == "hex" :
|
||||
value = self._get_hex( analysis_method )
|
||||
s.add( i, value )
|
||||
|
||||
elif i == "sequencebb" :
|
||||
_type = ('_get_strings_a', '_get_fields_a', '_get_packages_pa_1')
|
||||
_arguments = ['Landroid', 'Ljava']
|
||||
|
||||
#value = self._get_bb( analysis_method, _type, _arguments )
|
||||
#s.add( i, value )
|
||||
|
||||
value = self._get_sequence_bb( analysis_method )
|
||||
s.add( i, value )
|
||||
|
||||
else :
|
||||
for f in self.levels[ i ] :
|
||||
value = getattr( self, f )( analysis_method )
|
||||
s.add( i, value )
|
||||
|
||||
self._cached_signatures[ key ] = s
|
||||
return s
|
359
androguard/core/androconf.py
Normal file
359
androguard/core/androconf.py
Normal file
@ -0,0 +1,359 @@
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS-IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
import os
|
||||
import logging
|
||||
import types
|
||||
import random
|
||||
import string
|
||||
|
||||
ANDROGUARD_VERSION = "2.0"
|
||||
|
||||
|
||||
def is_ascii_problem(s):
|
||||
try:
|
||||
s.decode("ascii")
|
||||
return False
|
||||
except UnicodeDecodeError:
|
||||
return True
|
||||
|
||||
|
||||
class Color:
|
||||
Normal = "\033[0m"
|
||||
Black = "\033[30m"
|
||||
Red = "\033[31m"
|
||||
Green = "\033[32m"
|
||||
Yellow = "\033[33m"
|
||||
Blue = "\033[34m"
|
||||
Purple = "\033[35m"
|
||||
Cyan = "\033[36m"
|
||||
Grey = "\033[37m"
|
||||
Bold = "\033[1m"
|
||||
|
||||
CONF = {
|
||||
"BIN_DED": "ded.sh",
|
||||
"PATH_DED": "./decompiler/ded/",
|
||||
"PATH_DEX2JAR": "./decompiler/dex2jar/",
|
||||
"BIN_DEX2JAR": "dex2jar.sh",
|
||||
"PATH_JAD": "./decompiler/jad/",
|
||||
"BIN_JAD": "jad",
|
||||
"BIN_WINEJAD": "jad.exe",
|
||||
"PATH_FERNFLOWER": "./decompiler/fernflower/",
|
||||
"BIN_FERNFLOWER": "fernflower.jar",
|
||||
"OPTIONS_FERNFLOWER": {"dgs": '1', "asc": '1'},
|
||||
"PRETTY_SHOW": 1,
|
||||
|
||||
"TMP_DIRECTORY": "/tmp/",
|
||||
|
||||
# Full python or mix python/c++ (native)
|
||||
#"ENGINE" : "automatic",
|
||||
"ENGINE": "python",
|
||||
|
||||
"RECODE_ASCII_STRING": False,
|
||||
"RECODE_ASCII_STRING_METH": None,
|
||||
|
||||
"DEOBFUSCATED_STRING": True,
|
||||
# "DEOBFUSCATED_STRING_METH" : get_deobfuscated_string,
|
||||
|
||||
"PATH_JARSIGNER": "jarsigner",
|
||||
|
||||
"COLORS": {
|
||||
"OFFSET": Color.Yellow,
|
||||
"OFFSET_ADDR": Color.Green,
|
||||
"INSTRUCTION_NAME": Color.Yellow,
|
||||
"BRANCH_FALSE": Color.Red,
|
||||
"BRANCH_TRUE": Color.Green,
|
||||
"BRANCH": Color.Blue,
|
||||
"EXCEPTION": Color.Cyan,
|
||||
"BB": Color.Purple,
|
||||
"NOTE": Color.Red,
|
||||
"NORMAL": Color.Normal,
|
||||
|
||||
"OUTPUT": {
|
||||
"normal": Color.Normal,
|
||||
"registers": Color.Normal,
|
||||
"literal": Color.Green,
|
||||
"offset": Color.Purple,
|
||||
"raw": Color.Red,
|
||||
"string": Color.Red,
|
||||
"meth": Color.Cyan,
|
||||
"type": Color.Blue,
|
||||
"field": Color.Green,
|
||||
}
|
||||
},
|
||||
|
||||
"PRINT_FCT": sys.stdout.write,
|
||||
"LAZY_ANALYSIS": False,
|
||||
"MAGIC_PATH_FILE": None,
|
||||
}
|
||||
|
||||
|
||||
def default_colors(obj):
|
||||
CONF["COLORS"]["OFFSET"] = obj.Yellow
|
||||
CONF["COLORS"]["OFFSET_ADDR"] = obj.Green
|
||||
CONF["COLORS"]["INSTRUCTION_NAME"] = obj.Yellow
|
||||
CONF["COLORS"]["BRANCH_FALSE"] = obj.Red
|
||||
CONF["COLORS"]["BRANCH_TRUE"] = obj.Green
|
||||
CONF["COLORS"]["BRANCH"] = obj.Blue
|
||||
CONF["COLORS"]["EXCEPTION"] = obj.Cyan
|
||||
CONF["COLORS"]["BB"] = obj.Purple
|
||||
CONF["COLORS"]["NOTE"] = obj.Red
|
||||
CONF["COLORS"]["NORMAL"] = obj.Normal
|
||||
|
||||
CONF["COLORS"]["OUTPUT"]["normal"] = obj.Normal
|
||||
CONF["COLORS"]["OUTPUT"]["registers"] = obj.Normal
|
||||
CONF["COLORS"]["OUTPUT"]["literal"] = obj.Green
|
||||
CONF["COLORS"]["OUTPUT"]["offset"] = obj.Purple
|
||||
CONF["COLORS"]["OUTPUT"]["raw"] = obj.Red
|
||||
CONF["COLORS"]["OUTPUT"]["string"] = obj.Red
|
||||
CONF["COLORS"]["OUTPUT"]["meth"] = obj.Cyan
|
||||
CONF["COLORS"]["OUTPUT"]["type"] = obj.Blue
|
||||
CONF["COLORS"]["OUTPUT"]["field"] = obj.Green
|
||||
|
||||
|
||||
def disable_colors():
|
||||
""" Disable colors from the output (color = normal)"""
|
||||
for i in CONF["COLORS"]:
|
||||
if isinstance(CONF["COLORS"][i], dict):
|
||||
for j in CONF["COLORS"][i]:
|
||||
CONF["COLORS"][i][j] = Color.normal
|
||||
else:
|
||||
CONF["COLORS"][i] = Color.normal
|
||||
|
||||
|
||||
def remove_colors():
|
||||
""" Remove colors from the output (no escape sequences)"""
|
||||
for i in CONF["COLORS"]:
|
||||
if isinstance(CONF["COLORS"][i], dict):
|
||||
for j in CONF["COLORS"][i]:
|
||||
CONF["COLORS"][i][j] = ""
|
||||
else:
|
||||
CONF["COLORS"][i] = ""
|
||||
|
||||
|
||||
def enable_colors(colors):
|
||||
for i in colors:
|
||||
CONF["COLORS"][i] = colors[i]
|
||||
|
||||
|
||||
def save_colors():
|
||||
c = {}
|
||||
for i in CONF["COLORS"]:
|
||||
if isinstance(CONF["COLORS"][i], dict):
|
||||
c[i] = {}
|
||||
for j in CONF["COLORS"][i]:
|
||||
c[i][j] = CONF["COLORS"][i][j]
|
||||
else:
|
||||
c[i] = CONF["COLORS"][i]
|
||||
return c
|
||||
|
||||
|
||||
def long2int(l):
|
||||
if l > 0x7fffffff:
|
||||
l = (0x7fffffff & l) - 0x80000000
|
||||
return l
|
||||
|
||||
|
||||
def long2str(l):
|
||||
"""Convert an integer to a string."""
|
||||
if type(l) not in (types.IntType, types.LongType):
|
||||
raise ValueError, 'the input must be an integer'
|
||||
|
||||
if l < 0:
|
||||
raise ValueError, 'the input must be greater than 0'
|
||||
s = ''
|
||||
while l:
|
||||
s = s + chr(l & 255L)
|
||||
l >>= 8
|
||||
|
||||
return s
|
||||
|
||||
def str2long(s):
|
||||
"""Convert a string to a long integer."""
|
||||
if type(s) not in (types.StringType, types.UnicodeType):
|
||||
raise ValueError, 'the input must be a string'
|
||||
|
||||
l = 0L
|
||||
for i in s:
|
||||
l <<= 8
|
||||
l |= ord(i)
|
||||
|
||||
return l
|
||||
|
||||
def random_string() :
|
||||
return random.choice( string.letters ) + ''.join([ random.choice(string.letters + string.digits) for i in range(10 - 1) ] )
|
||||
|
||||
def is_android(filename) :
|
||||
"""Return the type of the file
|
||||
|
||||
@param filename : the filename
|
||||
@rtype : "APK", "DEX", "ELF", None
|
||||
"""
|
||||
if not filename:
|
||||
return None
|
||||
|
||||
fd = open( filename, "r")
|
||||
val = None
|
||||
|
||||
f_bytes = fd.read(7)
|
||||
|
||||
val = is_android_raw( f_bytes )
|
||||
|
||||
fd.close()
|
||||
return val
|
||||
|
||||
def is_android_raw(raw):
|
||||
val = None
|
||||
f_bytes = raw[:7]
|
||||
|
||||
if f_bytes[0:2] == "PK":
|
||||
val = "APK"
|
||||
elif f_bytes[0:3] == "dex":
|
||||
val = "DEX"
|
||||
elif f_bytes[0:3] == "dey":
|
||||
val = "DEY"
|
||||
elif f_bytes[0:7] == "\x7fELF\x01\x01\x01":
|
||||
val = "ELF"
|
||||
elif f_bytes[0:4] == "\x03\x00\x08\x00":
|
||||
val = "AXML"
|
||||
elif f_bytes[0:4] == "\x02\x00\x0C\x00":
|
||||
val = "ARSC"
|
||||
|
||||
return val
|
||||
|
||||
def is_valid_android_raw(raw) :
|
||||
return raw.find("classes.dex") != -1
|
||||
|
||||
# from scapy
|
||||
log_andro = logging.getLogger("andro")
|
||||
console_handler = logging.StreamHandler()
|
||||
console_handler.setFormatter(logging.Formatter("%(levelname)s: %(message)s"))
|
||||
log_andro.addHandler(console_handler)
|
||||
log_runtime = logging.getLogger("andro.runtime") # logs at runtime
|
||||
log_interactive = logging.getLogger("andro.interactive") # logs in interactive functions
|
||||
log_loading = logging.getLogger("andro.loading") # logs when loading andro
|
||||
|
||||
def set_lazy() :
|
||||
CONF["LAZY_ANALYSIS"] = True
|
||||
|
||||
def set_debug() :
|
||||
log_andro.setLevel( logging.DEBUG )
|
||||
|
||||
def set_info() :
|
||||
log_andro.setLevel(logging.INFO)
|
||||
|
||||
def get_debug() :
|
||||
return log_andro.getEffectiveLevel() == logging.DEBUG
|
||||
|
||||
def warning(x):
|
||||
log_runtime.warning(x)
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
def error(x) :
|
||||
log_runtime.error(x)
|
||||
raise()
|
||||
|
||||
def debug(x):
|
||||
log_runtime.debug(x)
|
||||
|
||||
def info(x):
|
||||
log_runtime.info(x)
|
||||
|
||||
def set_options(key, value) :
|
||||
CONF[ key ] = value
|
||||
|
||||
def save_to_disk(buff, output) :
|
||||
fd = open(output, "w")
|
||||
fd.write(buff)
|
||||
fd.close()
|
||||
|
||||
def rrmdir( directory ):
|
||||
for root, dirs, files in os.walk(directory, topdown=False):
|
||||
for name in files:
|
||||
os.remove(os.path.join(root, name))
|
||||
for name in dirs:
|
||||
os.rmdir(os.path.join(root, name))
|
||||
os.rmdir( directory )
|
||||
|
||||
|
||||
def make_color_tuple( color ):
|
||||
"""
|
||||
turn something like "#000000" into 0,0,0
|
||||
or "#FFFFFF into "255,255,255"
|
||||
"""
|
||||
R = color[1:3]
|
||||
G = color[3:5]
|
||||
B = color[5:7]
|
||||
|
||||
R = int(R, 16)
|
||||
G = int(G, 16)
|
||||
B = int(B, 16)
|
||||
|
||||
return R,G,B
|
||||
|
||||
def interpolate_tuple( startcolor, goalcolor, steps ):
|
||||
"""
|
||||
Take two RGB color sets and mix them over a specified number of steps. Return the list
|
||||
"""
|
||||
# white
|
||||
|
||||
R = startcolor[0]
|
||||
G = startcolor[1]
|
||||
B = startcolor[2]
|
||||
|
||||
targetR = goalcolor[0]
|
||||
targetG = goalcolor[1]
|
||||
targetB = goalcolor[2]
|
||||
|
||||
DiffR = targetR - R
|
||||
DiffG = targetG - G
|
||||
DiffB = targetB - B
|
||||
|
||||
buffer = []
|
||||
|
||||
for i in range(0, steps +1):
|
||||
iR = R + (DiffR * i / steps)
|
||||
iG = G + (DiffG * i / steps)
|
||||
iB = B + (DiffB * i / steps)
|
||||
|
||||
hR = string.replace(hex(iR), "0x", "")
|
||||
hG = string.replace(hex(iG), "0x", "")
|
||||
hB = string.replace(hex(iB), "0x", "")
|
||||
|
||||
if len(hR) == 1:
|
||||
hR = "0" + hR
|
||||
if len(hB) == 1:
|
||||
hB = "0" + hB
|
||||
|
||||
if len(hG) == 1:
|
||||
hG = "0" + hG
|
||||
|
||||
color = string.upper("#"+hR+hG+hB)
|
||||
buffer.append(color)
|
||||
|
||||
return buffer
|
||||
|
||||
def color_range( startcolor, goalcolor, steps ):
|
||||
"""
|
||||
wrapper for interpolate_tuple that accepts colors as html ("#CCCCC" and such)
|
||||
"""
|
||||
start_tuple = make_color_tuple(startcolor)
|
||||
goal_tuple = make_color_tuple(goalcolor)
|
||||
|
||||
return interpolate_tuple(start_tuple, goal_tuple, steps)
|
264
androguard/core/androgen.py
Normal file
264
androguard/core/androgen.py
Normal file
@ -0,0 +1,264 @@
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS-IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from androguard.core import androconf
|
||||
from androguard.core.bytecodes import jvm
|
||||
from androguard.core.bytecodes import dvm
|
||||
from androguard.core.bytecodes import apk
|
||||
from androguard.core.analysis import analysis
|
||||
from androguard.core.analysis import ganalysis
|
||||
|
||||
class BC :
|
||||
def __init__(self, bc) :
|
||||
self.__bc = bc
|
||||
|
||||
def get_vm(self) :
|
||||
return self.__bc
|
||||
|
||||
def get_analysis(self) :
|
||||
return self.__a
|
||||
|
||||
def analyze(self) :
|
||||
self.__a = analysis.uVMAnalysis( self.__bc )
|
||||
self.__bc.set_vmanalysis( self.__a )
|
||||
|
||||
self.__g = ganalysis.GVMAnalysis( self.__a, None )
|
||||
|
||||
self.__bc.set_gvmanalysis( self.__g )
|
||||
|
||||
self.__bc.create_xref()
|
||||
self.__bc.create_dref()
|
||||
|
||||
def _get(self, val, name) :
|
||||
l = []
|
||||
r = getattr(self.__bc, val)(name)
|
||||
for i in r :
|
||||
l.append( i )
|
||||
return l
|
||||
|
||||
def _gets(self, val) :
|
||||
l = []
|
||||
r = getattr(self.__bc, val)()
|
||||
for i in r :
|
||||
l.append( i )
|
||||
return l
|
||||
|
||||
def gets(self, name) :
|
||||
return self._gets("get_" + name)
|
||||
|
||||
def get(self, val, name) :
|
||||
return self._get("get_" + val, name)
|
||||
|
||||
def insert_direct_method(self, name, method) :
|
||||
return self.__bc.insert_direct_method(name, method)
|
||||
|
||||
def insert_craft_method(self, name, proto, codes) :
|
||||
return self.__bc.insert_craft_method( name, proto, codes)
|
||||
|
||||
def show(self) :
|
||||
self.__bc.show()
|
||||
|
||||
def pretty_show(self) :
|
||||
self.__bc.pretty_show()
|
||||
|
||||
def save(self) :
|
||||
return self.__bc.save()
|
||||
|
||||
def __getattr__(self, value) :
|
||||
return getattr(self.__bc, value)
|
||||
|
||||
class Androguard:
|
||||
"""Androguard is the main object to abstract and manage differents formats
|
||||
|
||||
@param files : a list of filenames (filename must be terminated by .class or .dex)
|
||||
@param raw : specify if the filename is in fact a raw buffer (default : False) #FIXME
|
||||
"""
|
||||
def __init__(self, files, raw=False) :
|
||||
self.__files = files
|
||||
|
||||
self.__orig_raw = {}
|
||||
for i in self.__files :
|
||||
self.__orig_raw[ i ] = open(i, "rb").read()
|
||||
|
||||
self.__bc = []
|
||||
self._analyze()
|
||||
|
||||
def _iterFlatten(self, root):
|
||||
if isinstance(root, (list, tuple)):
|
||||
for element in root :
|
||||
for e in self._iterFlatten(element) :
|
||||
yield e
|
||||
else:
|
||||
yield root
|
||||
|
||||
def _analyze(self) :
|
||||
for i in self.__files :
|
||||
ret_type = androconf.is_android( i )
|
||||
if ret_type == "APK" :
|
||||
x = apk.APK( i )
|
||||
bc = dvm.DalvikVMFormat( x.get_dex() )
|
||||
elif ret_type == "DEX" :
|
||||
bc = dvm.DalvikVMFormat( open(i, "rb").read() )
|
||||
elif ret_type == "DEY" :
|
||||
bc = dvm.DalvikOdexVMFormat( open(i, "rb").read() )
|
||||
elif ret_type == "ELF" :
|
||||
from androguard.core.binaries import elf
|
||||
bc = elf.ELF( open(i, "rb").read() )
|
||||
else :
|
||||
raise( "Unknown format" )
|
||||
|
||||
if isinstance(bc, list) :
|
||||
for j in bc :
|
||||
self.__bc.append( (j[0], BC( jvm.JVMFormat(j[1]) ) ) )
|
||||
else :
|
||||
self.__bc.append( (i, BC( bc )) )
|
||||
|
||||
def ianalyze(self) :
|
||||
for i in self.get_bc() :
|
||||
i[1].analyze()
|
||||
|
||||
def get_class(self, class_name) :
|
||||
for _, bc in self.__bc :
|
||||
if bc.get_class(class_name) == True :
|
||||
return bc
|
||||
return None
|
||||
|
||||
def get_raw(self) :
|
||||
"""Return raw format of all file"""
|
||||
l = []
|
||||
for _, bc in self.__bc :
|
||||
l.append( bc._get_raw() )
|
||||
return l
|
||||
|
||||
def get_orig_raw(self) :
|
||||
return self.__orig_raw
|
||||
|
||||
def get_method_descriptor(self, class_name, method_name, descriptor) :
|
||||
"""
|
||||
Return the specific method
|
||||
|
||||
@param class_name : the class name of the method
|
||||
@param method_name : the name of the method
|
||||
@param descriptor : the descriptor of the method
|
||||
"""
|
||||
for file_name, bc in self.__bc :
|
||||
x = bc.get_method_descriptor( class_name, method_name, descriptor )
|
||||
if x != None :
|
||||
return x, bc
|
||||
return None, None
|
||||
|
||||
def get_field_descriptor(self, class_name, field_name, descriptor) :
|
||||
"""
|
||||
Return the specific field
|
||||
|
||||
@param class_name : the class name of the field
|
||||
@param field_name : the name of the field
|
||||
@param descriptor : the descriptor of the field
|
||||
"""
|
||||
for file_name, bc in self.__bc :
|
||||
x = bc.get_field_descriptor( class_name, field_name, descriptor )
|
||||
if x != None :
|
||||
return x, bc
|
||||
return None, None
|
||||
|
||||
def get(self, name, val) :
|
||||
"""
|
||||
Return the specific value for all files
|
||||
|
||||
@param name :
|
||||
@param val :
|
||||
"""
|
||||
if name == "file" :
|
||||
for file_name, bc in self.__bc :
|
||||
if file_name == val :
|
||||
return bc
|
||||
|
||||
return None
|
||||
else :
|
||||
l = []
|
||||
for file_name, bc in self.__bc :
|
||||
l.append( bc.get( name, val ) )
|
||||
|
||||
return list( self._iterFlatten(l) )
|
||||
|
||||
def gets(self, name) :
|
||||
"""
|
||||
Return the specific value for all files
|
||||
|
||||
@param name :
|
||||
"""
|
||||
l = []
|
||||
for file_name, bc in self.__bc :
|
||||
l.append( bc.gets( name ) )
|
||||
|
||||
return list( self._iterFlatten(l) )
|
||||
|
||||
def get_vms(self) :
|
||||
return [ i[1].get_vm() for i in self.__bc ]
|
||||
|
||||
def get_bc(self) :
|
||||
return self.__bc
|
||||
|
||||
def show(self) :
|
||||
"""
|
||||
Display all files
|
||||
"""
|
||||
for _, bc in self.__bc :
|
||||
bc.show()
|
||||
|
||||
def pretty_show(self) :
|
||||
"""
|
||||
Display all files
|
||||
"""
|
||||
for _, bc in self.__bc :
|
||||
bc.pretty_show()
|
||||
|
||||
class AndroguardS :
|
||||
"""AndroguardS is the main object to abstract and manage differents formats but only per filename. In fact this class is just a wrapper to the main class Androguard
|
||||
|
||||
@param filename : the filename to use (filename must be terminated by .class or .dex)
|
||||
@param raw : specify if the filename is a raw buffer (default : False)
|
||||
"""
|
||||
def __init__(self, filename, raw=False) :
|
||||
self.__filename = filename
|
||||
self.__orig_a = Androguard( [ filename ], raw )
|
||||
self.__a = self.__orig_a.get( "file", filename )
|
||||
|
||||
def get_orig_raw(self) :
|
||||
return self.__orig_a.get_orig_raw()[ self.__filename ]
|
||||
|
||||
def get_vm(self) :
|
||||
"""
|
||||
This method returns the VMFormat which correspond to the file
|
||||
|
||||
@rtype: L{jvm.JVMFormat} or L{dvm.DalvikVMFormat}
|
||||
"""
|
||||
return self.__a.get_vm()
|
||||
|
||||
def save(self) :
|
||||
"""
|
||||
Return the original format (with the modifications) into raw format
|
||||
|
||||
@rtype: string
|
||||
"""
|
||||
return self.__a.save()
|
||||
|
||||
def __getattr__(self, value) :
|
||||
try :
|
||||
return getattr(self.__orig_a, value)
|
||||
except AttributeError :
|
||||
return getattr(self.__a, value)
|
0
androguard/core/binaries/__init__.py
Normal file
0
androguard/core/binaries/__init__.py
Normal file
101
androguard/core/binaries/elf.py
Normal file
101
androguard/core/binaries/elf.py
Normal file
@ -0,0 +1,101 @@
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS-IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from elfesteem import *
|
||||
from miasm.tools.pe_helper import *
|
||||
from miasm.core import asmbloc
|
||||
from miasm.arch import arm_arch
|
||||
from miasm.core import bin_stream
|
||||
|
||||
|
||||
from androguard.core import bytecode
|
||||
from androguard.core.androconf import CONF, debug
|
||||
|
||||
def disasm_at_addr(in_str, ad_to_dis, symbol_pool) :
|
||||
kargs = {}
|
||||
all_bloc = asmbloc.dis_bloc_all(arm_arch.arm_mn, in_str, ad_to_dis, set(),
|
||||
symbol_pool=symbol_pool,
|
||||
dontdis_retcall = False,
|
||||
follow_call = False,
|
||||
**kargs)
|
||||
for i in all_bloc :
|
||||
bytecode._PrintDefault("%s\n" % i.label)
|
||||
for j in i.lines :
|
||||
bytecode._PrintDefault("\t %s\n" % j)
|
||||
bytecode._PrintDefault("\n")
|
||||
|
||||
class Function :
|
||||
def __init__(self, cm, name, info) :
|
||||
self.cm = cm
|
||||
self.name = name
|
||||
self.info = info
|
||||
|
||||
def show(self) :
|
||||
bytecode._PrintSubBanner("Function")
|
||||
bytecode._PrintDefault("name=%s addr=0x%x\n" % (self.name, self.info.value))
|
||||
|
||||
self.cm.disasm_at_addr( self.info.value )
|
||||
|
||||
class ClassManager :
|
||||
def __init__(self, in_str, symbol_pool) :
|
||||
self.in_str = in_str
|
||||
self.symbol_pool = symbol_pool
|
||||
|
||||
def disasm_at_addr(self, ad_to_dis) :
|
||||
disasm_at_addr( self.in_str, ad_to_dis, self.symbol_pool )
|
||||
|
||||
class ELF :
|
||||
def __init__(self, buff) :
|
||||
self.E = elf_init.ELF( buff )
|
||||
|
||||
self.in_str = bin_stream.bin_stream(self.E.virt)
|
||||
self.symbol_pool = None
|
||||
self.functions = []
|
||||
|
||||
self.create_symbol_pool()
|
||||
|
||||
self.CM = ClassManager( self.in_str, self.symbol_pool )
|
||||
|
||||
self.create_functions()
|
||||
|
||||
def create_symbol_pool(self) :
|
||||
dll_dyn_funcs = get_import_address_elf(self.E)
|
||||
self.symbol_pool = asmbloc.asm_symbol_pool()
|
||||
for (n,f), ads in dll_dyn_funcs.items() :
|
||||
for ad in ads :
|
||||
l = self.symbol_pool.getby_name_create("%s_%s"%(n, f))
|
||||
l.offset = ad
|
||||
self.symbol_pool.s_offset[l.offset] = l
|
||||
|
||||
def show(self) :
|
||||
for i in self.get_functions():
|
||||
i.show()
|
||||
|
||||
def get_functions(self) :
|
||||
return self.functions
|
||||
|
||||
def create_functions(self) :
|
||||
try :
|
||||
for k, v in self.E.sh.symtab.symbols.items():
|
||||
if v.size != 0 :
|
||||
self.functions.append( Function(self.CM, k, v) )
|
||||
except AttributeError :
|
||||
pass
|
||||
|
||||
for k, v in self.E.sh.dynsym.symbols.items() :
|
||||
if v.size != 0 :
|
||||
self.functions.append( Function(self.CM, k, v) )
|
211
androguard/core/binaries/idapipe.py
Normal file
211
androguard/core/binaries/idapipe.py
Normal file
@ -0,0 +1,211 @@
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS-IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from subprocess import Popen, PIPE, STDOUT
|
||||
|
||||
import os, sys
|
||||
import xmlrpclib
|
||||
|
||||
import cPickle
|
||||
|
||||
class _Method :
|
||||
def __init__(self, proxy, name) :
|
||||
self.proxy = proxy
|
||||
self.name = name
|
||||
|
||||
def __call__(self, *args):
|
||||
#print "CALL", self.name, args
|
||||
z = getattr( self.proxy, self.name, None )
|
||||
#print "SEND", repr(cPickle.dumps( args ) )
|
||||
|
||||
try :
|
||||
if len(args) == 1 :
|
||||
ret = z( cPickle.dumps( args[0] ) )
|
||||
else :
|
||||
ret = z( cPickle.dumps( args ) )
|
||||
#print "RECEIVE", repr(ret)
|
||||
return cPickle.loads( ret )
|
||||
except xmlrpclib.ProtocolError :
|
||||
return []
|
||||
|
||||
class MyXMLRPC :
|
||||
def __init__(self, proxy) :
|
||||
self.proxy = proxy
|
||||
|
||||
def __getattr__(self, name) :
|
||||
return _Method(self.proxy, name)
|
||||
|
||||
class BasicBlock :
|
||||
def __init__(self, ins) :
|
||||
self.ins = ins
|
||||
|
||||
def show(self) :
|
||||
for i in self.ins :
|
||||
print i
|
||||
|
||||
class Function :
|
||||
def __init__(self, name, start_ea, instructions, information) :
|
||||
#print name, start_ea
|
||||
|
||||
self.name = name
|
||||
self.start_ea = start_ea
|
||||
self.information = information
|
||||
self.basic_blocks = []
|
||||
self.instructions = instructions
|
||||
|
||||
r = {}
|
||||
idx = 0
|
||||
for i in instructions :
|
||||
r[ i[0] ] = idx
|
||||
idx += 1
|
||||
|
||||
for i in information[0] :
|
||||
try :
|
||||
start = r[i[0]]
|
||||
end = r[i[1]] + 1
|
||||
self.basic_blocks.append( BasicBlock( instructions[start:end] ) )
|
||||
except KeyError :
|
||||
pass
|
||||
|
||||
def get_instructions(self) :
|
||||
return [ i for i in self.instructions ]
|
||||
|
||||
def run_ida(idapath, wrapper_init_path, binpath) :
|
||||
os.environ["TVHEADLESS"] = "1"
|
||||
pid = os.fork()
|
||||
if pid == 0:
|
||||
wrapper_path = "-S" + wrapper_init_path
|
||||
l = [ idapath, "-A", wrapper_path, binpath ]
|
||||
print l
|
||||
compile = Popen(l, stdout=open('/dev/null', 'w'), stderr=STDOUT)
|
||||
stdout, stderr = compile.communicate()
|
||||
# print stdout, stderr
|
||||
sys.exit(0)
|
||||
|
||||
class IDAPipe :
|
||||
def __init__(self, idapath, binpath, wrapper_init_path) :
|
||||
self.idapath = idapath
|
||||
self.binpath = binpath
|
||||
|
||||
self.proxy = None
|
||||
|
||||
run_ida(self.idapath, self.binpath, wrapper_init_path)
|
||||
|
||||
while 1 :
|
||||
try :
|
||||
self.proxy = xmlrpclib.ServerProxy("http://localhost:9000/")
|
||||
self.proxy.is_connected()
|
||||
break
|
||||
except :
|
||||
pass
|
||||
|
||||
#print self.proxy
|
||||
self.proxy = MyXMLRPC( self.proxy )
|
||||
|
||||
def quit(self) :
|
||||
try :
|
||||
self.proxy.quit()
|
||||
except :
|
||||
pass
|
||||
|
||||
def _build_functions(self, functions) :
|
||||
F = {}
|
||||
|
||||
for i in functions :
|
||||
F[ i ] = Function( functions[i][0], i, functions[i][1:-1], functions[i][-1] )
|
||||
|
||||
return F
|
||||
|
||||
def get_quick_functions(self) :
|
||||
functions = self.get_raw()
|
||||
return self._build_functions( functions )
|
||||
|
||||
def get_raw(self) :
|
||||
return self.proxy.get_raw()
|
||||
|
||||
def get_nb_functions(self) :
|
||||
return len(self.proxy.Functions())
|
||||
|
||||
def get_functions(self) :
|
||||
for function_ea in self.proxy.Functions() :
|
||||
self.get_function_addr( function_ea )
|
||||
|
||||
def get_function_name(self, name) :
|
||||
function_ea = self.proxy.get_function( name )
|
||||
self.get_function_addr( function_ea )
|
||||
|
||||
def get_function_addr(self, function_ea) :
|
||||
if function_ea == -1 :
|
||||
return
|
||||
|
||||
f_start = function_ea
|
||||
f_end = self.proxy.GetFunctionAttr(function_ea, 4) #FUNCATTR_END)
|
||||
|
||||
edges = set()
|
||||
boundaries = set((f_start,))
|
||||
|
||||
for head in self.proxy.Heads(f_start, f_end) :
|
||||
if self.proxy.isCode( self.proxy.GetFlags( head ) ) :
|
||||
refs = self.proxy.CodeRefsFrom(head, 0)
|
||||
refs = set(filter(lambda x: x>=f_start and x<=f_end, refs))
|
||||
|
||||
#print head, f_end, refs, self.proxy.GetMnem(head), self.proxy.GetOpnd(head, 0), self.proxy.GetOpnd(head, 1)
|
||||
|
||||
if refs :
|
||||
next_head = self.proxy.NextHead(head, f_end)
|
||||
if self.proxy.isFlow(self.proxy.GetFlags(next_head)):
|
||||
refs.add(next_head)
|
||||
|
||||
# Update the boundaries found so far.
|
||||
boundaries.update(refs)
|
||||
|
||||
# For each of the references found, and edge is
|
||||
# created.
|
||||
for r in refs:
|
||||
# If the flow could also come from the address
|
||||
# previous to the destination of the branching
|
||||
# an edge is created.
|
||||
if self.proxy.isFlow(self.proxy.GetFlags(r)):
|
||||
edges.add((self.proxy.PrevHead(r, f_start), r))
|
||||
edges.add((head, r))
|
||||
|
||||
|
||||
#print edges, boundaries
|
||||
# Let's build the list of (startEA, startEA) couples
|
||||
# for each basic block
|
||||
sorted_boundaries = sorted(boundaries, reverse = True)
|
||||
end_addr = self.proxy.PrevHead(f_end, f_start)
|
||||
bb_addr = []
|
||||
for begin_addr in sorted_boundaries:
|
||||
bb_addr.append((begin_addr, end_addr))
|
||||
# search the next end_addr which could be
|
||||
# farther than just the previous head
|
||||
# if data are interlaced in the code
|
||||
# WARNING: it assumes it won't epicly fail ;)
|
||||
end_addr = self.proxy.PrevHead(begin_addr, f_start)
|
||||
while not self.proxy.isCode(self.proxy.GetFlags(end_addr)):
|
||||
end_addr = self.proxy.PrevHead(end_addr, f_start)
|
||||
# And finally return the result
|
||||
bb_addr.reverse()
|
||||
#print bb_addr, sorted(edges)
|
||||
|
||||
def display_function(f) :
|
||||
print f, f.name, f.information
|
||||
|
||||
for i in f.basic_blocks :
|
||||
print i
|
||||
i.show()
|
161
androguard/core/binaries/idawrapper.py
Normal file
161
androguard/core/binaries/idawrapper.py
Normal file
@ -0,0 +1,161 @@
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS-IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
from idaapi import *
|
||||
from idautils import *
|
||||
from idc import *
|
||||
|
||||
from SimpleXMLRPCServer import SimpleXMLRPCServer
|
||||
import cPickle
|
||||
|
||||
def is_connected() :
|
||||
return True
|
||||
|
||||
def wrapper_get_raw(oops) :
|
||||
F = {}
|
||||
for function_ea in Functions() :
|
||||
|
||||
F[ function_ea ] = []
|
||||
|
||||
f_start = function_ea
|
||||
f_end = GetFunctionAttr(function_ea, FUNCATTR_END)
|
||||
|
||||
edges = set()
|
||||
boundaries = set((f_start,))
|
||||
|
||||
F[ function_ea ].append( GetFunctionName(function_ea) )
|
||||
|
||||
for head in Heads(f_start, f_end) :
|
||||
if isCode( GetFlags( head ) ) :
|
||||
F[ function_ea ].append( (head, GetMnem(head), GetOpnd(head, 0), GetOpnd(head, 1), GetOpnd(head, 2)) )
|
||||
|
||||
refs = CodeRefsFrom(head, 0)
|
||||
refs = set(filter(lambda x: x>=f_start and x<=f_end, refs))
|
||||
|
||||
if refs :
|
||||
next_head = NextHead(head, f_end)
|
||||
if isFlow(GetFlags(next_head)):
|
||||
refs.add(next_head)
|
||||
|
||||
# Update the boundaries found so far.
|
||||
boundaries.update(refs)
|
||||
|
||||
# For each of the references found, and edge is
|
||||
# created.
|
||||
for r in refs:
|
||||
# If the flow could also come from the address
|
||||
# previous to the destination of the branching
|
||||
# an edge is created.
|
||||
if isFlow(GetFlags(r)):
|
||||
edges.add((PrevHead(r, f_start), r))
|
||||
edges.add((head, r))
|
||||
|
||||
#print edges, boundaries
|
||||
# Let's build the list of (startEA, startEA) couples
|
||||
# for each basic block
|
||||
sorted_boundaries = sorted(boundaries, reverse = True)
|
||||
end_addr = PrevHead(f_end, f_start)
|
||||
bb_addr = []
|
||||
for begin_addr in sorted_boundaries:
|
||||
bb_addr.append((begin_addr, end_addr))
|
||||
# search the next end_addr which could be
|
||||
# farther than just the previous head
|
||||
# if data are interlaced in the code
|
||||
# WARNING: it assumes it won't epicly fail ;)
|
||||
end_addr = PrevHead(begin_addr, f_start)
|
||||
while not isCode(GetFlags(end_addr)):
|
||||
end_addr = PrevHead(end_addr, f_start)
|
||||
# And finally return the result
|
||||
bb_addr.reverse()
|
||||
F[ function_ea ].append( (bb_addr, sorted(edges)) )
|
||||
|
||||
return cPickle.dumps( F )
|
||||
|
||||
def wrapper_Heads(oops) :
|
||||
start, end = cPickle.loads(oops)
|
||||
return cPickle.dumps( [ x for x in Heads( start, end ) ] )
|
||||
|
||||
def wrapper_Functions(oops) :
|
||||
return cPickle.dumps( [ x for x in Functions() ] )
|
||||
|
||||
def wrapper_get_function(oops) :
|
||||
name = cPickle.loads(oops)
|
||||
for function_ea in Functions() :
|
||||
if GetFunctionName(function_ea) == name :
|
||||
return cPickle.dumps( function_ea )
|
||||
return cPickle.dumps( -1 )
|
||||
|
||||
def wrapper_quit(oops) :
|
||||
qexit(0)
|
||||
|
||||
class IDAWrapper :
|
||||
def _dispatch(self, x, params) :
|
||||
#fd = open("toto.txt", "w")
|
||||
#fd.write( x + "\n" )
|
||||
#fd.write( str(type(params[0])) + "\n" )
|
||||
#fd.close()
|
||||
|
||||
params = cPickle.loads( *params )
|
||||
if isinstance(params, tuple) == False :
|
||||
params = (params,)
|
||||
|
||||
import types
|
||||
import idautils
|
||||
import idc
|
||||
|
||||
#[getattr(idautils, a, None) for a in dir(idautils) if isinstance(getattr(idautils, a, None) , types.FunctionType)]
|
||||
for a in dir(idautils) :
|
||||
#fd.write( "\t" + a + "\n" )
|
||||
if a == x :
|
||||
z = getattr(idautils, a, None)
|
||||
ret = z( *params )
|
||||
if type(ret).__name__=='generator' :
|
||||
return cPickle.dumps( [ i for i in ret ] )
|
||||
return cPickle.dumps( ret )
|
||||
|
||||
for a in dir(idc) :
|
||||
#fd.write( "\t" + a + "\n" )
|
||||
if a == x :
|
||||
z = getattr(idc, a, None)
|
||||
ret = z( *params )
|
||||
if type(ret).__name__=='generator' :
|
||||
return cPickle.dumps( [ i for i in ret ] )
|
||||
return cPickle.dumps( ret )
|
||||
|
||||
return cPickle.dumps( [] )
|
||||
|
||||
def main() :
|
||||
autoWait()
|
||||
ea = ScreenEA()
|
||||
|
||||
server = SimpleXMLRPCServer(("localhost", 9000))
|
||||
server.register_function(is_connected, "is_connected")
|
||||
|
||||
server.register_function(wrapper_get_raw, "get_raw")
|
||||
server.register_function(wrapper_get_function, "get_function")
|
||||
server.register_function(wrapper_Heads, "Heads")
|
||||
server.register_function(wrapper_Functions, "Functions")
|
||||
|
||||
server.register_instance(IDAWrapper())
|
||||
|
||||
server.register_function(wrapper_quit, "quit")
|
||||
server.serve_forever()
|
||||
|
||||
qexit(0)
|
||||
|
||||
main()
|
765
androguard/core/bytecode.py
Normal file
765
androguard/core/bytecode.py
Normal file
@ -0,0 +1,765 @@
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (C) 2012/2013, Anthony Desnos <desnos at t0t0.fr>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS-IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import hashlib
|
||||
from xml.sax.saxutils import escape
|
||||
from struct import unpack, pack
|
||||
import textwrap
|
||||
|
||||
import json
|
||||
from androconf import warning, error, CONF, enable_colors, remove_colors, save_colors, color_range
|
||||
|
||||
|
||||
def disable_print_colors():
|
||||
colors = save_colors()
|
||||
remove_colors()
|
||||
return colors
|
||||
|
||||
|
||||
def enable_print_colors(colors):
|
||||
enable_colors(colors)
|
||||
|
||||
|
||||
# Handle exit message
|
||||
def Exit( msg ):
|
||||
warning("Error : " + msg)
|
||||
raise("oops")
|
||||
|
||||
def Warning( msg ):
|
||||
warning(msg)
|
||||
|
||||
def _PrintBanner() :
|
||||
print_fct = CONF["PRINT_FCT"]
|
||||
print_fct("*" * 75 + "\n")
|
||||
|
||||
def _PrintSubBanner(title=None) :
|
||||
print_fct = CONF["PRINT_FCT"]
|
||||
if title == None :
|
||||
print_fct("#" * 20 + "\n")
|
||||
else :
|
||||
print_fct("#" * 10 + " " + title + "\n")
|
||||
|
||||
def _PrintNote(note, tab=0) :
|
||||
print_fct = CONF["PRINT_FCT"]
|
||||
note_color = CONF["COLORS"]["NOTE"]
|
||||
normal_color = CONF["COLORS"]["NORMAL"]
|
||||
print_fct("\t" * tab + "%s# %s%s" % (note_color, note, normal_color) + "\n")
|
||||
|
||||
# Print arg into a correct format
|
||||
def _Print(name, arg) :
|
||||
buff = name + " "
|
||||
|
||||
if type(arg).__name__ == 'int' :
|
||||
buff += "0x%x" % arg
|
||||
elif type(arg).__name__ == 'long' :
|
||||
buff += "0x%x" % arg
|
||||
elif type(arg).__name__ == 'str' :
|
||||
buff += "%s" % arg
|
||||
elif isinstance(arg, SV) :
|
||||
buff += "0x%x" % arg.get_value()
|
||||
elif isinstance(arg, SVs) :
|
||||
buff += arg.get_value().__str__()
|
||||
|
||||
print buff
|
||||
|
||||
|
||||
def PrettyShowEx(exceptions):
|
||||
if len(exceptions) > 0:
|
||||
CONF["PRINT_FCT"]("Exceptions:\n")
|
||||
for i in exceptions:
|
||||
CONF["PRINT_FCT"]("\t%s%s%s\n" % (CONF["COLORS"]["EXCEPTION"], i.show_buff(), CONF["COLORS"]["NORMAL"]))
|
||||
|
||||
|
||||
def _PrintXRef(tag, items):
|
||||
print_fct = CONF["PRINT_FCT"]
|
||||
for i in items:
|
||||
print_fct("%s: %s %s %s %s\n" % (tag, i[0].get_class_name(), i[0].get_name(), i[0].get_descriptor(), ' '.join("%x" % j.get_idx() for j in i[1])))
|
||||
|
||||
|
||||
def _PrintDRef(tag, items):
|
||||
print_fct = CONF["PRINT_FCT"]
|
||||
for i in items:
|
||||
print_fct("%s: %s %s %s %s\n" % (tag, i[0].get_class_name(), i[0].get_name(), i[0].get_descriptor(), ' '.join("%x" % j for j in i[1])))
|
||||
|
||||
|
||||
def _PrintDefault(msg):
|
||||
print_fct = CONF["PRINT_FCT"]
|
||||
print_fct(msg)
|
||||
|
||||
|
||||
def PrettyShow(m_a, basic_blocks, notes={}):
|
||||
idx = 0
|
||||
nb = 0
|
||||
|
||||
offset_color = CONF["COLORS"]["OFFSET"]
|
||||
offset_addr_color = CONF["COLORS"]["OFFSET_ADDR"]
|
||||
instruction_name_color = CONF["COLORS"]["INSTRUCTION_NAME"]
|
||||
branch_false_color = CONF["COLORS"]["BRANCH_FALSE"]
|
||||
branch_true_color = CONF["COLORS"]["BRANCH_TRUE"]
|
||||
branch_color = CONF["COLORS"]["BRANCH"]
|
||||
exception_color = CONF["COLORS"]["EXCEPTION"]
|
||||
bb_color = CONF["COLORS"]["BB"]
|
||||
normal_color = CONF["COLORS"]["NORMAL"]
|
||||
print_fct = CONF["PRINT_FCT"]
|
||||
|
||||
colors = CONF["COLORS"]["OUTPUT"]
|
||||
|
||||
for i in basic_blocks:
|
||||
print_fct("%s%s%s : \n" % (bb_color, i.get_name(), normal_color))
|
||||
instructions = i.get_instructions()
|
||||
for ins in instructions:
|
||||
if nb in notes:
|
||||
for note in notes[nb]:
|
||||
_PrintNote(note, 1)
|
||||
|
||||
print_fct("\t%s%-3d%s(%s%08x%s) " % (offset_color, nb, normal_color, offset_addr_color, idx, normal_color))
|
||||
print_fct("%s%-20s%s" % (instruction_name_color, ins.get_name(), normal_color))
|
||||
|
||||
operands = ins.get_operands()
|
||||
print_fct("%s" % ", ".join(m_a.get_vm().colorize_operands(operands, colors)))
|
||||
|
||||
op_value = ins.get_op_value()
|
||||
if ins == instructions[-1] and i.childs:
|
||||
print_fct(" ")
|
||||
|
||||
# packed/sparse-switch
|
||||
if (op_value == 0x2b or op_value == 0x2c) and len(i.childs) > 1:
|
||||
values = i.get_special_ins(idx).get_values()
|
||||
print_fct("%s[ D:%s%s " % (branch_false_color, i.childs[0][2].get_name(), branch_color))
|
||||
print_fct(' '.join("%d:%s" % (values[j], i.childs[j + 1][2].get_name()) for j in range(0, len(i.childs) - 1)) + " ]%s" % normal_color)
|
||||
else:
|
||||
if len(i.childs) == 2:
|
||||
print_fct("%s[ %s%s " % (branch_false_color, i.childs[0][2].get_name(), branch_true_color))
|
||||
print_fct(' '.join("%s" % c[2].get_name() for c in i.childs[1:]) + " ]%s" % normal_color)
|
||||
else:
|
||||
print_fct("%s[ " % branch_color + ' '.join("%s" % c[2].get_name() for c in i.childs) + " ]%s" % normal_color)
|
||||
|
||||
idx += ins.get_length()
|
||||
nb += 1
|
||||
|
||||
print_fct("\n")
|
||||
|
||||
if i.get_exception_analysis():
|
||||
print_fct("\t%s%s%s\n" % (exception_color, i.exception_analysis.show_buff(), normal_color))
|
||||
|
||||
print_fct("\n")
|
||||
|
||||
|
||||
def method2dot(mx, colors={}):
|
||||
"""
|
||||
Export analysis method to dot format
|
||||
|
||||
@param mx : MethodAnalysis object
|
||||
@param colors : MethodAnalysis object
|
||||
|
||||
@rtype : dot format buffer (it is a subgraph (dict))
|
||||
"""
|
||||
|
||||
colors = colors or {"true_branch": "green",
|
||||
"false_branch": "red",
|
||||
"default_branch": "purple",
|
||||
"jump_branch": "blue",
|
||||
"bg_idx": "lightgray",
|
||||
"idx": "blue",
|
||||
"bg_start_idx": "yellow",
|
||||
"bg_instruction": "lightgray",
|
||||
"instruction_name": "black",
|
||||
"instructions_operands": "yellow",
|
||||
|
||||
"raw": "red",
|
||||
"string": "red",
|
||||
"literal": "green",
|
||||
"offset": "#4000FF",
|
||||
"method": "#DF3A01",
|
||||
"field": "#088A08",
|
||||
"type": "#0000FF",
|
||||
|
||||
"registers_range": ("#999933", "#6666FF")
|
||||
}
|
||||
|
||||
node_tpl = "\nstruct_%s [label=<\n<TABLE BORDER=\"0\" CELLBORDER=\"0\" CELLSPACING=\"3\">\n%s</TABLE>>];\n"
|
||||
label_tpl = "<TR><TD ALIGN=\"LEFT\" BGCOLOR=\"%s\"> <FONT FACE=\"Times-Bold\" color=\"%s\">%x</FONT> </TD><TD ALIGN=\"LEFT\" BGCOLOR=\"%s\"> <FONT FACE=\"Times-Bold\" color=\"%s\">%s </FONT> %s </TD></TR>\n"
|
||||
link_tpl = "<TR><TD PORT=\"%s\"></TD></TR>\n"
|
||||
|
||||
edges_html = ""
|
||||
blocks_html = ""
|
||||
|
||||
method = mx.get_method()
|
||||
sha256 = hashlib.sha256("%s%s%s" % (mx.get_method().get_class_name(), mx.get_method().get_name(), mx.get_method().get_descriptor())).hexdigest()
|
||||
|
||||
registers = {}
|
||||
if method.get_code():
|
||||
for DVMBasicMethodBlock in mx.basic_blocks.gets():
|
||||
for DVMBasicMethodBlockInstruction in DVMBasicMethodBlock.get_instructions():
|
||||
operands = DVMBasicMethodBlockInstruction.get_operands(0)
|
||||
for register in operands:
|
||||
if register[0] == 0:
|
||||
if register[1] not in registers:
|
||||
registers[register[1]] = 0
|
||||
registers[register[1]] += 1
|
||||
# for i in range(method.get_code().get_registers_size()):
|
||||
# registers[i] = 0
|
||||
|
||||
if registers:
|
||||
registers_colors = color_range(colors["registers_range"][0],
|
||||
colors["registers_range"][1],
|
||||
len(registers))
|
||||
for i in registers:
|
||||
registers[i] = registers_colors.pop(0)
|
||||
|
||||
new_links = []
|
||||
|
||||
for DVMBasicMethodBlock in mx.basic_blocks.gets():
|
||||
ins_idx = DVMBasicMethodBlock.start
|
||||
block_id = hashlib.md5(sha256 + DVMBasicMethodBlock.get_name()).hexdigest()
|
||||
|
||||
content = link_tpl % 'header'
|
||||
|
||||
for DVMBasicMethodBlockInstruction in DVMBasicMethodBlock.get_instructions():
|
||||
if DVMBasicMethodBlockInstruction.get_op_value() == 0x2b or DVMBasicMethodBlockInstruction.get_op_value() == 0x2c:
|
||||
new_links.append((DVMBasicMethodBlock, ins_idx, DVMBasicMethodBlockInstruction.get_ref_off() * 2 + ins_idx))
|
||||
elif DVMBasicMethodBlockInstruction.get_op_value() == 0x26:
|
||||
new_links.append((DVMBasicMethodBlock, ins_idx, DVMBasicMethodBlockInstruction.get_ref_off() * 2 + ins_idx))
|
||||
|
||||
operands = DVMBasicMethodBlockInstruction.get_operands(ins_idx)
|
||||
output = ", ".join(mx.get_vm().get_operand_html(i, registers, colors, escape, textwrap.wrap) for i in operands)
|
||||
|
||||
formatted_operands = DVMBasicMethodBlockInstruction.get_formatted_operands()
|
||||
if formatted_operands:
|
||||
output += " ; %s" % str(formatted_operands)
|
||||
|
||||
bg_idx = colors["bg_idx"]
|
||||
if ins_idx == 0 and "bg_start_idx" in colors:
|
||||
bg_idx = colors["bg_start_idx"]
|
||||
|
||||
content += label_tpl % (bg_idx,
|
||||
colors["idx"],
|
||||
ins_idx,
|
||||
colors["bg_instruction"],
|
||||
colors["instruction_name"],
|
||||
DVMBasicMethodBlockInstruction.get_name(),
|
||||
output)
|
||||
|
||||
ins_idx += DVMBasicMethodBlockInstruction.get_length()
|
||||
last_instru = DVMBasicMethodBlockInstruction
|
||||
|
||||
# all blocks from one method parsed
|
||||
# updating dot HTML content
|
||||
content += link_tpl % 'tail'
|
||||
blocks_html += node_tpl % (block_id, content)
|
||||
|
||||
# Block edges color treatment (conditional branchs colors)
|
||||
val = colors["true_branch"]
|
||||
if len(DVMBasicMethodBlock.childs) > 1:
|
||||
val = colors["false_branch"]
|
||||
elif len(DVMBasicMethodBlock.childs) == 1:
|
||||
val = colors["jump_branch"]
|
||||
|
||||
values = None
|
||||
if (last_instru.get_op_value() == 0x2b or last_instru.get_op_value() == 0x2c) and len(DVMBasicMethodBlock.childs) > 1:
|
||||
val = colors["default_branch"]
|
||||
values = ["default"]
|
||||
values.extend(DVMBasicMethodBlock.get_special_ins(ins_idx - last_instru.get_length()).get_values())
|
||||
|
||||
# updating dot edges
|
||||
for DVMBasicMethodBlockChild in DVMBasicMethodBlock.childs:
|
||||
label_edge = ""
|
||||
|
||||
if values:
|
||||
label_edge = values.pop(0)
|
||||
|
||||
child_id = hashlib.md5(sha256 + DVMBasicMethodBlockChild[-1].get_name()).hexdigest()
|
||||
edges_html += "struct_%s:tail -> struct_%s:header [color=\"%s\", label=\"%s\"];\n" % (block_id, child_id, val, label_edge)
|
||||
# color switch
|
||||
if val == colors["false_branch"]:
|
||||
val = colors["true_branch"]
|
||||
elif val == colors["default_branch"]:
|
||||
val = colors["true_branch"]
|
||||
|
||||
exception_analysis = DVMBasicMethodBlock.get_exception_analysis()
|
||||
if exception_analysis:
|
||||
for exception_elem in exception_analysis.exceptions:
|
||||
exception_block = exception_elem[-1]
|
||||
if exception_block:
|
||||
exception_id = hashlib.md5(sha256 + exception_block.get_name()).hexdigest()
|
||||
edges_html += "struct_%s:tail -> struct_%s:header [color=\"%s\", label=\"%s\"];\n" % (block_id, exception_id, "black", exception_elem[0])
|
||||
|
||||
for link in new_links:
|
||||
DVMBasicMethodBlock = link[0]
|
||||
DVMBasicMethodBlockChild = mx.basic_blocks.get_basic_block(link[2])
|
||||
|
||||
if DVMBasicMethodBlockChild:
|
||||
block_id = hashlib.md5(sha256 + DVMBasicMethodBlock.get_name()).hexdigest()
|
||||
child_id = hashlib.md5(sha256 + DVMBasicMethodBlockChild.get_name()).hexdigest()
|
||||
|
||||
edges_html += "struct_%s:tail -> struct_%s:header [color=\"%s\", label=\"data(0x%x) to @0x%x\", style=\"dashed\"];\n" % (block_id, child_id, "yellow", link[1], link[2])
|
||||
|
||||
method_label = method.get_class_name() + "." + method.get_name() + "->" + method.get_descriptor()
|
||||
|
||||
method_information = method.get_information()
|
||||
if method_information:
|
||||
method_label += "\\nLocal registers v%d ... v%d" % (method_information["registers"][0], method_information["registers"][1])
|
||||
if "params" in method_information:
|
||||
for register, rtype in method_information["params"]:
|
||||
method_label += "\\nparam v%d = %s" % (register, rtype)
|
||||
method_label += "\\nreturn = %s" % (method_information["return"])
|
||||
|
||||
return {'name': method_label,
|
||||
'nodes': blocks_html,
|
||||
'edges': edges_html}
|
||||
|
||||
|
||||
def method2format(output, _format="png", mx=None, raw=None):
|
||||
"""
|
||||
Export method to a specific file format
|
||||
|
||||
@param output : output filename
|
||||
@param _format : format type (png, jpg ...) (default : png)
|
||||
@param mx : specify the MethodAnalysis object
|
||||
@param raw : use directly a dot raw buffer if None
|
||||
"""
|
||||
try:
|
||||
import pydot
|
||||
except ImportError:
|
||||
error("module pydot not found")
|
||||
|
||||
buff = "digraph {\n"
|
||||
buff += "graph [rankdir=TB]\n"
|
||||
buff += "node [shape=plaintext]\n"
|
||||
|
||||
if raw:
|
||||
data = raw
|
||||
else:
|
||||
data = method2dot(mx)
|
||||
|
||||
# subgraphs cluster
|
||||
buff += "subgraph cluster_" + hashlib.md5(output).hexdigest() + " {\nlabel=\"%s\"\n" % data['name']
|
||||
buff += data['nodes']
|
||||
buff += "}\n"
|
||||
|
||||
# subgraphs edges
|
||||
buff += data['edges']
|
||||
buff += "}\n"
|
||||
|
||||
d = pydot.graph_from_dot_data(buff)
|
||||
if d:
|
||||
getattr(d, "write_" + _format.lower())(output)
|
||||
|
||||
|
||||
def method2png(output, mx, raw=False):
|
||||
"""
|
||||
Export method to a png file format
|
||||
|
||||
:param output: output filename
|
||||
:type output: string
|
||||
:param mx: specify the MethodAnalysis object
|
||||
:type mx: :class:`MethodAnalysis` object
|
||||
:param raw: use directly a dot raw buffer
|
||||
:type raw: string
|
||||
"""
|
||||
buff = raw
|
||||
if raw == False:
|
||||
buff = method2dot(mx)
|
||||
|
||||
method2format(output, "png", mx, buff)
|
||||
|
||||
|
||||
def method2jpg(output, mx, raw=False):
|
||||
"""
|
||||
Export method to a jpg file format
|
||||
|
||||
:param output: output filename
|
||||
:type output: string
|
||||
:param mx: specify the MethodAnalysis object
|
||||
:type mx: :class:`MethodAnalysis` object
|
||||
:param raw: use directly a dot raw buffer (optional)
|
||||
:type raw: string
|
||||
"""
|
||||
buff = raw
|
||||
if raw == False:
|
||||
buff = method2dot(mx)
|
||||
|
||||
method2format(output, "jpg", mx, buff)
|
||||
|
||||
|
||||
def vm2json(vm):
|
||||
d = {}
|
||||
d["name"] = "root"
|
||||
d["children"] = []
|
||||
|
||||
for _class in vm.get_classes():
|
||||
c_class = {}
|
||||
c_class["name"] = _class.get_name()
|
||||
c_class["children"] = []
|
||||
|
||||
for method in _class.get_methods():
|
||||
c_method = {}
|
||||
c_method["name"] = method.get_name()
|
||||
c_method["children"] = []
|
||||
|
||||
c_class["children"].append(c_method)
|
||||
|
||||
d["children"].append(c_class)
|
||||
|
||||
return json.dumps(d)
|
||||
|
||||
|
||||
class TmpBlock:
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
|
||||
def get_name(self):
|
||||
return self.name
|
||||
|
||||
|
||||
def method2json(mx, directed_graph=False):
|
||||
if directed_graph:
|
||||
return method2json_direct(mx)
|
||||
return method2json_undirect(mx)
|
||||
|
||||
|
||||
def method2json_undirect(mx):
|
||||
d = {}
|
||||
reports = []
|
||||
d["reports"] = reports
|
||||
|
||||
for DVMBasicMethodBlock in mx.basic_blocks.gets():
|
||||
cblock = {}
|
||||
|
||||
cblock["BasicBlockId"] = DVMBasicMethodBlock.get_name()
|
||||
cblock["registers"] = mx.get_method().get_code().get_registers_size()
|
||||
cblock["instructions"] = []
|
||||
|
||||
ins_idx = DVMBasicMethodBlock.start
|
||||
for DVMBasicMethodBlockInstruction in DVMBasicMethodBlock.get_instructions():
|
||||
c_ins = {}
|
||||
c_ins["idx"] = ins_idx
|
||||
c_ins["name"] = DVMBasicMethodBlockInstruction.get_name()
|
||||
c_ins["operands"] = DVMBasicMethodBlockInstruction.get_operands(ins_idx)
|
||||
|
||||
cblock["instructions"].append(c_ins)
|
||||
ins_idx += DVMBasicMethodBlockInstruction.get_length()
|
||||
|
||||
cblock["Edge"] = []
|
||||
for DVMBasicMethodBlockChild in DVMBasicMethodBlock.childs:
|
||||
cblock["Edge"].append(DVMBasicMethodBlockChild[-1].get_name())
|
||||
|
||||
reports.append(cblock)
|
||||
|
||||
return json.dumps(d)
|
||||
|
||||
|
||||
def method2json_direct(mx):
|
||||
d = {}
|
||||
reports = []
|
||||
d["reports"] = reports
|
||||
|
||||
hooks = {}
|
||||
|
||||
l = []
|
||||
for DVMBasicMethodBlock in mx.basic_blocks.gets():
|
||||
for index, DVMBasicMethodBlockChild in enumerate(DVMBasicMethodBlock.childs):
|
||||
if DVMBasicMethodBlock.get_name() == DVMBasicMethodBlockChild[-1].get_name():
|
||||
|
||||
preblock = TmpBlock(DVMBasicMethodBlock.get_name() + "-pre")
|
||||
|
||||
cnblock = {}
|
||||
cnblock["BasicBlockId"] = DVMBasicMethodBlock.get_name() + "-pre"
|
||||
cnblock["start"] = DVMBasicMethodBlock.start
|
||||
cnblock["notes"] = []
|
||||
|
||||
cnblock["Edge"] = [DVMBasicMethodBlock.get_name()]
|
||||
cnblock["registers"] = 0
|
||||
cnblock["instructions"] = []
|
||||
cnblock["info_bb"] = 0
|
||||
|
||||
l.append(cnblock)
|
||||
|
||||
for parent in DVMBasicMethodBlock.fathers:
|
||||
hooks[parent[-1].get_name()] = []
|
||||
hooks[parent[-1].get_name()].append(preblock)
|
||||
|
||||
for idx, child in enumerate(parent[-1].childs):
|
||||
if child[-1].get_name() == DVMBasicMethodBlock.get_name():
|
||||
hooks[parent[-1].get_name()].append(child[-1])
|
||||
|
||||
for DVMBasicMethodBlock in mx.basic_blocks.gets():
|
||||
cblock = {}
|
||||
|
||||
cblock["BasicBlockId"] = DVMBasicMethodBlock.get_name()
|
||||
cblock["start"] = DVMBasicMethodBlock.start
|
||||
cblock["notes"] = DVMBasicMethodBlock.get_notes()
|
||||
|
||||
cblock["registers"] = mx.get_method().get_code().get_registers_size()
|
||||
cblock["instructions"] = []
|
||||
|
||||
ins_idx = DVMBasicMethodBlock.start
|
||||
last_instru = None
|
||||
for DVMBasicMethodBlockInstruction in DVMBasicMethodBlock.get_instructions():
|
||||
c_ins = {}
|
||||
c_ins["idx"] = ins_idx
|
||||
c_ins["name"] = DVMBasicMethodBlockInstruction.get_name()
|
||||
c_ins["operands"] = DVMBasicMethodBlockInstruction.get_operands(ins_idx)
|
||||
|
||||
c_ins["formatted_operands"] = DVMBasicMethodBlockInstruction.get_formatted_operands()
|
||||
|
||||
cblock["instructions"].append(c_ins)
|
||||
|
||||
if (DVMBasicMethodBlockInstruction.get_op_value() == 0x2b or DVMBasicMethodBlockInstruction.get_op_value() == 0x2c):
|
||||
values = DVMBasicMethodBlock.get_special_ins(ins_idx)
|
||||
cblock["info_next"] = values.get_values()
|
||||
|
||||
ins_idx += DVMBasicMethodBlockInstruction.get_length()
|
||||
last_instru = DVMBasicMethodBlockInstruction
|
||||
|
||||
cblock["info_bb"] = 0
|
||||
if DVMBasicMethodBlock.childs:
|
||||
if len(DVMBasicMethodBlock.childs) > 1:
|
||||
cblock["info_bb"] = 1
|
||||
|
||||
if (last_instru.get_op_value() == 0x2b or last_instru.get_op_value() == 0x2c):
|
||||
cblock["info_bb"] = 2
|
||||
|
||||
cblock["Edge"] = []
|
||||
for DVMBasicMethodBlockChild in DVMBasicMethodBlock.childs:
|
||||
ok = False
|
||||
if DVMBasicMethodBlock.get_name() in hooks:
|
||||
if DVMBasicMethodBlockChild[-1] in hooks[DVMBasicMethodBlock.get_name()]:
|
||||
ok = True
|
||||
cblock["Edge"].append(hooks[DVMBasicMethodBlock.get_name()][0].get_name())
|
||||
|
||||
if not ok:
|
||||
cblock["Edge"].append(DVMBasicMethodBlockChild[-1].get_name())
|
||||
|
||||
exception_analysis = DVMBasicMethodBlock.get_exception_analysis()
|
||||
if exception_analysis:
|
||||
cblock["Exceptions"] = exception_analysis.get()
|
||||
|
||||
reports.append(cblock)
|
||||
|
||||
reports.extend(l)
|
||||
|
||||
return json.dumps(d)
|
||||
|
||||
|
||||
class SV:
|
||||
def __init__(self, size, buff):
|
||||
self.__size = size
|
||||
self.__value = unpack(self.__size, buff)[0]
|
||||
|
||||
def _get(self):
|
||||
return pack(self.__size, self.__value)
|
||||
|
||||
def __str__(self) :
|
||||
return "0x%x" % self.__value
|
||||
|
||||
def __int__(self) :
|
||||
return self.__value
|
||||
|
||||
def get_value_buff(self) :
|
||||
return self._get()
|
||||
|
||||
def get_value(self) :
|
||||
return self.__value
|
||||
|
||||
def set_value(self, attr) :
|
||||
self.__value = attr
|
||||
|
||||
class SVs :
|
||||
def __init__(self, size, ntuple, buff) :
|
||||
self.__size = size
|
||||
|
||||
self.__value = ntuple._make( unpack( self.__size, buff ) )
|
||||
|
||||
def _get(self) :
|
||||
l = []
|
||||
for i in self.__value._fields :
|
||||
l.append( getattr( self.__value, i ) )
|
||||
return pack( self.__size, *l)
|
||||
|
||||
def _export(self) :
|
||||
return [ x for x in self.__value._fields ]
|
||||
|
||||
def get_value_buff(self) :
|
||||
return self._get()
|
||||
|
||||
def get_value(self) :
|
||||
return self.__value
|
||||
|
||||
def set_value(self, attr) :
|
||||
self.__value = self.__value._replace( **attr )
|
||||
|
||||
def __str__(self) :
|
||||
return self.__value.__str__()
|
||||
|
||||
def object_to_str(obj) :
|
||||
if isinstance(obj, str) :
|
||||
return obj
|
||||
elif isinstance(obj, bool) :
|
||||
return ""
|
||||
elif isinstance(obj, int) :
|
||||
return pack("<L", obj)
|
||||
elif obj == None :
|
||||
return ""
|
||||
else :
|
||||
#print type(obj), obj
|
||||
return obj.get_raw()
|
||||
|
||||
class MethodBC(object) :
|
||||
def show(self, value) :
|
||||
getattr(self, "show_" + value)()
|
||||
|
||||
|
||||
class BuffHandle:
|
||||
def __init__(self, buff):
|
||||
self.__buff = buff
|
||||
self.__idx = 0
|
||||
|
||||
def size(self):
|
||||
return len(self.__buff)
|
||||
|
||||
def set_idx(self, idx):
|
||||
self.__idx = idx
|
||||
|
||||
def get_idx(self):
|
||||
return self.__idx
|
||||
|
||||
def readNullString(self, size):
|
||||
data = self.read(size)
|
||||
return data
|
||||
|
||||
def read_b(self, size) :
|
||||
return self.__buff[ self.__idx : self.__idx + size ]
|
||||
|
||||
def read_at(self, offset, size):
|
||||
return self.__buff[ offset : offset + size ]
|
||||
|
||||
def read(self, size) :
|
||||
if isinstance(size, SV) :
|
||||
size = size.value
|
||||
|
||||
buff = self.__buff[ self.__idx : self.__idx + size ]
|
||||
self.__idx += size
|
||||
|
||||
return buff
|
||||
|
||||
def end(self) :
|
||||
return self.__idx == len(self.__buff)
|
||||
|
||||
class Buff :
|
||||
def __init__(self, offset, buff) :
|
||||
self.offset = offset
|
||||
self.buff = buff
|
||||
|
||||
self.size = len(buff)
|
||||
|
||||
|
||||
class _Bytecode(object):
|
||||
def __init__(self, buff):
|
||||
try :
|
||||
import psyco
|
||||
psyco.full()
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
self.__buff = buff
|
||||
self.__idx = 0
|
||||
|
||||
def read(self, size) :
|
||||
if isinstance(size, SV) :
|
||||
size = size.value
|
||||
|
||||
buff = self.__buff[ self.__idx : self.__idx + size ]
|
||||
self.__idx += size
|
||||
|
||||
return buff
|
||||
|
||||
def readat(self, off) :
|
||||
if isinstance(off, SV) :
|
||||
off = off.value
|
||||
|
||||
return self.__buff[ off : ]
|
||||
|
||||
def read_b(self, size) :
|
||||
return self.__buff[ self.__idx : self.__idx + size ]
|
||||
|
||||
def set_idx(self, idx) :
|
||||
self.__idx = idx
|
||||
|
||||
def get_idx(self) :
|
||||
return self.__idx
|
||||
|
||||
def add_idx(self, idx) :
|
||||
self.__idx += idx
|
||||
|
||||
def register(self, type_register, fct) :
|
||||
self.__registers[ type_register ].append( fct )
|
||||
|
||||
def get_buff(self) :
|
||||
return self.__buff
|
||||
|
||||
def length_buff(self) :
|
||||
return len( self.__buff )
|
||||
|
||||
def set_buff(self, buff) :
|
||||
self.__buff = buff
|
||||
|
||||
def save(self, filename) :
|
||||
fd = open(filename, "w")
|
||||
buff = self._save()
|
||||
fd.write( buff )
|
||||
fd.close()
|
||||
|
||||
def FormatClassToJava(input) :
|
||||
"""
|
||||
Transoform a typical xml format class into java format
|
||||
|
||||
:param input: the input class name
|
||||
:rtype: string
|
||||
"""
|
||||
return "L" + input.replace(".", "/") + ";"
|
||||
|
||||
def FormatClassToPython(input) :
|
||||
i = input[:-1]
|
||||
i = i.replace("/", "_")
|
||||
i = i.replace("$", "_")
|
||||
|
||||
return i
|
||||
|
||||
def FormatNameToPython(input) :
|
||||
i = input.replace("<", "")
|
||||
i = i.replace(">", "")
|
||||
i = i.replace("$", "_")
|
||||
|
||||
return i
|
||||
|
||||
def FormatDescriptorToPython(input) :
|
||||
i = input.replace("/", "_")
|
||||
i = i.replace(";", "")
|
||||
i = i.replace("[", "")
|
||||
i = i.replace("(", "")
|
||||
i = i.replace(")", "")
|
||||
i = i.replace(" ", "")
|
||||
i = i.replace("$", "")
|
||||
|
||||
return i
|
||||
|
||||
class Node:
|
||||
def __init__(self, n, s):
|
||||
self.id = n
|
||||
self.title = s
|
||||
self.children = []
|
0
androguard/core/bytecodes/__init__.py
Normal file
0
androguard/core/bytecodes/__init__.py
Normal file
4379
androguard/core/bytecodes/api_permissions.py
Normal file
4379
androguard/core/bytecodes/api_permissions.py
Normal file
File diff suppressed because it is too large
Load Diff
1799
androguard/core/bytecodes/apk.py
Normal file
1799
androguard/core/bytecodes/apk.py
Normal file
File diff suppressed because it is too large
Load Diff
67
androguard/core/bytecodes/arm.py
Normal file
67
androguard/core/bytecodes/arm.py
Normal file
@ -0,0 +1,67 @@
|
||||
# Radare !
|
||||
|
||||
from r2 import r_bin
|
||||
from r2 import r_asm
|
||||
from r2 import r_anal
|
||||
from r2 import r_core
|
||||
|
||||
from miasm.arch.arm_arch import arm_mn
|
||||
from miasm.core.bin_stream import bin_stream
|
||||
from miasm.core import asmbloc
|
||||
|
||||
|
||||
class ARM2 :
|
||||
def __init__(self) :
|
||||
b = r_bin.RBin ()
|
||||
b.load("./apks/exploits/617efb2d51ad5c4aed50b76119ad880c6adcd4d2e386b3170930193525b0563d", None)
|
||||
baddr= b.get_baddr()
|
||||
print '-> Sections'
|
||||
for i in b.get_sections ():
|
||||
print 'offset=0x%08x va=0x%08x size=%05i %s' % (i.offset, baddr+i.rva, i.size, i.name)
|
||||
|
||||
core = r_core.RCore()
|
||||
core.config.set_i("io.va", 1)
|
||||
core.config.set_i("anal.split", 1)
|
||||
|
||||
core.file_open("./apks/exploits/617efb2d51ad5c4aed50b76119ad880c6adcd4d2e386b3170930193525b0563d", 0, 0)
|
||||
core.bin_load( None )
|
||||
|
||||
core.anal_all()
|
||||
|
||||
for fcn in core.anal.get_fcns() :
|
||||
print type(fcn), fcn.type, "%x" % fcn.addr, fcn.ninstr, fcn.name
|
||||
# if (fcn.type == FcnType_FCN or fcn.type == FcnType_SYM):
|
||||
|
||||
for s in core.bin.get_entries() :
|
||||
print s, type(s), s.rva, "%x" % s.offset
|
||||
|
||||
|
||||
#a = r_asm.RAsm()
|
||||
for s in core.bin.get_symbols() :
|
||||
print s, s.name, s.rva, s.offset, s.size
|
||||
if s.name == "rootshell" :
|
||||
#print core.disassemble_bytes( 0x8000 + s.offset, s.size )
|
||||
|
||||
#core.assembler.mdisassemble( 0x8000 + s.offset, s.size )
|
||||
z = core.op_anal( 0x8000 + s.offset )
|
||||
print z.mnemonic
|
||||
|
||||
raise("oo")
|
||||
|
||||
print core.bin.bins, core.bin.user
|
||||
d = core.bin.read_at( 0x8000 + s.offset, x, s.size )
|
||||
print d
|
||||
raise("ooo")
|
||||
j = 0
|
||||
while j < s.size :
|
||||
v = core.disassemble( 0x8000 + s.offset + j )
|
||||
v1 = core.op_str( 0x8000 + s.offset + j )
|
||||
|
||||
print v1
|
||||
# print 0x8000 + s.offset + j, j, v.inst_len, v.buf_asm
|
||||
j += v.inst_len
|
||||
|
||||
#for i in core.asm_bwdisassemble(s.rva, 4, s.size/4) :
|
||||
# print "la", i
|
||||
# print a.mdisassemble( 20, 0x90 ) #"main", "main" ) #s.name )
|
||||
|
8372
androguard/core/bytecodes/dvm.py
Normal file
8372
androguard/core/bytecodes/dvm.py
Normal file
File diff suppressed because it is too large
Load Diff
341
androguard/core/bytecodes/dvm_permissions.py
Normal file
341
androguard/core/bytecodes/dvm_permissions.py
Normal file
@ -0,0 +1,341 @@
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS-IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# frameworks/base/core/res/AndroidManifest.xml
|
||||
########################################## PERMISSIONS ########################################################
|
||||
DVM_PERMISSIONS = {
|
||||
"MANIFEST_PERMISSION": {
|
||||
|
||||
# MESSAGES
|
||||
"SEND_SMS": ["dangerous", "send SMS messages", "Allows application to send SMS messages. Malicious applications may cost you money by sending messages without your confirmation."],
|
||||
"SEND_SMS_NO_CONFIRMATION": ["signatureOrSystem", "send SMS messages", "send SMS messages via the Messaging app with no user input or confirmation"],
|
||||
"RECEIVE_SMS": ["dangerous", "receive SMS", "Allows application to receive and process SMS messages. Malicious applications may monitor your messages or delete them without showing them to you."],
|
||||
"RECEIVE_MMS": ["dangerous", "receive MMS", "Allows application to receive and process MMS messages. Malicious applications may monitor your messages or delete them without showing them to you."],
|
||||
"RECEIVE_EMERGENCY_BROADCAST": [ "signatureOrSystem", "", "Allows an application to receive emergency cell broadcast messages, to record or display them to the user. Reserved for system apps." ],
|
||||
"READ_CELL_BROADCASTS" : [ "dangerous", "received cell broadcast messages", "Allows an application to read previously received cell broadcast "\
|
||||
"messages and to register a content observer to get notifications when "\
|
||||
"a cell broadcast has been received and added to the database. For "\
|
||||
"emergency alerts, the database is updated immediately after the "\
|
||||
"alert dialog and notification sound/vibration/speech are presented."\
|
||||
"The \"read\" column is then updated after the user dismisses the alert."\
|
||||
"This enables supplementary emergency assistance apps to start loading "\
|
||||
"additional emergency information (if Internet access is available) "\
|
||||
"when the alert is first received, and to delay presenting the info "\
|
||||
"to the user until after the initial alert dialog is dismissed." ],
|
||||
"READ_SMS" : [ "dangerous" , "read SMS or MMS" , "Allows application to read SMS messages stored on your phone or SIM card. Malicious applications may read your confidential messages." ],
|
||||
"WRITE_SMS" : [ "dangerous" , "edit SMS or MMS" , "Allows application to write to SMS messages stored on your phone or SIM card. Malicious applications may delete your messages." ],
|
||||
"RECEIVE_WAP_PUSH" : [ "dangerous" , "receive WAP" , "Allows application to receive and process WAP messages. Malicious applications may monitor your messages or delete them without showing them to you." ],
|
||||
"BROADCAST_SMS" : [ "signature" , "send SMS-received broadcast" , "Allows an application to broadcast a notification that an SMS message has been received. Malicious applications may use this to forge incoming SMS messages." ],
|
||||
"BROADCAST_WAP_PUSH" : [ "signature" , "send WAP-PUSH-received broadcast" , "Allows an application to broadcast a notification that a WAP-PUSH message has been received. Malicious applications may use this to forge MMS message receipt or to replace the content of any web page silently with malicious variants." ],
|
||||
|
||||
# SOCIAL_INFO
|
||||
"READ_CONTACTS" : [ "dangerous" , "read contact data" , "Allows an application to read all of the contact (address) data stored on your phone. Malicious applications can use this to send your data to other people." ],
|
||||
"WRITE_CONTACTS" : [ "dangerous" , "write contact data" , "Allows an application to modify the contact (address) data stored on your phone. Malicious applications can use this to erase or modify your contact data." ],
|
||||
"BIND_DIRECTORY_SEARCH" : [ "signatureOrSystem", "execute contacts directory search", "Allows an application to execute contacts directory search. This should only be used by ContactsProvider." ],
|
||||
"READ_CALL_LOG": [ "dangerous", "read the user's call log.", "Allows an application to read the user's call log." ],
|
||||
"WRITE_CALL_LOG": [ "dangerous", "write (but not read) the user's contacts data.", "Allows an application to write (but not read) the user's contacts data." ],
|
||||
"READ_SOCIAL_STREAM" : [ "dangerous", "read from the user's social stream", "Allows an application to read from the user's social stream." ],
|
||||
"WRITE_SOCIAL_STREAM" : [ "dangerous", "write the user's social stream", "Allows an application to write (but not read) the user's social stream data." ],
|
||||
|
||||
# PERSONAL_INFO
|
||||
"READ_PROFILE" : [ "dangerous", "read the user's personal profile data", "Allows an application to read the user's personal profile data."],
|
||||
"WRITE_PROFILE" : [ "dangerous", "write the user's personal profile data", "Allows an application to write (but not read) the user's personal profile data."],
|
||||
"RETRIEVE_WINDOW_CONTENT": [ "signatureOrSystem", "", "Allows an application to retrieve the content of the active window An active window is the window that has fired an accessibility event. " ],
|
||||
"BIND_APPWIDGET" : [ "signatureOrSystem" , "choose widgets" , "Allows the application to tell the system which widgets can be used by which application. With this permission, applications can give access to personal data to other applications. Not for use by normal applications." ],
|
||||
"BIND_KEYGUARD_APPWIDGET" : [ "signatureOrSystem", "", "Private permission, to restrict who can bring up a dialog to add a new keyguard widget" ],
|
||||
|
||||
# CALENDAR
|
||||
"READ_CALENDAR" : [ "dangerous" , "read calendar events" , "Allows an application to read all of the calendar events stored on your phone. Malicious applications can use this to send your calendar events to other people." ],
|
||||
"WRITE_CALENDAR": [ "dangerous" , "add or modify calendar events and send emails to guests" , "Allows an application to add or change the events on your calendar, which may send emails to guests. Malicious applications can use this to erase or modify your calendar events or to send emails to guests." ],
|
||||
|
||||
|
||||
# USER_DICTIONARY
|
||||
"READ_USER_DICTIONARY" : [ "dangerous" , "read user-defined dictionary" , "Allows an application to read any private words, names and phrases that the user may have stored in the user dictionary." ],
|
||||
|
||||
# WRITE_USER_DICTIONARY
|
||||
"WRITE_USER_DICTIONARY" : [ "normal" , "write to user-defined dictionary" , "Allows an application to write new words into the user dictionary." ],
|
||||
|
||||
# BOOKMARKS
|
||||
"READ_HISTORY_BOOKMARKS" : [ "dangerous" , "read Browser\'s history and bookmarks" , "Allows the application to read all the URLs that the browser has visited and all of the browser\'s bookmarks." ],
|
||||
"WRITE_HISTORY_BOOKMARKS" : [ "dangerous" , "write Browser\'s history and bookmarks" , "Allows an application to modify the browser\'s history or bookmarks stored on your phone. Malicious applications can use this to erase or modify your browser\'s data." ],
|
||||
|
||||
# DEVICE_ALARMS
|
||||
"SET_ALARM" : [ "normal" , "set alarm in alarm clock" , "Allows the application to set an alarm in an installed alarm clock application. Some alarm clock applications may not implement this feature." ],
|
||||
|
||||
# VOICEMAIL
|
||||
"ADD_VOICEMAIL" : [ "dangerous", "add voicemails into the system", "Allows an application to add voicemails into the system." ],
|
||||
|
||||
# LOCATION
|
||||
"ACCESS_FINE_LOCATION" : [ "dangerous" , "fine (GPS) location" , "Access fine location sources, such as the Global Positioning System on the phone, where available. Malicious applications can use this to determine where you are and may consume additional battery power." ],
|
||||
"ACCESS_COARSE_LOCATION" : [ "dangerous" , "coarse (network-based) location" , "Access coarse location sources, such as the mobile network database, to determine an approximate phone location, where available. Malicious applications can use this to determine approximately where you are." ],
|
||||
"ACCESS_MOCK_LOCATION" : [ "dangerous" , "mock location sources for testing" , "Create mock location sources for testing. Malicious applications can use this to override the location and/or status returned by real-location sources such as GPS or Network providers." ],
|
||||
"ACCESS_LOCATION_EXTRA_COMMANDS" : [ "normal" , "access extra location provider commands" , "Access extra location provider commands. Malicious applications could use this to interfere with the operation of the GPS or other location sources." ],
|
||||
"INSTALL_LOCATION_PROVIDER" : [ "signatureOrSystem" , "permission to install a location provider" , "Create mock location sources for testing. Malicious applications can use this to override the location and/or status returned by real-location sources such as GPS or Network providers, or monitor and report your location to an external source." ],
|
||||
|
||||
|
||||
# NETWORK
|
||||
"INTERNET" : [ "dangerous" , "full Internet access" , "Allows an application to create network sockets." ],
|
||||
"ACCESS_NETWORK_STATE" : [ "normal" , "view network status" , "Allows an application to view the status of all networks." ],
|
||||
"ACCESS_WIFI_STATE" : [ "normal" , "view Wi-Fi status" , "Allows an application to view the information about the status of Wi-Fi." ],
|
||||
"CHANGE_WIFI_STATE" : [ "dangerous" , "change Wi-Fi status" , "Allows an application to connect to and disconnect from Wi-Fi access points and to make changes to configured Wi-Fi networks." ],
|
||||
"CHANGE_NETWORK_STATE" : [ "normal" , "change network connectivity" , "Allows an application to change the state of network connectivity." ],
|
||||
"ACCESS_WIMAX_STATE": [ "normal", "", "" ],
|
||||
"CHANGE_WIMAX_STATE": [ "dangerous", "", "" ],
|
||||
"NFC" : [ "dangerous" , "control Near-Field Communication" , "Allows an application to communicate with Near-Field Communication (NFC) tags, cards and readers." ],
|
||||
"CONNECTIVITY_INTERNAL": [ "signatureOrSystem", "use privileged ConnectivityManager API", "Allows an internal user to use privileged ConnectivityManager API" ],
|
||||
"RECEIVE_DATA_ACTIVITY_CHANGE": [ "signatureOrSystem", "", "" ],
|
||||
|
||||
|
||||
# BLUETOOTH_NETWORK
|
||||
"BLUETOOTH" : [ "dangerous" , "create Bluetooth connections" , "Allows an application to view configuration of the local Bluetooth phone and to make and accept connections with paired devices." ],
|
||||
"BLUETOOTH_ADMIN" : [ "dangerous" , "bluetooth administration" , "Allows an application to configure the local Bluetooth phone and to discover and pair with remote devices." ],
|
||||
|
||||
|
||||
# SYSTEM TOOLS
|
||||
"BLUETOOTH_STACK": [ "signature", "", "" ],
|
||||
"NET_ADMIN": [ "signature", "configure network interfaces, configure/use IPSec, etc", "Allows access to configure network interfaces, configure/use IPSec, etc." ],
|
||||
"REMOTE_AUDIO_PLAYBACK": [ "signature", "remote audio playback", "Allows registration for remote audio playback" ],
|
||||
"READ_EXTERNAL_STORAGE" : [ "normal", "read from external storage", "Allows an application to read from external storage" ],
|
||||
"INTERACT_ACROSS_USERS": [ "signatureOrSystemOrDevelopment", "", "Allows an application to call APIs that allow it to do interactions across the users on the device, using singleton services and user-targeted broadcasts. This permission is not available to third party applications." ],
|
||||
"INTERACT_ACROSS_USERS_FULL": [ "signature", "", "Fuller form of INTERACT_ACROSS_USERS that removes restrictions on where broadcasts can be sent and allows other types of interactions." ],
|
||||
"MANAGE_USERS": [ "signatureOrSystem", "", "Allows an application to call APIs that allow it to query and manage users on the device. This permission is not available to third party applications." ],
|
||||
"GET_DETAILED_TASKS": [ "signature", "", "Allows an application to get full detailed information about recently running tasks, with full fidelity to the real state." ],
|
||||
"START_ANY_ACTIVITY": [ "signature", "", "Allows an application to start any activity, regardless of permission protection or exported state." ],
|
||||
"SET_SCREEN_COMPATIBILITY": [ "signature", "", "Change the screen compatibility mode of applications" ],
|
||||
"CHANGE_CONFIGURATION" : [ "signatureOrSystemOrDevelopment" , "change your UI settings" , "Allows an application to change the current configuration, such as the locale or overall font size." ],
|
||||
"FORCE_STOP_PACKAGES" : [ "signature" , "force-stop other applications" , "Allows an application to stop other applications forcibly." ],
|
||||
"SET_ANIMATION_SCALE" : [ "signatureOrSystemOrDevelopment" , "modify global animation speed" , "Allows an application to change the global animation speed (faster or slower animations) at any time." ],
|
||||
"GET_PACKAGE_SIZE" : [ "normal" , "measure application storage space" , "Allows an application to retrieve its code, data and cache sizes" ],
|
||||
"SET_PREFERRED_APPLICATIONS" : [ "signature" , "set preferred applications" , "Allows an application to modify your preferred applications. This can allow malicious applications to silently change the applications that are run, spoofing your existing applications to collect private data from you." ],
|
||||
"BROADCAST_STICKY" : [ "normal" , "send sticky broadcast" , "Allows an application to send sticky broadcasts, which remain after the broadcast ends. Malicious applications can make the phone slow or unstable by causing it to use too much memory." ],
|
||||
"MOUNT_UNMOUNT_FILESYSTEMS" : [ "signatureOrSystem" , "mount and unmount file systems" , "Allows the application to mount and unmount file systems for removable storage." ],
|
||||
"MOUNT_FORMAT_FILESYSTEMS" : [ "signatureOrSystem" , "format external storage" , "Allows the application to format removable storage." ],
|
||||
"ASEC_ACCESS" : [ "signature" , "get information on internal storage" , "Allows the application to get information on internal storage." ],
|
||||
"ASEC_CREATE" : [ "signature" , "create internal storage" , "Allows the application to create internal storage." ],
|
||||
"ASEC_DESTROY" : [ "signature" , "destroy internal storage" , "Allows the application to destroy internal storage." ],
|
||||
"ASEC_MOUNT_UNMOUNT" : [ "signature" , "mount/unmount internal storage" , "Allows the application to mount/unmount internal storage." ],
|
||||
"ASEC_RENAME" : [ "signature" , "rename internal storage" , "Allows the application to rename internal storage." ],
|
||||
"WRITE_APN_SETTINGS" : [ "signatureOrSystem" , "write Access Point Name settings" , "Allows an application to modify the APN settings, such as Proxy and Port of any APN." ],
|
||||
"SUBSCRIBED_FEEDS_READ" : [ "normal" , "read subscribed feeds" , "Allows an application to receive details about the currently synced feeds." ],
|
||||
"SUBSCRIBED_FEEDS_WRITE" : [ "dangerous" , "write subscribed feeds" , "Allows an application to modify your currently synced feeds. This could allow a malicious application to change your synced feeds." ],
|
||||
"CLEAR_APP_CACHE" : [ "dangerous" , "delete all application cache data" , "Allows an application to free phone storage by deleting files in application cache directory. Access is usually very restricted to system process." ],
|
||||
"DIAGNOSTIC" : [ "signature" , "read/write to resources owned by diag" , "Allows an application to read and write to any resource owned by the diag group; for example, files in /dev. This could potentially affect system stability and security. This should ONLY be used for hardware-specific diagnostics by the manufacturer or operator." ],
|
||||
"BROADCAST_PACKAGE_REMOVED" : [ "signature" , "send package removed broadcast" , "Allows an application to broadcast a notification that an application package has been removed. Malicious applications may use this to kill any other application running." ],
|
||||
"BATTERY_STATS" : [ "dangerous" , "modify battery statistics" , "Allows the modification of collected battery statistics. Not for use by normal applications." ],
|
||||
"MODIFY_APPWIDGET_BIND_PERMISSIONS" : [ "signatureOrSystem", "query/set which applications can bind AppWidgets.", "Internal permission allowing an application to query/set which applications can bind AppWidgets." ],
|
||||
"CHANGE_BACKGROUND_DATA_SETTING" : [ "signature" , "change background data usage setting" , "Allows an application to change the background data usage setting." ],
|
||||
"GLOBAL_SEARCH" : [ "signatureOrSystem" , "" , "This permission can be used on content providers to allow the global search " \
|
||||
"system to access their data. Typically it used when the provider has some " \
|
||||
"permissions protecting it (which global search would not be expected to hold)," \
|
||||
"and added as a read-only permission to the path in the provider where global "\
|
||||
"search queries are performed. This permission can not be held by regular applications; "\
|
||||
"it is used by applications to protect themselves from everyone else besides global search" ],
|
||||
"GLOBAL_SEARCH_CONTROL" : [ "signature" , "" , "Internal permission protecting access to the global search " \
|
||||
"system: ensures that only the system can access the provider " \
|
||||
"to perform queries (since this otherwise provides unrestricted " \
|
||||
"access to a variety of content providers), and to write the " \
|
||||
"search statistics (to keep applications from gaming the source " \
|
||||
"ranking)." ],
|
||||
"SET_WALLPAPER_COMPONENT" : [ "signatureOrSystem" , "set a live wallpaper" , "Allows applications to set a live wallpaper." ],
|
||||
"READ_DREAM_STATE" : [ "signature", "", "Allows applications to read dream settings and dream state." ],
|
||||
"WRITE_DREAM_STATE" : [ "signature", "", "Allows applications to write dream settings, and start or stop dreaming." ],
|
||||
"WRITE_SETTINGS" : [ "normal" , "modify global system settings" , "Allows an application to modify the system\'s settings data. Malicious applications can corrupt your system\'s configuration." ],
|
||||
|
||||
# ACCOUNTS
|
||||
"GET_ACCOUNTS" : [ "normal" , "discover known accounts" , "Allows an application to access the list of accounts known by the phone." ],
|
||||
"AUTHENTICATE_ACCOUNTS" : [ "dangerous" , "act as an account authenticator" , "Allows an application to use the account authenticator capabilities of the Account Manager, including creating accounts as well as obtaining and setting their passwords." ],
|
||||
"USE_CREDENTIALS" : [ "dangerous" , "use the authentication credentials of an account" , "Allows an application to request authentication tokens." ],
|
||||
"MANAGE_ACCOUNTS" : [ "dangerous" , "manage the accounts list" , "Allows an application to perform operations like adding and removing accounts and deleting their password." ],
|
||||
"ACCOUNT_MANAGER" : [ "signature" , "act as the Account Manager Service" , "Allows an application to make calls to Account Authenticators" ],
|
||||
|
||||
# AFFECTS_BATTERY
|
||||
"CHANGE_WIFI_MULTICAST_STATE" : [ "dangerous" , "allow Wi-Fi Multicast reception" , "Allows an application to receive packets not directly addressed to your device. This can be useful when discovering services offered nearby. It uses more power than the non-multicast mode." ],
|
||||
"VIBRATE" : [ "normal" , "control vibrator" , "Allows the application to control the vibrator." ],
|
||||
"FLASHLIGHT" : [ "normal" , "control flashlight" , "Allows the application to control the flashlight." ],
|
||||
"WAKE_LOCK" : [ "normal" , "prevent phone from sleeping" , "Allows an application to prevent the phone from going to sleep." ],
|
||||
|
||||
# AUDIO_SETTINGS
|
||||
"MODIFY_AUDIO_SETTINGS" : [ "normal" , "change your audio settings" , "Allows application to modify global audio settings, such as volume and routing." ],
|
||||
|
||||
# HARDWARE_CONTROLS
|
||||
"MANAGE_USB": [ "signatureOrSystem", "manage preferences and permissions for USB devices", "Allows an application to manage preferences and permissions for USB devices" ],
|
||||
"ACCESS_MTP": [ "signatureOrSystem", "access the MTP USB kernel driver", "Allows an application to access the MTP USB kernel driver. For use only by the device side MTP implementation." ],
|
||||
"HARDWARE_TEST" : [ "signature" , "test hardware" , "Allows the application to control various peripherals for the purpose of hardware testing." ],
|
||||
|
||||
# MICROPHONE
|
||||
"RECORD_AUDIO" : [ "dangerous" , "record audio" , "Allows application to access the audio record path." ],
|
||||
|
||||
# CAMERA
|
||||
"CAMERA" : [ "dangerous" , "take pictures and videos" , "Allows application to take pictures and videos with the camera. This allows the application to collect images that the camera is seeing at any time." ],
|
||||
|
||||
# PHONE_CALLS
|
||||
"PROCESS_OUTGOING_CALLS" : [ "dangerous" , "intercept outgoing calls" , "Allows application to process outgoing calls and change the number to be dialled. Malicious applications may monitor, redirect or prevent outgoing calls." ],
|
||||
"MODIFY_PHONE_STATE" : [ "signatureOrSystem" , "modify phone status" , "Allows modification of the telephony state - power on, mmi, etc. Does not include placing calls." ],
|
||||
"READ_PHONE_STATE" : [ "dangerous" , "read phone state and identity" , "Allows the application to access the phone features of the device. An application with this permission can determine the phone number and serial number of this phone, whether a call is active, the number that call is connected to and so on." ],
|
||||
"READ_PRIVILEGED_PHONE_STATE": [ "signatureOrSystem", "read access to privileged phone state", "Allows read access to privileged phone state." ],
|
||||
"CALL_PHONE" : [ "dangerous" , "directly call phone numbers" , "Allows an application to initiate a phone call without going through the Dialer user interface for the user to confirm the call being placed. " ],
|
||||
"USE_SIP" : [ "dangerous" , "make/receive Internet calls" , "Allows an application to use the SIP service to make/receive Internet calls." ],
|
||||
|
||||
# STORAGE
|
||||
"WRITE_EXTERNAL_STORAGE" : [ "dangerous" , "modify/delete SD card contents" , "Allows an application to write to the SD card." ],
|
||||
"WRITE_MEDIA_STORAGE": [ "signatureOrSystem", "write to internal media storage", "Allows an application to write to internal media storage" ],
|
||||
|
||||
# SCREENLOCK
|
||||
"DISABLE_KEYGUARD" : [ "dangerous" , "disable key lock" , "Allows an application to disable the key lock and any associated password security. A legitimate example of this is the phone disabling the key lock when receiving an incoming phone call, then re-enabling the key lock when the call is finished." ],
|
||||
|
||||
# APP_INFO
|
||||
"GET_TASKS" : [ "dangerous" , "retrieve running applications" , "Allows application to retrieve information about currently and recently running tasks. May allow malicious applications to discover private information about other applications." ],
|
||||
"REORDER_TASKS" : [ "normal" , "reorder applications running" , "Allows an application to move tasks to the foreground and background. Malicious applications can force themselves to the front without your control." ],
|
||||
"REMOVE_TASKS": [ "signature", "", "Allows an application to change to remove/kill tasks" ],
|
||||
"RESTART_PACKAGES" : [ "normal" , "kill background processes" , "Allows an application to kill background processes of other applications, even if memory is not low." ],
|
||||
"KILL_BACKGROUND_PROCESSES" : [ "normal" , "kill background processes" , "Allows an application to kill background processes of other applications, even if memory is not low." ],
|
||||
"PERSISTENT_ACTIVITY" : [ "normal" , "make application always run" , "Allows an application to make parts of itself persistent, so that the system can\'t use it for other applications." ],
|
||||
"RECEIVE_BOOT_COMPLETED" : [ "normal" , "automatically start at boot" , "Allows an application to start itself as soon as the system has finished booting. This can make it take longer to start the phone and allow the application to slow down the overall phone by always running." ],
|
||||
|
||||
# DISPLAY
|
||||
"SYSTEM_ALERT_WINDOW" : [ "dangerous" , "display system-level alerts" , "Allows an application to show system-alert windows. Malicious applications can take over the entire screen of the phone." ],
|
||||
|
||||
# WALLPAPER
|
||||
"SET_WALLPAPER" : [ "normal" , "set wallpaper" , "Allows the application to set the system wallpaper." ],
|
||||
"SET_WALLPAPER_HINTS" : [ "normal" , "set wallpaper size hints" , "Allows the application to set the system wallpaper size hints." ],
|
||||
|
||||
# SYSTEM_CLOCK
|
||||
"SET_TIME_ZONE" : [ "normal" , "set time zone" , "Allows an application to change the phone\'s time zone." ],
|
||||
|
||||
# STATUS_BAR
|
||||
"EXPAND_STATUS_BAR" : [ "normal" , "expand/collapse status bar" , "Allows application to expand or collapse the status bar." ],
|
||||
|
||||
# SYNC_SETTINGS
|
||||
"READ_SYNC_SETTINGS" : [ "normal" , "read sync settings" , "Allows an application to read the sync settings, such as whether sync is enabled for Contacts." ],
|
||||
"WRITE_SYNC_SETTINGS" : [ "normal" , "write sync settings" , "Allows an application to modify the sync settings, such as whether sync is enabled for Contacts." ],
|
||||
"READ_SYNC_STATS" : [ "normal" , "read sync statistics" , "Allows an application to read the sync stats; e.g. the history of syncs that have occurred." ],
|
||||
|
||||
# DEVELOPMENT_TOOLS
|
||||
"WRITE_SECURE_SETTINGS" : [ "signatureOrSystemOrDevelopment" , "modify secure system settings" , "Allows an application to modify the system\'s secure settings data. Not for use by normal applications." ],
|
||||
"DUMP" : [ "signatureOrSystemOrDevelopment" , "retrieve system internal status" , "Allows application to retrieve internal status of the system. Malicious applications may retrieve a wide variety of private and secure information that they should never normally need." ],
|
||||
"READ_LOGS" : [ "signatureOrSystemOrDevelopment" , "read sensitive log data" , "Allows an application to read from the system\'s various log files. This allows it to discover general information about what you are doing with the phone, potentially including personal or private information." ],
|
||||
"SET_DEBUG_APP" : [ "signatureOrSystemOrDevelopment" , "enable application debugging" , "Allows an application to turn on debugging for another application. Malicious applications can use this to kill other applications." ],
|
||||
"SET_PROCESS_LIMIT" : [ "signatureOrSystemOrDevelopment" , "limit number of running processes" , "Allows an application to control the maximum number of processes that will run. Never needed for normal applications." ],
|
||||
"SET_ALWAYS_FINISH" : [ "signatureOrSystemOrDevelopment" , "make all background applications close" , "Allows an application to control whether activities are always finished as soon as they go to the background. Never needed for normal applications." ],
|
||||
"SIGNAL_PERSISTENT_PROCESSES" : [ "signatureOrSystemOrDevelopment" , "send Linux signals to applications" , "Allows application to request that the supplied signal be sent to all persistent processes." ],
|
||||
"ACCESS_ALL_EXTERNAL_STORAGE" : [ "signature", "", "Allows an application to access all multi-user external storage" ],
|
||||
|
||||
# No groups ...
|
||||
"SET_TIME": [ "signatureOrSystem" , "set time" , "Allows an application to change the phone\'s clock time." ],
|
||||
"ALLOW_ANY_CODEC_FOR_PLAYBACK": [ "signatureOrSystem", "", "Allows an application to use any media decoder when decoding for playback." ],
|
||||
"STATUS_BAR" : [ "signatureOrSystem" , "disable or modify status bar" , "Allows application to disable the status bar or add and remove system icons." ],
|
||||
"STATUS_BAR_SERVICE" : [ "signature" , "status bar" , "Allows the application to be the status bar." ],
|
||||
"FORCE_BACK" : [ "signature" , "force application to close" , "Allows an application to force any activity that is in the foreground to close and go back. Should never be needed for normal applications." ],
|
||||
"UPDATE_DEVICE_STATS" : [ "signatureOrSystem" , "modify battery statistics" , "Allows the modification of collected battery statistics. Not for use by normal applications." ],
|
||||
"INTERNAL_SYSTEM_WINDOW" : [ "signature" , "display unauthorised windows" , "Allows the creation of windows that are intended to be used by the internal system user interface. Not for use by normal applications." ],
|
||||
"MANAGE_APP_TOKENS" : [ "signature" , "manage application tokens" , "Allows applications to create and manage their own tokens, bypassing their normal Z-ordering. Should never be needed for normal applications." ],
|
||||
"FREEZE_SCREEN": [ "signature", "", "Allows the application to temporarily freeze the screen for a full-screen transition." ],
|
||||
"INJECT_EVENTS" : [ "signature" , "inject user events" , "Allows an application to inject user events (keys, touch, trackball) into the event stream and deliver them to ANY window. Without this permission, you can only deliver events to windows in your own process. Very few applications should need to use this permission" ],
|
||||
"FILTER_EVENTS": [ "signature", "", "Allows an application to register an input filter which filters the stream of user events (keys, touch, trackball) before they are dispatched to any window" ],
|
||||
"RETRIEVE_WINDOW_INFO" : [ "signature", "", "Allows an application to retrieve info for a window from the window manager." ],
|
||||
"TEMPORARY_ENABLE_ACCESSIBILITY": [ "signature", "", "Allows an application to temporary enable accessibility on the device." ],
|
||||
"MAGNIFY_DISPLAY": [ "signature", "", "Allows an application to magnify the content of a display." ],
|
||||
"SET_ACTIVITY_WATCHER" : [ "signature" , "monitor and control all application launching" , "Allows an application to monitor and control how the system launches activities. Malicious applications may compromise the system completely. This permission is needed only for development, never for normal phone usage." ],
|
||||
"SHUTDOWN" : [ "signatureOrSystem" , "partial shutdown" , "Puts the activity manager into a shut-down state. Does not perform a complete shut down." ],
|
||||
"STOP_APP_SWITCHES" : [ "signatureOrSystem" , "prevent app switches" , "Prevents the user from switching to another application." ],
|
||||
"READ_INPUT_STATE" : [ "signature" , "record what you type and actions that you take" , "Allows applications to watch the keys that you press even when interacting with another application (such as entering a password). Should never be needed for normal applications." ],
|
||||
"BIND_INPUT_METHOD" : [ "signature" , "bind to an input method" , "Allows the holder to bind to the top-level interface of an input method. Should never be needed for normal applications." ],
|
||||
"BIND_ACCESSIBILITY_SERVICE" : [ "signature", "", "Must be required by an android.accessibilityservice.AccessibilityService to ensure that only the system can bind to it. " ],
|
||||
"BIND_TEXT_SERVICE" : [ "signature", "", "Must be required by a TextService (e.g. SpellCheckerService) to ensure that only the system can bind to it." ],
|
||||
"BIND_VPN_SERVICE" : [ "signature", "", "Must be required by an {@link android.net.VpnService}, to ensure that only the system can bind to it." ],
|
||||
"BIND_WALLPAPER" : [ "signatureOrSystem" , "bind to wallpaper" , "Allows the holder to bind to the top-level interface of wallpaper. Should never be needed for normal applications." ],
|
||||
"BIND_DEVICE_ADMIN" : [ "signature" , "interact with device admin" , "Allows the holder to send intents to a device administrator. Should never be needed for normal applications." ],
|
||||
"SET_ORIENTATION" : [ "signature" , "change screen orientation" , "Allows an application to change the rotation of the screen at any time. Should never be needed for normal applications." ],
|
||||
"SET_POINTER_SPEED" : [ "signature", "", "Allows low-level access to setting the pointer speed. Not for use by normal applications. " ],
|
||||
"SET_KEYBOARD_LAYOUT" : [ "signature", "", "Allows low-level access to setting the keyboard layout. Not for use by normal applications." ],
|
||||
"INSTALL_PACKAGES" : [ "signatureOrSystem" , "directly install applications" , "Allows an application to install new or updated Android packages. Malicious applications can use this to add new applications with arbitrarily powerful permissions." ],
|
||||
"CLEAR_APP_USER_DATA" : [ "signature" , "delete other applications\' data" , "Allows an application to clear user data." ],
|
||||
"DELETE_CACHE_FILES" : [ "signatureOrSystem" , "delete other applications\' caches" , "Allows an application to delete cache files." ],
|
||||
"DELETE_PACKAGES" : [ "signatureOrSystem" , "delete applications" , "Allows an application to delete Android packages. Malicious applications can use this to delete important applications." ],
|
||||
"MOVE_PACKAGE" : [ "signatureOrSystem" , "Move application resources" , "Allows an application to move application resources from internal to external media and vice versa." ],
|
||||
"CHANGE_COMPONENT_ENABLED_STATE" : [ "signatureOrSystem" , "enable or disable application components" , "Allows an application to change whether or not a component of another application is enabled. Malicious applications can use this to disable important phone capabilities. It is important to be careful with permission, as it is possible to bring application components into an unusable, inconsistent or unstable state." ],
|
||||
"GRANT_REVOKE_PERMISSIONS" : [ "signature", "", "Allows an application to grant or revoke specific permissions." ],
|
||||
"ACCESS_SURFACE_FLINGER" : [ "signature" , "access SurfaceFlinger" , "Allows application to use SurfaceFlinger low-level features." ],
|
||||
"READ_FRAME_BUFFER" : [ "signatureOrSystem" , "read frame buffer" , "Allows application to read the content of the frame buffer." ],
|
||||
"CONFIGURE_WIFI_DISPLAY" : [ "signature", "", "Allows an application to configure and connect to Wifi displays" ],
|
||||
"CONTROL_WIFI_DISPLAY" : [ "signature", "", "Allows an application to control low-level features of Wifi displays such as opening an RTSP socket. This permission should only be used by the display manager." ],
|
||||
"BRICK" : [ "signature" , "permanently disable phone" , "Allows the application to disable the entire phone permanently. This is very dangerous." ],
|
||||
"REBOOT" : [ "signatureOrSystem" , "force phone reboot" , "Allows the application to force the phone to reboot." ],
|
||||
"DEVICE_POWER" : [ "signature" , "turn phone on or off" , "Allows the application to turn the phone on or off." ],
|
||||
"NET_TUNNELING" : [ "signature", "", "Allows low-level access to tun tap driver " ],
|
||||
"FACTORY_TEST" : [ "signature" , "run in factory test mode" , "Run as a low-level manufacturer test, allowing complete access to the phone hardware. Only available when a phone is running in manufacturer test mode." ],
|
||||
"MASTER_CLEAR" : [ "signatureOrSystem" , "reset system to factory defaults" , "Allows an application to completely reset the system to its factory settings, erasing all data, configuration and installed applications." ],
|
||||
"CALL_PRIVILEGED" : [ "signatureOrSystem" , "directly call any phone numbers" , "Allows the application to call any phone number, including emergency numbers, without your intervention. Malicious applications may place unnecessary and illegal calls to emergency services." ],
|
||||
"PERFORM_CDMA_PROVISIONING" : [ "signatureOrSystem" , "directly start CDMA phone setup" , "Allows the application to start CDMA provisioning. Malicious applications may start CDMA provisioning unnecessarily" ],
|
||||
"CONTROL_LOCATION_UPDATES" : [ "signatureOrSystem" , "control location update notifications" , "Allows enabling/disabling location update notifications from the radio. Not for use by normal applications." ],
|
||||
"ACCESS_CHECKIN_PROPERTIES" : [ "signatureOrSystem" , "access check-in properties" , "Allows read/write access to properties uploaded by the check-in service. Not for use by normal applications." ],
|
||||
"PACKAGE_USAGE_STATS" : [ "signatureOrSystem" , "update component usage statistics" , "Allows the modification of collected component usage statistics. Not for use by normal applications." ],
|
||||
"BACKUP" : [ "signatureOrSystem" , "control system back up and restore" , "Allows the application to control the system\'s back-up and restore mechanism. Not for use by normal applications." ],
|
||||
"CONFIRM_FULL_BACKUP" : [ "signature", "", "Allows a package to launch the secure full-backup confirmation UI. ONLY the system process may hold this permission." ],
|
||||
"BIND_REMOTEVIEWS" : [ "signatureOrSystem", "", "Must be required by a {@link android.widget.RemoteViewsService}, to ensure that only the system can bind to it." ],
|
||||
"ACCESS_CACHE_FILESYSTEM" : [ "signatureOrSystem" , "access the cache file system" , "Allows an application to read and write the cache file system." ],
|
||||
"COPY_PROTECTED_DATA" : [ "signature" , "Allows to invoke default container service to copy content. Not for use by normal applications." , "Allows to invoke default container service to copy content. Not for use by normal applications." ],
|
||||
"CRYPT_KEEPER" : [ "signatureOrSystem", "access to the encryption methods", "Internal permission protecting access to the encryption methods" ],
|
||||
"READ_NETWORK_USAGE_HISTORY" : [ "signatureOrSystem", "read historical network usage for specific networks and applications.", "Allows an application to read historical network usage for specific networks and applications."],
|
||||
"MANAGE_NETWORK_POLICY": [ "signature", "manage network policies and to define application-specific rules.", "Allows an application to manage network policies and to define application-specific rules."],
|
||||
"MODIFY_NETWORK_ACCOUNTING" : [ "signatureOrSystem", "account its network traffic against other UIDs.", "Allows an application to account its network traffic against other UIDs."],
|
||||
"C2D_MESSAGE" : [ "signature" , "C2DM permission." , "C2DM permission." ],
|
||||
"PACKAGE_VERIFICATION_AGENT" : [ "signatureOrSystem", "Package verifier needs to have this permission before the PackageManager will trust it to verify packages.", "Package verifier needs to have this permission before the PackageManager will trust it to verify packages."],
|
||||
"BIND_PACKAGE_VERIFIER" : [ "signature", "", "Must be required by package verifier receiver, to ensure that only the system can interact with it.."],
|
||||
"SERIAL_PORT" : [ "signature", "", "Allows applications to access serial ports via the SerialManager." ],
|
||||
"ACCESS_CONTENT_PROVIDERS_EXTERNALLY": [ "signature", "", "Allows the holder to access content providers from outside an ApplicationThread. This permission is enforced by the ActivityManagerService on the corresponding APIs,in particular ActivityManagerService#getContentProviderExternal(String) and ActivityManagerService#removeContentProviderExternal(String)."],
|
||||
"UPDATE_LOCK" : [ "signatureOrSystem", "", "Allows an application to hold an UpdateLock, recommending that a headless OTA reboot "\
|
||||
"*not* occur while the lock is held"],
|
||||
"WRITE_GSERVICES" : [ "signatureOrSystem" , "modify the Google services map" , "Allows an application to modify the Google services map. Not for use by normal applications." ],
|
||||
|
||||
"ACCESS_USB" : [ "signatureOrSystem" , "access USB devices" , "Allows the application to access USB devices." ],
|
||||
},
|
||||
|
||||
"MANIFEST_PERMISSION_GROUP":
|
||||
{
|
||||
"ACCOUNTS": "Permissions for direct access to the accounts managed by the Account Manager.",
|
||||
"AFFECTS_BATTERY": "Used for permissions that provide direct access to the hardware on the device that has an effect on battery life. This includes vibrator, flashlight, etc.",
|
||||
"APP_INFO": "Group of permissions that are related to the other applications installed on the system.",
|
||||
"AUDIO_SETTINGS": "Used for permissions that provide direct access to speaker settings the device.",
|
||||
"BLUETOOTH_NETWORK": "Used for permissions that provide access to other devices through Bluetooth.",
|
||||
"BOOKMARKS": "Used for permissions that provide access to the user bookmarks and browser history.",
|
||||
"CALENDAR": "Used for permissions that provide access to the device calendar to create / view events",
|
||||
"CAMERA": "Used for permissions that are associated with accessing camera or capturing images/video from the device.",
|
||||
"COST_MONEY": "Used for permissions that can be used to make the user spend money without their direct involvement.",
|
||||
"DEVICE_ALARMS": "Used for permissions that provide access to the user voicemail box.",
|
||||
"DEVELOPMENT_TOOLS": "Group of permissions that are related to development features.",
|
||||
"DISPLAY": "Group of permissions that allow manipulation of how another application displays UI to the user.",
|
||||
"HARDWARE_CONTROLS": "Used for permissions that provide direct access to the hardware on the device.",
|
||||
"LOCATION": "Used for permissions that allow access to the user's current location.",
|
||||
"MESSAGES": "Used for permissions that allow an application to send messages on behalf of the user or intercept messages being received by the user.",
|
||||
"MICROPHONE": "Used for permissions that are associated with accessing microphone audio from the device. Note that phone calls also capture audio but are in a separate (more visible) permission group.",
|
||||
"NETWORK": "Used for permissions that provide access to networking services.",
|
||||
"PERSONAL_INFO": "Used for permissions that provide access to the user's private data, such as contacts, calendar events, e-mail messages, etc.",
|
||||
"PHONE_CALLS": "Used for permissions that are associated with accessing and modifyign telephony state: intercepting outgoing calls, reading and modifying the phone state.",
|
||||
"STORAGE": "Group of permissions that are related to SD card access.",
|
||||
"SOCIAL_INFO": "Used for permissions that provide access to the user's social connections, such as contacts, call logs, social stream, etc. This includes both reading and writing of this data (which should generally be expressed as two distinct permissions)",
|
||||
"SCREENLOCK": "Group of permissions that are related to the screenlock.",
|
||||
"STATUS_BAR": "Used for permissions that change the status bar.",
|
||||
"SYSTEM_CLOCK": "Group of permissions that are related to system clock.",
|
||||
"SYSTEM_TOOLS": "Group of permissions that are related to system APIs.",
|
||||
"SYNC_SETTINGS": "Used for permissions that access the sync settings or sync related information.",
|
||||
"USER_DICTIONARY": "Used for permissions that provide access to the user calendar to create / view events.",
|
||||
"VOICEMAIL": "Used for permissions that provide access to the user voicemail box.",
|
||||
"WALLPAPER": "Group of permissions that allow manipulation of how another application displays UI to the user.",
|
||||
"WRITE_USER_DICTIONARY": "Used for permissions that provide access to the user calendar to create / view events.",
|
||||
},
|
||||
}
|
3445
androguard/core/bytecodes/jvm.py
Normal file
3445
androguard/core/bytecodes/jvm.py
Normal file
File diff suppressed because it is too large
Load Diff
137
androguard/core/bytecodes/jvm_generate.py
Normal file
137
androguard/core/bytecodes/jvm_generate.py
Normal file
@ -0,0 +1,137 @@
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (C) 2012 Anthony Desnos <desnos at t0t0.fr>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS-IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import random
|
||||
|
||||
from androconf import error
|
||||
import jvm
|
||||
|
||||
class Automaton :
|
||||
def __init__(self, _analysis) :
|
||||
self.__analysis = _analysis
|
||||
|
||||
try :
|
||||
from networkx import DiGraph
|
||||
from networkx import draw_graphviz, write_dot
|
||||
except ImportError :
|
||||
error("module networkx not found")
|
||||
|
||||
self.__G = DiGraph()
|
||||
|
||||
for m in self.__analysis.get_methods() :
|
||||
for bb in m.basic_blocks.get() :
|
||||
for trace in bb.stack_traces.get() :
|
||||
for mre in jvm.MATH_JVM_RE :
|
||||
if mre[0].match( trace[2].get_name() ) :
|
||||
for i in trace[3].gets() :
|
||||
self._add( str(i) )
|
||||
|
||||
def _add(self, elem) :
|
||||
l = []
|
||||
x = ""
|
||||
for i in elem :
|
||||
if i not in jvm.MATH_JVM_OPCODES.values() :
|
||||
x += i
|
||||
else :
|
||||
l.append( x )
|
||||
l.append( i )
|
||||
x = ""
|
||||
|
||||
if len(l) > 1 :
|
||||
l.append( x )
|
||||
|
||||
self._add_expr( l )
|
||||
|
||||
def _add_expr(self, l) :
|
||||
if l == [] :
|
||||
return
|
||||
i = 0
|
||||
while i < (len(l)-1) :
|
||||
self.__G.add_edge( self._transform(l[i]), self._transform(l[i+1]) )
|
||||
|
||||
i += 1
|
||||
|
||||
def _transform(self, i) :
|
||||
if "VARIABLE" in i :
|
||||
return "V"
|
||||
return i
|
||||
|
||||
def new(self, loop) :
|
||||
expr = []
|
||||
|
||||
l = list( self.__G.node )
|
||||
|
||||
init = l[ random.randint(0, len(l) - 1) ]
|
||||
while init in jvm.MATH_JVM_OPCODES.values() :
|
||||
init = l[ random.randint(0, len(l) - 1) ]
|
||||
|
||||
expr.append( init )
|
||||
|
||||
i = 0
|
||||
while i <= loop :
|
||||
l = list( self.__G.edge[ init ] )
|
||||
if l == [] :
|
||||
break
|
||||
|
||||
init = l[ random.randint(0, len(l) - 1) ]
|
||||
expr.append( init )
|
||||
|
||||
i += 1
|
||||
|
||||
return expr
|
||||
|
||||
def show(self) :
|
||||
print self.__G.node
|
||||
print self.__G.edge
|
||||
|
||||
#draw_graphviz(self.__G)
|
||||
#write_dot(self.__G,'file.dot')
|
||||
|
||||
class JVMGenerate :
|
||||
def __init__(self, _vm, _analysis) :
|
||||
self.__vm = _vm
|
||||
self.__analysis = _analysis
|
||||
|
||||
self.__automaton = Automaton( self.__analysis )
|
||||
self.__automaton.show()
|
||||
|
||||
def create_affectation(self, method_name, desc) :
|
||||
l = []
|
||||
|
||||
if desc[0] == 0 :
|
||||
l.append( [ "aload_0" ] )
|
||||
l.append( [ "bipush", desc[2] ] )
|
||||
l.append( [ "putfield", desc[1].get_name(), desc[1].get_descriptor() ] )
|
||||
|
||||
return l
|
||||
|
||||
def write(self, method, offset, field) :
|
||||
print method, offset, field
|
||||
expr = self.__automaton.new( 5 )
|
||||
|
||||
print field.get_name(), "EXPR ->", expr
|
||||
|
||||
self._transform( expr )
|
||||
|
||||
|
||||
def _transform(self, expr) :
|
||||
if len(expr) == 1 :
|
||||
return
|
||||
|
||||
x = [ expr.pop(0), expr.pop(1), expr.pop(0) ]
|
||||
|
||||
# while expr != [] :
|
33
androguard/core/bytecodes/libdvm/Makefile
Normal file
33
androguard/core/bytecodes/libdvm/Makefile
Normal file
@ -0,0 +1,33 @@
|
||||
SRC = dvm.cc buff.cc
|
||||
|
||||
CUROS = $(shell uname -s)
|
||||
ifeq ($(CUROS),Darwin)
|
||||
LDFLAGS = -lpython
|
||||
else
|
||||
LDFLAGS =
|
||||
endif
|
||||
|
||||
CFLAGS += -g -fPIC -I/usr/include/python2.7/
|
||||
mkdir = mkdir -p
|
||||
CD = cd
|
||||
RM = rm -f
|
||||
|
||||
CCP = g++
|
||||
|
||||
LIBNAME = dvmnative
|
||||
|
||||
OBJ = $(SRC:.cc=.o)
|
||||
|
||||
.SILENT:
|
||||
|
||||
all : $(OBJ) LIBDVM
|
||||
|
||||
LIBDVM :
|
||||
$(CCP) -o $(LIBNAME).so $(OBJ) -shared $(LDFLAGS)
|
||||
|
||||
clean :
|
||||
$(RM) *.o $(LIBNAME).so
|
||||
|
||||
%.o : %.cc
|
||||
echo " CCP $@"
|
||||
$(CCP) $(CFLAGS) -c -o $@ $<
|
0
androguard/core/bytecodes/libdvm/__init__.py
Normal file
0
androguard/core/bytecodes/libdvm/__init__.py
Normal file
99
androguard/core/bytecodes/libdvm/buff.cc
Normal file
99
androguard/core/bytecodes/libdvm/buff.cc
Normal file
@ -0,0 +1,99 @@
|
||||
/*
|
||||
This file is part of Androguard.
|
||||
|
||||
Copyright (C) 2011, Anthony Desnos <desnos at t0t0.fr>
|
||||
All rights reserved.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS-IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
*/
|
||||
|
||||
#include "buff.h"
|
||||
|
||||
#include <stdio.h>
|
||||
|
||||
Buff::Buff() {
|
||||
|
||||
}
|
||||
|
||||
Buff::Buff(const char *data, size_t data_len) {
|
||||
bdata = data;
|
||||
bdata_len = data_len;
|
||||
bcurrent_idx = 0;
|
||||
}
|
||||
|
||||
Buff:: Buff(const char *data, size_t data_len, size_t current_idx) {
|
||||
bdata = data;
|
||||
bdata_len = data_len;
|
||||
bcurrent_idx = current_idx;
|
||||
}
|
||||
|
||||
void Buff::setup(const char *data, size_t data_len, size_t current_idx) {
|
||||
bdata = data;
|
||||
bdata_len = data_len;
|
||||
bcurrent_idx = current_idx;
|
||||
}
|
||||
|
||||
const char *Buff::read(size_t len) {
|
||||
//cout << "read add " << bcurrent_idx << " " << len << "\n";
|
||||
bcurrent_idx += len;
|
||||
return (bdata + (bcurrent_idx - len));
|
||||
}
|
||||
|
||||
const char *Buff::readat(size_t pos, size_t len) {
|
||||
return (bdata + (pos));
|
||||
}
|
||||
|
||||
const char *Buff::read_false(size_t len) {
|
||||
return (bdata + (bcurrent_idx));
|
||||
}
|
||||
|
||||
size_t Buff::get_current_idx() {
|
||||
return bcurrent_idx;
|
||||
}
|
||||
|
||||
size_t Buff::get_end() {
|
||||
return bdata_len;
|
||||
}
|
||||
|
||||
bool Buff::empty() {
|
||||
return bcurrent_idx == bdata_len;
|
||||
}
|
||||
|
||||
int Buff::register_dynamic_offset(unsigned int *addr) {
|
||||
DynamicOffsets.push_back( addr );
|
||||
}
|
||||
|
||||
int Buff::set_idx(unsigned int idx) {
|
||||
bcurrent_idx = idx;
|
||||
}
|
||||
|
||||
unsigned char Buff::read_uc() {
|
||||
return *( reinterpret_cast<unsigned char *>( const_cast<char *>(this->read(1))) );
|
||||
}
|
||||
|
||||
char Buff::read_c() {
|
||||
return *( reinterpret_cast<char *>( const_cast<char *>(this->read(1))) );
|
||||
}
|
||||
|
||||
unsigned long Buff::read_ul() {
|
||||
return *( reinterpret_cast<unsigned long *>( const_cast<char *>(this->read(4))) );
|
||||
}
|
||||
|
||||
unsigned int Buff::read_ui() {
|
||||
return *( reinterpret_cast<unsigned int *>( const_cast<char *>(this->read(4))) );
|
||||
}
|
||||
|
||||
unsigned short Buff::read_us() {
|
||||
return *( reinterpret_cast<unsigned short *>( const_cast<char *>(this->read(2))) );
|
||||
}
|
69
androguard/core/bytecodes/libdvm/buff.h
Normal file
69
androguard/core/bytecodes/libdvm/buff.h
Normal file
@ -0,0 +1,69 @@
|
||||
/*
|
||||
This file is part of Androguard.
|
||||
|
||||
Copyright (C) 2011, Anthony Desnos <desnos at t0t0.fr>
|
||||
All rights reserved.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS-IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef BUFF_H
|
||||
#define BUFF_H
|
||||
|
||||
#ifdef __cplusplus
|
||||
|
||||
#if defined __GNUC__ || defined __APPLE__
|
||||
#include <ext/hash_map>
|
||||
#else
|
||||
#include <hash_map>
|
||||
#endif
|
||||
|
||||
#include <iostream>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
using namespace __gnu_cxx;
|
||||
using namespace std;
|
||||
using std::cout;
|
||||
using std::endl;
|
||||
|
||||
class Buff {
|
||||
public :
|
||||
const char *bdata;
|
||||
size_t bdata_len;
|
||||
size_t bcurrent_idx;
|
||||
|
||||
vector<unsigned int *> DynamicOffsets;
|
||||
public :
|
||||
Buff();
|
||||
Buff(const char *data, size_t data_len);
|
||||
Buff(const char *data, size_t data_len, size_t current_idx);
|
||||
void setup(const char *data, size_t data_len, size_t current_idx);
|
||||
const char *read(size_t len);
|
||||
const char *readat(size_t pos, size_t len);
|
||||
const char *read_false(size_t len);
|
||||
size_t get_current_idx();
|
||||
size_t get_end();
|
||||
bool empty();
|
||||
int register_dynamic_offset(unsigned int *addr);
|
||||
int set_idx(unsigned int);
|
||||
unsigned char read_uc();
|
||||
char read_c();
|
||||
unsigned long read_ul();
|
||||
unsigned int read_ui();
|
||||
unsigned short read_us();
|
||||
};
|
||||
|
||||
#endif
|
||||
|
||||
#endif
|
1886
androguard/core/bytecodes/libdvm/dvm.cc
Normal file
1886
androguard/core/bytecodes/libdvm/dvm.cc
Normal file
File diff suppressed because it is too large
Load Diff
302
androguard/core/bytecodes/libdvm/dvm.h
Normal file
302
androguard/core/bytecodes/libdvm/dvm.h
Normal file
@ -0,0 +1,302 @@
|
||||
/*
|
||||
This file is part of Androguard.
|
||||
|
||||
Copyright (C) 2011, Anthony Desnos <desnos at t0t0.fr>
|
||||
All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS-IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef DVM_H
|
||||
#define DVM_H
|
||||
|
||||
#include <Python.h>
|
||||
|
||||
#ifdef __cplusplus
|
||||
|
||||
#include <iostream>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#if defined __GNUC__ || defined __APPLE__
|
||||
#include <ext/hash_map>
|
||||
#else
|
||||
#include <hash_map>
|
||||
#endif
|
||||
|
||||
#include "buff.h"
|
||||
|
||||
#define OPVALUE 0
|
||||
#define REGISTER 1
|
||||
#define FIELD 2
|
||||
#define METHOD 3
|
||||
#define TYPE 4
|
||||
#define INTEGER 5
|
||||
#define STRING 6
|
||||
#define INTEGER_BRANCH 7
|
||||
|
||||
|
||||
//#define DEBUG_DESTRUCTOR
|
||||
#undef DEBUG_DESTRUCTOR
|
||||
|
||||
using namespace __gnu_cxx;
|
||||
using namespace std;
|
||||
using std::cout;
|
||||
using std::endl;
|
||||
|
||||
typedef struct fillarraydata {
|
||||
unsigned short ident;
|
||||
unsigned short element_width;
|
||||
unsigned int size;
|
||||
} fillarraydata_t;
|
||||
|
||||
typedef struct sparseswitch {
|
||||
unsigned short ident;
|
||||
unsigned short size;
|
||||
} sparseswitch_t;
|
||||
|
||||
typedef struct packedswitch {
|
||||
unsigned short ident;
|
||||
unsigned short size;
|
||||
unsigned int first_key;
|
||||
} packedswitch_t;
|
||||
|
||||
class DBC {
|
||||
public :
|
||||
unsigned char op_value;
|
||||
const char *op_name;
|
||||
size_t op_length;
|
||||
vector<int> *voperands;
|
||||
vector<int> *vdescoperands;
|
||||
vector<string> *vstrings;
|
||||
|
||||
public :
|
||||
DBC(unsigned char value, const char *name, vector<int> *v, vector<int> *vdesc, size_t length);
|
||||
~DBC();
|
||||
int get_opvalue();
|
||||
const char *get_opname();
|
||||
size_t get_length();
|
||||
};
|
||||
|
||||
class DBCSpe {
|
||||
public :
|
||||
virtual const char *get_opname()=0;
|
||||
virtual size_t get_length()=0;
|
||||
virtual size_t get_type()=0;
|
||||
};
|
||||
|
||||
class FillArrayData : public DBCSpe {
|
||||
public :
|
||||
fillarraydata_t fadt;
|
||||
char *data;
|
||||
size_t data_size;
|
||||
public :
|
||||
FillArrayData(Buff *b, unsigned int off);
|
||||
~FillArrayData();
|
||||
const char *get_opname();
|
||||
size_t get_length();
|
||||
size_t get_type();
|
||||
};
|
||||
|
||||
class SparseSwitch : public DBCSpe {
|
||||
public :
|
||||
sparseswitch_t sst;
|
||||
vector<int> keys;
|
||||
vector<int> targets;
|
||||
|
||||
public :
|
||||
SparseSwitch(Buff *b, unsigned int off);
|
||||
~SparseSwitch();
|
||||
const char *get_opname();
|
||||
size_t get_length();
|
||||
size_t get_type();
|
||||
};
|
||||
|
||||
class PackedSwitch : public DBCSpe {
|
||||
public :
|
||||
packedswitch_t pst;
|
||||
vector<int> targets;
|
||||
|
||||
public :
|
||||
PackedSwitch(Buff *b, unsigned int off);
|
||||
~PackedSwitch();
|
||||
const char *get_opname();
|
||||
size_t get_length();
|
||||
size_t get_type();
|
||||
};
|
||||
|
||||
class DCode {
|
||||
public :
|
||||
vector<DBC *> bytecodes;
|
||||
vector<DBCSpe *> bytecodes_spe;
|
||||
|
||||
public :
|
||||
DCode();
|
||||
~DCode();
|
||||
DCode(vector<unsigned int(*)(Buff *, vector<int>*, vector<int>*)> *parsebytecodes,
|
||||
vector<void (*)(Buff *, vector<int> *, vector<int> *, vector<int> *, unsigned int *)> *postbytecodes,
|
||||
vector<const char *> *bytecodes_names,
|
||||
Buff *b);
|
||||
int size();
|
||||
DBC *get_bytecode_at(int i);
|
||||
};
|
||||
|
||||
class DalvikBytecode {
|
||||
public :
|
||||
vector<unsigned int(*)(Buff *, vector<int>*, vector<int>*)> bytecodes;
|
||||
vector<void (*)(Buff *, vector<int> *, vector<int> *, vector<int> *, unsigned int *)> postbytecodes;
|
||||
|
||||
vector<const char *> bytecodes_names;
|
||||
|
||||
public :
|
||||
DalvikBytecode();
|
||||
DCode *new_code(const char *data, size_t data_len);
|
||||
};
|
||||
|
||||
typedef struct {
|
||||
PyObject_HEAD;
|
||||
DBC *d;
|
||||
PyObject *operands;
|
||||
} dvm_DBCObject;
|
||||
|
||||
PyObject *DBC_new(PyTypeObject *type, PyObject *args, PyObject *kwds);
|
||||
void DBC_dealloc(dvm_DBCObject* self);
|
||||
PyObject *DBC_new(PyTypeObject *type, PyObject *args, PyObject *kwds);
|
||||
int DBC_init(dvm_DBCObject *self, PyObject *args, PyObject *kwds);
|
||||
PyObject *DBC_get_opvalue(dvm_DBCObject *self, PyObject* args);
|
||||
PyObject *DBC_get_length(dvm_DBCObject *self, PyObject* args);
|
||||
PyObject *DBC_get_name(dvm_DBCObject *self, PyObject* args);
|
||||
PyObject *DBC_get_operands(dvm_DBCObject *self, PyObject* args);
|
||||
PyObject *DBC_get_type_ins(dvm_DBCObject *self, PyObject* args);
|
||||
|
||||
static PyMethodDef DBC_methods[] = {
|
||||
{"get_op_value", (PyCFunction)DBC_get_opvalue, METH_NOARGS, "get nb bytecodes" },
|
||||
{"get_length", (PyCFunction)DBC_get_length, METH_NOARGS, "get nb bytecodes" },
|
||||
{"get_name", (PyCFunction)DBC_get_name, METH_NOARGS, "get nb bytecodes" },
|
||||
{"get_operands", (PyCFunction)DBC_get_operands, METH_NOARGS, "get nb bytecodes" },
|
||||
{"get_type_ins", (PyCFunction)DBC_get_type_ins, METH_NOARGS, "get type ins" },
|
||||
{NULL, NULL, 0, NULL} /* Sentinel */
|
||||
};
|
||||
|
||||
static PyTypeObject dvm_DBCType = {
|
||||
PyObject_HEAD_INIT(NULL)
|
||||
0, /*ob_size*/
|
||||
"dvm.DBC", /*tp_name*/
|
||||
sizeof(dvm_DBCObject), /*tp_basicsize*/
|
||||
0, /*tp_itemsize*/
|
||||
(destructor)DBC_dealloc, /*tp_dealloc*/
|
||||
0, /*tp_print*/
|
||||
0, /*tp_getattr*/
|
||||
0, /*tp_setattr*/
|
||||
0, /*tp_compare*/
|
||||
0, /*tp_repr*/
|
||||
0, /*tp_as_number*/
|
||||
0, /*tp_as_sequence*/
|
||||
0, /*tp_as_mapping*/
|
||||
0, /*tp_hash */
|
||||
0, /*tp_call*/
|
||||
0, /*tp_str*/
|
||||
0, /*tp_getattro*/
|
||||
0, /*tp_setattro*/
|
||||
0, /*tp_as_buffer*/
|
||||
Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /*tp_flags*/
|
||||
"DBC objects", /* tp_doc */
|
||||
0, /* tp_traverse */
|
||||
0, /* tp_clear */
|
||||
0, /* tp_richcompare */
|
||||
0, /* tp_weaklistoffset */
|
||||
0, /* tp_iter */
|
||||
0, /* tp_iternext */
|
||||
DBC_methods, /* tp_methods */
|
||||
NULL, /* tp_members */
|
||||
NULL, /* tp_getset */
|
||||
0, /* tp_base */
|
||||
0, /* tp_dict */
|
||||
0, /* tp_descr_get */
|
||||
0, /* tp_descr_set */
|
||||
0, /* tp_dictoffset */
|
||||
(initproc)DBC_init, /* tp_init */
|
||||
0, /* tp_alloc */
|
||||
DBC_new, /* tp_new */
|
||||
};
|
||||
|
||||
typedef struct {
|
||||
PyObject_HEAD;
|
||||
DBCSpe *d;
|
||||
} dvm_DBCSpeObject;
|
||||
|
||||
void DBCSpe_dealloc(dvm_DBCSpeObject* self);
|
||||
PyObject *DBCSpe_new(PyTypeObject *type, PyObject *args, PyObject *kwds);
|
||||
int DBCSpe_init(dvm_DBCSpeObject *self, PyObject *args, PyObject *kwds);
|
||||
PyObject *DBCSpe_get_opvalue(dvm_DBCSpeObject *self, PyObject* args);
|
||||
PyObject *DBCSpe_get_name(dvm_DBCSpeObject *self, PyObject* args);
|
||||
PyObject *DBCSpe_get_operands(dvm_DBCSpeObject *self, PyObject* args);
|
||||
PyObject *DBCSpe_get_targets(dvm_DBCSpeObject *self, PyObject* args);
|
||||
PyObject *DBCSpe_get_length(dvm_DBCSpeObject *self, PyObject* args);
|
||||
PyObject *DBCSpe_get_type_ins(dvm_DBCSpeObject *self, PyObject* args);
|
||||
|
||||
static PyMethodDef DBCSpe_methods[] = {
|
||||
{"get_name", (PyCFunction)DBCSpe_get_name, METH_NOARGS, "get nb bytecodes" },
|
||||
{"get_op_value", (PyCFunction)DBCSpe_get_opvalue, METH_NOARGS, "get nb bytecodes" },
|
||||
{"get_operands", (PyCFunction)DBCSpe_get_operands, METH_NOARGS, "get nb bytecodes" },
|
||||
{"get_targets", (PyCFunction)DBCSpe_get_targets, METH_NOARGS, "get nb bytecodes" },
|
||||
{"get_length", (PyCFunction)DBCSpe_get_length, METH_NOARGS, "get nb bytecodes" },
|
||||
{"get_type_ins", (PyCFunction)DBCSpe_get_type_ins, METH_NOARGS, "get type ins" },
|
||||
{NULL, NULL, 0, NULL} /* Sentinel */
|
||||
};
|
||||
|
||||
static PyTypeObject dvm_DBCSpeType = {
|
||||
PyObject_HEAD_INIT(NULL)
|
||||
0, /*ob_size*/
|
||||
"dvm.DBCSpe", /*tp_name*/
|
||||
sizeof(dvm_DBCSpeObject), /*tp_basicsize*/
|
||||
0, /*tp_itemsize*/
|
||||
(destructor)DBCSpe_dealloc, /*tp_dealloc*/
|
||||
0, /*tp_print*/
|
||||
0, /*tp_getattr*/
|
||||
0, /*tp_setattr*/
|
||||
0, /*tp_compare*/
|
||||
0, /*tp_repr*/
|
||||
0, /*tp_as_number*/
|
||||
0, /*tp_as_sequence*/
|
||||
0, /*tp_as_mapping*/
|
||||
0, /*tp_hash */
|
||||
0, /*tp_call*/
|
||||
0, /*tp_str*/
|
||||
0, /*tp_getattro*/
|
||||
0, /*tp_setattro*/
|
||||
0, /*tp_as_buffer*/
|
||||
Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /*tp_flags*/
|
||||
"DBC objects", /* tp_doc */
|
||||
0, /* tp_traverse */
|
||||
0, /* tp_clear */
|
||||
0, /* tp_richcompare */
|
||||
0, /* tp_weaklistoffset */
|
||||
0, /* tp_iter */
|
||||
0, /* tp_iternext */
|
||||
DBCSpe_methods, /* tp_methods */
|
||||
NULL, /* tp_members */
|
||||
NULL, /* tp_getset */
|
||||
0, /* tp_base */
|
||||
0, /* tp_dict */
|
||||
0, /* tp_descr_get */
|
||||
0, /* tp_descr_set */
|
||||
0, /* tp_dictoffset */
|
||||
(initproc)DBCSpe_init, /* tp_init */
|
||||
0, /* tp_alloc */
|
||||
DBCSpe_new, /* tp_new */
|
||||
};
|
||||
|
||||
|
||||
#endif
|
||||
#endif
|
68
androguard/core/bytecodes/libdvm/test_dvm.py
Executable file
68
androguard/core/bytecodes/libdvm/test_dvm.py
Executable file
@ -0,0 +1,68 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (C) 2011, Anthony Desnos <desnos at t0t0.fr>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS-IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys, itertools, time, os, random
|
||||
from ctypes import cdll, c_float, c_int, c_uint, c_void_p, Structure, addressof, create_string_buffer, cast, POINTER, pointer
|
||||
from struct import pack, unpack, calcsize
|
||||
|
||||
PATH_INSTALL = "../../../"
|
||||
sys.path.append(PATH_INSTALL + "./")
|
||||
sys.path.append(PATH_INSTALL + "./core")
|
||||
sys.path.append(PATH_INSTALL + "./core/bytecodes")
|
||||
sys.path.append(PATH_INSTALL + "./core/analysis")
|
||||
|
||||
import apk, dvm, analysis, msign
|
||||
|
||||
if __name__ == "__main__" :
|
||||
# a = apk.APK( PATH_INSTALL + "examples/android/TestsAndroguard/bin/TestsAndroguard.apk" )
|
||||
# a = apk.APK( PATH_INSTALL + "apks/drweb-600-android-beta.apk" )
|
||||
# a = apk.APK( PATH_INSTALL + "debug/062d5e38dc4618a8b1c6bf3587dc2016a3a3db146aea0d82cc227a18ca21ad13")
|
||||
a = apk.APK( PATH_INSTALL + "apks/malwares/kungfu/sample2.apk" )
|
||||
|
||||
t1 = time.time()
|
||||
|
||||
|
||||
if len(sys.argv) > 1 :
|
||||
d = dvm.DalvikVMFormat( a.get_dex(), engine=["python"] )
|
||||
else :
|
||||
d = dvm.DalvikVMFormat( a.get_dex() )
|
||||
|
||||
t2 = time.time()
|
||||
x = analysis.VMAnalysis( d )
|
||||
|
||||
t3 = time.time()
|
||||
print '-> %0.8f %0.8f %0.8f' % ((t2-t1, t3-t2, t3-t1))
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
for method in d.get_methods() :
|
||||
print method.get_class_name(), method.get_name(), method.get_descriptor()
|
||||
|
||||
code = method.get_code()
|
||||
if code == None :
|
||||
continue
|
||||
|
||||
bc = code.get_bc()
|
||||
|
||||
idx = 0
|
||||
for i in bc.get() :
|
||||
print "\t", "%x" % idx, i.get_op_value(), i.get_name(), i.get_operands()#, i.get_formatted_operands()
|
||||
idx += i.get_length()
|
||||
|
||||
sys.exit(0)
|
0
androguard/core/data/__init__.py
Normal file
0
androguard/core/data/__init__.py
Normal file
395
androguard/core/data/data.py
Normal file
395
androguard/core/data/data.py
Normal file
@ -0,0 +1,395 @@
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS-IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from networkx import DiGraph
|
||||
import os
|
||||
from xml.sax.saxutils import escape
|
||||
|
||||
|
||||
from androguard.core.analysis import analysis
|
||||
|
||||
try :
|
||||
from androguard.core.analysis.libsign.libsign import entropy
|
||||
except ImportError :
|
||||
import math
|
||||
def entropy(data):
|
||||
entropy = 0
|
||||
|
||||
if len(data) == 0 :
|
||||
return entropy
|
||||
|
||||
for x in range(256):
|
||||
p_x = float(data.count(chr(x)))/len(data)
|
||||
if p_x > 0:
|
||||
entropy += - p_x*math.log(p_x, 2)
|
||||
return entropy
|
||||
|
||||
DEFAULT_SIGNATURE = analysis.SIGNATURE_L0_4
|
||||
def create_entropies(vmx, m) :
|
||||
try :
|
||||
default_signature = vmx.get_method_signature(m, predef_sign = DEFAULT_SIGNATURE).get_string()
|
||||
l = [ default_signature,
|
||||
entropy( vmx.get_method_signature(m, "L4", { "L4" : { "arguments" : ["Landroid"] } } ).get_string() ),
|
||||
entropy( vmx.get_method_signature(m, "L4", { "L4" : { "arguments" : ["Ljava"] } } ).get_string() ),
|
||||
entropy( vmx.get_method_signature(m, "hex" ).get_string() ),
|
||||
entropy( vmx.get_method_signature(m, "L2" ).get_string() ),
|
||||
]
|
||||
return l
|
||||
except KeyError :
|
||||
return [ "", 0.0, 0.0, 0.0, 0.0 ]
|
||||
|
||||
def create_info(vmx, m) :
|
||||
E = create_entropies(vmx, m)
|
||||
|
||||
H = {}
|
||||
H["signature"] = E[0]
|
||||
H["signature_entropy"] = entropy( E[0] )
|
||||
H["android_api_entropy"] = E[1]
|
||||
H["java_api_entropy"] = E[2]
|
||||
H["hex_entropy"] = E[3]
|
||||
H["exceptions_entropy"] = E[4]
|
||||
|
||||
return H
|
||||
|
||||
class Data :
|
||||
def __init__(self, vm, vmx, gvmx, a=None) :
|
||||
self.vm = vm
|
||||
self.vmx = vmx
|
||||
self.gvmx = gvmx
|
||||
self.a = a
|
||||
|
||||
self.apk_data = None
|
||||
self.dex_data = None
|
||||
|
||||
if self.a != None :
|
||||
self.apk_data = ApkViewer( self.a )
|
||||
|
||||
self.dex_data = DexViewer( vm, vmx, gvmx )
|
||||
|
||||
self.gvmx.set_new_attributes( create_info )
|
||||
self.export_methods_to_gml()
|
||||
|
||||
def export_methodcalls_to_gml(self) :
|
||||
return self.gvmx.export_to_gml()
|
||||
|
||||
def export_methods_to_gml(self) :
|
||||
print self.gvmx.G
|
||||
|
||||
for node in self.gvmx.G.nodes() :
|
||||
print self.gvmx.nodes_id[ node ].method_name, self.gvmx.nodes_id[ node ].get_attributes()
|
||||
|
||||
def export_apk_to_gml(self) :
|
||||
if self.apk_data != None :
|
||||
return self.apk_data.export_to_gml()
|
||||
|
||||
def export_dex_to_gml(self) :
|
||||
if self.dex_data != None :
|
||||
return self.dex_data.export_to_gml()
|
||||
|
||||
class DexViewer :
|
||||
def __init__(self, vm, vmx, gvmx) :
|
||||
self.vm = vm
|
||||
self.vmx = vmx
|
||||
self.gvmx = gvmx
|
||||
|
||||
|
||||
def _create_node(self, id, height, width, color, label) :
|
||||
buff = "<node id=\"%d\">\n" % id
|
||||
buff += "<data key=\"d6\">\n"
|
||||
buff += "<y:ShapeNode>\n"
|
||||
|
||||
buff += "<y:Geometry height=\"%f\" width=\"%f\"/>\n" % (16 * height, 7.5 * width)
|
||||
buff += "<y:Fill color=\"#%s\" transparent=\"false\"/>\n" % color
|
||||
|
||||
buff += "<y:NodeLabel alignment=\"left\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"13\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" modelName=\"internal\" modelPosition=\"c\" textColor=\"#000000\" visible=\"true\">\n"
|
||||
|
||||
buff += escape(label)
|
||||
|
||||
buff += "</y:NodeLabel>\n"
|
||||
buff += "</y:ShapeNode>\n"
|
||||
buff += "</data>\n"
|
||||
|
||||
buff += "</node>\n"
|
||||
|
||||
return buff
|
||||
|
||||
def add_exception_node(self, exception, id_i) :
|
||||
buff = ""
|
||||
# 9933FF
|
||||
height = 2
|
||||
width = 0
|
||||
label = ""
|
||||
|
||||
label += "%x:%x\n" % (exception.start, exception.end)
|
||||
for i in exception.exceptions :
|
||||
c_label = "\t(%s -> %x %s)\n" % (i[0], i[1], i[2].get_name())
|
||||
label += c_label
|
||||
|
||||
width = max(len(c_label), width)
|
||||
height += 1
|
||||
|
||||
return self._create_node( id_i, height, width, "9333FF", label )
|
||||
|
||||
def add_method_node(self, i, id_i) :
|
||||
height = 0
|
||||
width = 0
|
||||
label = ""
|
||||
|
||||
label += i.get_name() + "\n"
|
||||
label += i.get_descriptor()
|
||||
|
||||
height = 3
|
||||
width = len(label)
|
||||
|
||||
return self._create_node( id_i, height, width, "FF0000", label )
|
||||
|
||||
def add_node(self, i, id_i) :
|
||||
height = 0
|
||||
width = 0
|
||||
idx = i.start
|
||||
label = ""
|
||||
for ins in i.get_instructions() :
|
||||
c_label = "%x %s\n" % (idx, self.vm.dotbuff(ins, idx))
|
||||
idx += ins.get_length()
|
||||
label += c_label
|
||||
width = max(width, len(c_label))
|
||||
height += 1
|
||||
|
||||
if height < 10 :
|
||||
height += 3
|
||||
|
||||
return self._create_node( id_i, height, width, "FFCC00", label )
|
||||
|
||||
def add_edge(self, i, id_i, j, id_j, l_eid, val) :
|
||||
buff = "<edge id=\"%d\" source=\"%d\" target=\"%d\">\n" % (len(l_eid), id_i, id_j)
|
||||
|
||||
buff += "<data key=\"d9\">\n"
|
||||
buff += "<y:PolyLineEdge>\n"
|
||||
buff += "<y:Arrows source=\"none\" target=\"standard\"/>\n"
|
||||
|
||||
if val == 0 :
|
||||
buff += "<y:LineStyle color=\"#00FF00\" type=\"line\" width=\"1.0\"/>\n"
|
||||
elif val == 1 :
|
||||
buff += "<y:LineStyle color=\"#FF0000\" type=\"line\" width=\"1.0\"/>\n"
|
||||
else :
|
||||
buff += "<y:LineStyle color=\"#0000FF\" type=\"line\" width=\"1.0\"/>\n"
|
||||
|
||||
buff += "</y:PolyLineEdge>\n"
|
||||
buff += "</data>\n"
|
||||
|
||||
buff += "</edge>\n"
|
||||
|
||||
l_eid[ "%d+%d" % (id_i, id_j) ] = len(l_eid)
|
||||
return buff
|
||||
|
||||
def new_id(self, i, l) :
|
||||
try :
|
||||
return l[i]
|
||||
except KeyError :
|
||||
l[i] = len(l)
|
||||
return l[i]
|
||||
|
||||
def export_to_gml(self) :
|
||||
H = {}
|
||||
|
||||
for _class in self.vm.get_classes() :
|
||||
name = _class.get_name()
|
||||
name = name[1:-1]
|
||||
|
||||
buff = ""
|
||||
|
||||
buff += "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\n"
|
||||
buff += "<graphml xmlns=\"http://graphml.graphdrawing.org/xmlns\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xmlns:y=\"http://www.yworks.com/xml/graphml\" xmlns:yed=\"http://www.yworks.com/xml/yed/3\" xsi:schemaLocation=\"http://graphml.graphdrawing.org/xmlns http://www.yworks.com/xml/schema/graphml/1.1/ygraphml.xsd\">\n"
|
||||
|
||||
buff += "<key attr.name=\"description\" attr.type=\"string\" for=\"node\" id=\"d5\"/>\n"
|
||||
buff += "<key for=\"node\" id=\"d6\" yfiles.type=\"nodegraphics\"/>\n"
|
||||
buff += "<key for=\"edge\" id=\"d9\" yfiles.type=\"edgegraphics\"/>\n"
|
||||
|
||||
buff += "<graph edgedefault=\"directed\" id=\"G\">\n"
|
||||
|
||||
print name
|
||||
|
||||
buff_nodes = ""
|
||||
buff_edges = ""
|
||||
l_id = {}
|
||||
l_eid = {}
|
||||
|
||||
for method in _class.get_methods() :
|
||||
mx = self.vmx.get_method( method )
|
||||
exceptions = mx.exceptions
|
||||
|
||||
id_method = self.new_id(method, l_id)
|
||||
buff_nodes += self.add_method_node(method, id_method)
|
||||
|
||||
for i in mx.basic_blocks.get() :
|
||||
|
||||
id_i = self.new_id(i, l_id)
|
||||
print i, id_i, i.exception_analysis
|
||||
|
||||
buff_nodes += self.add_node( i, id_i )
|
||||
|
||||
# add childs nodes
|
||||
val = 0
|
||||
if len(i.childs) > 1 :
|
||||
val = 1
|
||||
elif len(i.childs) == 1 :
|
||||
val = 2
|
||||
|
||||
for j in i.childs :
|
||||
print "\t", j
|
||||
|
||||
id_j = self.new_id(j[-1], l_id)
|
||||
buff_edges += self.add_edge(i, id_i, j[-1], id_j, l_eid, val)
|
||||
if val == 1 :
|
||||
val = 0
|
||||
|
||||
# add exceptions node
|
||||
if i.exception_analysis != None :
|
||||
id_exceptions = self.new_id(i.exception_analysis, l_id)
|
||||
buff_nodes += self.add_exception_node(i.exception_analysis, id_exceptions)
|
||||
buff_edges += self.add_edge(None, id_exceptions, None, id_i, l_eid, 2)
|
||||
|
||||
buff_edges += self.add_edge(None, id_method, None, id_method+1, l_eid, 2)
|
||||
|
||||
buff += buff_nodes
|
||||
buff += buff_edges
|
||||
|
||||
|
||||
buff += "</graph>\n"
|
||||
buff += "</graphml>\n"
|
||||
|
||||
H[ name ] = buff
|
||||
return H
|
||||
|
||||
class Directory :
|
||||
def __init__(self, name) :
|
||||
self.name = name
|
||||
self.basename = os.path.basename(name)
|
||||
self.color = "FF0000"
|
||||
|
||||
self.width = len(self.name)
|
||||
|
||||
def set_color(self, color) :
|
||||
self.color = color
|
||||
|
||||
class File :
|
||||
def __init__(self, name, file_type, file_crc) :
|
||||
self.name = name
|
||||
self.basename = os.path.basename(name)
|
||||
self.file_type = file_type
|
||||
self.file_crc = file_crc
|
||||
|
||||
self.color = "FFCC00"
|
||||
|
||||
self.width = max(len(self.name), len(self.file_type))
|
||||
|
||||
def splitall(path, z) :
|
||||
if len(path) == 0 :
|
||||
return
|
||||
|
||||
l = os.path.split( path )
|
||||
z.append(l[0])
|
||||
|
||||
for i in l :
|
||||
return splitall( i, z )
|
||||
|
||||
class ApkViewer :
|
||||
def __init__(self, a) :
|
||||
self.a = a
|
||||
|
||||
self.G = DiGraph()
|
||||
self.all_files = {}
|
||||
self.ids = {}
|
||||
|
||||
root = Directory( "APK" )
|
||||
root.set_color( "00FF00" )
|
||||
|
||||
self.ids[ root ] = len(self.ids)
|
||||
self.G.add_node( root )
|
||||
|
||||
for x, y, z in self.a.get_files_information() :
|
||||
print x, y, z, os.path.basename(x)
|
||||
|
||||
l = []
|
||||
splitall( x, l )
|
||||
l.reverse()
|
||||
l.pop(0)
|
||||
|
||||
|
||||
last = root
|
||||
for i in l :
|
||||
if i not in self.all_files :
|
||||
tmp = Directory( i )
|
||||
self.ids[ tmp ] = len(self.ids)
|
||||
self.all_files[ i ] = tmp
|
||||
else :
|
||||
tmp = self.all_files[ i ]
|
||||
|
||||
self.G.add_edge(last, tmp)
|
||||
last = tmp
|
||||
|
||||
n1 = last
|
||||
n2 = File( x, y, z )
|
||||
self.G.add_edge(n1, n2)
|
||||
|
||||
self.ids[ n2 ] = len(self.ids)
|
||||
|
||||
def export_to_gml(self) :
|
||||
buff = "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\n"
|
||||
buff += "<graphml xmlns=\"http://graphml.graphdrawing.org/xmlns\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xmlns:y=\"http://www.yworks.com/xml/graphml\" xmlns:yed=\"http://www.yworks.com/xml/yed/3\" xsi:schemaLocation=\"http://graphml.graphdrawing.org/xmlns http://www.yworks.com/xml/schema/graphml/1.1/ygraphml.xsd\">\n"
|
||||
|
||||
buff += "<key attr.name=\"description\" attr.type=\"string\" for=\"node\" id=\"d5\"/>\n"
|
||||
buff += "<key for=\"node\" id=\"d6\" yfiles.type=\"nodegraphics\"/>\n"
|
||||
|
||||
|
||||
buff += "<graph edgedefault=\"directed\" id=\"G\">\n"
|
||||
|
||||
|
||||
for node in self.G.nodes() :
|
||||
print node
|
||||
|
||||
buff += "<node id=\"%d\">\n" % self.ids[node]
|
||||
buff += "<data key=\"d6\">\n"
|
||||
buff += "<y:ShapeNode>\n"
|
||||
|
||||
buff += "<y:Geometry height=\"%f\" width=\"%f\"/>\n" % (60.0, 7 * node.width)
|
||||
buff += "<y:Fill color=\"#%s\" transparent=\"false\"/>\n" % node.color
|
||||
|
||||
buff += "<y:NodeLabel>\n"
|
||||
buff += "%s\n" % node.basename
|
||||
|
||||
if isinstance(node, File) :
|
||||
buff += "%s\n" % node.file_type
|
||||
buff += "%s\n" % hex(node.file_crc)
|
||||
|
||||
buff += "</y:NodeLabel>\n"
|
||||
|
||||
buff += "</y:ShapeNode>\n"
|
||||
buff += "</data>\n"
|
||||
|
||||
buff += "</node>\n"
|
||||
|
||||
nb = 0
|
||||
for edge in self.G.edges() :
|
||||
buff += "<edge id=\"%d\" source=\"%d\" target=\"%d\">\n" % (nb, self.ids[edge[0]], self.ids[edge[1]])
|
||||
buff += "</edge>\n"
|
||||
nb += 1
|
||||
|
||||
buff += "</graph>\n"
|
||||
buff += "</graphml>\n"
|
||||
|
||||
return buff
|
0
androguard/core/debugger/__init__.py
Normal file
0
androguard/core/debugger/__init__.py
Normal file
0
androguard/decompiler/__init__.py
Normal file
0
androguard/decompiler/__init__.py
Normal file
0
androguard/decompiler/dad/README.txt
Normal file
0
androguard/decompiler/dad/README.txt
Normal file
0
androguard/decompiler/dad/__init__.py
Normal file
0
androguard/decompiler/dad/__init__.py
Normal file
349
androguard/decompiler/dad/basic_blocks.py
Normal file
349
androguard/decompiler/dad/basic_blocks.py
Normal file
@ -0,0 +1,349 @@
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (c) 2012 Geoffroy Gueguen <geoffroy.gueguen@gmail.com>
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS-IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
from androguard.decompiler.dad.opcode_ins import INSTRUCTION_SET
|
||||
from androguard.decompiler.dad.node import Node
|
||||
|
||||
|
||||
logger = logging.getLogger('dad.basic_blocks')
|
||||
|
||||
|
||||
class BasicBlock(Node):
|
||||
def __init__(self, name, block_ins):
|
||||
super(BasicBlock, self).__init__(name)
|
||||
self.ins = block_ins
|
||||
self.ins_range = None
|
||||
self.loc_ins = None
|
||||
self.var_to_declare = set()
|
||||
|
||||
def get_ins(self):
|
||||
return self.ins
|
||||
|
||||
def get_loc_with_ins(self):
|
||||
if self.loc_ins is None:
|
||||
self.loc_ins = zip(range(*self.ins_range), self.ins)
|
||||
return self.loc_ins
|
||||
|
||||
def remove_ins(self, loc, ins):
|
||||
self.ins.remove(ins)
|
||||
self.loc_ins.remove((loc, ins))
|
||||
|
||||
def add_ins(self, new_ins_list):
|
||||
for new_ins in new_ins_list:
|
||||
self.ins.append(new_ins)
|
||||
|
||||
def add_variable_declaration(self, variable):
|
||||
self.var_to_declare.add(variable)
|
||||
|
||||
def number_ins(self, num):
|
||||
last_ins_num = num + len(self.ins)
|
||||
self.ins_range = [num, last_ins_num]
|
||||
self.loc_ins = None
|
||||
return last_ins_num
|
||||
|
||||
|
||||
class StatementBlock(BasicBlock):
|
||||
def __init__(self, name, block_ins):
|
||||
super(StatementBlock, self).__init__(name, block_ins)
|
||||
self.type.is_stmt = True
|
||||
|
||||
def visit(self, visitor):
|
||||
return visitor.visit_statement_node(self)
|
||||
|
||||
def __str__(self):
|
||||
return '%d-Statement(%s)' % (self.num, self.name)
|
||||
|
||||
|
||||
class ReturnBlock(BasicBlock):
|
||||
def __init__(self, name, block_ins):
|
||||
super(ReturnBlock, self).__init__(name, block_ins)
|
||||
self.type.is_return = True
|
||||
|
||||
def visit(self, visitor):
|
||||
return visitor.visit_return_node(self)
|
||||
|
||||
def __str__(self):
|
||||
return '%d-Return(%s)' % (self.num, self.name)
|
||||
|
||||
|
||||
class ThrowBlock(BasicBlock):
|
||||
def __init__(self, name, block_ins):
|
||||
super(ThrowBlock, self).__init__(name, block_ins)
|
||||
self.type.is_throw = True
|
||||
|
||||
def visit(self, visitor):
|
||||
return visitor.visit_throw_node(self)
|
||||
|
||||
def __str__(self):
|
||||
return '%d-Throw(%s)' % (self.num, self.name)
|
||||
|
||||
|
||||
class SwitchBlock(BasicBlock):
|
||||
def __init__(self, name, switch, block_ins):
|
||||
super(SwitchBlock, self).__init__(name, block_ins)
|
||||
self.switch = switch
|
||||
self.cases = []
|
||||
self.default = None
|
||||
self.node_to_case = defaultdict(list)
|
||||
self.type.is_switch = True
|
||||
|
||||
def add_case(self, case):
|
||||
self.cases.append(case)
|
||||
|
||||
def visit(self, visitor):
|
||||
return visitor.visit_switch_node(self)
|
||||
|
||||
def copy_from(self, node):
|
||||
super(SwitchBlock, self).copy_from(node)
|
||||
self.cases = node.cases[:]
|
||||
self.switch = node.switch[:]
|
||||
|
||||
def update_attribute_with(self, n_map):
|
||||
super(SwitchBlock, self).update_attribute_with(n_map)
|
||||
self.cases = [n_map.get(n, n) for n in self.cases]
|
||||
for node1, node2 in n_map.iteritems():
|
||||
if node1 in self.node_to_case:
|
||||
self.node_to_case[node2] = self.node_to_case.pop(node1)
|
||||
|
||||
def order_cases(self):
|
||||
values = self.switch.get_values()
|
||||
if len(values) < len(self.cases):
|
||||
self.default = self.cases.pop(0)
|
||||
for case, node in zip(values, self.cases):
|
||||
self.node_to_case[node].append(case)
|
||||
|
||||
def __str__(self):
|
||||
return '%d-Switch(%s)' % (self.num, self.name)
|
||||
|
||||
|
||||
class CondBlock(BasicBlock):
|
||||
def __init__(self, name, block_ins):
|
||||
super(CondBlock, self).__init__(name, block_ins)
|
||||
self.true = None
|
||||
self.false = None
|
||||
self.type.is_cond = True
|
||||
|
||||
def update_attribute_with(self, n_map):
|
||||
super(CondBlock, self).update_attribute_with(n_map)
|
||||
self.true = n_map.get(self.true, self.true)
|
||||
self.false = n_map.get(self.false, self.false)
|
||||
|
||||
def neg(self):
|
||||
if len(self.ins) != 1:
|
||||
raise RuntimeWarning('Condition should have only 1 instruction !')
|
||||
self.ins[-1].neg()
|
||||
|
||||
def visit(self, visitor):
|
||||
return visitor.visit_cond_node(self)
|
||||
|
||||
def visit_cond(self, visitor):
|
||||
if len(self.ins) != 1:
|
||||
raise RuntimeWarning('Condition should have only 1 instruction !')
|
||||
return visitor.visit_ins(self.ins[-1])
|
||||
|
||||
def __str__(self):
|
||||
return '%d-If(%s)' % (self.num, self.name)
|
||||
|
||||
|
||||
class Condition(object):
|
||||
def __init__(self, cond1, cond2, isand, isnot):
|
||||
self.cond1 = cond1
|
||||
self.cond2 = cond2
|
||||
self.isand = isand
|
||||
self.isnot = isnot
|
||||
|
||||
def neg(self):
|
||||
self.isand = not self.isand
|
||||
self.cond1.neg()
|
||||
self.cond2.neg()
|
||||
|
||||
def get_ins(self):
|
||||
lins = []
|
||||
lins.extend(self.cond1.get_ins())
|
||||
lins.extend(self.cond2.get_ins())
|
||||
return lins
|
||||
|
||||
def get_loc_with_ins(self):
|
||||
loc_ins = []
|
||||
loc_ins.extend(self.cond1.get_loc_with_ins())
|
||||
loc_ins.extend(self.cond2.get_loc_with_ins())
|
||||
return loc_ins
|
||||
|
||||
def visit(self, visitor):
|
||||
return visitor.visit_short_circuit_condition(self.isnot, self.isand,
|
||||
self.cond1, self.cond2)
|
||||
|
||||
def __str__(self):
|
||||
if self.isnot:
|
||||
ret = '!%s %s %s'
|
||||
else:
|
||||
ret = '%s %s %s'
|
||||
return ret % (self.cond1, ['||', '&&'][self.isand], self.cond2)
|
||||
|
||||
|
||||
class ShortCircuitBlock(CondBlock):
|
||||
def __init__(self, name, cond):
|
||||
super(ShortCircuitBlock, self).__init__(name, None)
|
||||
self.cond = cond
|
||||
|
||||
def get_ins(self):
|
||||
return self.cond.get_ins()
|
||||
|
||||
def get_loc_with_ins(self):
|
||||
return self.cond.get_loc_with_ins()
|
||||
|
||||
def neg(self):
|
||||
self.cond.neg()
|
||||
|
||||
def visit_cond(self, visitor):
|
||||
return self.cond.visit(visitor)
|
||||
|
||||
def __str__(self):
|
||||
return '%d-SC(%s)' % (self.num, self.cond)
|
||||
|
||||
|
||||
class LoopBlock(CondBlock):
|
||||
def __init__(self, name, cond):
|
||||
super(LoopBlock, self).__init__(name, None)
|
||||
self.cond = cond
|
||||
|
||||
def get_ins(self):
|
||||
return self.cond.get_ins()
|
||||
|
||||
def neg(self):
|
||||
self.cond.neg()
|
||||
|
||||
def get_loc_with_ins(self):
|
||||
return self.cond.get_loc_with_ins()
|
||||
|
||||
def visit(self, visitor):
|
||||
return visitor.visit_loop_node(self)
|
||||
|
||||
def visit_cond(self, visitor):
|
||||
return self.cond.visit_cond(visitor)
|
||||
|
||||
def update_attribute_with(self, n_map):
|
||||
super(LoopBlock, self).update_attribute_with(n_map)
|
||||
self.cond.update_attribute_with(n_map)
|
||||
|
||||
def __str__(self):
|
||||
if self.looptype.is_pretest:
|
||||
if self.false in self.loop_nodes:
|
||||
return '%d-While(!%s)[%s]' % (self.num, self.name, self.cond)
|
||||
return '%d-While(%s)[%s]' % (self.num, self.name, self.cond)
|
||||
elif self.looptype.is_posttest:
|
||||
return '%d-DoWhile(%s)[%s]' % (self.num, self.name, self.cond)
|
||||
elif self.looptype.is_endless:
|
||||
return '%d-WhileTrue(%s)[%s]' % (self.num, self.name, self.cond)
|
||||
return '%d-WhileNoType(%s)' % (self.num, self.name)
|
||||
|
||||
|
||||
class TryBlock(BasicBlock):
|
||||
def __init__(self, node):
|
||||
super(TryBlock, self).__init__('Try-%s' % node.name, None)
|
||||
self.try_start = node
|
||||
self.catch = []
|
||||
|
||||
def add_catch_node(self, node):
|
||||
self.catch.append(node)
|
||||
|
||||
def visit(self, visitor):
|
||||
visitor.visit_try_node(self)
|
||||
|
||||
def __str__(self):
|
||||
return 'Try(%s)[%s]' % (self.name, self.catch)
|
||||
|
||||
|
||||
class CatchBlock(BasicBlock):
|
||||
def __init__(self, node):
|
||||
self.exception = node.ins[0]
|
||||
node.ins.pop(0)
|
||||
super(CatchBlock, self).__init__('Catch-%s' % node.name, node.ins)
|
||||
self.catch_start = node
|
||||
|
||||
def visit(self, visitor):
|
||||
visitor.visit_catch_node(self)
|
||||
|
||||
def visit_exception(self, visitor):
|
||||
visitor.visit_ins(self.exception)
|
||||
|
||||
def __str__(self):
|
||||
return 'Catch(%s)' % self.name
|
||||
|
||||
|
||||
def build_node_from_block(block, vmap, gen_ret, exception_type=None):
|
||||
ins, lins = None, []
|
||||
idx = block.get_start()
|
||||
for ins in block.get_instructions():
|
||||
opcode = ins.get_op_value()
|
||||
# check-cast
|
||||
if opcode in (0x1f, -1): # FIXME? or opcode in (0x0300, 0x0200, 0x0100):
|
||||
idx += ins.get_length()
|
||||
continue
|
||||
try:
|
||||
_ins = INSTRUCTION_SET[opcode]
|
||||
except IndexError:
|
||||
logger.error('Unknown instruction : %s.', ins.get_name().lower())
|
||||
raise
|
||||
# fill-array-data
|
||||
if opcode == 0x26:
|
||||
fillaray = block.get_special_ins(idx)
|
||||
lins.append(_ins(ins, vmap, fillaray))
|
||||
# invoke-kind[/range]
|
||||
elif (0x6e <= opcode <= 0x72 or 0x74 <= opcode <= 0x78):
|
||||
lins.append(_ins(ins, vmap, gen_ret))
|
||||
# filled-new-array[/range]
|
||||
elif 0x24 <= opcode <= 0x25:
|
||||
lins.append(_ins(ins, vmap, gen_ret.new()))
|
||||
# move-result*
|
||||
elif 0xa <= opcode <= 0xc:
|
||||
lins.append(_ins(ins, vmap, gen_ret.last()))
|
||||
# move-exception
|
||||
elif opcode == 0xd:
|
||||
lins.append(_ins(ins, vmap, exception_type))
|
||||
# monitor-{enter,exit}
|
||||
elif 0x1d <= opcode <= 0x1e:
|
||||
idx += ins.get_length()
|
||||
continue
|
||||
else:
|
||||
lins.append(_ins(ins, vmap))
|
||||
idx += ins.get_length()
|
||||
name = block.get_name()
|
||||
# return*
|
||||
if 0xe <= opcode <= 0x11:
|
||||
node = ReturnBlock(name, lins)
|
||||
# {packed,sparse}-switch
|
||||
elif 0x2b <= opcode <= 0x2c:
|
||||
idx -= ins.get_length()
|
||||
values = block.get_special_ins(idx)
|
||||
node = SwitchBlock(name, values, lins)
|
||||
# if-test[z]
|
||||
elif 0x32 <= opcode <= 0x3d:
|
||||
node = CondBlock(name, lins)
|
||||
node.off_last_ins = ins.get_ref_off()
|
||||
# throw
|
||||
elif opcode == 0x27:
|
||||
node = ThrowBlock(name, lins)
|
||||
else:
|
||||
# goto*
|
||||
if 0x28 <= opcode <= 0x2a:
|
||||
lins.pop()
|
||||
node = StatementBlock(name, lins)
|
||||
return node
|
||||
|
432
androguard/decompiler/dad/control_flow.py
Normal file
432
androguard/decompiler/dad/control_flow.py
Normal file
@ -0,0 +1,432 @@
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (c) 2012 Geoffroy Gueguen <geoffroy.gueguen@gmail.com>
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS-IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
from androguard.decompiler.dad.basic_blocks import (CatchBlock,
|
||||
Condition,
|
||||
LoopBlock,
|
||||
ShortCircuitBlock,
|
||||
TryBlock)
|
||||
from androguard.decompiler.dad.graph import Graph
|
||||
from androguard.decompiler.dad.node import Interval
|
||||
from androguard.decompiler.dad.util import common_dom
|
||||
|
||||
|
||||
logger = logging.getLogger('dad.control_flow')
|
||||
|
||||
|
||||
def intervals(graph):
|
||||
'''
|
||||
Compute the intervals of the graph
|
||||
Returns
|
||||
interval_graph: a graph of the intervals of G
|
||||
interv_heads: a dict of (header node, interval)
|
||||
'''
|
||||
interval_graph = Graph() # graph of intervals
|
||||
heads = [graph.entry] # list of header nodes
|
||||
interv_heads = {} # interv_heads[i] = interval of header i
|
||||
processed = dict([(i, False) for i in graph])
|
||||
edges = defaultdict(list)
|
||||
|
||||
while heads:
|
||||
head = heads.pop(0)
|
||||
|
||||
if not processed[head]:
|
||||
processed[head] = True
|
||||
interv_heads[head] = Interval(head)
|
||||
|
||||
# Check if there is a node which has all its predecessor in the
|
||||
# current interval. If there is, add that node to the interval and
|
||||
# repeat until all the possible nodes have been added.
|
||||
change = True
|
||||
while change:
|
||||
change = False
|
||||
for node in graph.rpo[1:]:
|
||||
if all(p in interv_heads[head] for p in graph.all_preds(node)):
|
||||
change |= interv_heads[head].add_node(node)
|
||||
|
||||
# At this stage, a node which is not in the interval, but has one
|
||||
# of its predecessor in it, is the header of another interval. So
|
||||
# we add all such nodes to the header list.
|
||||
for node in graph:
|
||||
if node not in interv_heads[head] and node not in heads:
|
||||
if any(p in interv_heads[head] for p in graph.all_preds(node)):
|
||||
edges[interv_heads[head]].append(node)
|
||||
assert(node not in heads)
|
||||
heads.append(node)
|
||||
|
||||
interval_graph.add_node(interv_heads[head])
|
||||
interv_heads[head].compute_end(graph)
|
||||
|
||||
# Edges is a mapping of 'Interval -> [header nodes of interval successors]'
|
||||
for interval, heads in edges.items():
|
||||
for head in heads:
|
||||
interval_graph.add_edge(interval, interv_heads[head])
|
||||
|
||||
interval_graph.entry = graph.entry.interval
|
||||
if graph.exit:
|
||||
interval_graph.exit = graph.exit.interval
|
||||
|
||||
return interval_graph, interv_heads
|
||||
|
||||
|
||||
def derived_sequence(graph):
|
||||
'''
|
||||
Compute the derived sequence of the graph G
|
||||
The intervals of G are collapsed into nodes, intervals of these nodes are
|
||||
built, and the process is repeated iteratively until we obtain a single
|
||||
node (if the graph is not irreducible)
|
||||
'''
|
||||
deriv_seq = [graph]
|
||||
deriv_interv = []
|
||||
single_node = False
|
||||
|
||||
while not single_node:
|
||||
|
||||
interv_graph, interv_heads = intervals(graph)
|
||||
deriv_interv.append(interv_heads)
|
||||
|
||||
single_node = len(interv_graph) == 1
|
||||
if not single_node:
|
||||
deriv_seq.append(interv_graph)
|
||||
|
||||
graph = interv_graph
|
||||
graph.compute_rpo()
|
||||
|
||||
return deriv_seq, deriv_interv
|
||||
|
||||
|
||||
def mark_loop_rec(graph, node, s_num, e_num, interval, nodes_in_loop):
|
||||
if node in nodes_in_loop:
|
||||
return
|
||||
nodes_in_loop.append(node)
|
||||
for pred in graph.all_preds(node):
|
||||
if s_num < pred.num <= e_num and pred in interval:
|
||||
mark_loop_rec(graph, pred, s_num, e_num, interval, nodes_in_loop)
|
||||
|
||||
|
||||
def mark_loop(graph, start, end, interval):
|
||||
logger.debug('MARKLOOP : %s END : %s', start, end)
|
||||
head = start.get_head()
|
||||
latch = end.get_end()
|
||||
nodes_in_loop = [head]
|
||||
mark_loop_rec(graph, latch, head.num, latch.num, interval, nodes_in_loop)
|
||||
head.startloop = True
|
||||
head.latch = latch
|
||||
return nodes_in_loop
|
||||
|
||||
|
||||
def loop_type(start, end, nodes_in_loop):
|
||||
if end.type.is_cond:
|
||||
if start.type.is_cond:
|
||||
if start.true in nodes_in_loop and start.false in nodes_in_loop:
|
||||
start.looptype.is_posttest = True
|
||||
else:
|
||||
start.looptype.is_pretest = True
|
||||
else:
|
||||
start.looptype.is_posttest = True
|
||||
else:
|
||||
if start.type.is_cond:
|
||||
if start.true in nodes_in_loop and start.false in nodes_in_loop:
|
||||
start.looptype.is_endless = True
|
||||
else:
|
||||
start.looptype.is_pretest = True
|
||||
else:
|
||||
start.looptype.is_endless = True
|
||||
|
||||
|
||||
def loop_follow(start, end, nodes_in_loop):
|
||||
follow = None
|
||||
if start.looptype.is_pretest:
|
||||
if start.true in nodes_in_loop:
|
||||
follow = start.false
|
||||
else:
|
||||
follow = start.true
|
||||
elif start.looptype.is_posttest:
|
||||
if end.true in nodes_in_loop:
|
||||
follow = end.false
|
||||
else:
|
||||
follow = end.true
|
||||
else:
|
||||
num_next = float('inf')
|
||||
for node in nodes_in_loop:
|
||||
if node.type.is_cond:
|
||||
if (node.true.num < num_next
|
||||
and node.true not in nodes_in_loop):
|
||||
follow = node.true
|
||||
num_next = follow.num
|
||||
elif (node.false.num < num_next
|
||||
and node.false not in nodes_in_loop):
|
||||
follow = node.false
|
||||
num_next = follow.num
|
||||
start.follow['loop'] = follow
|
||||
for node in nodes_in_loop:
|
||||
node.follow['loop'] = follow
|
||||
logger.debug('Start of loop %s', start)
|
||||
logger.debug('Follow of loop: %s', start.follow['loop'])
|
||||
|
||||
|
||||
def loop_struct(graphs_list, intervals_list):
|
||||
first_graph = graphs_list[0]
|
||||
for i, graph in enumerate(graphs_list):
|
||||
interval = intervals_list[i]
|
||||
for head in sorted(interval.keys(), key=lambda x: x.num):
|
||||
loop_nodes = []
|
||||
for node in graph.all_preds(head):
|
||||
if node.interval is head.interval:
|
||||
lnodes = mark_loop(first_graph, head, node, head.interval)
|
||||
for lnode in lnodes:
|
||||
if lnode not in loop_nodes:
|
||||
loop_nodes.append(lnode)
|
||||
head.get_head().loop_nodes = loop_nodes
|
||||
|
||||
|
||||
def if_struct(graph, idoms):
|
||||
unresolved = set()
|
||||
for node in graph.post_order():
|
||||
if node.type.is_cond:
|
||||
ldominates = []
|
||||
for n, idom in idoms.iteritems():
|
||||
if node is idom and len(graph.reverse_edges.get(n, [])) > 1:
|
||||
ldominates.append(n)
|
||||
if len(ldominates) > 0:
|
||||
n = max(ldominates, key=lambda x: x.num)
|
||||
node.follow['if'] = n
|
||||
for x in unresolved.copy():
|
||||
if node.num < x.num < n.num:
|
||||
x.follow['if'] = n
|
||||
unresolved.remove(x)
|
||||
else:
|
||||
unresolved.add(node)
|
||||
return unresolved
|
||||
|
||||
|
||||
def switch_struct(graph, idoms):
|
||||
unresolved = set()
|
||||
for node in graph.post_order():
|
||||
if node.type.is_switch:
|
||||
m = node
|
||||
for suc in graph.sucs(node):
|
||||
if idoms[suc] is not node:
|
||||
m = common_dom(idoms, node, suc)
|
||||
ldominates = []
|
||||
for n, dom in idoms.iteritems():
|
||||
if m is dom and len(graph.all_preds(n)) > 1:
|
||||
ldominates.append(n)
|
||||
if len(ldominates) > 0:
|
||||
n = max(ldominates, key=lambda x: x.num)
|
||||
node.follow['switch'] = n
|
||||
for x in unresolved:
|
||||
x.follow['switch'] = n
|
||||
unresolved = set()
|
||||
else:
|
||||
unresolved.add(node)
|
||||
node.order_cases()
|
||||
|
||||
|
||||
# TODO: deal with preds which are in catch
|
||||
def short_circuit_struct(graph, idom, node_map):
|
||||
def MergeNodes(node1, node2, is_and, is_not):
|
||||
lpreds = set()
|
||||
ldests = set()
|
||||
for node in (node1, node2):
|
||||
lpreds.update(graph.preds(node))
|
||||
ldests.update(graph.sucs(node))
|
||||
graph.remove_node(node)
|
||||
done.add(node)
|
||||
lpreds.difference_update((node1, node2))
|
||||
ldests.difference_update((node1, node2))
|
||||
|
||||
entry = graph.entry in (node1, node2)
|
||||
|
||||
new_name = '%s+%s' % (node1.name, node2.name)
|
||||
condition = Condition(node1, node2, is_and, is_not)
|
||||
|
||||
new_node = ShortCircuitBlock(new_name, condition)
|
||||
for old_n, new_n in node_map.iteritems():
|
||||
if new_n in (node1, node2):
|
||||
node_map[old_n] = new_node
|
||||
node_map[node1] = new_node
|
||||
node_map[node2] = new_node
|
||||
idom[new_node] = idom[node1]
|
||||
idom.pop(node1)
|
||||
idom.pop(node2)
|
||||
new_node.copy_from(node1)
|
||||
|
||||
graph.add_node(new_node)
|
||||
|
||||
for pred in lpreds:
|
||||
pred.update_attribute_with(node_map)
|
||||
graph.add_edge(node_map.get(pred, pred), new_node)
|
||||
for dest in ldests:
|
||||
graph.add_edge(new_node, node_map.get(dest, dest))
|
||||
if entry:
|
||||
graph.entry = new_node
|
||||
return new_node
|
||||
|
||||
change = True
|
||||
while change:
|
||||
change = False
|
||||
done = set()
|
||||
for node in graph.post_order():
|
||||
if node.type.is_cond and node not in done:
|
||||
then = node.true
|
||||
els = node.false
|
||||
if node in (then, els):
|
||||
continue
|
||||
if then.type.is_cond and len(graph.preds(then)) == 1:
|
||||
if then.false is els: # node && t
|
||||
change = True
|
||||
merged_node = MergeNodes(node, then, True, False)
|
||||
merged_node.true = then.true
|
||||
merged_node.false = els
|
||||
elif then.true is els: # !node || t
|
||||
change = True
|
||||
merged_node = MergeNodes(node, then, False, True)
|
||||
merged_node.true = els
|
||||
merged_node.false = then.false
|
||||
elif els.type.is_cond and len(graph.preds(els)) == 1:
|
||||
if els.false is then: # !node && e
|
||||
change = True
|
||||
merged_node = MergeNodes(node, els, True, True)
|
||||
merged_node.true = els.true
|
||||
merged_node.false = then
|
||||
elif els.true is then: # node || e
|
||||
change = True
|
||||
merged_node = MergeNodes(node, els, False, False)
|
||||
merged_node.true = then
|
||||
merged_node.false = els.false
|
||||
done.add(node)
|
||||
if change:
|
||||
graph.reset_rpo()
|
||||
|
||||
|
||||
def while_block_struct(graph, node_map):
|
||||
change = False
|
||||
for node in graph.rpo[:]:
|
||||
if node.startloop:
|
||||
change = True
|
||||
new_node = LoopBlock(node.name, node)
|
||||
node_map[node] = new_node
|
||||
new_node.copy_from(node)
|
||||
|
||||
entry = node is graph.entry
|
||||
lpreds = graph.preds(node)
|
||||
lsuccs = graph.sucs(node)
|
||||
|
||||
for pred in lpreds:
|
||||
graph.add_edge(node_map.get(pred, pred), new_node)
|
||||
|
||||
for suc in lsuccs:
|
||||
graph.add_edge(new_node, node_map.get(suc, suc))
|
||||
if entry:
|
||||
graph.entry = new_node
|
||||
|
||||
if node.type.is_cond:
|
||||
new_node.true = node.true
|
||||
new_node.false = node.false
|
||||
|
||||
graph.add_node(new_node)
|
||||
graph.remove_node(node)
|
||||
|
||||
if change:
|
||||
graph.reset_rpo()
|
||||
|
||||
|
||||
def catch_struct(graph, idoms):
|
||||
block_try_nodes = {}
|
||||
node_map = {}
|
||||
for catch_block in graph.reverse_catch_edges:
|
||||
if catch_block in graph.catch_edges:
|
||||
continue
|
||||
catch_node = CatchBlock(catch_block)
|
||||
|
||||
try_block = idoms[catch_block]
|
||||
try_node = block_try_nodes.get(try_block)
|
||||
if try_node is None:
|
||||
block_try_nodes[try_block] = TryBlock(try_block)
|
||||
try_node = block_try_nodes[try_block]
|
||||
|
||||
node_map[try_block] = try_node
|
||||
for pred in graph.all_preds(try_block):
|
||||
pred.update_attribute_with(node_map)
|
||||
if try_block in graph.sucs(pred):
|
||||
graph.edges[pred].remove(try_block)
|
||||
graph.add_edge(pred, try_node)
|
||||
|
||||
if try_block.type.is_stmt:
|
||||
follow = graph.sucs(try_block)
|
||||
if follow:
|
||||
try_node.follow = graph.sucs(try_block)[0]
|
||||
else:
|
||||
try_node.follow = None
|
||||
elif try_block.type.is_cond:
|
||||
loop_follow = try_block.follow['loop']
|
||||
if loop_follow:
|
||||
try_node.follow = loop_follow
|
||||
else:
|
||||
try_node.follow = try_block.follow['if']
|
||||
elif try_block.type.is_switch:
|
||||
try_node.follow = try_block.follow['switch']
|
||||
else: # return or throw
|
||||
try_node.follow = None
|
||||
|
||||
try_node.add_catch_node(catch_node)
|
||||
for node in graph.nodes:
|
||||
node.update_attribute_with(node_map)
|
||||
if graph.entry in node_map:
|
||||
graph.entry = node_map[graph.entry]
|
||||
|
||||
|
||||
def update_dom(idoms, node_map):
|
||||
for n, dom in idoms.iteritems():
|
||||
idoms[n] = node_map.get(dom, dom)
|
||||
|
||||
|
||||
def identify_structures(graph, idoms):
|
||||
Gi, Li = derived_sequence(graph)
|
||||
switch_struct(graph, idoms)
|
||||
loop_struct(Gi, Li)
|
||||
node_map = {}
|
||||
|
||||
short_circuit_struct(graph, idoms, node_map)
|
||||
update_dom(idoms, node_map)
|
||||
|
||||
if_unresolved = if_struct(graph, idoms)
|
||||
|
||||
while_block_struct(graph, node_map)
|
||||
update_dom(idoms, node_map)
|
||||
|
||||
loop_starts = []
|
||||
for node in graph.rpo:
|
||||
node.update_attribute_with(node_map)
|
||||
if node.startloop:
|
||||
loop_starts.append(node)
|
||||
for node in loop_starts:
|
||||
loop_type(node, node.latch, node.loop_nodes)
|
||||
loop_follow(node, node.latch, node.loop_nodes)
|
||||
|
||||
for node in if_unresolved:
|
||||
follows = [n for n in (node.follow['loop'],
|
||||
node.follow['switch']) if n]
|
||||
if len(follows) >= 1:
|
||||
follow = min(follows, key=lambda x: x.num)
|
||||
node.follow['if'] = follow
|
||||
|
||||
catch_struct(graph, idoms)
|
||||
|
562
androguard/decompiler/dad/dataflow.py
Normal file
562
androguard/decompiler/dad/dataflow.py
Normal file
@ -0,0 +1,562 @@
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (c) 2012 Geoffroy Gueguen <geoffroy.gueguen@gmail.com>
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS-IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
from androguard.decompiler.dad.graph import Graph
|
||||
from androguard.decompiler.dad.instruction import Variable, Param
|
||||
from androguard.decompiler.dad.util import build_path, common_dom
|
||||
from androguard.decompiler.dad.node import Node
|
||||
|
||||
|
||||
logger = logging.getLogger('dad.control_flow')
|
||||
|
||||
|
||||
def dominance_frontier(graph, idoms):
|
||||
'''
|
||||
Create the dominance frontier of each nodes of the graph.
|
||||
The dominance frontier of a node n is the set of all nodes m such that
|
||||
n dominates an immediate predecessor of m but does not strictly dominate m.
|
||||
'''
|
||||
DF = {}
|
||||
for node in graph:
|
||||
DF[node] = set()
|
||||
for node in graph:
|
||||
# Nodes in a DF set must be join points in the graph
|
||||
preds = graph.preds(node)
|
||||
if len(preds) > 1:
|
||||
# We found a join point. Now for each of its predecessor we walk up
|
||||
# the dominator tree to find a node that dominates it.
|
||||
# The join point belong to the DF of all the nodes which are on the
|
||||
# dominator tree walk.
|
||||
for pred in preds:
|
||||
runner = pred
|
||||
while runner != idoms[node]:
|
||||
DF[runner].add(node)
|
||||
runner = idoms[runner]
|
||||
return DF
|
||||
|
||||
|
||||
def dom_tree(idoms):
|
||||
g = Graph()
|
||||
for node in idoms:
|
||||
g.add_node(node)
|
||||
for node, idom in idoms.iteritems():
|
||||
if node:
|
||||
g.add_edge(idom, node)
|
||||
return g
|
||||
|
||||
|
||||
def dom_frontier(graph, idoms):
|
||||
dtree = dom_tree(idoms)
|
||||
dtree.entry = graph.entry
|
||||
DF = {}
|
||||
for node in dtree.post_order():
|
||||
DF[node] = set()
|
||||
for suc in graph.sucs(node):
|
||||
if idoms[suc] != node:
|
||||
DF[node].add(suc)
|
||||
for child in dtree.sucs(node):
|
||||
for p in DF[child]:
|
||||
if idoms[p] != node:
|
||||
DF[node].add(p)
|
||||
return DF
|
||||
|
||||
|
||||
def phi_placement(graph, DF, lvars, UD, DU):
|
||||
worklist = []
|
||||
inserted = {}
|
||||
in_worklist = {}
|
||||
for node in graph:
|
||||
inserted[node] = None
|
||||
in_worklist[node] = None
|
||||
|
||||
var_to_loc = defaultdict(list)
|
||||
for var, loc in UD:
|
||||
if var in lvars:
|
||||
var_to_loc[var].append(loc)
|
||||
|
||||
for var, locs in var_to_loc.iteritems():
|
||||
for node in set([graph.get_node_from_loc(loc) for loc in locs]):
|
||||
in_worklist[node] = var
|
||||
worklist.append(node)
|
||||
|
||||
while worklist:
|
||||
node = worklist.pop()
|
||||
for m in DF[node]:
|
||||
if inserted[m] != var:
|
||||
pass
|
||||
|
||||
|
||||
|
||||
class BasicReachDef(object):
|
||||
def __init__(self, graph, params):
|
||||
self.g = graph
|
||||
self.A = defaultdict(set)
|
||||
self.R = defaultdict(set)
|
||||
self.DB = defaultdict(set)
|
||||
self.defs = defaultdict(lambda: defaultdict(set))
|
||||
self.def_to_loc = defaultdict(set)
|
||||
# Deal with special entry node
|
||||
entry = graph.entry
|
||||
self.A[entry] = set([-1])
|
||||
for param in params:
|
||||
self.defs[entry][param].add(-1)
|
||||
self.def_to_loc[param].add(-1)
|
||||
# Deal with the other nodes
|
||||
for node in graph.rpo:
|
||||
for i, ins in node.get_loc_with_ins():
|
||||
kill = ins.get_lhs()
|
||||
if kill is not None:
|
||||
self.defs[node][kill].add(i)
|
||||
self.def_to_loc[kill].add(i)
|
||||
for defs, values in self.defs[node].items():
|
||||
self.DB[node].add(max(values))
|
||||
|
||||
def run(self):
|
||||
nodes = self.g.rpo[:]
|
||||
while nodes:
|
||||
node = nodes.pop(0)
|
||||
newR = set()
|
||||
for pred in self.g.all_preds(node):
|
||||
newR.update(self.A[pred])
|
||||
if newR and newR != self.R[node]:
|
||||
self.R[node] = newR
|
||||
for suc in self.g.all_sucs(node):
|
||||
if suc not in nodes:
|
||||
nodes.append(suc)
|
||||
|
||||
killed_locs = set()
|
||||
for reg in self.defs[node]:
|
||||
for loc in self.def_to_loc[reg]:
|
||||
killed_locs.add(loc)
|
||||
|
||||
A = set()
|
||||
for loc in self.R[node]:
|
||||
if loc not in killed_locs:
|
||||
A.add(loc)
|
||||
newA = A.union(self.DB[node])
|
||||
if newA != self.A[node]:
|
||||
self.A[node] = newA
|
||||
for suc in self.g.all_sucs(node):
|
||||
if suc not in nodes:
|
||||
nodes.append(suc)
|
||||
|
||||
|
||||
def update_chain(graph, loc, du, ud):
|
||||
'''
|
||||
Updates the DU chain of the instruction located at loc such that there is
|
||||
no more reference to it so that we can remove it.
|
||||
When an instruction is found to be dead (i.e it has no side effect, and the
|
||||
register defined is not used) we have to update the DU chain of all the
|
||||
variables that may me used by the dead instruction.
|
||||
'''
|
||||
ins = graph.get_ins_from_loc(loc)
|
||||
for var in ins.get_used_vars():
|
||||
# We get the definition points of the current variable
|
||||
for def_loc in set(ud[(var, loc)]):
|
||||
# We remove the use of the variable at loc from the DU chain of
|
||||
# the variable definition located at def_loc
|
||||
du[(var, def_loc)].remove(loc)
|
||||
ud[(var, loc)].remove(def_loc)
|
||||
if not ud.get((var, loc)):
|
||||
ud.pop((var, loc))
|
||||
# If the DU chain of the defined variable is now empty, this means
|
||||
# that we may have created a new dead instruction, so we check that
|
||||
# the instruction has no side effect and we update the DU chain of
|
||||
# the new dead instruction, and we delete it.
|
||||
# We also make sure that def_loc is not -1. This is the case when
|
||||
# the current variable is a method parameter.
|
||||
if def_loc >= 0 and not du[(var, def_loc)]:
|
||||
du.pop((var, def_loc))
|
||||
def_ins = graph.get_ins_from_loc(def_loc)
|
||||
if def_ins.is_call():
|
||||
def_ins.remove_defined_var()
|
||||
elif def_ins.has_side_effect():
|
||||
continue
|
||||
else:
|
||||
update_chain(graph, def_loc, du, ud)
|
||||
graph.remove_ins(def_loc)
|
||||
|
||||
|
||||
def dead_code_elimination(graph, du, ud):
|
||||
'''
|
||||
Run a dead code elimination pass.
|
||||
Instructions are checked to be dead. If it is the case, we remove them and
|
||||
we update the DU & UD chains of its variables to check for further dead
|
||||
instructions.
|
||||
'''
|
||||
for node in graph.rpo:
|
||||
for i, ins in node.get_loc_with_ins()[:]:
|
||||
reg = ins.get_lhs()
|
||||
if reg is not None:
|
||||
# If the definition is not used, we check that the instruction
|
||||
# has no side effect. If there is one and this is a call, we
|
||||
# remove only the unused defined variable. else, this is
|
||||
# something like an array access, so we do nothing.
|
||||
# Otherwise (no side effect) we can remove the instruction from
|
||||
# the node.
|
||||
if (reg, i) not in du:
|
||||
if ins.is_call():
|
||||
ins.remove_defined_var()
|
||||
elif ins.has_side_effect():
|
||||
continue
|
||||
else:
|
||||
# We can delete the instruction. First update the DU
|
||||
# chain of the variables used by the instruction to
|
||||
# `let them know` that they are not used anymore by the
|
||||
# deleted instruction.
|
||||
# Then remove the instruction.
|
||||
update_chain(graph, i, du, ud)
|
||||
graph.remove_ins(i)
|
||||
|
||||
|
||||
def clear_path_node(graph, reg, loc1, loc2):
|
||||
for loc in xrange(loc1, loc2):
|
||||
ins = graph.get_ins_from_loc(loc)
|
||||
logger.debug(' treat loc: %d, ins: %s', loc, ins)
|
||||
if ins is None:
|
||||
continue
|
||||
logger.debug(' LHS: %s, side_effect: %s', ins.get_lhs(),
|
||||
ins.has_side_effect())
|
||||
if ins.get_lhs() == reg or ins.has_side_effect():
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def clear_path(graph, reg, loc1, loc2):
|
||||
'''
|
||||
Check that the path from loc1 to loc2 is clear.
|
||||
We have to check that there is no side effect between the two location
|
||||
points. We also have to check that the variable `reg` is not redefined
|
||||
along one of the possible pathes from loc1 to loc2.
|
||||
'''
|
||||
logger.debug('clear_path: reg(%s), loc1(%s), loc2(%s)', reg, loc1, loc2)
|
||||
node1 = graph.get_node_from_loc(loc1)
|
||||
node2 = graph.get_node_from_loc(loc2)
|
||||
# If both instructions are in the same node, we only have to check that the
|
||||
# path is clear inside the node
|
||||
if node1 is node2:
|
||||
return clear_path_node(graph, reg, loc1 + 1, loc2)
|
||||
|
||||
# If instructions are in different nodes, we also have to check the nodes
|
||||
# in the path between the two locations.
|
||||
if not clear_path_node(graph, reg, loc1 + 1, node1.ins_range[1]):
|
||||
return False
|
||||
path = build_path(graph, node1, node2)
|
||||
for node in path:
|
||||
locs = node.ins_range
|
||||
end_loc = loc2 if (locs[0] <= loc2 <= locs[1]) else locs[1]
|
||||
if not clear_path_node(graph, reg, locs[0], end_loc):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def register_propagation(graph, du, ud):
|
||||
'''
|
||||
Propagate the temporary registers between instructions and remove them if
|
||||
necessary.
|
||||
We process the nodes of the graph in reverse post order. For each
|
||||
instruction in the node, we look at the variables that it uses. For each of
|
||||
these variables we look where it is defined and if we can replace it with
|
||||
its definition.
|
||||
We have to be careful to the side effects some instructions may have.
|
||||
To do the propagation, we use the computed DU and UD chains.
|
||||
'''
|
||||
change = True
|
||||
while change:
|
||||
change = False
|
||||
for node in graph.rpo:
|
||||
for i, ins in node.get_loc_with_ins()[:]:
|
||||
logger.debug('Treating instruction %d: %s', i, ins)
|
||||
# We make sure the ins has not been deleted since the start of
|
||||
# the iteration
|
||||
if ins not in node.get_ins():
|
||||
logger.debug(' => skip instruction (deleted)')
|
||||
continue
|
||||
logger.debug(' Used vars: %s', ins.get_used_vars())
|
||||
for var in ins.get_used_vars():
|
||||
# Get the list of locations this variable is defined at.
|
||||
locs = ud[(var, i)]
|
||||
logger.debug(' var %s defined in lines %s', var, locs)
|
||||
# If the variable is uniquely defined for this instruction
|
||||
# it may be eligible for propagation.
|
||||
if len(locs) != 1:
|
||||
continue
|
||||
|
||||
loc = locs[0]
|
||||
# Methods parameters are defined with a location of -1.
|
||||
if loc == -1:
|
||||
continue
|
||||
orig_ins = graph.get_ins_from_loc(loc)
|
||||
logger.debug(' -> %s', orig_ins)
|
||||
|
||||
logger.debug(' -> DU(%s, %s) = %s', var, loc,
|
||||
du[(var, loc)])
|
||||
|
||||
# We defined some instructions as not propagable.
|
||||
# Actually this is the case only for array creation
|
||||
# (new foo[x])
|
||||
if not orig_ins.is_propagable():
|
||||
logger.debug(' %s not propagable...', orig_ins)
|
||||
continue
|
||||
|
||||
# We only try to propagate constants and definition
|
||||
# points which are used at only one location.
|
||||
if len(du[(var, loc)]) > 1:
|
||||
if not orig_ins.get_rhs().is_const():
|
||||
logger.debug(' => variable has multiple uses'
|
||||
' and is not const => skip')
|
||||
continue
|
||||
|
||||
# We check that the propagation is safe for all the
|
||||
# variables that are used in the instruction.
|
||||
# The propagation is not safe if there is a side effect
|
||||
# along the path from the definition of the variable
|
||||
# to its use in the instruction, or if the variable may
|
||||
# be redifined along this path.
|
||||
safe = True
|
||||
orig_ins_used_vars = orig_ins.get_used_vars()
|
||||
logger.debug(' variables used by the original '
|
||||
'instruction: %s', orig_ins_used_vars)
|
||||
for var2 in orig_ins_used_vars:
|
||||
# loc is the location of the defined variable
|
||||
# i is the location of the current instruction
|
||||
if not clear_path(graph, var2, loc, i):
|
||||
safe = False
|
||||
break
|
||||
if not safe:
|
||||
logger.debug('Propagation NOT SAFE')
|
||||
continue
|
||||
|
||||
# We also check that the instruction itself is
|
||||
# propagable. If the instruction has a side effect it
|
||||
# cannot be propagated if there is another side effect
|
||||
# along the path
|
||||
if orig_ins.has_side_effect():
|
||||
if not clear_path(graph, None, loc, i):
|
||||
logger.debug(' %s has side effect and the '
|
||||
'path is not clear !', orig_ins)
|
||||
continue
|
||||
|
||||
logger.debug(' => Modification of the instruction!')
|
||||
logger.debug(' - BEFORE: %s', ins)
|
||||
ins.replace(var, orig_ins.get_rhs())
|
||||
logger.debug(' -> AFTER: %s', ins)
|
||||
logger.debug('\t UD(%s, %s) : %s', var, i, ud[(var, i)])
|
||||
ud[(var, i)].remove(loc)
|
||||
logger.debug('\t -> %s', ud[(var, i)])
|
||||
if len(ud[(var, i)]) == 0:
|
||||
ud.pop((var, i))
|
||||
for var2 in orig_ins.get_used_vars():
|
||||
# We update the UD chain of the variables we
|
||||
# propagate. We also have to take the
|
||||
# definition points of all the variables used
|
||||
# by the instruction and update the DU chain
|
||||
# with this information.
|
||||
old_ud = ud.get((var2, loc))
|
||||
logger.debug('\t ud(%s, %s) = %s', var2, loc, old_ud)
|
||||
# If the instruction use the same variable
|
||||
# multiple times, the second+ time the ud chain
|
||||
# will be None because already treated.
|
||||
if old_ud is None:
|
||||
continue
|
||||
ud[(var2, i)].extend(old_ud)
|
||||
logger.debug('\t - ud(%s, %s) = %s', var2, i,
|
||||
ud[(var2, i)])
|
||||
ud.pop((var2, loc))
|
||||
|
||||
for def_loc in old_ud:
|
||||
du[(var2, def_loc)].remove(loc)
|
||||
du[(var2, def_loc)].append(i)
|
||||
|
||||
new_du = du[(var, loc)]
|
||||
logger.debug('\t new_du(%s, %s): %s', var, loc, new_du)
|
||||
new_du.remove(i)
|
||||
logger.debug('\t -> %s', new_du)
|
||||
if not new_du:
|
||||
logger.debug('\t REMOVING INS %d', loc)
|
||||
du.pop((var, loc))
|
||||
graph.remove_ins(loc)
|
||||
change = True
|
||||
|
||||
|
||||
class DummyNode(Node):
|
||||
def __init__(self, name):
|
||||
super(DummyNode, self).__init__(name)
|
||||
|
||||
def get_loc_with_ins(self):
|
||||
return []
|
||||
|
||||
def __repr__(self):
|
||||
return '%s-dumnode' % self.name
|
||||
|
||||
def __str__(self):
|
||||
return '%s-dummynode' % self.name
|
||||
|
||||
|
||||
def split_variables(graph, lvars, DU, UD):
|
||||
treated = defaultdict(list)
|
||||
variables = defaultdict(list)
|
||||
for var, loc in sorted(DU):
|
||||
if var not in lvars:
|
||||
continue
|
||||
if loc in treated[var]:
|
||||
continue
|
||||
defs = [loc]
|
||||
uses = set(DU[(var, loc)])
|
||||
change = True
|
||||
while change:
|
||||
change = False
|
||||
for use in uses:
|
||||
ldefs = UD[(var, use)]
|
||||
for ldef in ldefs:
|
||||
if ldef not in defs:
|
||||
defs.append(ldef)
|
||||
change = True
|
||||
for ldef in defs[1:]:
|
||||
luses = set(DU[(var, ldef)])
|
||||
for use in luses:
|
||||
if use not in uses:
|
||||
uses.add(use)
|
||||
change = True
|
||||
treated[var].extend(defs)
|
||||
variables[var].append((defs, list(uses)))
|
||||
|
||||
if lvars:
|
||||
nb_vars = max(lvars) + 1
|
||||
else:
|
||||
nb_vars = 0
|
||||
for var, versions in variables.iteritems():
|
||||
nversions = len(versions)
|
||||
if nversions == 1:
|
||||
continue
|
||||
orig_var = lvars.pop(var)
|
||||
for i, (defs, uses) in enumerate(versions):
|
||||
if -1 in defs: # Param
|
||||
new_version = Param(var, orig_var.type)
|
||||
lvars[var] = new_version
|
||||
else:
|
||||
new_version = Variable(nb_vars)
|
||||
new_version.type = orig_var.type
|
||||
lvars[nb_vars] = new_version # add new version to variables
|
||||
nb_vars += 1
|
||||
new_version.name = '%d_%d' % (var, i)
|
||||
|
||||
for loc in defs:
|
||||
if loc == -1:
|
||||
continue
|
||||
ins = graph.get_ins_from_loc(loc)
|
||||
ins.replace_lhs(new_version)
|
||||
for loc in uses:
|
||||
ins = graph.get_ins_from_loc(loc)
|
||||
ins.replace_var(var, new_version)
|
||||
|
||||
|
||||
def build_def_use(graph, lparams):
|
||||
'''
|
||||
Builds the Def-Use and Use-Def (DU/UD) chains of the variables of the
|
||||
method.
|
||||
'''
|
||||
# We insert two special nodes : entry & exit, to the graph.
|
||||
# This is done to simplify the reaching definition analysis.
|
||||
old_entry = graph.entry
|
||||
old_exit = graph.exit
|
||||
new_entry = DummyNode('entry')
|
||||
graph.add_node(new_entry)
|
||||
graph.add_edge(new_entry, old_entry)
|
||||
graph.entry = new_entry
|
||||
if old_exit:
|
||||
new_exit = DummyNode('exit')
|
||||
graph.add_node(new_exit)
|
||||
graph.add_edge(old_exit, new_exit)
|
||||
graph.rpo.append(new_exit)
|
||||
|
||||
analysis = BasicReachDef(graph, set(lparams))
|
||||
analysis.run()
|
||||
|
||||
# The analysis is done, We can now remove the two special nodes.
|
||||
graph.remove_node(new_entry)
|
||||
if old_exit:
|
||||
graph.remove_node(new_exit)
|
||||
graph.entry = old_entry
|
||||
|
||||
UD = defaultdict(list)
|
||||
for node in graph.rpo:
|
||||
for i, ins in node.get_loc_with_ins():
|
||||
for var in ins.get_used_vars():
|
||||
# var not in analysis.def_to_loc: test that the register
|
||||
# exists. It is possible that it is not the case, when a
|
||||
# variable is of a type which is stored on multiple registers
|
||||
# e.g: a 'double' stored in v3 is also present in v4, so a call
|
||||
# to foo(v3), will in fact call foo(v3, v4).
|
||||
if var not in analysis.def_to_loc:
|
||||
continue
|
||||
ldefs = analysis.defs[node]
|
||||
prior_def = -1
|
||||
for v in ldefs.get(var, set()):
|
||||
if prior_def < v < i:
|
||||
prior_def = v
|
||||
if prior_def >= 0:
|
||||
UD[(var, i)].append(prior_def)
|
||||
else:
|
||||
intersect = analysis.def_to_loc[var].intersection(
|
||||
analysis.R[node])
|
||||
UD[(var, i)].extend(intersect)
|
||||
DU = defaultdict(list)
|
||||
for var_loc, defs_loc in UD.items():
|
||||
var, loc = var_loc
|
||||
# FIXME: should not have to add this
|
||||
if not defs_loc:
|
||||
DU[(var, -1)].append(loc)
|
||||
for def_loc in defs_loc:
|
||||
DU[(var, def_loc)].append(loc)
|
||||
|
||||
return UD, DU
|
||||
|
||||
|
||||
def place_declarations(graph, dvars, du, ud):
|
||||
idom = graph.immediate_dominators()
|
||||
for node in graph.rpo:
|
||||
for loc, ins in node.get_loc_with_ins():
|
||||
used_vars = ins.get_used_vars()
|
||||
for var in used_vars:
|
||||
if (not isinstance(dvars[var], Variable)
|
||||
or isinstance(dvars[var], Param)):
|
||||
continue
|
||||
var_defs_locs = ud[(var, loc)]
|
||||
# FIXME: this should not happen.
|
||||
if var_defs_locs is None:
|
||||
continue
|
||||
def_nodes = set()
|
||||
for def_loc in var_defs_locs:
|
||||
def_node = graph.get_node_from_loc(def_loc)
|
||||
# TODO: place declarations in catch if needed
|
||||
if def_node.in_catch:
|
||||
continue
|
||||
def_nodes.add(def_node)
|
||||
if not def_nodes:
|
||||
continue
|
||||
common_dominator = def_nodes.pop()
|
||||
for def_node in def_nodes:
|
||||
common_dominator = common_dom(
|
||||
idom,common_dominator, def_node)
|
||||
if any(var in range(*common_dominator.ins_range)
|
||||
for var in ud[(var, loc)]):
|
||||
continue
|
||||
common_dominator.add_variable_declaration(dvars[var])
|
||||
|
346
androguard/decompiler/dad/decompile.py
Normal file
346
androguard/decompiler/dad/decompile.py
Normal file
@ -0,0 +1,346 @@
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (c) 2012 Geoffroy Gueguen <geoffroy.gueguen@gmail.com>
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS-IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
sys.path.append('./')
|
||||
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
import androguard.core.androconf as androconf
|
||||
import androguard.decompiler.dad.util as util
|
||||
from androguard.core.analysis import analysis
|
||||
from androguard.core.bytecodes import apk, dvm
|
||||
from androguard.decompiler.dad.control_flow import identify_structures
|
||||
from androguard.decompiler.dad.dataflow import (build_def_use,
|
||||
place_declarations,
|
||||
dead_code_elimination,
|
||||
register_propagation,
|
||||
phi_placement,
|
||||
split_variables)
|
||||
from androguard.decompiler.dad.graph import construct
|
||||
from androguard.decompiler.dad.instruction import Param, ThisParam
|
||||
from androguard.decompiler.dad.writer import Writer
|
||||
|
||||
|
||||
def auto_vm(filename):
|
||||
ret = androconf.is_android(filename)
|
||||
if ret == 'APK':
|
||||
return dvm.DalvikVMFormat(apk.APK(filename).get_dex())
|
||||
elif ret == 'DEX':
|
||||
return dvm.DalvikVMFormat(open(filename, 'rb').read())
|
||||
elif ret == 'ODEX':
|
||||
return dvm.DalvikOdexVMFormat(open(filename, 'rb').read())
|
||||
return None
|
||||
|
||||
|
||||
class DvMethod():
|
||||
def __init__(self, methanalysis):
|
||||
method = methanalysis.get_method()
|
||||
self.start_block = next(methanalysis.get_basic_blocks().get(), None)
|
||||
self.cls_name = method.get_class_name()
|
||||
self.name = method.get_name()
|
||||
self.lparams = []
|
||||
self.var_to_name = defaultdict()
|
||||
self.writer = None
|
||||
self.graph = None
|
||||
|
||||
access = method.get_access_flags()
|
||||
self.access = [name for flag, name in
|
||||
util.ACCESS_FLAGS_METHODS.iteritems() if flag & access]
|
||||
desc = method.get_descriptor()
|
||||
self.type = desc.split(')')[-1]
|
||||
self.params_type = util.get_params_type(desc)
|
||||
|
||||
self.exceptions = methanalysis.exceptions.exceptions
|
||||
|
||||
code = method.get_code()
|
||||
if code is None:
|
||||
logger.debug('No code : %s %s', self.name, self.cls_name)
|
||||
else:
|
||||
start = code.registers_size - code.ins_size
|
||||
if 'static' not in self.access:
|
||||
self.var_to_name[start] = ThisParam(start, self.name)
|
||||
self.lparams.append(start)
|
||||
start += 1
|
||||
num_param = 0
|
||||
for ptype in self.params_type:
|
||||
param = start + num_param
|
||||
self.lparams.append(param)
|
||||
self.var_to_name[param] = Param(param, ptype)
|
||||
num_param += util.get_type_size(ptype)
|
||||
if not __debug__:
|
||||
from androguard.core import bytecode
|
||||
bytecode.method2png('/tmp/dad/graphs/%s#%s.png' % \
|
||||
(self.cls_name.split('/')[-1][:-1], self.name), methanalysis)
|
||||
|
||||
def process(self):
|
||||
logger.debug('METHOD : %s', self.name)
|
||||
|
||||
# Native methods... no blocks.
|
||||
if self.start_block is None:
|
||||
logger.debug('Native Method.')
|
||||
self.writer = Writer(None, self)
|
||||
self.writer.write_method()
|
||||
return
|
||||
|
||||
graph = construct(self.start_block, self.var_to_name, self.exceptions)
|
||||
self.graph = graph
|
||||
|
||||
if not __debug__:
|
||||
util.create_png(self.cls_name, self.name, graph, '/tmp/dad/blocks')
|
||||
|
||||
#idoms = graph.immediate_dominators()
|
||||
#DF = dominance_frontier(graph, idoms)
|
||||
use_defs, def_uses = build_def_use(graph, self.lparams)
|
||||
#phi_placement(graph, DF, self.var_to_name, use_defs, def_uses)
|
||||
split_variables(graph, self.var_to_name, def_uses, use_defs)
|
||||
# TODO: split_variables should update DU/UD
|
||||
use_defs, def_uses = build_def_use(graph, self.lparams)
|
||||
|
||||
dead_code_elimination(graph, def_uses, use_defs)
|
||||
register_propagation(graph, def_uses, use_defs)
|
||||
|
||||
place_declarations(graph, self.var_to_name, def_uses, use_defs)
|
||||
del def_uses, use_defs
|
||||
# After the DCE pass, some nodes may be empty, so we can simplify the
|
||||
# graph to delete these nodes.
|
||||
# We start by restructuring the graph by spliting the conditional nodes
|
||||
# into a pre-header and a header part.
|
||||
graph.split_if_nodes()
|
||||
# We then simplify the graph by merging multiple statement nodes into
|
||||
# a single statement node when possible. This also delete empty nodes.
|
||||
|
||||
graph.simplify()
|
||||
graph.reset_rpo()
|
||||
|
||||
identify_structures(graph, graph.immediate_dominators())
|
||||
|
||||
if not __debug__:
|
||||
util.create_png(self.cls_name, self.name, graph,
|
||||
'/tmp/dad/structured')
|
||||
|
||||
self.writer = Writer(graph, self)
|
||||
self.writer.write_method()
|
||||
del graph
|
||||
|
||||
def show_source(self):
|
||||
print self.get_source()
|
||||
|
||||
def get_source(self):
|
||||
if self.writer:
|
||||
return '%s' % self.writer
|
||||
return ''
|
||||
|
||||
def __repr__(self):
|
||||
return 'Method %s' % self.name
|
||||
|
||||
|
||||
class DvClass():
|
||||
def __init__(self, dvclass, vma):
|
||||
name = dvclass.get_name()
|
||||
if name.find('/') > 0:
|
||||
pckg, name = name.rsplit('/', 1)
|
||||
else:
|
||||
pckg, name = '', name
|
||||
self.package = pckg[1:].replace('/', '.')
|
||||
self.name = name[:-1]
|
||||
|
||||
self.vma = vma
|
||||
self.methods = dict((meth.get_method_idx(), meth)
|
||||
for meth in dvclass.get_methods())
|
||||
self.fields = dict((field.get_name(), field)
|
||||
for field in dvclass.get_fields())
|
||||
self.subclasses = {}
|
||||
self.code = []
|
||||
self.inner = False
|
||||
|
||||
access = dvclass.get_access_flags()
|
||||
self.access = [util.ACCESS_FLAGS_CLASSES[flag] for flag in
|
||||
util.ACCESS_FLAGS_CLASSES if flag & access]
|
||||
self.prototype = '%s class %s' % (' '.join(self.access), self.name)
|
||||
|
||||
self.interfaces = dvclass.interfaces
|
||||
self.superclass = dvclass.get_superclassname()
|
||||
|
||||
logger.info('Class : %s', self.name)
|
||||
logger.info('Methods added :')
|
||||
for index, meth in self.methods.iteritems():
|
||||
logger.info('%s (%s, %s)', index, self.name, meth.name)
|
||||
logger.info('')
|
||||
|
||||
def add_subclass(self, innername, dvclass):
|
||||
self.subclasses[innername] = dvclass
|
||||
dvclass.inner = True
|
||||
|
||||
def get_methods(self):
|
||||
return self.methods
|
||||
|
||||
def process_method(self, num):
|
||||
methods = self.methods
|
||||
if num in methods:
|
||||
method = methods[num]
|
||||
if not isinstance(method, DvMethod):
|
||||
method.set_instructions([i for i in method.get_instructions()])
|
||||
meth = methods[num] = DvMethod(self.vma.get_method(method))
|
||||
meth.process()
|
||||
method.set_instructions([])
|
||||
else:
|
||||
method.process()
|
||||
else:
|
||||
logger.error('Method %s not found.', num)
|
||||
|
||||
def process(self):
|
||||
for klass in self.subclasses.values():
|
||||
klass.process()
|
||||
for meth in self.methods:
|
||||
self.process_method(meth)
|
||||
|
||||
def get_source(self):
|
||||
source = []
|
||||
if not self.inner and self.package:
|
||||
source.append('package %s;\n' % self.package)
|
||||
|
||||
if self.superclass is not None:
|
||||
self.superclass = self.superclass[1:-1].replace('/', '.')
|
||||
if self.superclass.split('.')[-1] == 'Object':
|
||||
self.superclass = None
|
||||
if self.superclass is not None:
|
||||
self.prototype += ' extends %s' % self.superclass
|
||||
if self.interfaces is not None:
|
||||
interfaces = self.interfaces[1:-1].split(' ')
|
||||
self.prototype += ' implements %s' % ', '.join(
|
||||
[n[1:-1].replace('/', '.') for n in interfaces])
|
||||
|
||||
source.append('%s {\n' % self.prototype)
|
||||
for field in self.fields.values():
|
||||
field_access_flags = field.get_access_flags()
|
||||
access = [util.ACCESS_FLAGS_FIELDS[flag] for flag in
|
||||
util.ACCESS_FLAGS_FIELDS if flag & field_access_flags]
|
||||
f_type = util.get_type(field.get_descriptor())
|
||||
name = field.get_name()
|
||||
source.append(' %s %s %s;\n' % (' '.join(access), f_type, name))
|
||||
|
||||
for klass in self.subclasses.values():
|
||||
source.append(klass.get_source())
|
||||
|
||||
for _, method in self.methods.iteritems():
|
||||
if isinstance(method, DvMethod):
|
||||
source.append(method.get_source())
|
||||
source.append('}\n')
|
||||
return ''.join(source)
|
||||
|
||||
def show_source(self):
|
||||
print self.get_source()
|
||||
|
||||
def __repr__(self):
|
||||
if not self.subclasses:
|
||||
return 'Class(%s)' % self.name
|
||||
return 'Class(%s) -- Subclasses(%s)' % (self.name, self.subclasses)
|
||||
|
||||
|
||||
class DvMachine():
|
||||
def __init__(self, name):
|
||||
vm = auto_vm(name)
|
||||
if vm is None:
|
||||
raise ValueError('Format not recognised: %s' % name)
|
||||
self.vma = analysis.uVMAnalysis(vm)
|
||||
self.classes = dict((dvclass.get_name(), dvclass)
|
||||
for dvclass in vm.get_classes())
|
||||
#util.merge_inner(self.classes)
|
||||
|
||||
def get_classes(self):
|
||||
return self.classes.keys()
|
||||
|
||||
def get_class(self, class_name):
|
||||
for name, klass in self.classes.iteritems():
|
||||
if class_name in name:
|
||||
if isinstance(klass, DvClass):
|
||||
return klass
|
||||
dvclass = self.classes[name] = DvClass(klass, self.vma)
|
||||
return dvclass
|
||||
|
||||
def process(self):
|
||||
for name, klass in self.classes.iteritems():
|
||||
logger.info('Processing class: %s', name)
|
||||
if isinstance(klass, DvClass):
|
||||
klass.process()
|
||||
else:
|
||||
dvclass = self.classes[name] = DvClass(klass, self.vma)
|
||||
dvclass.process()
|
||||
|
||||
def show_source(self):
|
||||
for klass in self.classes.values():
|
||||
klass.show_source()
|
||||
|
||||
def process_and_show(self):
|
||||
for name, klass in sorted(self.classes.iteritems()):
|
||||
logger.info('Processing class: %s', name)
|
||||
if not isinstance(klass, DvClass):
|
||||
klass = DvClass(klass, self.vma)
|
||||
klass.process()
|
||||
klass.show_source()
|
||||
|
||||
|
||||
logger = logging.getLogger('dad')
|
||||
sys.setrecursionlimit(5000)
|
||||
|
||||
|
||||
def main():
|
||||
# logger.setLevel(logging.DEBUG) for debugging output
|
||||
# comment the line to disable the logging.
|
||||
logger.setLevel(logging.INFO)
|
||||
console_hdlr = logging.StreamHandler(sys.stdout)
|
||||
console_hdlr.setFormatter(logging.Formatter('%(levelname)s: %(message)s'))
|
||||
logger.addHandler(console_hdlr)
|
||||
|
||||
default_file = 'examples/android/TestsAndroguard/bin/TestActivity.apk'
|
||||
if len(sys.argv) > 1:
|
||||
machine = DvMachine(sys.argv[1])
|
||||
else:
|
||||
machine = DvMachine(default_file)
|
||||
|
||||
logger.info('========================')
|
||||
logger.info('Classes:')
|
||||
for class_name in sorted(machine.get_classes()):
|
||||
logger.info(' %s', class_name)
|
||||
logger.info('========================')
|
||||
|
||||
cls_name = raw_input('Choose a class: ')
|
||||
if cls_name == '*':
|
||||
machine.process_and_show()
|
||||
else:
|
||||
cls = machine.get_class(cls_name)
|
||||
if cls is None:
|
||||
logger.error('%s not found.', cls_name)
|
||||
else:
|
||||
logger.info('======================')
|
||||
for method_id, method in cls.get_methods().items():
|
||||
logger.info('%d: %s', method_id, method.name)
|
||||
logger.info('======================')
|
||||
meth = raw_input('Method: ')
|
||||
if meth == '*':
|
||||
logger.info('CLASS = %s', cls)
|
||||
cls.process()
|
||||
else:
|
||||
cls.process_method(int(meth))
|
||||
logger.info('Source:')
|
||||
logger.info('===========================')
|
||||
cls.show_source()
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
453
androguard/decompiler/dad/graph.py
Normal file
453
androguard/decompiler/dad/graph.py
Normal file
@ -0,0 +1,453 @@
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (c) 2012 Geoffroy Gueguen <geoffroy.gueguen@gmail.com>
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS-IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
from androguard.decompiler.dad.basic_blocks import (build_node_from_block,
|
||||
StatementBlock, CondBlock)
|
||||
from androguard.decompiler.dad.instruction import Variable
|
||||
from androguard.decompiler.dad.util import common_dom
|
||||
|
||||
|
||||
logger = logging.getLogger('dad.graph')
|
||||
|
||||
|
||||
class Graph():
|
||||
def __init__(self):
|
||||
self.entry = None
|
||||
self.exit = None
|
||||
self.nodes = list()
|
||||
self.rpo = []
|
||||
self.edges = defaultdict(list)
|
||||
self.catch_edges = defaultdict(list)
|
||||
self.reverse_edges = defaultdict(list)
|
||||
self.reverse_catch_edges = defaultdict(list)
|
||||
self.loc_to_ins = None
|
||||
self.loc_to_node = None
|
||||
|
||||
def sucs(self, node):
|
||||
return self.edges.get(node, [])
|
||||
|
||||
def all_sucs(self, node):
|
||||
return self.edges.get(node, []) + self.catch_edges.get(node, [])
|
||||
|
||||
def preds(self, node):
|
||||
return [n for n in self.reverse_edges.get(node, [])
|
||||
if not n.in_catch]
|
||||
|
||||
def all_preds(self, node):
|
||||
return (self.reverse_edges.get(node, []) +
|
||||
self.reverse_catch_edges.get(node, []))
|
||||
|
||||
def add_node(self, node):
|
||||
self.nodes.append(node)
|
||||
|
||||
def add_edge(self, e1, e2):
|
||||
lsucs = self.edges[e1]
|
||||
if e2 not in lsucs:
|
||||
lsucs.append(e2)
|
||||
lpreds = self.reverse_edges[e2]
|
||||
if e1 not in lpreds:
|
||||
lpreds.append(e1)
|
||||
|
||||
def add_catch_edge(self, e1, e2):
|
||||
lsucs = self.catch_edges[e1]
|
||||
if e2 not in lsucs:
|
||||
lsucs.append(e2)
|
||||
lpreds = self.reverse_catch_edges[e2]
|
||||
if e1 not in lpreds:
|
||||
lpreds.append(e1)
|
||||
|
||||
def remove_node(self, node):
|
||||
preds = self.reverse_edges.pop(node, [])
|
||||
for pred in preds:
|
||||
self.edges[pred].remove(node)
|
||||
|
||||
succs = self.edges.pop(node, [])
|
||||
for suc in succs:
|
||||
self.reverse_edges[suc].remove(node)
|
||||
|
||||
exc_preds = self.reverse_catch_edges.pop(node, [])
|
||||
for pred in exc_preds:
|
||||
self.catch_edges[pred].remove(node)
|
||||
|
||||
exc_succs = self.catch_edges.pop(node, [])
|
||||
for suc in exc_succs:
|
||||
self.reverse_catch_edges[suc].remove(node)
|
||||
|
||||
self.nodes.remove(node)
|
||||
if node in self.rpo:
|
||||
self.rpo.remove(node)
|
||||
del node
|
||||
|
||||
def number_ins(self):
|
||||
self.loc_to_ins = {}
|
||||
self.loc_to_node = {}
|
||||
num = 0
|
||||
for node in self.rpo:
|
||||
start_node = num
|
||||
num = node.number_ins(num)
|
||||
end_node = num - 1
|
||||
self.loc_to_ins.update(node.get_loc_with_ins())
|
||||
self.loc_to_node[(start_node, end_node)] = node
|
||||
|
||||
def get_ins_from_loc(self, loc):
|
||||
return self.loc_to_ins.get(loc)
|
||||
|
||||
def get_node_from_loc(self, loc):
|
||||
for (start, end), node in self.loc_to_node.iteritems():
|
||||
if start <= loc <= end:
|
||||
return node
|
||||
|
||||
def remove_ins(self, loc):
|
||||
ins = self.get_ins_from_loc(loc)
|
||||
self.get_node_from_loc(loc).remove_ins(loc, ins)
|
||||
self.loc_to_ins.pop(loc)
|
||||
|
||||
def split_if_nodes(self):
|
||||
'''
|
||||
Split IfNodes in two nodes, the first node is the header node, the
|
||||
second one is only composed of the jump condition.
|
||||
'''
|
||||
node_map = {}
|
||||
to_update = set()
|
||||
for node in self.nodes[:]:
|
||||
if node.type.is_cond:
|
||||
if len(node.get_ins()) > 1:
|
||||
pre_ins = node.get_ins()[:-1]
|
||||
last_ins = node.get_ins()[-1]
|
||||
pre_node = StatementBlock('%s-pre' % node.name, pre_ins)
|
||||
cond_node = CondBlock('%s-cond' % node.name, [last_ins])
|
||||
node_map[node] = pre_node
|
||||
|
||||
pre_node.copy_from(node)
|
||||
cond_node.copy_from(node)
|
||||
for var in node.var_to_declare:
|
||||
pre_node.add_variable_declaration(var)
|
||||
pre_node.type.is_stmt = True
|
||||
cond_node.true = node.true
|
||||
cond_node.false = node.false
|
||||
|
||||
for pred in self.all_preds(node):
|
||||
pred_node = node_map.get(pred, pred)
|
||||
# Verify that the link is not an exception link
|
||||
if node not in self.sucs(pred):
|
||||
self.add_catch_edge(pred_node, pre_node)
|
||||
continue
|
||||
if pred is node:
|
||||
pred_node = cond_node
|
||||
if pred.type.is_cond: # and not (pred is node):
|
||||
if pred.true is node:
|
||||
pred_node.true = pre_node
|
||||
if pred.false is node:
|
||||
pred_node.false = pre_node
|
||||
self.add_edge(pred_node, pre_node)
|
||||
for suc in self.sucs(node):
|
||||
self.add_edge(cond_node, node_map.get(suc, suc))
|
||||
|
||||
# We link all the exceptions to the pre node instead of the
|
||||
# condition node, which should not trigger any of them.
|
||||
for suc in self.catch_edges.get(node, []):
|
||||
self.add_catch_edge(pre_node, node_map.get(suc, suc))
|
||||
|
||||
if node is self.entry:
|
||||
self.entry = pre_node
|
||||
|
||||
self.add_node(pre_node)
|
||||
self.add_node(cond_node)
|
||||
self.add_edge(pre_node, cond_node)
|
||||
pre_node.update_attribute_with(node_map)
|
||||
cond_node.update_attribute_with(node_map)
|
||||
self.remove_node(node)
|
||||
else:
|
||||
to_update.add(node)
|
||||
for node in to_update:
|
||||
node.update_attribute_with(node_map)
|
||||
|
||||
def simplify(self):
|
||||
'''
|
||||
Simplify the CFG by merging/deleting statement nodes when possible:
|
||||
If statement B follows statement A and if B has no other predecessor
|
||||
besides A, then we can merge A and B into a new statement node.
|
||||
We also remove nodes which do nothing except redirecting the control
|
||||
flow (nodes which only contains a goto).
|
||||
'''
|
||||
redo = True
|
||||
while redo:
|
||||
redo = False
|
||||
node_map = {}
|
||||
to_update = set()
|
||||
for node in self.nodes[:]:
|
||||
if node.type.is_stmt and node in self.nodes:
|
||||
sucs = self.all_sucs(node)
|
||||
if len(sucs) == 0 or len(sucs) > 1:
|
||||
continue
|
||||
suc = sucs[0]
|
||||
if len(node.get_ins()) == 0:
|
||||
if any(pred.type.is_switch
|
||||
for pred in self.all_preds(node)):
|
||||
continue
|
||||
suc = self.edges.get(node)[0]
|
||||
if node is suc:
|
||||
continue
|
||||
node_map[node] = suc
|
||||
|
||||
add_edge = self.add_edge
|
||||
if node.in_catch:
|
||||
add_edge = self.add_catch_edge
|
||||
for pred in self.all_preds(node):
|
||||
pred.update_attribute_with(node_map)
|
||||
if node not in self.sucs(pred):
|
||||
self.add_catch_edge(pred, suc)
|
||||
continue
|
||||
self.add_edge(pred, suc)
|
||||
redo = True
|
||||
if node is self.entry:
|
||||
self.entry = suc
|
||||
self.remove_node(node)
|
||||
elif (suc.type.is_stmt and
|
||||
len(self.all_preds(suc)) == 1 and
|
||||
not (suc in self.catch_edges) and
|
||||
not ((node is suc) or (suc is self.entry))):
|
||||
ins_to_merge = suc.get_ins()
|
||||
node.add_ins(ins_to_merge)
|
||||
for var in suc.var_to_declare:
|
||||
node.add_variable_declaration(var)
|
||||
new_suc = self.sucs(suc)[0]
|
||||
if new_suc:
|
||||
self.add_edge(node, new_suc)
|
||||
for exception_suc in self.catch_edges.get(suc, []):
|
||||
self.add_catch_edge(node, exception_suc)
|
||||
redo = True
|
||||
self.remove_node(suc)
|
||||
else:
|
||||
to_update.add(node)
|
||||
for node in to_update:
|
||||
node.update_attribute_with(node_map)
|
||||
|
||||
|
||||
def _traverse(self, node, visit, res):
|
||||
if node in visit:
|
||||
return
|
||||
visit.add(node)
|
||||
for suc in self.all_sucs(node):
|
||||
self._traverse(suc, visit, res)
|
||||
res.insert(0, node)
|
||||
|
||||
def compute_rpo(self):
|
||||
'''
|
||||
Number the nodes in reverse post order.
|
||||
An RPO traversal visit as many predecessors of a node as possible
|
||||
before visiting the node itself.
|
||||
'''
|
||||
visit = set()
|
||||
res = []
|
||||
self._traverse(self.entry, visit, res)
|
||||
for i, n in enumerate(res, 1):
|
||||
n.num = i
|
||||
self.rpo.append(n)
|
||||
|
||||
def reset_rpo(self):
|
||||
self.rpo = []
|
||||
self.compute_rpo()
|
||||
|
||||
def post_order(self, start=None, visited=None, res=None):
|
||||
'''
|
||||
Return the nodes of the graph in post-order i.e we visit all the
|
||||
children of a node before visiting the node itself.
|
||||
'''
|
||||
if visited is None:
|
||||
res = []
|
||||
visited = set()
|
||||
start = self.entry
|
||||
visited.add(start)
|
||||
for suc in self.all_sucs(start):
|
||||
if not suc in visited:
|
||||
self.post_order(suc, visited, res)
|
||||
res.append(start)
|
||||
return res
|
||||
|
||||
def draw(self, name, dname, draw_branches=True):
|
||||
from pydot import Dot, Edge
|
||||
g = Dot()
|
||||
g.set_node_defaults(color='lightgray', style='filled', shape='box',
|
||||
fontname='Courier', fontsize='10')
|
||||
for node in sorted(self.nodes, key=lambda x: x.num):
|
||||
if draw_branches and node.type.is_cond:
|
||||
g.add_edge(Edge(str(node), str(node.true), color='green'))
|
||||
g.add_edge(Edge(str(node), str(node.false), color='red'))
|
||||
else:
|
||||
for suc in self.sucs(node):
|
||||
g.add_edge(Edge(str(node), str(suc), color='blue'))
|
||||
for except_node in self.catch_edges.get(node, []):
|
||||
g.add_edge(Edge(str(node), str(except_node),
|
||||
color='black', style='dashed'))
|
||||
|
||||
g.write_png('%s/%s.png' % (dname, name))
|
||||
|
||||
def immediate_dominators(self):
|
||||
'''
|
||||
Create a mapping of the nodes of a graph with their corresponding
|
||||
immediate dominator
|
||||
'''
|
||||
idom = dict((n, None) for n in self.nodes)
|
||||
for node in self.rpo:
|
||||
if node.in_catch:
|
||||
predecessor = self.all_preds
|
||||
else:
|
||||
predecessor = self.preds
|
||||
for pred in predecessor(node):
|
||||
if pred.num < node.num:
|
||||
idom[node] = common_dom(idom, idom[node], pred)
|
||||
return idom
|
||||
|
||||
def __len__(self):
|
||||
return len(self.nodes)
|
||||
|
||||
def __repr__(self):
|
||||
return str(self.nodes)
|
||||
|
||||
def __iter__(self):
|
||||
for node in self.nodes:
|
||||
yield node
|
||||
|
||||
|
||||
def bfs(start):
|
||||
to_visit = [start]
|
||||
visited = set([start])
|
||||
while to_visit:
|
||||
node = to_visit.pop(0)
|
||||
yield node
|
||||
if node.exception_analysis:
|
||||
for _, _, exception in node.exception_analysis.exceptions:
|
||||
if exception not in visited:
|
||||
to_visit.append(exception)
|
||||
visited.add(exception)
|
||||
for _, _, child in node.childs:
|
||||
if child not in visited:
|
||||
to_visit.append(child)
|
||||
visited.add(child)
|
||||
|
||||
|
||||
class GenInvokeRetName(object):
|
||||
def __init__(self):
|
||||
self.num = 0
|
||||
self.ret = None
|
||||
|
||||
def new(self):
|
||||
self.num += 1
|
||||
self.ret = Variable('tmp%d' % self.num)
|
||||
return self.ret
|
||||
|
||||
def set_to(self, ret):
|
||||
self.ret = ret
|
||||
|
||||
def last(self):
|
||||
return self.ret
|
||||
|
||||
|
||||
def make_node(graph, block, block_to_node, vmap, gen_ret):
|
||||
node = block_to_node.get(block)
|
||||
if node is None:
|
||||
node = build_node_from_block(block, vmap, gen_ret)
|
||||
block_to_node[block] = node
|
||||
if block.exception_analysis:
|
||||
for _type, _, exception_target in block.exception_analysis.exceptions:
|
||||
exception_node = block_to_node.get(exception_target)
|
||||
if exception_node is None:
|
||||
exception_node = build_node_from_block(exception_target,
|
||||
vmap, gen_ret, _type)
|
||||
exception_node.in_catch = True
|
||||
block_to_node[exception_target] = exception_node
|
||||
graph.add_catch_edge(node, exception_node)
|
||||
for _, _, child_block in block.childs:
|
||||
child_node = block_to_node.get(child_block)
|
||||
if child_node is None:
|
||||
child_node = build_node_from_block(child_block, vmap, gen_ret)
|
||||
block_to_node[child_block] = child_node
|
||||
graph.add_edge(node, child_node)
|
||||
if node.type.is_switch:
|
||||
node.add_case(child_node)
|
||||
if node.type.is_cond:
|
||||
if_target = ((block.end / 2) - (block.last_length / 2) +
|
||||
node.off_last_ins)
|
||||
child_addr = child_block.start / 2
|
||||
if if_target == child_addr:
|
||||
node.true = child_node
|
||||
else:
|
||||
node.false = child_node
|
||||
|
||||
# Check that both branch of the if point to something
|
||||
# It may happen that both branch point to the same node, in this case
|
||||
# the false branch will be None. So we set it to the right node.
|
||||
# TODO: In this situation, we should transform the condition node into
|
||||
# a statement node
|
||||
if node.type.is_cond and node.false is None:
|
||||
node.false = node.true
|
||||
|
||||
return node
|
||||
|
||||
|
||||
def construct(start_block, vmap, exceptions):
|
||||
bfs_blocks = bfs(start_block)
|
||||
|
||||
graph = Graph()
|
||||
gen_ret = GenInvokeRetName()
|
||||
|
||||
# Construction of a mapping of basic blocks into Nodes
|
||||
block_to_node = {}
|
||||
|
||||
exceptions_start_block = []
|
||||
for exception in exceptions:
|
||||
for _, _, block in exception.exceptions:
|
||||
exceptions_start_block.append(block)
|
||||
|
||||
for block in bfs_blocks:
|
||||
node = make_node(graph, block, block_to_node, vmap, gen_ret)
|
||||
graph.add_node(node)
|
||||
|
||||
graph.entry = block_to_node[start_block]
|
||||
del block_to_node, bfs_blocks
|
||||
|
||||
graph.compute_rpo()
|
||||
graph.number_ins()
|
||||
|
||||
for node in graph.rpo:
|
||||
preds = [pred for pred in graph.all_preds(node)
|
||||
if pred.num < node.num]
|
||||
if preds and all(pred.in_catch for pred in preds):
|
||||
node.in_catch = True
|
||||
|
||||
# Create a list of Node which are 'return' node
|
||||
# There should be one and only one node of this type
|
||||
# If this is not the case, try to continue anyway by setting the exit node
|
||||
# to the one which has the greatest RPO number (not necessarily the case)
|
||||
lexit_nodes = [node for node in graph if node.type.is_return]
|
||||
|
||||
if len(lexit_nodes) > 1:
|
||||
# Not sure that this case is possible...
|
||||
logger.error('Multiple exit nodes found !')
|
||||
graph.exit = graph.rpo[-1]
|
||||
elif len(lexit_nodes) < 1:
|
||||
# A method can have no return if it has throw statement(s) or if its
|
||||
# body is a while(1) whitout break/return.
|
||||
logger.debug('No exit node found !')
|
||||
else:
|
||||
graph.exit = lexit_nodes[0]
|
||||
|
||||
return graph
|
||||
|
1338
androguard/decompiler/dad/instruction.py
Normal file
1338
androguard/decompiler/dad/instruction.py
Normal file
File diff suppressed because it is too large
Load Diff
159
androguard/decompiler/dad/node.py
Normal file
159
androguard/decompiler/dad/node.py
Normal file
@ -0,0 +1,159 @@
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (c) 2012 Geoffroy Gueguen <geoffroy.gueguen@gmail.com>
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS-IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
class MakeProperties(type):
|
||||
def __init__(cls, name, bases, dct):
|
||||
def _wrap_set(names, name):
|
||||
def fun(self, value):
|
||||
for field in names:
|
||||
self.__dict__[field] = (name == field) and value
|
||||
return fun
|
||||
|
||||
def _wrap_get(name):
|
||||
def fun(self):
|
||||
return self.__dict__[name]
|
||||
return fun
|
||||
|
||||
super(MakeProperties, cls).__init__(name, bases, dct)
|
||||
attrs = []
|
||||
prefixes = ('_get_', '_set_')
|
||||
for key in dct.keys():
|
||||
for prefix in prefixes:
|
||||
if key.startswith(prefix):
|
||||
attrs.append(key[4:])
|
||||
delattr(cls, key)
|
||||
for attr in attrs:
|
||||
setattr(cls, attr[1:],
|
||||
property(_wrap_get(attr), _wrap_set(attrs, attr)))
|
||||
cls._attrs = attrs
|
||||
|
||||
def __call__(cls, *args, **kwds):
|
||||
obj = super(MakeProperties, cls).__call__(*args, **kwds)
|
||||
for attr in cls._attrs:
|
||||
obj.__dict__[attr] = False
|
||||
return obj
|
||||
|
||||
|
||||
class LoopType(object):
|
||||
__metaclass__ = MakeProperties
|
||||
_set_is_pretest = _set_is_posttest = _set_is_endless = None
|
||||
_get_is_pretest = _get_is_posttest = _get_is_endless = None
|
||||
|
||||
def copy(self):
|
||||
res = LoopType()
|
||||
for key, value in self.__dict__.iteritems():
|
||||
setattr(res, key, value)
|
||||
return res
|
||||
|
||||
|
||||
class NodeType(object):
|
||||
__metaclass__ = MakeProperties
|
||||
_set_is_cond = _set_is_switch = _set_is_stmt = None
|
||||
_get_is_cond = _get_is_switch = _get_is_stmt = None
|
||||
_set_is_return = _set_is_throw = None
|
||||
_get_is_return = _get_is_throw = None
|
||||
|
||||
def copy(self):
|
||||
res = NodeType()
|
||||
for key, value in self.__dict__.iteritems():
|
||||
setattr(res, key, value)
|
||||
return res
|
||||
|
||||
|
||||
class Node(object):
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
self.num = 0
|
||||
self.follow = {'if': None, 'loop': None, 'switch': None}
|
||||
self.looptype = LoopType()
|
||||
self.type = NodeType()
|
||||
self.in_catch = False
|
||||
self.interval = None
|
||||
self.startloop = False
|
||||
self.latch = None
|
||||
self.loop_nodes = []
|
||||
|
||||
def copy_from(self, node):
|
||||
self.num = node.num
|
||||
self.looptype = node.looptype.copy()
|
||||
self.interval = node.interval
|
||||
self.startloop = node.startloop
|
||||
self.type = node.type.copy()
|
||||
self.follow = node.follow.copy()
|
||||
self.latch = node.latch
|
||||
self.loop_nodes = node.loop_nodes
|
||||
self.in_catch = node.in_catch
|
||||
|
||||
def update_attribute_with(self, n_map):
|
||||
self.latch = n_map.get(self.latch, self.latch)
|
||||
for follow_type, value in self.follow.iteritems():
|
||||
self.follow[follow_type] = n_map.get(value, value)
|
||||
self.loop_nodes = list(set(n_map.get(n, n) for n in self.loop_nodes))
|
||||
|
||||
def get_head(self):
|
||||
return self
|
||||
|
||||
def get_end(self):
|
||||
return self
|
||||
|
||||
def __repr__(self):
|
||||
return '%s' % self
|
||||
|
||||
|
||||
class Interval(object):
|
||||
def __init__(self, head):
|
||||
self.name = 'Interval-%s' % head.name
|
||||
self.content = set([head])
|
||||
self.end = None
|
||||
self.head = head
|
||||
self.in_catch = head.in_catch
|
||||
head.interval = self
|
||||
|
||||
def __contains__(self, item):
|
||||
# If the interval contains nodes, check if the item is one of them
|
||||
if item in self.content:
|
||||
return True
|
||||
# If the interval contains intervals, we need to check them
|
||||
return any(item in node for node in self.content
|
||||
if isinstance(node, Interval))
|
||||
|
||||
def add_node(self, node):
|
||||
if node in self.content:
|
||||
return False
|
||||
self.content.add(node)
|
||||
node.interval = self
|
||||
return True
|
||||
|
||||
def compute_end(self, graph):
|
||||
for node in self.content:
|
||||
for suc in graph.sucs(node):
|
||||
if suc not in self.content:
|
||||
self.end = node
|
||||
|
||||
def get_end(self):
|
||||
return self.end.get_end()
|
||||
|
||||
def get_head(self):
|
||||
return self.head.get_head()
|
||||
|
||||
def __len__(self):
|
||||
return len(self.content)
|
||||
|
||||
def __repr__(self):
|
||||
return '%s(%s)' % (self.name, self.content)
|
||||
|
1983
androguard/decompiler/dad/opcode_ins.py
Normal file
1983
androguard/decompiler/dad/opcode_ins.py
Normal file
File diff suppressed because it is too large
Load Diff
190
androguard/decompiler/dad/util.py
Normal file
190
androguard/decompiler/dad/util.py
Normal file
@ -0,0 +1,190 @@
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (c) 2012 Geoffroy Gueguen <geoffroy.gueguen@gmail.com>
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS-IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger('dad.util')
|
||||
|
||||
TYPE_DESCRIPTOR = {
|
||||
'V': 'void',
|
||||
'Z': 'boolean',
|
||||
'B': 'byte',
|
||||
'S': 'short',
|
||||
'C': 'char',
|
||||
'I': 'int',
|
||||
'J': 'long',
|
||||
'F': 'float',
|
||||
'D': 'double',
|
||||
'STR': 'String',
|
||||
'StringBuilder': 'String',
|
||||
}
|
||||
|
||||
ACCESS_FLAGS_CLASSES = {
|
||||
0x1: 'public',
|
||||
0x2: 'private',
|
||||
0x4: 'protected',
|
||||
0x8: 'static',
|
||||
0x10: 'final',
|
||||
0x200: 'interface',
|
||||
0x400: 'abstract',
|
||||
0x1000: 'synthetic',
|
||||
0x2000: 'annotation',
|
||||
0x4000: 'enum',
|
||||
}
|
||||
|
||||
ACCESS_FLAGS_FIELDS = {
|
||||
0x1: 'public',
|
||||
0x2: 'private',
|
||||
0x4: 'protected',
|
||||
0x8: 'static',
|
||||
0x10: 'final',
|
||||
0x40: 'volatile',
|
||||
0x80: 'transient',
|
||||
0x1000: 'synthetic',
|
||||
0x4000: 'enum',
|
||||
}
|
||||
|
||||
ACCESS_FLAGS_METHODS = {
|
||||
0x1: 'public',
|
||||
0x2: 'private',
|
||||
0x4: 'protected',
|
||||
0x8: 'static',
|
||||
0x10: 'final',
|
||||
0x20: 'synchronized',
|
||||
0x40: 'bridge',
|
||||
0x80: 'varargs',
|
||||
0x100: 'native',
|
||||
0x400: 'abstract',
|
||||
0x800: 'strict',
|
||||
0x1000: 'synthetic',
|
||||
0x10000: 'constructor',
|
||||
0x20000: 'synchronized',
|
||||
}
|
||||
|
||||
TYPE_LEN = {
|
||||
'J': 2,
|
||||
'D': 2,
|
||||
}
|
||||
|
||||
|
||||
def build_path(graph, node1, node2, path=None):
|
||||
'''
|
||||
Build the path from node1 to node2.
|
||||
The path is composed of all the nodes between node1 and node2,
|
||||
node1 excluded. Although if there is a loop starting from node1, it will be
|
||||
included in the path.
|
||||
'''
|
||||
if path is None:
|
||||
path = []
|
||||
if node1 is node2:
|
||||
return path
|
||||
path.append(node2)
|
||||
for pred in graph.all_preds(node2):
|
||||
if pred in path:
|
||||
continue
|
||||
build_path(graph, node1, pred, path)
|
||||
return path
|
||||
|
||||
|
||||
def common_dom(idom, cur, pred):
|
||||
if not (cur and pred):
|
||||
return cur or pred
|
||||
while cur is not pred:
|
||||
while cur.num < pred.num:
|
||||
pred = idom[pred]
|
||||
while cur.num > pred.num:
|
||||
cur = idom[cur]
|
||||
return cur
|
||||
|
||||
|
||||
def merge_inner(clsdict):
|
||||
'''
|
||||
Merge the inner class(es) of a class :
|
||||
e.g class A { ... } class A$foo{ ... } class A$bar{ ... }
|
||||
==> class A { class foo{...} class bar{...} ... }
|
||||
'''
|
||||
samelist = False
|
||||
done = {}
|
||||
while not samelist:
|
||||
samelist = True
|
||||
classlist = clsdict.keys()
|
||||
for classname in classlist:
|
||||
parts_name = classname.rsplit('$', 1)
|
||||
if len(parts_name) > 1:
|
||||
mainclass, innerclass = parts_name
|
||||
innerclass = innerclass[:-1] # remove ';' of the name
|
||||
mainclass += ';'
|
||||
if mainclass in clsdict:
|
||||
clsdict[mainclass].add_subclass(innerclass,
|
||||
clsdict[classname])
|
||||
clsdict[classname].name = innerclass
|
||||
done[classname] = clsdict[classname]
|
||||
del clsdict[classname]
|
||||
samelist = False
|
||||
elif mainclass in done:
|
||||
cls = done[mainclass]
|
||||
cls.add_subclass(innerclass, clsdict[classname])
|
||||
clsdict[classname].name = innerclass
|
||||
done[classname] = done[mainclass]
|
||||
del clsdict[classname]
|
||||
samelist = False
|
||||
|
||||
|
||||
def get_type_size(param):
|
||||
'''
|
||||
Return the number of register needed by the type @param
|
||||
'''
|
||||
return TYPE_LEN.get(param, 1)
|
||||
|
||||
|
||||
def get_type(atype, size=None):
|
||||
'''
|
||||
Retrieve the java type of a descriptor (e.g : I)
|
||||
'''
|
||||
res = TYPE_DESCRIPTOR.get(atype)
|
||||
if res is None:
|
||||
if atype[0] == 'L':
|
||||
if atype.startswith('Ljava/lang'):
|
||||
res = atype[1:-1].lstrip('java/lang/').replace('/', '.')
|
||||
else:
|
||||
res = atype[1:-1].replace('/', '.')
|
||||
elif atype[0] == '[':
|
||||
if size is None:
|
||||
res = '%s[]' % get_type(atype[1:])
|
||||
else:
|
||||
res = '%s[%s]' % (get_type(atype[1:]), size)
|
||||
else:
|
||||
res = atype
|
||||
logger.debug('Unknown descriptor: "%s".', atype)
|
||||
return res
|
||||
|
||||
|
||||
def get_params_type(descriptor):
|
||||
'''
|
||||
Return the parameters type of a descriptor (e.g (IC)V)
|
||||
'''
|
||||
params = descriptor.split(')')[0][1:].split()
|
||||
if params:
|
||||
return [param for param in params]
|
||||
return []
|
||||
|
||||
|
||||
def create_png(cls_name, meth_name, graph, dir_name='graphs2'):
|
||||
m_name = ''.join(x for x in meth_name if x.isalnum())
|
||||
name = ''.join((cls_name.split('/')[-1][:-1], '#', m_name))
|
||||
graph.draw(name, dir_name)
|
||||
|
559
androguard/decompiler/dad/writer.py
Normal file
559
androguard/decompiler/dad/writer.py
Normal file
@ -0,0 +1,559 @@
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (c) 2012 Geoffroy Gueguen <geoffroy.gueguen@gmail.com>
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS-IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
from androguard.decompiler.dad.util import get_type, ACCESS_FLAGS_METHODS
|
||||
from androguard.decompiler.dad.opcode_ins import Op
|
||||
from androguard.decompiler.dad.instruction import (Constant, ThisParam,
|
||||
BinaryExpression,
|
||||
BinaryCompExpression)
|
||||
|
||||
|
||||
logger = logging.getLogger('dad.writer')
|
||||
|
||||
|
||||
class Writer(object):
|
||||
def __init__(self, graph, method):
|
||||
self.graph = graph
|
||||
self.method = method
|
||||
self.visited_nodes = set()
|
||||
self.ind = 4
|
||||
self.buffer = []
|
||||
self.loop_follow = [None]
|
||||
self.if_follow = [None]
|
||||
self.switch_follow = [None]
|
||||
self.latch_node = [None]
|
||||
self.try_follow = [None]
|
||||
self.next_case = None
|
||||
self.skip = False
|
||||
self.need_break = True
|
||||
|
||||
def __str__(self):
|
||||
return ''.join(self.buffer)
|
||||
|
||||
def inc_ind(self, i=1):
|
||||
self.ind += (4 * i)
|
||||
|
||||
def dec_ind(self, i=1):
|
||||
self.ind -= (4 * i)
|
||||
|
||||
def space(self):
|
||||
if self.skip:
|
||||
self.skip = False
|
||||
return ''
|
||||
return ' ' * self.ind
|
||||
|
||||
def write_ind(self):
|
||||
if self.skip:
|
||||
self.skip = False
|
||||
else:
|
||||
self.write(self.space())
|
||||
|
||||
def write(self, s):
|
||||
self.buffer.append(s)
|
||||
|
||||
def end_ins(self):
|
||||
self.write(';\n')
|
||||
|
||||
def write_ind_visit_end(self, lhs, s, rhs=None):
|
||||
self.write_ind()
|
||||
lhs.visit(self)
|
||||
self.write(s)
|
||||
if rhs is not None:
|
||||
rhs.visit(self)
|
||||
self.end_ins()
|
||||
|
||||
def write_inplace_if_possible(self, lhs, rhs):
|
||||
if isinstance(rhs, BinaryExpression) and lhs == rhs.var_map[rhs.arg1]:
|
||||
exp_rhs = rhs.var_map[rhs.arg2]
|
||||
if rhs.op in '+-' and isinstance(exp_rhs, Constant) and\
|
||||
exp_rhs.get_int_value() == 1:
|
||||
return self.write_ind_visit_end(lhs, rhs.op * 2)
|
||||
return self.write_ind_visit_end(lhs, ' %s= ' % rhs.op, exp_rhs)
|
||||
return self.write_ind_visit_end(lhs, ' = ', rhs)
|
||||
|
||||
def visit_ins(self, ins):
|
||||
ins.visit(self)
|
||||
|
||||
def write_method(self):
|
||||
acc = []
|
||||
access = self.method.access
|
||||
self.constructor = False
|
||||
for modifier in access:
|
||||
if modifier == 'constructor':
|
||||
self.constructor = True
|
||||
continue
|
||||
acc.append(modifier)
|
||||
if self.constructor:
|
||||
name = get_type(self.method.cls_name).split('.')[-1]
|
||||
proto = '%s %s' % (' '.join(acc), name)
|
||||
else:
|
||||
name = self.method.name
|
||||
proto = '%s %s %s' % (
|
||||
' '.join(acc), get_type(self.method.type), name)
|
||||
self.write('\n%s%s' % (self.space(), proto))
|
||||
params = self.method.lparams
|
||||
if 'static' not in access:
|
||||
params = params[1:]
|
||||
proto = ''
|
||||
if self.method.params_type:
|
||||
proto = ', '.join(['%s p%s' % (get_type(p_type), param) for
|
||||
p_type, param in zip(self.method.params_type, params)])
|
||||
self.write('(%s)' % proto)
|
||||
if self.graph is None:
|
||||
return self.write(';\n')
|
||||
self.write('\n%s{\n' % self.space())
|
||||
self.inc_ind()
|
||||
self.visit_node(self.graph.entry)
|
||||
self.dec_ind()
|
||||
self.write('%s}\n' % self.space())
|
||||
|
||||
def visit_node(self, node):
|
||||
if node in (self.if_follow[-1], self.switch_follow[-1],
|
||||
self.loop_follow[-1], self.latch_node[-1],
|
||||
self.try_follow[-1]):
|
||||
return
|
||||
if not node.type.is_return and node in self.visited_nodes:
|
||||
return
|
||||
self.visited_nodes.add(node)
|
||||
for var in node.var_to_declare:
|
||||
var.visit_decl(self)
|
||||
var.declared = True
|
||||
node.visit(self)
|
||||
|
||||
def visit_loop_node(self, loop):
|
||||
follow = loop.follow['loop']
|
||||
if follow is None and not loop.looptype.is_endless:
|
||||
logger.error('Loop has no follow !')
|
||||
if loop.looptype.is_pretest:
|
||||
if loop.true is follow:
|
||||
loop.neg()
|
||||
loop.true, loop.false = loop.false, loop.true
|
||||
self.write('%swhile (' % self.space())
|
||||
loop.visit_cond(self)
|
||||
self.write(') {\n')
|
||||
elif loop.looptype.is_posttest:
|
||||
self.write('%sdo {\n' % self.space())
|
||||
self.latch_node.append(loop.latch)
|
||||
elif loop.looptype.is_endless:
|
||||
self.write('%swhile(true) {\n' % self.space())
|
||||
self.inc_ind()
|
||||
self.loop_follow.append(follow)
|
||||
if loop.looptype.is_pretest:
|
||||
self.visit_node(loop.true)
|
||||
else:
|
||||
self.visit_node(loop.cond)
|
||||
self.loop_follow.pop()
|
||||
self.dec_ind()
|
||||
if loop.looptype.is_pretest:
|
||||
self.write('%s}\n' % self.space())
|
||||
elif loop.looptype.is_posttest:
|
||||
self.latch_node.pop()
|
||||
self.write('%s} while(' % self.space())
|
||||
loop.latch.visit_cond(self)
|
||||
self.write(');\n')
|
||||
else:
|
||||
self.inc_ind()
|
||||
self.visit_node(loop.latch)
|
||||
self.dec_ind()
|
||||
self.write('%s}\n' % self.space())
|
||||
if follow is not None:
|
||||
self.visit_node(follow)
|
||||
|
||||
def visit_cond_node(self, cond):
|
||||
follow = cond.follow['if']
|
||||
if cond.false is cond.true:
|
||||
self.write('%s// Both branches of the conditions point to the same'
|
||||
' code.\n' % self.space())
|
||||
self.write('%s// if (' % self.space())
|
||||
cond.visit_cond(self)
|
||||
self.write(') {\n')
|
||||
self.inc_ind()
|
||||
self.visit_node(cond.true)
|
||||
self.dec_ind()
|
||||
self.write('%s// }\n' % self.space())
|
||||
return
|
||||
if cond.false is self.loop_follow[-1]:
|
||||
cond.neg()
|
||||
cond.true, cond.false = cond.false, cond.true
|
||||
if self.loop_follow[-1] in (cond.true, cond.false):
|
||||
self.write('%sif (' % self.space())
|
||||
cond.visit_cond(self)
|
||||
self.write(') {\n')
|
||||
self.inc_ind()
|
||||
self.write('%sbreak;\n' % self.space())
|
||||
self.dec_ind()
|
||||
self.write('%s}\n' % self.space())
|
||||
self.visit_node(cond.false)
|
||||
elif follow is not None:
|
||||
if cond.true in (follow, self.next_case) or\
|
||||
cond.num > cond.true.num:
|
||||
# or cond.true.num > cond.false.num:
|
||||
cond.neg()
|
||||
cond.true, cond.false = cond.false, cond.true
|
||||
self.if_follow.append(follow)
|
||||
if not cond.true in self.visited_nodes:
|
||||
self.write('%sif (' % self.space())
|
||||
cond.visit_cond(self)
|
||||
self.write(') {\n')
|
||||
self.inc_ind()
|
||||
self.visit_node(cond.true)
|
||||
self.dec_ind()
|
||||
is_else = not (follow in (cond.true, cond.false))
|
||||
if is_else and not cond.false in self.visited_nodes:
|
||||
self.write('%s} else {\n' % self.space())
|
||||
self.inc_ind()
|
||||
self.visit_node(cond.false)
|
||||
self.dec_ind()
|
||||
self.if_follow.pop()
|
||||
self.write('%s}\n' % self.space())
|
||||
self.visit_node(follow)
|
||||
else:
|
||||
self.write('%sif (' % self.space())
|
||||
cond.visit_cond(self)
|
||||
self.write(') {\n')
|
||||
self.inc_ind()
|
||||
self.visit_node(cond.true)
|
||||
self.dec_ind()
|
||||
self.write('%s} else {\n' % self.space())
|
||||
self.inc_ind()
|
||||
self.visit_node(cond.false)
|
||||
self.dec_ind()
|
||||
self.write('%s}\n' % self.space())
|
||||
|
||||
def visit_short_circuit_condition(self, nnot, aand, cond1, cond2):
|
||||
if nnot:
|
||||
cond1.neg()
|
||||
self.write('(')
|
||||
cond1.visit_cond(self)
|
||||
self.write(') %s (' % ['||', '&&'][aand])
|
||||
cond2.visit_cond(self)
|
||||
self.write(')')
|
||||
|
||||
def visit_switch_node(self, switch):
|
||||
lins = switch.get_ins()
|
||||
for ins in lins[:-1]:
|
||||
self.visit_ins(ins)
|
||||
switch_ins = switch.get_ins()[-1]
|
||||
self.write('%sswitch (' % self.space())
|
||||
self.visit_ins(switch_ins)
|
||||
self.write(') {\n')
|
||||
follow = switch.follow['switch']
|
||||
cases = switch.cases
|
||||
self.switch_follow.append(follow)
|
||||
default = switch.default
|
||||
for i, node in enumerate(cases):
|
||||
if node in self.visited_nodes:
|
||||
continue
|
||||
self.inc_ind()
|
||||
for case in switch.node_to_case[node]:
|
||||
self.write('%scase %d:\n' % (self.space(), case))
|
||||
if i + 1 < len(cases):
|
||||
self.next_case = cases[i + 1]
|
||||
else:
|
||||
self.next_case = None
|
||||
if node is default:
|
||||
self.write('%sdefault:\n' % self.space())
|
||||
default = None
|
||||
self.inc_ind()
|
||||
self.visit_node(node)
|
||||
if self.need_break:
|
||||
self.write('%sbreak;\n' % self.space())
|
||||
else:
|
||||
self.need_break = True
|
||||
self.dec_ind(2)
|
||||
if default not in (None, follow):
|
||||
self.inc_ind()
|
||||
self.write('%sdefault:\n' % self.space())
|
||||
self.inc_ind()
|
||||
self.visit_node(default)
|
||||
self.dec_ind(2)
|
||||
self.write('%s}\n' % self.space())
|
||||
self.switch_follow.pop()
|
||||
self.visit_node(follow)
|
||||
|
||||
def visit_statement_node(self, stmt):
|
||||
sucs = self.graph.sucs(stmt)
|
||||
for ins in stmt.get_ins():
|
||||
self.visit_ins(ins)
|
||||
if len(sucs) == 1:
|
||||
if sucs[0] is self.loop_follow[-1]:
|
||||
self.write('%sbreak;\n' % self.space())
|
||||
elif sucs[0] is self.next_case:
|
||||
self.need_break = False
|
||||
else:
|
||||
self.visit_node(sucs[0])
|
||||
|
||||
def visit_try_node(self, try_node):
|
||||
self.write('%stry {\n' % self.space())
|
||||
self.inc_ind()
|
||||
self.try_follow.append(try_node.follow)
|
||||
self.visit_node(try_node.try_start)
|
||||
self.dec_ind()
|
||||
self.write('%s}' % self.space())
|
||||
for catch in try_node.catch:
|
||||
self.visit_node(catch)
|
||||
self.write('\n')
|
||||
self.visit_node(self.try_follow.pop())
|
||||
|
||||
|
||||
def visit_catch_node(self, catch_node):
|
||||
self.write(' catch (')
|
||||
catch_node.visit_exception(self)
|
||||
self.write(') {\n')
|
||||
self.inc_ind()
|
||||
self.visit_node(catch_node.catch_start)
|
||||
self.dec_ind()
|
||||
self.write('%s}' % self.space())
|
||||
|
||||
def visit_return_node(self, ret):
|
||||
self.need_break = False
|
||||
for ins in ret.get_ins():
|
||||
self.visit_ins(ins)
|
||||
|
||||
def visit_throw_node(self, throw):
|
||||
for ins in throw.get_ins():
|
||||
self.visit_ins(ins)
|
||||
|
||||
def visit_decl(self, var):
|
||||
if not var.declared:
|
||||
var_type = var.get_type() or 'unknownType'
|
||||
self.write('%s%s v%s' % (
|
||||
self.space(), get_type(var_type), var.value()))
|
||||
self.end_ins()
|
||||
|
||||
def visit_constant(self, cst):
|
||||
if isinstance(cst, str) or isinstance(cst, unicode):
|
||||
return self.write(string(cst))
|
||||
self.write('%r' % cst)
|
||||
|
||||
def visit_base_class(self, cls):
|
||||
self.write(cls)
|
||||
|
||||
def visit_variable(self, var):
|
||||
if not var.declared:
|
||||
var_type = var.get_type() or 'unknownType'
|
||||
self.write('%s ' % get_type(var_type))
|
||||
var.declared = True
|
||||
self.write('v%s' % var.value())
|
||||
|
||||
def visit_param(self, param):
|
||||
self.write('p%s' % param)
|
||||
|
||||
def visit_this(self):
|
||||
self.write('this')
|
||||
|
||||
def visit_assign(self, lhs, rhs):
|
||||
if lhs is not None:
|
||||
return self.write_inplace_if_possible(lhs, rhs)
|
||||
self.write_ind()
|
||||
rhs.visit(self)
|
||||
if not self.skip:
|
||||
self.end_ins()
|
||||
|
||||
def visit_move_result(self, lhs, rhs):
|
||||
self.write_ind_visit_end(lhs, ' = ', rhs)
|
||||
|
||||
def visit_move(self, lhs, rhs):
|
||||
if lhs is not rhs:
|
||||
self.write_inplace_if_possible(lhs, rhs)
|
||||
|
||||
def visit_astore(self, array, index, rhs):
|
||||
self.write_ind()
|
||||
array.visit(self)
|
||||
self.write('[')
|
||||
if isinstance(index, Constant):
|
||||
index.visit(self, 'I')
|
||||
else:
|
||||
index.visit(self)
|
||||
self.write('] = ')
|
||||
rhs.visit(self)
|
||||
self.end_ins()
|
||||
|
||||
def visit_put_static(self, cls, name, rhs):
|
||||
self.write_ind()
|
||||
self.write('%s.%s = ' % (cls, name))
|
||||
rhs.visit(self)
|
||||
self.end_ins()
|
||||
|
||||
def visit_put_instance(self, lhs, name, rhs):
|
||||
self.write_ind_visit_end(lhs, '.%s = ' % name, rhs)
|
||||
|
||||
def visit_new(self, atype):
|
||||
self.write('new %s' % get_type(atype))
|
||||
|
||||
def visit_invoke(self, name, base, ptype, rtype, args):
|
||||
if isinstance(base, ThisParam):
|
||||
if name == '<init>' and self.constructor and len(args) == 0:
|
||||
self.skip = True
|
||||
return
|
||||
base.visit(self)
|
||||
if name != '<init>':
|
||||
self.write('.%s' % name)
|
||||
self.write('(')
|
||||
comma = False
|
||||
for arg in args:
|
||||
if comma:
|
||||
self.write(', ')
|
||||
comma = True
|
||||
arg.visit(self)
|
||||
self.write(')')
|
||||
|
||||
def visit_return_void(self):
|
||||
self.write_ind()
|
||||
self.write('return')
|
||||
self.end_ins()
|
||||
|
||||
def visit_return(self, arg):
|
||||
self.write_ind()
|
||||
self.write('return ')
|
||||
arg.visit(self)
|
||||
self.end_ins()
|
||||
|
||||
def visit_nop(self):
|
||||
pass
|
||||
|
||||
def visit_switch(self, arg):
|
||||
arg.visit(self)
|
||||
|
||||
def visit_check_cast(self, arg, atype):
|
||||
self.write('(checkcast)(')
|
||||
arg.visit(self)
|
||||
self.write(', %s)' % atype)
|
||||
|
||||
def visit_aload(self, array, index):
|
||||
array.visit(self)
|
||||
self.write('[')
|
||||
index.visit(self)
|
||||
self.write(']')
|
||||
|
||||
def visit_alength(self, array):
|
||||
array.visit(self)
|
||||
self.write('.length')
|
||||
|
||||
def visit_new_array(self, atype, size):
|
||||
self.write('new %s[' % get_type(atype[1:]))
|
||||
size.visit(self)
|
||||
self.write(']')
|
||||
|
||||
def visit_filled_new_array(self, atype, size, args):
|
||||
self.write('new %s {' % get_type(atype))
|
||||
for idx, arg in enumerate(args):
|
||||
arg.visit(self)
|
||||
if idx + 1 < len(args):
|
||||
self.write(', ')
|
||||
self.write('})')
|
||||
|
||||
def visit_fill_array(self, array, value):
|
||||
self.write_ind()
|
||||
array.visit(self)
|
||||
self.write(' = {')
|
||||
data = value.get_data()
|
||||
self.write(', '.join(['%d' % ord(c) for c in data[:-1]]))
|
||||
self.write('}')
|
||||
self.end_ins()
|
||||
|
||||
def visit_move_exception(self, var):
|
||||
var.declared = True
|
||||
var_type = var.get_type() or 'unknownType'
|
||||
self.write('%s v%s' % (get_type(var_type), var.value()))
|
||||
|
||||
def visit_monitor_enter(self, ref):
|
||||
self.write_ind()
|
||||
self.write('synchronized(')
|
||||
ref.visit(self)
|
||||
self.write(') {\n')
|
||||
self.inc_ind()
|
||||
|
||||
def visit_monitor_exit(self, ref):
|
||||
self.dec_ind()
|
||||
self.write_ind()
|
||||
self.write('}\n')
|
||||
|
||||
def visit_throw(self, ref):
|
||||
self.write_ind()
|
||||
self.write('throw ')
|
||||
ref.visit(self)
|
||||
self.end_ins()
|
||||
|
||||
def visit_binary_expression(self, op, arg1, arg2):
|
||||
self.write('(')
|
||||
arg1.visit(self)
|
||||
self.write(' %s ' % op)
|
||||
arg2.visit(self)
|
||||
self.write(')')
|
||||
|
||||
def visit_unary_expression(self, op, arg):
|
||||
self.write('(%s ' % op)
|
||||
arg.visit(self)
|
||||
self.write(')')
|
||||
|
||||
def visit_cast(self, op, arg):
|
||||
self.write('(%s ' % op)
|
||||
arg.visit(self)
|
||||
self.write(')')
|
||||
|
||||
def visit_cond_expression(self, op, arg1, arg2):
|
||||
arg1.visit(self)
|
||||
self.write(' %s ' % op)
|
||||
arg2.visit(self)
|
||||
|
||||
def visit_condz_expression(self, op, arg):
|
||||
if isinstance(arg, BinaryCompExpression):
|
||||
arg.op = op
|
||||
return arg.visit(self)
|
||||
atype = arg.get_type()
|
||||
if atype == 'Z':
|
||||
if op is Op.EQUAL:
|
||||
self.write('!')
|
||||
arg.visit(self)
|
||||
else:
|
||||
arg.visit(self)
|
||||
if atype in 'VBSCIJFD':
|
||||
self.write(' %s 0' % op)
|
||||
else:
|
||||
self.write(' %s null' % op)
|
||||
|
||||
def visit_get_instance(self, arg, name):
|
||||
arg.visit(self)
|
||||
self.write('.%s' % name)
|
||||
|
||||
def visit_get_static(self, cls, name):
|
||||
self.write('%s.%s' % (cls, name))
|
||||
|
||||
|
||||
def string(s):
|
||||
ret = ['"']
|
||||
for c in s:
|
||||
if c >= ' ' and c < '\x7f':
|
||||
if c == "'" or c == '"' or c == '\\':
|
||||
ret.append('\\')
|
||||
ret.append(c)
|
||||
continue
|
||||
elif c <= '\x7f':
|
||||
if c in ('\r', '\n', '\t'):
|
||||
ret.append(c.encode('unicode-escape'))
|
||||
continue
|
||||
i = ord(c)
|
||||
ret.append('\\u')
|
||||
ret.append('%x' % (i >> 12))
|
||||
ret.append('%x' % ((i >> 8) & 0x0f))
|
||||
ret.append('%x' % ((i >> 4) & 0x0f))
|
||||
ret.append('%x' % (i & 0x0f))
|
||||
ret.append('"')
|
||||
return ''.join(ret)
|
||||
|
504
androguard/decompiler/decompiler.py
Normal file
504
androguard/decompiler/decompiler.py
Normal file
@ -0,0 +1,504 @@
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (C) 2013, Anthony Desnos <desnos at t0t0.fr>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS-IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from subprocess import Popen, PIPE, STDOUT
|
||||
|
||||
import tempfile
|
||||
import os
|
||||
|
||||
from androguard.core.androconf import rrmdir
|
||||
from androguard.decompiler.dad import decompile
|
||||
|
||||
PYGMENTS = True
|
||||
try:
|
||||
from pygments.filter import Filter
|
||||
from pygments import highlight
|
||||
from pygments.lexers import get_lexer_by_name
|
||||
from pygments.formatters import TerminalFormatter
|
||||
from pygments.token import Token
|
||||
except ImportError:
|
||||
PYGMENTS = False
|
||||
class Filter:
|
||||
pass
|
||||
|
||||
|
||||
class Dex2Jar:
|
||||
def __init__(self, vm, path_dex2jar="./decompiler/dex2jar/", bin_dex2jar="dex2jar.sh", tmp_dir="/tmp/"):
|
||||
pathtmp = tmp_dir
|
||||
if not os.path.exists(pathtmp):
|
||||
os.makedirs(pathtmp)
|
||||
|
||||
fd, fdname = tempfile.mkstemp(dir=pathtmp)
|
||||
fd = os.fdopen(fd, "w+b")
|
||||
fd.write(vm.get_buff())
|
||||
fd.flush()
|
||||
fd.close()
|
||||
|
||||
compile = Popen([path_dex2jar + bin_dex2jar, fdname], stdout=PIPE, stderr=STDOUT)
|
||||
stdout, stderr = compile.communicate()
|
||||
os.unlink(fdname)
|
||||
|
||||
self.jarfile = fdname + "_dex2jar.jar"
|
||||
|
||||
def get_jar(self):
|
||||
return self.jarfile
|
||||
|
||||
|
||||
class DecompilerDex2Jad:
|
||||
def __init__(self, vm, path_dex2jar="./decompiler/dex2jar/", bin_dex2jar="dex2jar.sh", path_jad="./decompiler/jad/", bin_jad="jad", tmp_dir="/tmp/"):
|
||||
self.classes = {}
|
||||
self.classes_failed = []
|
||||
|
||||
pathtmp = tmp_dir
|
||||
if not os.path.exists(pathtmp):
|
||||
os.makedirs(pathtmp)
|
||||
|
||||
fd, fdname = tempfile.mkstemp(dir=pathtmp)
|
||||
fd = os.fdopen(fd, "w+b")
|
||||
fd.write(vm.get_buff())
|
||||
fd.flush()
|
||||
fd.close()
|
||||
|
||||
compile = Popen([path_dex2jar + bin_dex2jar, fdname], stdout=PIPE, stderr=STDOUT)
|
||||
stdout, stderr = compile.communicate()
|
||||
os.unlink(fdname)
|
||||
|
||||
pathclasses = fdname + "dex2jar/"
|
||||
compile = Popen(["unzip", fdname + "_dex2jar.jar", "-d", pathclasses], stdout=PIPE, stderr=STDOUT)
|
||||
stdout, stderr = compile.communicate()
|
||||
os.unlink(fdname + "_dex2jar.jar")
|
||||
|
||||
for root, dirs, files in os.walk(pathclasses, followlinks=True):
|
||||
if files != []:
|
||||
for f in files:
|
||||
real_filename = root
|
||||
if real_filename[-1] != "/":
|
||||
real_filename += "/"
|
||||
real_filename += f
|
||||
|
||||
compile = Popen([path_jad + bin_jad, "-o", "-d", root, real_filename], stdout=PIPE, stderr=STDOUT)
|
||||
stdout, stderr = compile.communicate()
|
||||
|
||||
for i in vm.get_classes():
|
||||
fname = pathclasses + "/" + i.get_name()[1:-1] + ".jad"
|
||||
if os.path.isfile(fname) == True:
|
||||
fd = open(fname, "r")
|
||||
self.classes[i.get_name()] = fd.read()
|
||||
fd.close()
|
||||
else:
|
||||
self.classes_failed.append(i.get_name())
|
||||
|
||||
rrmdir(pathclasses)
|
||||
|
||||
def get_source_method(self, method):
|
||||
class_name = method.get_class_name()
|
||||
method_name = method.get_name()
|
||||
|
||||
if class_name not in self.classes:
|
||||
return ""
|
||||
|
||||
if PYGMENTS:
|
||||
lexer = get_lexer_by_name("java", stripall=True)
|
||||
lexer.add_filter(MethodFilter(method_name=method_name))
|
||||
formatter = TerminalFormatter()
|
||||
result = highlight(self.classes[class_name], lexer, formatter)
|
||||
return result
|
||||
|
||||
return self.classes[class_name]
|
||||
|
||||
def display_source(self, method):
|
||||
print self.get_source_method(method)
|
||||
|
||||
def get_source_class(self, _class):
|
||||
return self.classes[_class.get_name()]
|
||||
|
||||
def get_all(self, class_name):
|
||||
if class_name not in self.classes:
|
||||
return ""
|
||||
|
||||
if PYGMENTS:
|
||||
lexer = get_lexer_by_name("java", stripall=True)
|
||||
formatter = TerminalFormatter()
|
||||
result = highlight(self.classes[class_name], lexer, formatter)
|
||||
return result
|
||||
return self.classes[class_name]
|
||||
|
||||
def display_all(self, _class):
|
||||
print self.get_all(_class.get_name())
|
||||
|
||||
|
||||
class DecompilerDex2WineJad:
|
||||
def __init__(self, vm, path_dex2jar="./decompiler/dex2jar/", bin_dex2jar="dex2jar.sh", path_jad="./decompiler/jad/", bin_jad="jad", tmp_dir="/tmp/"):
|
||||
self.classes = {}
|
||||
self.classes_failed = []
|
||||
|
||||
pathtmp = tmp_dir
|
||||
if not os.path.exists(pathtmp):
|
||||
os.makedirs(pathtmp)
|
||||
|
||||
fd, fdname = tempfile.mkstemp(dir=pathtmp)
|
||||
fd = os.fdopen(fd, "w+b")
|
||||
fd.write(vm.get_buff())
|
||||
fd.flush()
|
||||
fd.close()
|
||||
|
||||
compile = Popen([path_dex2jar + bin_dex2jar, fdname], stdout=PIPE, stderr=STDOUT)
|
||||
stdout, stderr = compile.communicate()
|
||||
os.unlink(fdname)
|
||||
|
||||
pathclasses = fdname + "dex2jar/"
|
||||
compile = Popen(["unzip", fdname + "_dex2jar.jar", "-d", pathclasses], stdout=PIPE, stderr=STDOUT)
|
||||
stdout, stderr = compile.communicate()
|
||||
os.unlink(fdname + "_dex2jar.jar")
|
||||
|
||||
for root, dirs, files in os.walk(pathclasses, followlinks=True):
|
||||
if files != []:
|
||||
for f in files:
|
||||
real_filename = root
|
||||
if real_filename[-1] != "/":
|
||||
real_filename += "/"
|
||||
real_filename += f
|
||||
|
||||
compile = Popen(["wine", path_jad + bin_jad, "-o", "-d", root, real_filename], stdout=PIPE, stderr=STDOUT)
|
||||
stdout, stderr = compile.communicate()
|
||||
|
||||
for i in vm.get_classes():
|
||||
fname = pathclasses + "/" + i.get_name()[1:-1] + ".jad"
|
||||
if os.path.isfile(fname) == True:
|
||||
fd = open(fname, "r")
|
||||
self.classes[i.get_name()] = fd.read()
|
||||
fd.close()
|
||||
else:
|
||||
self.classes_failed.append(i.get_name())
|
||||
|
||||
rrmdir(pathclasses)
|
||||
|
||||
def get_source_method(self, method):
|
||||
class_name = method.get_class_name()
|
||||
method_name = method.get_name()
|
||||
|
||||
if class_name not in self.classes:
|
||||
return ""
|
||||
|
||||
if PYGMENTS:
|
||||
lexer = get_lexer_by_name("java", stripall=True)
|
||||
lexer.add_filter(MethodFilter(method_name=method_name))
|
||||
formatter = TerminalFormatter()
|
||||
result = highlight(self.classes[class_name], lexer, formatter)
|
||||
return result
|
||||
|
||||
return self.classes[class_name]
|
||||
|
||||
def display_source(self, method):
|
||||
print self.get_source_method(method)
|
||||
|
||||
def get_source_class(self, _class):
|
||||
return self.classes[_class.get_name()]
|
||||
|
||||
def get_all(self, class_name):
|
||||
if class_name not in self.classes:
|
||||
return ""
|
||||
|
||||
if PYGMENTS:
|
||||
lexer = get_lexer_by_name("java", stripall=True)
|
||||
formatter = TerminalFormatter()
|
||||
result = highlight(self.classes[class_name], lexer, formatter)
|
||||
return result
|
||||
return self.classes[class_name]
|
||||
|
||||
def display_all(self, _class):
|
||||
print self.get_all(_class.get_name())
|
||||
|
||||
class DecompilerDed:
|
||||
def __init__(self, vm, path="./decompiler/ded/", bin_ded="ded.sh", tmp_dir="/tmp/"):
|
||||
self.classes = {}
|
||||
self.classes_failed = []
|
||||
|
||||
pathtmp = tmp_dir
|
||||
if not os.path.exists(pathtmp) :
|
||||
os.makedirs( pathtmp )
|
||||
|
||||
fd, fdname = tempfile.mkstemp( dir=pathtmp )
|
||||
fd = os.fdopen(fd, "w+b")
|
||||
fd.write( vm.get_buff() )
|
||||
fd.flush()
|
||||
fd.close()
|
||||
|
||||
dirname = tempfile.mkdtemp(prefix=fdname + "-src")
|
||||
compile = Popen([ path + bin_ded, "-c", "-o", "-d", dirname, fdname ], stdout=PIPE, stderr=STDOUT)
|
||||
stdout, stderr = compile.communicate()
|
||||
os.unlink( fdname )
|
||||
|
||||
findsrc = None
|
||||
for root, dirs, files in os.walk( dirname + "/optimized-decompiled/" ) :
|
||||
if dirs != [] :
|
||||
for f in dirs :
|
||||
if f == "src" :
|
||||
findsrc = root
|
||||
if findsrc[-1] != "/" :
|
||||
findsrc += "/"
|
||||
findsrc += f
|
||||
break
|
||||
if findsrc != None :
|
||||
break
|
||||
|
||||
for i in vm.get_classes() :
|
||||
fname = findsrc + "/" + i.get_name()[1:-1] + ".java"
|
||||
#print fname
|
||||
if os.path.isfile(fname) == True :
|
||||
fd = open(fname, "r")
|
||||
self.classes[ i.get_name() ] = fd.read()
|
||||
fd.close()
|
||||
else :
|
||||
self.classes_failed.append( i.get_name() )
|
||||
|
||||
rrmdir( dirname )
|
||||
|
||||
def get_source_method(self, method):
|
||||
class_name = method.get_class_name()
|
||||
method_name = method.get_name()
|
||||
|
||||
if class_name not in self.classes:
|
||||
return ""
|
||||
|
||||
lexer = get_lexer_by_name("java", stripall=True)
|
||||
lexer.add_filter(MethodFilter(method_name=method_name))
|
||||
formatter = TerminalFormatter()
|
||||
result = highlight(self.classes[class_name], lexer, formatter)
|
||||
return result
|
||||
|
||||
def display_source(self, method):
|
||||
print self.get_source_method(method)
|
||||
|
||||
def get_all(self, class_name):
|
||||
if class_name not in self.classes:
|
||||
return ""
|
||||
|
||||
lexer = get_lexer_by_name("java", stripall=True)
|
||||
formatter = TerminalFormatter()
|
||||
result = highlight(self.classes[class_name], lexer, formatter)
|
||||
return result
|
||||
|
||||
def get_source_class(self, _class):
|
||||
return self.classes[_class.get_name()]
|
||||
|
||||
def display_all(self, _class):
|
||||
print self.get_all(_class.get_name())
|
||||
|
||||
|
||||
class DecompilerDex2Fernflower:
|
||||
def __init__(self,
|
||||
vm,
|
||||
path_dex2jar="./decompiler/dex2jar/",
|
||||
bin_dex2jar="dex2jar.sh",
|
||||
path_fernflower="./decompiler/fernflower/",
|
||||
bin_fernflower="fernflower.jar",
|
||||
options_fernflower={"dgs": '1', "asc": '1'},
|
||||
tmp_dir="/tmp/"):
|
||||
self.classes = {}
|
||||
self.classes_failed = []
|
||||
|
||||
pathtmp = tmp_dir
|
||||
if not os.path.exists(pathtmp):
|
||||
os.makedirs(pathtmp)
|
||||
|
||||
fd, fdname = tempfile.mkstemp(dir=pathtmp)
|
||||
fd = os.fdopen(fd, "w+b")
|
||||
fd.write(vm.get_buff())
|
||||
fd.flush()
|
||||
fd.close()
|
||||
|
||||
compile = Popen([path_dex2jar + bin_dex2jar, fdname], stdout=PIPE, stderr=STDOUT)
|
||||
stdout, stderr = compile.communicate()
|
||||
os.unlink(fdname)
|
||||
|
||||
pathclasses = fdname + "dex2jar/"
|
||||
compile = Popen(["unzip", fdname + "_dex2jar.jar", "-d", pathclasses], stdout=PIPE, stderr=STDOUT)
|
||||
stdout, stderr = compile.communicate()
|
||||
os.unlink(fdname + "_dex2jar.jar")
|
||||
|
||||
for root, dirs, files in os.walk(pathclasses, followlinks=True):
|
||||
if files != []:
|
||||
for f in files:
|
||||
real_filename = root
|
||||
if real_filename[-1] != "/":
|
||||
real_filename += "/"
|
||||
real_filename += f
|
||||
|
||||
l = ["java", "-jar", path_fernflower + bin_fernflower]
|
||||
|
||||
for option in options_fernflower:
|
||||
l.append("-%s:%s" % (option, options_fernflower[option]))
|
||||
l.append(real_filename)
|
||||
l.append(root)
|
||||
|
||||
compile = Popen(l, stdout=PIPE, stderr=STDOUT)
|
||||
stdout, stderr = compile.communicate()
|
||||
|
||||
for i in vm.get_classes():
|
||||
fname = pathclasses + "/" + i.get_name()[1:-1] + ".java"
|
||||
if os.path.isfile(fname) == True:
|
||||
fd = open(fname, "r")
|
||||
self.classes[i.get_name()] = fd.read()
|
||||
fd.close()
|
||||
else:
|
||||
self.classes_failed.append(i.get_name())
|
||||
|
||||
rrmdir(pathclasses)
|
||||
|
||||
def get_source_method(self, method):
|
||||
class_name = method.get_class_name()
|
||||
method_name = method.get_name()
|
||||
|
||||
if class_name not in self.classes:
|
||||
return ""
|
||||
|
||||
if PYGMENTS:
|
||||
lexer = get_lexer_by_name("java", stripall=True)
|
||||
lexer.add_filter(MethodFilter(method_name=method_name))
|
||||
formatter = TerminalFormatter()
|
||||
result = highlight(self.classes[class_name], lexer, formatter)
|
||||
return result
|
||||
|
||||
return self.classes[class_name]
|
||||
|
||||
def display_source(self, method):
|
||||
print self.get_source_method(method)
|
||||
|
||||
def get_source_class(self, _class):
|
||||
return self.classes[_class.get_name()]
|
||||
|
||||
def get_all(self, class_name):
|
||||
if class_name not in self.classes:
|
||||
return ""
|
||||
|
||||
if PYGMENTS:
|
||||
lexer = get_lexer_by_name("java", stripall=True)
|
||||
formatter = TerminalFormatter()
|
||||
result = highlight(self.classes[class_name], lexer, formatter)
|
||||
return result
|
||||
return self.classes[class_name]
|
||||
|
||||
def display_all(self, _class):
|
||||
print self.get_all(_class.get_name())
|
||||
|
||||
|
||||
class MethodFilter(Filter):
|
||||
def __init__(self, **options):
|
||||
Filter.__init__(self, **options)
|
||||
|
||||
self.method_name = options["method_name"]
|
||||
#self.descriptor = options["descriptor"]
|
||||
|
||||
self.present = False
|
||||
self.get_desc = True #False
|
||||
|
||||
def filter(self, lexer, stream) :
|
||||
a = []
|
||||
l = []
|
||||
rep = []
|
||||
|
||||
for ttype, value in stream:
|
||||
if self.method_name == value and (ttype is Token.Name.Function or ttype is Token.Name) :
|
||||
#print ttype, value
|
||||
|
||||
item_decl = -1
|
||||
for i in range(len(a)-1, 0, -1) :
|
||||
if a[i][0] is Token.Keyword.Declaration :
|
||||
if a[i][1] != "class" :
|
||||
item_decl = i
|
||||
break
|
||||
|
||||
if item_decl != -1 :
|
||||
self.present = True
|
||||
l.extend( a[item_decl:] )
|
||||
|
||||
|
||||
if self.present and ttype is Token.Keyword.Declaration :
|
||||
item_end = -1
|
||||
for i in range(len(l)-1, 0, -1) :
|
||||
if l[i][0] is Token.Operator and l[i][1] == "}" :
|
||||
item_end = i
|
||||
break
|
||||
|
||||
if item_end != -1 :
|
||||
rep.extend( l[:item_end+1] )
|
||||
l = []
|
||||
self.present = False
|
||||
|
||||
if self.present :
|
||||
l.append( (ttype, value) )
|
||||
|
||||
a.append( (ttype, value) )
|
||||
|
||||
|
||||
if self.present :
|
||||
nb = 0
|
||||
item_end = -1
|
||||
for i in range(len(l)-1, 0, -1) :
|
||||
if l[i][0] is Token.Operator and l[i][1] == "}" :
|
||||
nb += 1
|
||||
if nb == 2 :
|
||||
item_end = i
|
||||
break
|
||||
|
||||
rep.extend( l[:item_end+1] )
|
||||
|
||||
return rep
|
||||
|
||||
|
||||
class DecompilerDAD:
|
||||
def __init__(self, vm, vmx):
|
||||
self.vm = vm
|
||||
self.vmx = vmx
|
||||
|
||||
def get_source_method(self, m):
|
||||
mx = self.vmx.get_method(m)
|
||||
z = decompile.DvMethod(mx)
|
||||
z.process()
|
||||
|
||||
result = z.get_source()
|
||||
return result
|
||||
|
||||
def display_source(self, m):
|
||||
result = self.get_source_method(m)
|
||||
|
||||
if PYGMENTS:
|
||||
lexer = get_lexer_by_name("java", stripall=True)
|
||||
formatter = TerminalFormatter()
|
||||
result = highlight(result, lexer, formatter)
|
||||
print result
|
||||
|
||||
def get_source_class(self, _class):
|
||||
c = decompile.DvClass(_class, self.vmx)
|
||||
c.process()
|
||||
|
||||
result = c.get_source()
|
||||
|
||||
return result
|
||||
|
||||
def display_all(self, _class):
|
||||
result = self.get_source_class(_class)
|
||||
|
||||
if PYGMENTS:
|
||||
lexer = get_lexer_by_name("java", stripall=True)
|
||||
formatter = TerminalFormatter()
|
||||
result = highlight(result, lexer, formatter)
|
||||
print result
|
||||
|
||||
def get_all(self, class_name):
|
||||
pass
|
0
androguard/patch/__init__.py
Normal file
0
androguard/patch/__init__.py
Normal file
1435
androguard/patch/zipfile.py
Normal file
1435
androguard/patch/zipfile.py
Normal file
File diff suppressed because it is too large
Load Diff
328
androlyze.py
Executable file
328
androlyze.py
Executable file
@ -0,0 +1,328 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Androguard is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Androguard is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with Androguard. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import sys, os, cmd, threading, code, re
|
||||
|
||||
from optparse import OptionParser
|
||||
|
||||
from androguard.core import *
|
||||
from androguard.core.androgen import *
|
||||
from androguard.core.androconf import *
|
||||
from androguard.core.bytecode import *
|
||||
from androguard.core.bytecodes.jvm import *
|
||||
from androguard.core.bytecodes.dvm import *
|
||||
from androguard.core.bytecodes.apk import *
|
||||
|
||||
from androguard.core.analysis.analysis import *
|
||||
from androguard.core.analysis.ganalysis import *
|
||||
from androguard.core.analysis.risk import *
|
||||
from androguard.decompiler.decompiler import *
|
||||
|
||||
|
||||
from androguard.core import androconf
|
||||
|
||||
from IPython.frontend.terminal.embed import InteractiveShellEmbed
|
||||
from IPython.config.loader import Config
|
||||
|
||||
from cPickle import dumps, loads
|
||||
|
||||
option_0 = { 'name' : ('-i', '--input'), 'help' : 'file : use this filename', 'nargs' : 1 }
|
||||
option_1 = { 'name' : ('-d', '--display'), 'help' : 'display the file in human readable format', 'action' : 'count' }
|
||||
option_2 = { 'name' : ('-m', '--method'), 'help' : 'display method(s) respect with a regexp', 'nargs' : 1 }
|
||||
option_3 = { 'name' : ('-f', '--field'), 'help' : 'display field(s) respect with a regexp', 'nargs' : 1 }
|
||||
option_4 = { 'name' : ('-s', '--shell'), 'help' : 'open an interactive shell to play more easily with objects', 'action' : 'count' }
|
||||
option_5 = { 'name' : ('-v', '--version'), 'help' : 'version of the API', 'action' : 'count' }
|
||||
option_6 = { 'name' : ('-p', '--pretty'), 'help' : 'pretty print !', 'action' : 'count' }
|
||||
option_8 = { 'name' : ('-x', '--xpermissions'), 'help' : 'show paths of permissions', 'action' : 'count' }
|
||||
|
||||
options = [option_0, option_1, option_2, option_3, option_4, option_5, option_6, option_8]
|
||||
|
||||
|
||||
def init_print_colors():
|
||||
from IPython.utils import coloransi, io
|
||||
default_colors(coloransi.TermColors)
|
||||
CONF["PRINT_FCT"] = io.stdout.write
|
||||
|
||||
|
||||
def interact():
|
||||
cfg = Config()
|
||||
ipshell = InteractiveShellEmbed(config=cfg, banner1="Androlyze version %s" % androconf.ANDROGUARD_VERSION)
|
||||
init_print_colors()
|
||||
ipshell()
|
||||
|
||||
|
||||
def save_session(l, filename):
|
||||
"""
|
||||
save your session !
|
||||
|
||||
:param l: a list of objects
|
||||
:type: a list of object
|
||||
:param filename: output filename to save the session
|
||||
:type filename: string
|
||||
|
||||
:Example:
|
||||
save_session([a, vm, vmx], "msession.json")
|
||||
"""
|
||||
fd = open(filename, "w")
|
||||
fd.write(dumps(l, -1))
|
||||
fd.close()
|
||||
|
||||
|
||||
def load_session(filename):
|
||||
"""
|
||||
load your session !
|
||||
|
||||
:param filename: the filename where the session has been saved
|
||||
:type filename: string
|
||||
|
||||
:rtype: the elements of your session :)
|
||||
|
||||
:Example:
|
||||
a, vm, vmx = load_session("mysession.json")
|
||||
"""
|
||||
return loads(open(filename, "r").read())
|
||||
|
||||
|
||||
def AnalyzeAPK(filename, raw=False, decompiler=None):
|
||||
"""
|
||||
Analyze an android application and setup all stuff for a more quickly analysis !
|
||||
|
||||
:param filename: the filename of the android application or a buffer which represents the application
|
||||
:type filename: string
|
||||
:param raw: True is you would like to use a buffer (optional)
|
||||
:type raw: boolean
|
||||
:param decompiler: ded, dex2jad, dad (optional)
|
||||
:type decompiler: string
|
||||
|
||||
:rtype: return the :class:`APK`, :class:`DalvikVMFormat`, and :class:`VMAnalysis` objects
|
||||
"""
|
||||
androconf.debug("APK ...")
|
||||
a = APK(filename, raw)
|
||||
d, dx = AnalyzeDex(a.get_dex(), raw=True, decompiler=decompiler)
|
||||
return a, d, dx
|
||||
|
||||
|
||||
def AnalyzeDex(filename, raw=False, decompiler=None):
|
||||
"""
|
||||
Analyze an android dex file and setup all stuff for a more quickly analysis !
|
||||
|
||||
:param filename: the filename of the android dex file or a buffer which represents the dex file
|
||||
:type filename: string
|
||||
:param raw: True is you would like to use a buffer (optional)
|
||||
:type raw: boolean
|
||||
|
||||
:rtype: return the :class:`DalvikVMFormat`, and :class:`VMAnalysis` objects
|
||||
"""
|
||||
androconf.debug("DalvikVMFormat ...")
|
||||
|
||||
d = None
|
||||
if raw == False:
|
||||
d = DalvikVMFormat(open(filename, "rb").read())
|
||||
else:
|
||||
d = DalvikVMFormat(filename)
|
||||
|
||||
androconf.debug("Export VM to python namespace")
|
||||
d.create_python_export()
|
||||
|
||||
androconf.debug("VMAnalysis ...")
|
||||
dx = uVMAnalysis(d)
|
||||
|
||||
androconf.debug("GVMAnalysis ...")
|
||||
gx = GVMAnalysis(dx, None)
|
||||
|
||||
d.set_vmanalysis(dx)
|
||||
d.set_gvmanalysis(gx)
|
||||
|
||||
RunDecompiler(d, dx, decompiler)
|
||||
|
||||
androconf.debug("XREF ...")
|
||||
d.create_xref()
|
||||
androconf.debug("DREF ...")
|
||||
d.create_dref()
|
||||
|
||||
return d, dx
|
||||
|
||||
|
||||
def AnalyzeODex(filename, raw=False, decompiler=None):
|
||||
"""
|
||||
Analyze an android odex file and setup all stuff for a more quickly analysis !
|
||||
|
||||
:param filename: the filename of the android dex file or a buffer which represents the dex file
|
||||
:type filename: string
|
||||
:param raw: True is you would like to use a buffer (optional)
|
||||
:type raw: boolean
|
||||
|
||||
:rtype: return the :class:`DalvikOdexVMFormat`, and :class:`VMAnalysis` objects
|
||||
"""
|
||||
androconf.debug("DalvikOdexVMFormat ...")
|
||||
d = None
|
||||
if raw == False:
|
||||
d = DalvikOdexVMFormat(open(filename, "rb").read())
|
||||
else:
|
||||
d = DalvikOdexVMFormat(filename)
|
||||
|
||||
androconf.debug("Export VM to python namespace")
|
||||
d.create_python_export()
|
||||
|
||||
androconf.debug("VMAnalysis ...")
|
||||
dx = uVMAnalysis(d)
|
||||
|
||||
androconf.debug("GVMAnalysis ...")
|
||||
gx = GVMAnalysis(dx, None)
|
||||
|
||||
d.set_vmanalysis(dx)
|
||||
d.set_gvmanalysis(gx)
|
||||
|
||||
RunDecompiler(d, dx, decompiler)
|
||||
|
||||
androconf.debug("XREF ...")
|
||||
d.create_xref()
|
||||
androconf.debug("DREF ...")
|
||||
d.create_dref()
|
||||
|
||||
return d, dx
|
||||
|
||||
|
||||
def RunDecompiler(d, dx, decompiler):
|
||||
"""
|
||||
Run the decompiler on a specific analysis
|
||||
|
||||
:param d: the DalvikVMFormat object
|
||||
:type d: :class:`DalvikVMFormat` object
|
||||
:param dx: the analysis of the format
|
||||
:type dx: :class:`VMAnalysis` object
|
||||
:param decompiler: the type of decompiler to use ("dad", "dex2jad", "ded")
|
||||
:type decompiler: string
|
||||
"""
|
||||
if decompiler != None:
|
||||
androconf.debug("Decompiler ...")
|
||||
decompiler = decompiler.lower()
|
||||
if decompiler == "dex2jad":
|
||||
d.set_decompiler(DecompilerDex2Jad(d,
|
||||
androconf.CONF["PATH_DEX2JAR"],
|
||||
androconf.CONF["BIN_DEX2JAR"],
|
||||
androconf.CONF["PATH_JAD"],
|
||||
androconf.CONF["BIN_JAD"],
|
||||
androconf.CONF["TMP_DIRECTORY"]))
|
||||
elif decompiler == "dex2fernflower":
|
||||
d.set_decompiler(DecompilerDex2Fernflower(d,
|
||||
androconf.CONF["PATH_DEX2JAR"],
|
||||
androconf.CONF["BIN_DEX2JAR"],
|
||||
androconf.CONF["PATH_FERNFLOWER"],
|
||||
androconf.CONF["BIN_FERNFLOWER"],
|
||||
androconf.CONF["OPTIONS_FERNFLOWER"],
|
||||
androconf.CONF["TMP_DIRECTORY"]))
|
||||
elif decompiler == "ded":
|
||||
d.set_decompiler(DecompilerDed(d,
|
||||
androconf.CONF["PATH_DED"],
|
||||
androconf.CONF["BIN_DED"],
|
||||
androconf.CONF["TMP_DIRECTORY"]))
|
||||
else:
|
||||
d.set_decompiler(DecompilerDAD(d, dx))
|
||||
|
||||
|
||||
def AnalyzeElf(filename, raw=False):
|
||||
# avoid to install smiasm for everybody
|
||||
from androguard.core.binaries.elf import ELF
|
||||
|
||||
e = None
|
||||
if raw == False:
|
||||
e = ELF(open(filename, "rb").read())
|
||||
else:
|
||||
e = ELF(filename)
|
||||
|
||||
ExportElfToPython(e)
|
||||
|
||||
return e
|
||||
|
||||
|
||||
def ExportElfToPython(e):
|
||||
for function in e.get_functions():
|
||||
name = "FUNCTION_" + function.name
|
||||
setattr(e, name, function)
|
||||
|
||||
|
||||
def AnalyzeJAR(filename, raw=False):
|
||||
androconf.debug("JAR ...")
|
||||
a = JAR(filename, raw)
|
||||
|
||||
d = AnalyzeClasses(a.get_classes())
|
||||
|
||||
return a, d
|
||||
|
||||
|
||||
def AnalyzeClasses(classes):
|
||||
d = {}
|
||||
for i in classes:
|
||||
d[i[0]] = JVMFormat(i[1])
|
||||
|
||||
return d
|
||||
|
||||
|
||||
def main(options, arguments):
|
||||
if options.shell != None:
|
||||
interact()
|
||||
|
||||
elif options.input != None :
|
||||
_a = AndroguardS( options.input )
|
||||
|
||||
if options.pretty != None :
|
||||
init_print_colors()
|
||||
|
||||
if options.display != None :
|
||||
if options.pretty != None :
|
||||
_a.ianalyze()
|
||||
_a.pretty_show()
|
||||
else :
|
||||
_a.show()
|
||||
|
||||
elif options.method != None :
|
||||
for method in _a.get("method", options.method) :
|
||||
if options.pretty != None :
|
||||
_a.ianalyze()
|
||||
method.pretty_show()
|
||||
else :
|
||||
method.show()
|
||||
|
||||
elif options.field != None :
|
||||
for field in _a.get("field", options.field) :
|
||||
field.show()
|
||||
|
||||
elif options.xpermissions != None :
|
||||
_a.ianalyze()
|
||||
perms_access = _a.get_analysis().get_permissions( [] )
|
||||
for perm in perms_access :
|
||||
print "PERM : ", perm
|
||||
for path in perms_access[ perm ] :
|
||||
show_Path( _a.get_vm(), path )
|
||||
|
||||
elif options.version != None :
|
||||
print "Androlyze version %s" % androconf.ANDROGUARD_VERSION
|
||||
|
||||
if __name__ == "__main__" :
|
||||
parser = OptionParser()
|
||||
for option in options :
|
||||
param = option['name']
|
||||
del option['name']
|
||||
parser.add_option(*param, **option)
|
||||
|
||||
options, arguments = parser.parse_args()
|
||||
sys.argv[:] = arguments
|
||||
main(options, arguments)
|
100
andromercury.py
Executable file
100
andromercury.py
Executable file
@ -0,0 +1,100 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Androguard is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Androguard is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with Androguard. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import sys, re, os
|
||||
|
||||
from optparse import OptionParser
|
||||
|
||||
from androguard.core.bytecodes import apk
|
||||
|
||||
sys.path.append("./elsim/")
|
||||
from elsim.elsign import dalvik_elsign
|
||||
|
||||
sys.path.append("./mercury/client")
|
||||
from merc.lib.common import Session
|
||||
|
||||
option_0 = { 'name' : ('-l', '--list'), 'help' : 'list all packages', 'nargs' : 1 }
|
||||
option_1 = { 'name' : ('-i', '--input'), 'help' : 'get specific packages (a filter)', 'nargs' : 1 }
|
||||
option_2 = { 'name' : ('-r', '--remotehost'), 'help' : 'specify ip of emulator/device', 'nargs' : 1 }
|
||||
option_3 = { 'name' : ('-p', '--port'), 'help' : 'specify the port', 'nargs' : 1 }
|
||||
option_4 = { 'name' : ('-o', '--output'), 'help' : 'output directory to write packages', 'nargs' : 1 }
|
||||
option_5 = { 'name' : ('-b', '--database'), 'help' : 'database : use this database', 'nargs' : 1 }
|
||||
option_6 = { 'name' : ('-c', '--config'), 'help' : 'use this configuration', 'nargs' : 1 }
|
||||
option_7 = { 'name' : ('-v', '--verbose'), 'help' : 'display debug information', 'action' : 'count' }
|
||||
|
||||
options = [option_0, option_1, option_2, option_3, option_4, option_5, option_6, option_7]
|
||||
|
||||
def display(ret, debug) :
|
||||
print "---->", ret[0],
|
||||
|
||||
def main(options, arguments) :
|
||||
sessionip = "127.0.0.1"
|
||||
sessionport = 31415
|
||||
|
||||
if options.remotehost :
|
||||
sessionip = options.remotehost
|
||||
|
||||
if options.port :
|
||||
sessionport = int(options.port)
|
||||
|
||||
newsession = Session(sessionip, sessionport, "bind")
|
||||
|
||||
# Check if connection can be established
|
||||
if newsession.executeCommand("core", "ping", None).data == "pong":
|
||||
|
||||
if options.list :
|
||||
request = {'filter': options.list, 'permissions': None }
|
||||
apks_info = newsession.executeCommand("packages", "info", {}).getPaddedErrorOrData()
|
||||
print apks_info
|
||||
|
||||
elif options.input and options.output :
|
||||
s = None
|
||||
if options.database != None or options.config != None :
|
||||
s = dalvik_elsign.MSignature( options.database, options.config, options.verbose != None, ps = dalvik_elsign.PublicSignature)
|
||||
|
||||
request = {'filter': options.input, 'permissions': None }
|
||||
apks_info = newsession.executeCommand("packages", "info", request).getPaddedErrorOrData()
|
||||
print apks_info
|
||||
|
||||
for i in apks_info.split("\n") :
|
||||
if re.match("APK path:", i) != None :
|
||||
name_app = i.split(":")[1][1:]
|
||||
print name_app,
|
||||
response = newsession.downloadFile(name_app, options.output)
|
||||
print response.data, response.error,
|
||||
|
||||
if s != None :
|
||||
a = apk.APK( options.output + "/" + os.path.basename(name_app) )
|
||||
if a.is_valid_APK() :
|
||||
display( s.check_apk( a ), options.verbose )
|
||||
print
|
||||
else:
|
||||
print "\n**Network Error** Could not connect to " + sessionip + ":" + str(sessionport) + "\n"
|
||||
|
||||
if __name__ == "__main__" :
|
||||
parser = OptionParser()
|
||||
for option in options :
|
||||
param = option['name']
|
||||
del option['name']
|
||||
parser.add_option(*param, **option)
|
||||
|
||||
options, arguments = parser.parse_args()
|
||||
sys.argv[:] = arguments
|
||||
main(options, arguments)
|
96
androrisk.py
Executable file
96
androrisk.py
Executable file
@ -0,0 +1,96 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Androguard is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Androguard is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with Androguard. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import sys, os
|
||||
|
||||
from optparse import OptionParser
|
||||
|
||||
from androguard.core import androconf
|
||||
from androguard.core.bytecodes import apk
|
||||
from androguard.core.analysis import risk
|
||||
|
||||
option_0 = { 'name' : ('-i', '--input'), 'help' : 'file : use these filenames', 'nargs' : 1 }
|
||||
option_1 = { 'name' : ('-m', '--method'), 'help' : 'perform analysis of each method', 'action' : 'count' }
|
||||
option_2 = { 'name' : ('-d', '--directory'), 'help' : 'directory : use this directory', 'nargs' : 1 }
|
||||
option_3 = { 'name' : ('-v', '--version'), 'help' : 'version of the API', 'action' : 'count' }
|
||||
|
||||
options = [option_0, option_1, option_2, option_3]
|
||||
|
||||
def display_result(res) :
|
||||
for i in res :
|
||||
print "\t", i
|
||||
for j in res[i] :
|
||||
print "\t\t", j, res[i][j]
|
||||
|
||||
def analyze_app(filename, ri, a) :
|
||||
print filename
|
||||
display_result( ri.with_apk( a ) )
|
||||
|
||||
def analyze_dex(filename, ri, d) :
|
||||
print filename
|
||||
display_result( ri.with_dex( d ) )
|
||||
|
||||
def main(options, arguments) :
|
||||
ri = risk.RiskIndicator()
|
||||
ri.add_risk_analysis( risk.RedFlags() )
|
||||
ri.add_risk_analysis( risk.FuzzyRisk() )
|
||||
|
||||
if options.input != None :
|
||||
ret_type = androconf.is_android( options.input )
|
||||
if ret_type == "APK" :
|
||||
a = apk.APK( options.input )
|
||||
analyze_app( options.input, ri, a )
|
||||
elif ret_type == "DEX" :
|
||||
analyze_dex( options.input, ri, open(options.input, "r").read() )
|
||||
|
||||
|
||||
elif options.directory != None :
|
||||
for root, dirs, files in os.walk( options.directory, followlinks=True ) :
|
||||
if files != [] :
|
||||
for f in files :
|
||||
real_filename = root
|
||||
if real_filename[-1] != "/" :
|
||||
real_filename += "/"
|
||||
real_filename += f
|
||||
|
||||
ret_type = androconf.is_android( real_filename )
|
||||
if ret_type == "APK" :
|
||||
try :
|
||||
a = apk.APK( real_filename )
|
||||
analyze_app( real_filename, ri, a )
|
||||
except Exception, e :
|
||||
print e
|
||||
|
||||
elif ret_type == "DEX" :
|
||||
analyze_dex( real_filename, ri, open(real_filename, "r").read() )
|
||||
|
||||
elif options.version != None :
|
||||
print "Androrisk version %s" % androconf.ANDROGUARD_VERSION
|
||||
|
||||
if __name__ == "__main__" :
|
||||
parser = OptionParser()
|
||||
for option in options :
|
||||
param = option['name']
|
||||
del option['name']
|
||||
parser.add_option(*param, **option)
|
||||
|
||||
options, arguments = parser.parse_args()
|
||||
sys.argv[:] = arguments
|
||||
main(options, arguments)
|
108
androsign.py
Executable file
108
androsign.py
Executable file
@ -0,0 +1,108 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Androguard is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Androguard is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with Androguard. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import sys, os
|
||||
|
||||
from optparse import OptionParser
|
||||
|
||||
from androguard.core import androconf
|
||||
from androguard.core.bytecodes import apk
|
||||
|
||||
sys.path.append("./elsim/")
|
||||
from elsim.elsign import dalvik_elsign
|
||||
|
||||
|
||||
option_0 = { 'name' : ('-i', '--input'), 'help' : 'file : use this filename', 'nargs' : 1 }
|
||||
option_1 = { 'name' : ('-d', '--directory'), 'help' : 'directory : use this directory', 'nargs' : 1 }
|
||||
option_2 = { 'name' : ('-b', '--database'), 'help' : 'database : use this database', 'nargs' : 1 }
|
||||
option_3 = { 'name' : ('-c', '--config'), 'help' : 'use this configuration', 'nargs' : 1 }
|
||||
option_4 = { 'name' : ('-v', '--verbose'), 'help' : 'display debug information', 'action' : 'count' }
|
||||
|
||||
options = [option_0, option_1, option_2, option_3, option_4]
|
||||
|
||||
def display(ret, debug) :
|
||||
print "---->", ret[0]
|
||||
sys.stdout.flush()
|
||||
|
||||
def main(options, arguments) :
|
||||
if options.database == None or options.config == None :
|
||||
return
|
||||
|
||||
s = dalvik_elsign.MSignature( options.database, options.config, options.verbose != None, ps = dalvik_elsign.PublicSignature)
|
||||
|
||||
if options.input != None :
|
||||
ret_type = androconf.is_android( options.input )
|
||||
|
||||
print os.path.basename(options.input), ":",
|
||||
sys.stdout.flush()
|
||||
if ret_type == "APK" :
|
||||
try :
|
||||
a = apk.APK( options.input )
|
||||
if a.is_valid_APK() :
|
||||
display( s.check_apk( a ), options.verbose )
|
||||
else :
|
||||
print "INVALID"
|
||||
except Exception, e :
|
||||
print "ERROR", e
|
||||
|
||||
elif ret_type == "DEX" :
|
||||
display( s.check_dex( open(options.input, "rb").read() ), options.verbose )
|
||||
elif options.directory != None :
|
||||
for root, dirs, files in os.walk( options.directory, followlinks=True ) :
|
||||
if files != [] :
|
||||
for f in files :
|
||||
real_filename = root
|
||||
if real_filename[-1] != "/" :
|
||||
real_filename += "/"
|
||||
real_filename += f
|
||||
|
||||
ret_type = androconf.is_android( real_filename )
|
||||
if ret_type == "APK" :
|
||||
print os.path.basename( real_filename ), ":",
|
||||
sys.stdout.flush()
|
||||
try :
|
||||
a = apk.APK( real_filename )
|
||||
if a.is_valid_APK() :
|
||||
display( s.check_apk( a ), options.verbose )
|
||||
else :
|
||||
print "INVALID APK"
|
||||
except Exception, e :
|
||||
print "ERROR", e
|
||||
elif ret_type == "DEX" :
|
||||
try :
|
||||
print os.path.basename( real_filename ), ":",
|
||||
sys.stdout.flush()
|
||||
display( s.check_dex( open(real_filename, "rb").read() ), options.verbose )
|
||||
except Exception, e :
|
||||
print "ERROR", e
|
||||
|
||||
elif options.version != None :
|
||||
print "Androsign version %s" % androconf.ANDROGUARD_VERSION
|
||||
|
||||
if __name__ == "__main__" :
|
||||
parser = OptionParser()
|
||||
for option in options :
|
||||
param = option['name']
|
||||
del option['name']
|
||||
parser.add_option(*param, **option)
|
||||
|
||||
options, arguments = parser.parse_args()
|
||||
sys.argv[:] = arguments
|
||||
main(options, arguments)
|
190
androsim.py
Executable file
190
androsim.py
Executable file
@ -0,0 +1,190 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Androguard is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Androguard is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with Androguard. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import sys, os
|
||||
|
||||
from optparse import OptionParser
|
||||
|
||||
from androguard.core import androconf
|
||||
from androguard.core.bytecodes import apk, dvm
|
||||
from androguard.core.analysis import analysis
|
||||
|
||||
sys.path.append("./elsim")
|
||||
from elsim import elsim
|
||||
from elsim.elsim_dalvik import ProxyDalvik, FILTERS_DALVIK_SIM
|
||||
from elsim.elsim_dalvik import ProxyDalvikStringMultiple, ProxyDalvikStringOne, FILTERS_DALVIK_SIM_STRING
|
||||
|
||||
option_0 = { 'name' : ('-i', '--input'), 'help' : 'file : use these filenames', 'nargs' : 2 }
|
||||
option_1 = { 'name' : ('-t', '--threshold'), 'help' : 'specify the threshold (0.0 to 1.0) to know if a method is similar. This option will impact on the filtering method. Because if you specify a higher value of the threshold, you will have more associations', 'nargs' : 1 }
|
||||
option_2 = { 'name' : ('-c', '--compressor'), 'help' : 'specify the compressor (BZ2, ZLIB, SNAPPY, LZMA, XZ). The final result depends directly of the type of compressor. But if you use LZMA for example, the final result will be better, but it take more time', 'nargs' : 1 }
|
||||
option_4 = { 'name' : ('-d', '--display'), 'help' : 'display all information about methods', 'action' : 'count' }
|
||||
option_5 = { 'name' : ('-n', '--new'), 'help' : 'calculate the final score only by using the ratio of included methods', 'action' : 'count' }
|
||||
option_6 = { 'name' : ('-e', '--exclude'), 'help' : 'exclude specific class name (python regexp)', 'nargs' : 1 }
|
||||
option_7 = { 'name' : ('-s', '--size'), 'help' : 'exclude specific method below the specific size (specify the minimum size of a method to be used (it is the length (bytes) of the dalvik method)', 'nargs' : 1 }
|
||||
option_8 = { 'name' : ('-x', '--xstrings'), 'help' : 'display similarities of strings', 'action' : 'count' }
|
||||
option_9 = { 'name' : ('-v', '--version'), 'help' : 'version of the API', 'action' : 'count' }
|
||||
option_10 = { 'name' : ('-l', '--library'), 'help' : 'use python library (python) or specify the path of the shared library)', 'nargs' : 1 }
|
||||
|
||||
options = [option_0, option_1, option_2, option_4, option_5, option_6, option_7, option_8, option_9, option_10]
|
||||
|
||||
def check_one_file(a, d1, dx1, FS, threshold, file_input, view_strings=False, new=True, library=True) :
|
||||
d2 = None
|
||||
ret_type = androconf.is_android( file_input )
|
||||
if ret_type == "APK" :
|
||||
a = apk.APK( file_input )
|
||||
d2 = dvm.DalvikVMFormat( a.get_dex() )
|
||||
elif ret_type == "DEX" :
|
||||
d2 = dvm.DalvikVMFormat( open(file_input, "rb").read() )
|
||||
|
||||
if d2 == None :
|
||||
return
|
||||
dx2 = analysis.VMAnalysis( d2 )
|
||||
|
||||
el = elsim.Elsim( ProxyDalvik(d1, dx1), ProxyDalvik(d2, dx2), FS, threshold, options.compressor, libnative=library )
|
||||
el.show()
|
||||
print "\t--> methods: %f%% of similarities" % el.get_similarity_value(new)
|
||||
|
||||
|
||||
if options.display :
|
||||
print "SIMILAR methods:"
|
||||
diff_methods = el.get_similar_elements()
|
||||
for i in diff_methods :
|
||||
el.show_element( i )
|
||||
|
||||
print "IDENTICAL methods:"
|
||||
new_methods = el.get_identical_elements()
|
||||
for i in new_methods :
|
||||
el.show_element( i )
|
||||
|
||||
print "NEW methods:"
|
||||
new_methods = el.get_new_elements()
|
||||
for i in new_methods :
|
||||
el.show_element( i, False )
|
||||
|
||||
print "DELETED methods:"
|
||||
del_methods = el.get_deleted_elements()
|
||||
for i in del_methods :
|
||||
el.show_element( i )
|
||||
|
||||
print "SKIPPED methods:"
|
||||
skipped_methods = el.get_skipped_elements()
|
||||
for i in skipped_methods :
|
||||
el.show_element( i )
|
||||
|
||||
if view_strings :
|
||||
els = elsim.Elsim( ProxyDalvikStringMultiple(d1, dx1),
|
||||
ProxyDalvikStringMultiple(d2, dx2),
|
||||
FILTERS_DALVIK_SIM_STRING,
|
||||
threshold,
|
||||
options.compressor,
|
||||
libnative=library )
|
||||
#els = elsim.Elsim( ProxyDalvikStringOne(d1, dx1),
|
||||
# ProxyDalvikStringOne(d2, dx2), FILTERS_DALVIK_SIM_STRING, threshold, options.compressor, libnative=library )
|
||||
els.show()
|
||||
print "\t--> strings: %f%% of similarities" % els.get_similarity_value(new)
|
||||
|
||||
if options.display :
|
||||
print "SIMILAR strings:"
|
||||
diff_strings = els.get_similar_elements()
|
||||
for i in diff_strings :
|
||||
els.show_element( i )
|
||||
|
||||
print "IDENTICAL strings:"
|
||||
new_strings = els.get_identical_elements()
|
||||
for i in new_strings :
|
||||
els.show_element( i )
|
||||
|
||||
print "NEW strings:"
|
||||
new_strings = els.get_new_elements()
|
||||
for i in new_strings :
|
||||
els.show_element( i, False )
|
||||
|
||||
print "DELETED strings:"
|
||||
del_strings = els.get_deleted_elements()
|
||||
for i in del_strings :
|
||||
els.show_element( i )
|
||||
|
||||
print "SKIPPED strings:"
|
||||
skipped_strings = els.get_skipped_elements()
|
||||
for i in skipped_strings :
|
||||
els.show_element( i )
|
||||
|
||||
|
||||
def check_one_directory(a, d1, dx1, FS, threshold, directory, view_strings=False, new=True, library=True) :
|
||||
for root, dirs, files in os.walk( directory, followlinks=True ) :
|
||||
if files != [] :
|
||||
for f in files :
|
||||
real_filename = root
|
||||
if real_filename[-1] != "/" :
|
||||
real_filename += "/"
|
||||
real_filename += f
|
||||
|
||||
print "filename: %s ..." % real_filename
|
||||
check_one_file(a, d1, dx1, FS, threshold, real_filename, view_strings, new, library)
|
||||
|
||||
############################################################
|
||||
def main(options, arguments) :
|
||||
if options.input != None :
|
||||
a = None
|
||||
ret_type = androconf.is_android( options.input[0] )
|
||||
if ret_type == "APK" :
|
||||
a = apk.APK( options.input[0] )
|
||||
d1 = dvm.DalvikVMFormat( a.get_dex() )
|
||||
elif ret_type == "DEX" :
|
||||
d1 = dvm.DalvikVMFormat( open(options.input[0], "rb").read() )
|
||||
|
||||
dx1 = analysis.VMAnalysis( d1 )
|
||||
|
||||
threshold = None
|
||||
if options.threshold != None :
|
||||
threshold = float(options.threshold)
|
||||
|
||||
FS = FILTERS_DALVIK_SIM
|
||||
FS[elsim.FILTER_SKIPPED_METH].set_regexp( options.exclude )
|
||||
FS[elsim.FILTER_SKIPPED_METH].set_size( options.size )
|
||||
|
||||
new = True
|
||||
if options.new != None :
|
||||
new = False
|
||||
|
||||
library = True
|
||||
if options.library != None :
|
||||
library = options.library
|
||||
if options.library == "python" :
|
||||
library = False
|
||||
|
||||
if os.path.isdir( options.input[1] ) == False :
|
||||
check_one_file( a, d1, dx1, FS, threshold, options.input[1], options.xstrings, new, library )
|
||||
else :
|
||||
check_one_directory(a, d1, dx1, FS, threshold, options.input[1], options.xstrings, new, library )
|
||||
|
||||
elif options.version != None :
|
||||
print "Androsim version %s" % androconf.ANDROGUARD_VERSION
|
||||
|
||||
if __name__ == "__main__" :
|
||||
parser = OptionParser()
|
||||
for option in options :
|
||||
param = option['name']
|
||||
del option['name']
|
||||
parser.add_option(*param, **option)
|
||||
|
||||
options, arguments = parser.parse_args()
|
||||
sys.argv[:] = arguments
|
||||
main(options, arguments)
|
275
androxgmml.py
Executable file
275
androxgmml.py
Executable file
@ -0,0 +1,275 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Androguard is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Androguard is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with Androguard. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from xml.sax.saxutils import escape
|
||||
import sys, os
|
||||
from optparse import OptionParser
|
||||
|
||||
from androguard.core.androgen import Androguard
|
||||
from androguard.core.analysis import analysis
|
||||
|
||||
option_0 = { 'name' : ('-i', '--input'), 'help' : 'filename input', 'nargs' : 1 }
|
||||
option_1 = { 'name' : ('-o', '--output'), 'help' : 'filename output of the xgmml', 'nargs' : 1 }
|
||||
option_2 = { 'name' : ('-f', '--functions'), 'help' : 'include function calls', 'action' : 'count' }
|
||||
option_3 = { 'name' : ('-e', '--externals'), 'help' : 'include extern function calls', 'action' : 'count' }
|
||||
option_4 = { 'name' : ('-v', '--version'), 'help' : 'version of the API', 'action' : 'count' }
|
||||
|
||||
options = [option_0, option_1, option_2, option_3, option_4]
|
||||
|
||||
METHODS_ID = {}
|
||||
EXTERNAL_METHODS_ID = {}
|
||||
NODES_ID = {}
|
||||
EDGES_ID = {}
|
||||
|
||||
NODE_GRAPHIC = {
|
||||
"classic" : {
|
||||
"h" : 20.0,
|
||||
"w" : 20.0,
|
||||
"type" : "ELLIPSE",
|
||||
"width" : 1,
|
||||
"fill" : "#e1e1e1",
|
||||
"outline" : "#000000",
|
||||
},
|
||||
|
||||
"extern" : {
|
||||
"h" : 20.0,
|
||||
"w" : 20.0,
|
||||
"type" : "ELLIPSE",
|
||||
"width" : 1,
|
||||
"fill" : "#ff8c00",
|
||||
"outline" : "#000000",
|
||||
}
|
||||
}
|
||||
|
||||
EDGE_GRAPHIC = {
|
||||
"cfg" : {
|
||||
"width" : 2,
|
||||
"fill" : "#0000e1",
|
||||
},
|
||||
|
||||
"fcg" : {
|
||||
"width" : 3,
|
||||
"fill" : "#9acd32",
|
||||
},
|
||||
|
||||
"efcg" : {
|
||||
"width" : 3,
|
||||
"fill" : "#808000",
|
||||
}
|
||||
}
|
||||
|
||||
def get_node_name(method, bb) :
|
||||
return "%s-%s-%s" % ( method.get_class_name(), escape(bb.name), escape(method.get_descriptor()) )
|
||||
|
||||
def export_xgmml_cfg(g, fd) :
|
||||
method = g.get_method()
|
||||
|
||||
name = method.get_name()
|
||||
class_name = method.get_class_name()
|
||||
descriptor = method.get_descriptor()
|
||||
|
||||
if method.get_code() != None :
|
||||
size_ins = method.get_code().get_length()
|
||||
|
||||
for i in g.basic_blocks.get() :
|
||||
fd.write("<node id=\"%d\" label=\"%s\">\n" % (len(NODES_ID), get_node_name(method, i)))
|
||||
|
||||
fd.write("<att type=\"string\" name=\"classname\" value=\"%s\"/>\n" % (escape(class_name)))
|
||||
fd.write("<att type=\"string\" name=\"name\" value=\"%s\"/>\n" % (escape(name)))
|
||||
fd.write("<att type=\"string\" name=\"descriptor\" value=\"%s\"/>\n" % (escape(descriptor)))
|
||||
|
||||
fd.write("<att type=\"integer\" name=\"offset\" value=\"%d\"/>\n" % (i.start))
|
||||
|
||||
cl = NODE_GRAPHIC["classic"]
|
||||
width = cl["width"]
|
||||
fill = cl["fill"]
|
||||
|
||||
# No child ...
|
||||
if i.childs == [] :
|
||||
fill = "#87ceeb"
|
||||
|
||||
if i.start == 0 :
|
||||
fd.write("<att type=\"string\" name=\"node.label\" value=\"%s\\n%s\"/>\n" % (escape(name), i.get_instructions()[-1].get_name()))
|
||||
width = 3
|
||||
fill = "#ff0000"
|
||||
|
||||
METHODS_ID[ class_name + name + descriptor ] = len(NODES_ID)
|
||||
else :
|
||||
fd.write("<att type=\"string\" name=\"node.label\" value=\"0x%x\\n%s\"/>\n" % (i.start, i.get_instructions()[-1].get_name()))
|
||||
|
||||
size = 0
|
||||
for tmp_ins in i.get_instructions() :
|
||||
size += (tmp_ins.get_length() / 2)
|
||||
|
||||
|
||||
h = ((size / float(size_ins)) * 20) + cl["h"]
|
||||
|
||||
fd.write("<graphics type=\"%s\" h=\"%.1f\" w=\"%.1f\" width=\"%d\" fill=\"%s\" outline=\"%s\">\n" % ( cl["type"], h, h, width, fill, cl["outline"]))
|
||||
fd.write("</graphics>\n")
|
||||
|
||||
fd.write("</node>\n")
|
||||
|
||||
NODES_ID[ class_name + i.name + descriptor ] = len(NODES_ID)
|
||||
|
||||
for i in g.basic_blocks.get() :
|
||||
for j in i.childs :
|
||||
if j[-1] != None :
|
||||
label = "%s (cfg) %s" % (get_node_name(method, i), get_node_name(method, j[-1]))
|
||||
id = len(NODES_ID) + len(EDGES_ID)
|
||||
fd.write( "<edge id=\"%d\" label=\"%s\" source=\"%d\" target=\"%d\">\n" % (id, label, NODES_ID[ class_name + i.name + descriptor ], NODES_ID[ class_name + j[-1].name + descriptor ]) )
|
||||
|
||||
cl = EDGE_GRAPHIC["cfg"]
|
||||
fd.write("<graphics width=\"%d\" fill=\"%s\">\n" % (cl["width"], cl["fill"]) )
|
||||
fd.write("</graphics>\n")
|
||||
|
||||
fd.write("</edge>\n")
|
||||
|
||||
EDGES_ID[ label ] = id
|
||||
|
||||
def export_xgmml_fcg(a, x, fd) :
|
||||
classes = a.get_classes_names()
|
||||
|
||||
# Methods flow graph
|
||||
for m, _ in x.get_tainted_packages().get_packages() :
|
||||
paths = m.get_methods()
|
||||
for j in paths :
|
||||
if j.get_method().get_class_name() in classes and m.get_info() in classes :
|
||||
if j.get_access_flag() == analysis.TAINTED_PACKAGE_CALL :
|
||||
t = m.get_info() + j.get_name() + j.get_descriptor()
|
||||
if t not in METHODS_ID :
|
||||
continue
|
||||
|
||||
bb1 = x.get_method( j.get_method() ).basic_blocks.get_basic_block( j.get_idx() )
|
||||
|
||||
node1 = get_node_name(j.get_method(), bb1) + "@0x%x" % j.get_idx()
|
||||
node2 = "%s-%s-%s" % (m.get_info(), escape(j.get_name()), escape(j.get_descriptor()))
|
||||
|
||||
label = "%s (fcg) %s" % (node1, node2)
|
||||
|
||||
if label in EDGES_ID :
|
||||
continue
|
||||
|
||||
id = len(NODES_ID) + len(EDGES_ID)
|
||||
|
||||
fd.write( "<edge id=\"%d\" label=\"%s\" source=\"%d\" target=\"%d\">\n" % (id,
|
||||
label,
|
||||
NODES_ID[ j.get_method().get_class_name() + bb1.name + j.get_method().get_descriptor() ],
|
||||
METHODS_ID[ m.get_info() + j.get_name() + j.get_descriptor() ]) )
|
||||
|
||||
cl = EDGE_GRAPHIC["fcg"]
|
||||
fd.write("<graphics width=\"%d\" fill=\"%s\">\n" % (cl["width"], cl["fill"]) )
|
||||
fd.write("</graphics>\n")
|
||||
|
||||
fd.write("</edge>\n")
|
||||
|
||||
EDGES_ID[ label ] = id
|
||||
|
||||
def export_xgmml_efcg(a, x, fd) :
|
||||
classes = a.get_classes_names()
|
||||
|
||||
# Methods flow graph
|
||||
for m, _ in x.get_tainted_packages().get_packages() :
|
||||
paths = m.get_methods()
|
||||
for j in paths :
|
||||
if j.get_method().get_class_name() in classes and m.get_info() not in classes :
|
||||
if j.get_access_flag() == analysis.TAINTED_PACKAGE_CALL :
|
||||
t = m.get_info() + j.get_name() + j.get_descriptor()
|
||||
if t not in EXTERNAL_METHODS_ID :
|
||||
fd.write("<node id=\"%d\" label=\"%s\">\n" % (len(NODES_ID), escape(t)))
|
||||
|
||||
fd.write("<att type=\"string\" name=\"classname\" value=\"%s\"/>\n" % (escape(m.get_info())))
|
||||
fd.write("<att type=\"string\" name=\"name\" value=\"%s\"/>\n" % (escape(j.get_name())))
|
||||
fd.write("<att type=\"string\" name=\"descriptor\" value=\"%s\"/>\n" % (escape(j.get_descriptor())))
|
||||
|
||||
cl = NODE_GRAPHIC["extern"]
|
||||
|
||||
fd.write("<att type=\"string\" name=\"node.label\" value=\"%s\\n%s\\n%s\"/>\n" % (escape(m.get_info()), escape(j.get_name()), escape(j.get_descriptor())))
|
||||
|
||||
fd.write("<graphics type=\"%s\" h=\"%.1f\" w=\"%.1f\" width=\"%d\" fill=\"%s\" outline=\"%s\">\n" % ( cl["type"], cl["h"], cl["h"], cl["width"], cl["fill"], cl["outline"]))
|
||||
fd.write("</graphics>\n")
|
||||
|
||||
fd.write("</node>\n")
|
||||
|
||||
NODES_ID[ t ] = len(NODES_ID)
|
||||
EXTERNAL_METHODS_ID[ t ] = NODES_ID[ t ]
|
||||
|
||||
bb1 = x.get_method( j.get_method() ).basic_blocks.get_basic_block( j.get_idx() )
|
||||
|
||||
node1 = get_node_name(j.get_method(), bb1) + "@0x%x" % j.get_idx()
|
||||
node2 = "%s-%s-%s" % (m.get_info(), escape(j.get_name()), escape(j.get_descriptor()))
|
||||
|
||||
label = "%s (efcg) %s" % (node1, node2)
|
||||
|
||||
if label in EDGES_ID :
|
||||
continue
|
||||
|
||||
id = len(NODES_ID) + len(EDGES_ID)
|
||||
|
||||
fd.write( "<edge id=\"%d\" label=\"%s\" source=\"%d\" target=\"%d\">\n" % (id,
|
||||
label,
|
||||
NODES_ID[ j.get_method().get_class_name() + bb1.name + j.get_method().get_descriptor() ],
|
||||
EXTERNAL_METHODS_ID[ m.get_info() + j.get_name() + j.get_descriptor() ]) )
|
||||
|
||||
cl = EDGE_GRAPHIC["efcg"]
|
||||
fd.write("<graphics width=\"%d\" fill=\"%s\">\n" % (cl["width"], cl["fill"]) )
|
||||
fd.write("</graphics>\n")
|
||||
|
||||
fd.write("</edge>\n")
|
||||
|
||||
EDGES_ID[ label ] = id
|
||||
|
||||
def export_apps_to_xgmml( input, output, fcg, efcg ) :
|
||||
a = Androguard( [ input ] )
|
||||
|
||||
fd = open(output, "w")
|
||||
fd.write("<?xml version='1.0'?>\n")
|
||||
fd.write("<graph label=\"Androguard XGMML %s\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xmlns:ns1=\"http://www.w3.org/1999/xlink\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" xmlns=\"http://www.cs.rpi.edu/XGMML\" directed=\"1\">\n" % (os.path.basename(input)))
|
||||
|
||||
for vm in a.get_vms() :
|
||||
x = analysis.VMAnalysis( vm )
|
||||
# CFG
|
||||
for method in vm.get_methods() :
|
||||
g = x.get_method( method )
|
||||
export_xgmml_cfg(g, fd)
|
||||
|
||||
if fcg :
|
||||
export_xgmml_fcg(vm, x, fd)
|
||||
|
||||
if efcg :
|
||||
export_xgmml_efcg(vm, x, fd)
|
||||
|
||||
fd.write("</graph>")
|
||||
fd.close()
|
||||
|
||||
def main(options, arguments) :
|
||||
if options.input != None and options.output != None :
|
||||
export_apps_to_xgmml( options.input, options.output, options.functions, options.externals )
|
||||
|
||||
if __name__ == "__main__" :
|
||||
parser = OptionParser()
|
||||
for option in options :
|
||||
param = option['name']
|
||||
del option['name']
|
||||
parser.add_option(*param, **option)
|
||||
|
||||
|
||||
options, arguments = parser.parse_args()
|
||||
sys.argv[:] = arguments
|
||||
main(options, arguments)
|
97
apkviewer.py
Executable file
97
apkviewer.py
Executable file
@ -0,0 +1,97 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Androguard is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Androguard is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with Androguard. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import sys, os
|
||||
from optparse import OptionParser
|
||||
|
||||
from androguard.core.bytecodes import apk, dvm
|
||||
from androguard.core.data import data
|
||||
from androguard.core.analysis import analysis, ganalysis
|
||||
from androguard.core import androconf
|
||||
|
||||
option_0 = { 'name' : ('-i', '--input'), 'help' : 'filename input (dex, apk)', 'nargs' : 1 }
|
||||
option_1 = { 'name' : ('-o', '--output'), 'help' : 'directory output', 'nargs' : 1 }
|
||||
|
||||
options = [option_0, option_1]
|
||||
|
||||
def create_directory( class_name, output ) :
|
||||
output_name = output
|
||||
if output_name[-1] != "/" :
|
||||
output_name = output_name + "/"
|
||||
|
||||
try :
|
||||
os.makedirs( output_name + class_name )
|
||||
except OSError :
|
||||
pass
|
||||
|
||||
def create_directories( vm, output ) :
|
||||
for class_name in vm.get_classes_names() :
|
||||
z = os.path.split( class_name )[0]
|
||||
create_directory( z[1:], output )
|
||||
|
||||
def main(options, arguments) :
|
||||
if options.input != None and options.output != None :
|
||||
|
||||
ret_type = androconf.is_android( options.input )
|
||||
vm = None
|
||||
a = None
|
||||
if ret_type == "APK" :
|
||||
a = apk.APK( options.input )
|
||||
if a.is_valid_APK() :
|
||||
vm = dvm.DalvikVMFormat( a.get_dex() )
|
||||
else :
|
||||
print "INVALID APK"
|
||||
elif ret_type == "DEX" :
|
||||
try :
|
||||
vm = dvm.DalvikVMFormat( open(options.input, "rb").read() )
|
||||
except Exception, e :
|
||||
print "INVALID DEX", e
|
||||
|
||||
|
||||
vmx = analysis.VMAnalysis( vm )
|
||||
gvmx = ganalysis.GVMAnalysis( vmx, a )
|
||||
|
||||
create_directories( vm, options.output )
|
||||
|
||||
# dv.export_to_gml( options.output )
|
||||
|
||||
dd = data.Data(vm, vmx, gvmx, a)
|
||||
|
||||
buff = dd.export_apk_to_gml()
|
||||
androconf.save_to_disk( buff, options.output + "/" + "apk.graphml" )
|
||||
|
||||
buff = dd.export_methodcalls_to_gml()
|
||||
androconf.save_to_disk( buff, options.output + "/" + "methodcalls.graphml" )
|
||||
|
||||
buff = dd.export_dex_to_gml()
|
||||
for i in buff :
|
||||
androconf.save_to_disk( buff[i], options.output + "/" + i + ".graphml" )
|
||||
|
||||
if __name__ == "__main__" :
|
||||
parser = OptionParser()
|
||||
for option in options :
|
||||
param = option['name']
|
||||
del option['name']
|
||||
parser.add_option(*param, **option)
|
||||
|
||||
|
||||
options, arguments = parser.parse_args()
|
||||
sys.argv[:] = arguments
|
||||
main(options, arguments)
|
99
demos/androguard_ANALYSIS.py
Executable file
99
demos/androguard_ANALYSIS.py
Executable file
@ -0,0 +1,99 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys, hashlib
|
||||
PATH_INSTALL = "./"
|
||||
sys.path.append(PATH_INSTALL + "./")
|
||||
|
||||
from androguard.core.androgen import AndroguardS
|
||||
from androguard.core.analysis import analysis
|
||||
|
||||
OUTPUT = "./output/"
|
||||
#TEST = 'examples/java/test/orig/Test1.class'
|
||||
#TEST = 'examples/java/Demo1/orig/DES.class'
|
||||
#TEST = 'examples/java/Demo1/orig/Util.class'
|
||||
#TEST = "apks/DroidDream/tmp/classes.dex"
|
||||
#TEST = "./examples/android/TCDiff/bin/classes.dex"
|
||||
TEST = "apks/iCalendar.apk"
|
||||
#TEST = "apks/adrd/5/8370959.dex"
|
||||
|
||||
def display_CFG(a, x, classes) :
|
||||
for method in a.get_methods() :
|
||||
g = x.get_method( method )
|
||||
|
||||
print method.get_class_name(), method.get_name(), method.get_descriptor()
|
||||
for i in g.basic_blocks.get() :
|
||||
print "\t %s %x %x" % (i.name, i.start, i.end), '[ NEXT = ', ', '.join( "%x-%x-%s" % (j[0], j[1], j[2].get_name()) for j in i.childs ), ']', '[ PREV = ', ', '.join( j[2].get_name() for j in i.fathers ), ']'
|
||||
|
||||
|
||||
def display_STRINGS(a, x, classes) :
|
||||
print "STRINGS"
|
||||
for s, _ in x.get_tainted_variables().get_strings() :
|
||||
print "String : ", repr(s.get_info())
|
||||
analysis.show_PathVariable( a, s.get_paths() )
|
||||
|
||||
def display_FIELDS(a, x, classes) :
|
||||
print "FIELDS"
|
||||
for f, _ in x.get_tainted_variables().get_fields() :
|
||||
print "field : ", repr(f.get_info())
|
||||
analysis.show_PathVariable( a, f.get_paths() )
|
||||
|
||||
def display_PACKAGES(a, x, classes) :
|
||||
print "CREATED PACKAGES"
|
||||
for m, _ in x.get_tainted_packages().get_packages() :
|
||||
m.show()
|
||||
|
||||
def display_PACKAGES_II(a, x, classes) :
|
||||
# Internal Methods -> Internal Methods
|
||||
print "Internal --> Internal"
|
||||
for j in x.get_tainted_packages().get_internal_packages() :
|
||||
analysis.show_Path( a, j )
|
||||
|
||||
def display_PACKAGES_IE(a, x, classes) :
|
||||
# Internal Methods -> External Methods
|
||||
print "Internal --> External"
|
||||
for j in x.get_tainted_packages().get_external_packages() :
|
||||
analysis.show_Path( a, j )
|
||||
|
||||
def display_SEARCH_PACKAGES(a, x, classes, package_name) :
|
||||
print "Search package", package_name
|
||||
analysis.show_Paths( a, x.get_tainted_packages().search_packages( package_name ) )
|
||||
|
||||
def display_SEARCH_METHODS(a, x, classes, package_name, method_name, descriptor) :
|
||||
print "Search method", package_name, method_name, descriptor
|
||||
analysis.show_Paths( a, x.get_tainted_packages().search_methods( package_name, method_name, descriptor) )
|
||||
|
||||
def display_PERMISSION(a, x, classes) :
|
||||
# Show methods used by permission
|
||||
perms_access = x.get_tainted_packages().get_permissions( [] )
|
||||
for perm in perms_access :
|
||||
print "PERM : ", perm
|
||||
analysis.show_Paths( a, perms_access[ perm ] )
|
||||
|
||||
def display_OBJECT_CREATED(a, x, class_name) :
|
||||
print "Search object", class_name
|
||||
analysis.show_Paths( a, x.get_tainted_packages().search_objects( class_name ) )
|
||||
|
||||
a = AndroguardS( TEST )
|
||||
x = analysis.uVMAnalysis( a.get_vm() )
|
||||
|
||||
#print a.get_vm().get_strings()
|
||||
print a.get_vm().get_regex_strings( "access" )
|
||||
print a.get_vm().get_regex_strings( "(long).*2" )
|
||||
print a.get_vm().get_regex_strings( ".*(t\_t).*" )
|
||||
|
||||
classes = a.get_vm().get_classes_names()
|
||||
vm = a.get_vm()
|
||||
|
||||
display_CFG( a, x, classes )
|
||||
display_STRINGS( vm, x, classes )
|
||||
display_FIELDS( vm, x, classes )
|
||||
display_PACKAGES( vm, x, classes )
|
||||
display_PACKAGES_IE( vm, x, classes )
|
||||
display_PACKAGES_II( vm, x, classes )
|
||||
display_PERMISSION( vm, x, classes )
|
||||
|
||||
display_SEARCH_PACKAGES( a, x, classes, "Landroid/telephony/" )
|
||||
display_SEARCH_PACKAGES( a, x, classes, "Ljavax/crypto/" )
|
||||
display_SEARCH_METHODS( a, x, classes, "Ljavax/crypto/", "generateSecret", "." )
|
||||
|
||||
display_OBJECT_CREATED( a, x, "." )
|
18
demos/apk_format_1.py
Executable file
18
demos/apk_format_1.py
Executable file
@ -0,0 +1,18 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
|
||||
PATH_INSTALL = "./"
|
||||
sys.path.append( PATH_INSTALL )
|
||||
|
||||
from androguard.core.bytecodes import dvm, apk
|
||||
|
||||
TEST = "./examples/android/TC/bin/TC-debug.apk"
|
||||
|
||||
a = apk.APK( TEST )
|
||||
a.show()
|
||||
|
||||
j = dvm.DalvikVMFormat( a.get_dex() )
|
||||
|
||||
# SHOW CLASS (verbose)
|
||||
#j.show()
|
18
demos/apk_format_2.py
Executable file
18
demos/apk_format_2.py
Executable file
@ -0,0 +1,18 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
|
||||
PATH_INSTALL = "./"
|
||||
sys.path.append( PATH_INSTALL )
|
||||
|
||||
from androguard.core.bytecodes import dvm, apk
|
||||
|
||||
TEST = "./apks/crash/mikecc/e0399fdd481992bc049b6e9d765da7f007f89875.apk"
|
||||
|
||||
a = apk.APK( TEST, zipmodule=2 )
|
||||
a.show()
|
||||
|
||||
j = dvm.DalvikVMFormat( a.get_dex() )
|
||||
|
||||
# SHOW CLASS (verbose)
|
||||
#j.show()
|
19
demos/arm_format_1.py
Executable file
19
demos/arm_format_1.py
Executable file
@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
|
||||
PATH_INSTALL = "./"
|
||||
sys.path.append(PATH_INSTALL + "/core")
|
||||
sys.path.append(PATH_INSTALL + "/core/bytecodes")
|
||||
sys.path.append(PATH_INSTALL + "/core/assembly/")
|
||||
sys.path.append(PATH_INSTALL + "/core/assembly/libassembly")
|
||||
|
||||
import assembly
|
||||
assembly.ASM()
|
||||
|
||||
#import arm
|
||||
#arm.ARM()
|
||||
|
||||
#a = apk.APK( TEST )
|
||||
#a.show()
|
||||
|
15
demos/axml_format_1.py
Executable file
15
demos/axml_format_1.py
Executable file
@ -0,0 +1,15 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
|
||||
PATH_INSTALL = "./"
|
||||
sys.path.append(PATH_INSTALL)
|
||||
|
||||
from androguard.core.bytecodes import apk
|
||||
|
||||
|
||||
from xml.dom import minidom
|
||||
|
||||
ap = apk.AXMLPrinter( open("examples/axml/AndroidManifest2.xml", "r").read() )
|
||||
|
||||
print minidom.parseString( ap.getBuff() ).toxml()
|
65
demos/benchmark.py
Executable file
65
demos/benchmark.py
Executable file
@ -0,0 +1,65 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys, os
|
||||
import cProfile
|
||||
|
||||
# http://code.activestate.com/recipes/286222-memory-usage/
|
||||
_proc_status = '/proc/%d/status' % os.getpid()
|
||||
|
||||
_scale = {'kB': 1024.0, 'mB': 1024.0*1024.0,
|
||||
'KB': 1024.0, 'MB': 1024.0*1024.0}
|
||||
|
||||
def _VmB(VmKey):
|
||||
global _proc_status, _scale
|
||||
# get pseudo file /proc/<pid>/status
|
||||
try:
|
||||
t = open(_proc_status)
|
||||
v = t.read()
|
||||
t.close()
|
||||
except:
|
||||
return 0.0 # non-Linux?
|
||||
# get VmKey line e.g. 'VmRSS: 9999 kB\n ...'
|
||||
i = v.index(VmKey)
|
||||
v = v[i:].split(None, 3) # whitespace
|
||||
if len(v) < 3:
|
||||
return 0.0 # invalid format?
|
||||
# convert Vm value to bytes
|
||||
return float(v[1]) * _scale[v[2]]
|
||||
|
||||
|
||||
def memory(since=0.0):
|
||||
'''Return memory usage in bytes.
|
||||
'''
|
||||
return _VmB('VmSize:') - since
|
||||
|
||||
|
||||
def resident(since=0.0):
|
||||
'''Return resident memory usage in bytes.
|
||||
'''
|
||||
return _VmB('VmRSS:') - since
|
||||
|
||||
|
||||
def stacksize(since=0.0):
|
||||
'''Return stack size in bytes.
|
||||
'''
|
||||
return _VmB('VmStk:') - since
|
||||
|
||||
PATH_INSTALL = "./"
|
||||
sys.path.append(PATH_INSTALL + "./")
|
||||
|
||||
import androguard, analysis
|
||||
|
||||
# a directory with apks files"
|
||||
|
||||
TEST = "./apks/"
|
||||
|
||||
l = []
|
||||
for i in os.walk( TEST ) :
|
||||
for j in i[2] :
|
||||
l.append( i[0] + j )
|
||||
|
||||
print len(l), l
|
||||
|
||||
_a = androguard.Androguard( l )
|
||||
|
||||
print "MEMORY : ", memory() / _scale["MB"], "RESIDENT ", resident() / _scale["MB"], "STACKSIZE ", stacksize() / _scale["MB"]
|
63
demos/crackme_dexlabs_patch.py
Executable file
63
demos/crackme_dexlabs_patch.py
Executable file
@ -0,0 +1,63 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
|
||||
PATH_INSTALL = "./"
|
||||
sys.path.append(PATH_INSTALL)
|
||||
|
||||
from androguard.core.bytecodes import dvm
|
||||
from androguard.core.bytecodes import apk
|
||||
from androguard.core.analysis import analysis
|
||||
from androguard.core import androconf
|
||||
|
||||
class Nop(dvm.Instruction10x) :
|
||||
def __init__(self) :
|
||||
self.OP = 0x00
|
||||
|
||||
def patch_dex( m ) :
|
||||
for i in m.get_methods() :
|
||||
if i.get_class_name() == "Lorg/dexlabs/poc/dexdropper/DropActivity;" :
|
||||
print i.get_class_name(), i.get_name()
|
||||
|
||||
patch_method_3( i )
|
||||
# or
|
||||
# patch_method_X( i )
|
||||
|
||||
|
||||
def patch_method_1( method ) :
|
||||
buff = method.get_code().get_bc().insn
|
||||
buff = "\x00" * 0x12 + buff[0x12:]
|
||||
method.get_code().get_bc().insn = buff
|
||||
|
||||
def patch_method_2( method ) :
|
||||
method.set_code_idx( 0x12 )
|
||||
instructions = [ j for j in method.get_instructions() ]
|
||||
for j in range(0, 9) :
|
||||
instructions.insert(0, Nop() )
|
||||
method.set_instructions( instructions )
|
||||
|
||||
def patch_method_3( method ) :
|
||||
method.set_code_idx( 0x12 )
|
||||
instructions = [ j for j in method.get_instructions() ]
|
||||
for j in range(0, 9) :
|
||||
instructions.insert(0, dvm.Instruction10x(None, "\x00\x00") )
|
||||
method.set_instructions( instructions )
|
||||
|
||||
|
||||
FILENAME_INPUT = "apks/crash/crackme-obfuscator.apk"
|
||||
|
||||
FILENAME_OUTPUT = "./toto.dex"
|
||||
|
||||
androconf.set_debug()
|
||||
|
||||
a = apk.APK( FILENAME_INPUT )
|
||||
vm = dvm.DalvikVMFormat( a.get_dex() )
|
||||
vmx = analysis.VMAnalysis( vm )
|
||||
|
||||
patch_dex( vm )
|
||||
|
||||
new_dex = vm.save()
|
||||
|
||||
fd = open(FILENAME_OUTPUT, "w")
|
||||
fd.write( new_dex )
|
||||
fd.close()
|
44
demos/create_invalid_apk.py
Executable file
44
demos/create_invalid_apk.py
Executable file
@ -0,0 +1,44 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
|
||||
PATH_INSTALL = "./"
|
||||
sys.path.append(PATH_INSTALL)
|
||||
|
||||
from androguard.core.bytecodes import dvm
|
||||
from androguard.core.bytecodes import apk
|
||||
from androguard.core.analysis import analysis
|
||||
from androguard.core import androconf
|
||||
|
||||
|
||||
def patch_dex(m):
|
||||
for i in m.get_methods():
|
||||
if i.get_class_name() == "Lre/androguard/android/invalid/MainActivity;":
|
||||
#if i.get_name() == "testStrings":
|
||||
# instructions = [ins for ins in i.get_instructions()]
|
||||
# instructions[0].BBBB = 10000
|
||||
# i.set_instructions(instructions)
|
||||
if i.get_name() == "testInstances":
|
||||
instructions = [ins for ins in i.get_instructions()]
|
||||
instructions[0].BBBB = 0x4141
|
||||
i.set_instructions(instructions)
|
||||
|
||||
FILENAME_INPUT = "./examples/android/Invalid/Invalid.apk"
|
||||
FILENAME_OUTPUT = "./toto.apk"
|
||||
|
||||
androconf.set_debug()
|
||||
|
||||
a = apk.APK(FILENAME_INPUT)
|
||||
vm = dvm.DalvikVMFormat(a.get_dex())
|
||||
vmx = analysis.VMAnalysis(vm)
|
||||
|
||||
patch_dex(vm)
|
||||
|
||||
new_dex = vm.save()
|
||||
|
||||
a.new_zip(filename=FILENAME_OUTPUT,
|
||||
deleted_files="(META-INF/.)",
|
||||
new_files={"classes.dex": new_dex})
|
||||
|
||||
# Please configure your keystore !! :) follow the tutorial on android website
|
||||
apk.sign_apk(FILENAME_OUTPUT, "./keystore/keystore1", "tototo")
|
25
demos/create_invalid_dex.py
Executable file
25
demos/create_invalid_dex.py
Executable file
@ -0,0 +1,25 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
|
||||
PATH_INSTALL = "./"
|
||||
sys.path.append(PATH_INSTALL)
|
||||
|
||||
from androguard.core.bytecodes import dvm
|
||||
from androguard.core import androconf
|
||||
|
||||
|
||||
FILENAME_INPUT = "examples/android/TestsAndroguard/bin/classes.dex"
|
||||
FILENAME_OUTPUT = "./toto.dex"
|
||||
|
||||
androconf.set_debug()
|
||||
|
||||
vm = dvm.DalvikVMFormat(open(FILENAME_INPUT, "rb").read())
|
||||
|
||||
print hex(vm.header.link_off), hex(vm.header.link_size)
|
||||
vm.header.link_off, vm.header.link_size = 0x41414141, 0x1337
|
||||
print hex(vm.header.link_off), hex(vm.header.link_size)
|
||||
|
||||
new_dex = vm.save()
|
||||
|
||||
open(FILENAME_OUTPUT, "wb").write(new_dex)
|
179
demos/dad_emul.py
Executable file
179
demos/dad_emul.py
Executable file
@ -0,0 +1,179 @@
|
||||
#!/usr/bin/env python
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (c) 2012 Geoffroy Gueguen <geoffroy.gueguen@gmail.com>
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Androguard is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Androguard is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with Androguard. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import sys
|
||||
sys.path.append('./')
|
||||
|
||||
from androguard.core.bytecodes import apk, dvm
|
||||
from androguard.core.analysis.analysis import uVMAnalysis
|
||||
from androguard.decompiler.dad.decompile import DvMethod
|
||||
from androguard.decompiler.dad.instruction import (Constant,
|
||||
BinaryCompExpression)
|
||||
|
||||
|
||||
class DemoEmulator(object):
|
||||
def __init__(self, graph):
|
||||
self.graph = graph
|
||||
self.loop = []
|
||||
self.mem = {}
|
||||
|
||||
def init(self, key, value):
|
||||
self.mem[key] = value
|
||||
|
||||
def visit(self, node):
|
||||
if node not in self.loop:
|
||||
node.visit(self)
|
||||
|
||||
def visit_ins(self, ins):
|
||||
return ins.visit(self)
|
||||
|
||||
def visit_loop_node(self, loop):
|
||||
self.loop.append(loop)
|
||||
follow = loop.get_loop_follow()
|
||||
if loop.looptype.pretest():
|
||||
if loop.true is follow:
|
||||
loop.neg()
|
||||
loop.true, loop.false = loop.false, loop.true
|
||||
while loop.visit_cond(self):
|
||||
loop.true.visit(self)
|
||||
self.loop.pop()
|
||||
if follow is not None:
|
||||
self.visit(follow)
|
||||
|
||||
def visit_cond_node(self, cond):
|
||||
follow = cond.get_if_follow()
|
||||
if follow is not None:
|
||||
has_else = not (follow in (cond.true, cond.false))
|
||||
cnd = cond.visit_cond(self)
|
||||
if cnd:
|
||||
cond.true.visit(self)
|
||||
elif has_else:
|
||||
cond.false.visit(self)
|
||||
self.visit(follow)
|
||||
|
||||
def visit_statement_node(self, stmt):
|
||||
sucs = self.graph.sucs(stmt)
|
||||
for ins in stmt.get_ins():
|
||||
self.visit_ins(ins)
|
||||
if len(sucs):
|
||||
self.visit(sucs[0])
|
||||
|
||||
def visit_return_node(self, ret):
|
||||
for ins in ret.get_ins():
|
||||
self.visit_ins(ins)
|
||||
|
||||
def visit_constant(self, cst):
|
||||
return cst
|
||||
|
||||
def visit_variable(self, var):
|
||||
return self.mem[var]
|
||||
|
||||
def visit_param(self, param):
|
||||
return param
|
||||
|
||||
def visit_assign(self, lhs, rhs):
|
||||
if lhs is None:
|
||||
rhs.visit(self)
|
||||
else:
|
||||
self.mem[lhs.v] = rhs.visit(self)
|
||||
|
||||
def visit_astore(self, array, index, rhs):
|
||||
array = array.visit(self)
|
||||
if isinstance(index, Constant):
|
||||
idx = index.visit(self, 'I')
|
||||
else:
|
||||
idx = index.visit(self)
|
||||
self.mem[array][idx] = rhs.visit(self)
|
||||
|
||||
def visit_return_void(self):
|
||||
pass
|
||||
|
||||
def visit_aload(self, array, index):
|
||||
arr = array.visit(self)
|
||||
idx = index.visit(self)
|
||||
return self.mem[arr][idx]
|
||||
|
||||
def visit_alength(self, array):
|
||||
return len(self.mem[array.visit(self)])
|
||||
|
||||
def visit_binary_expression(self, op, arg1, arg2):
|
||||
arg1 = arg1.visit(self)
|
||||
if not isinstance(arg1, int):
|
||||
arg1 = ord(arg1)
|
||||
arg2 = arg2.visit(self)
|
||||
if not isinstance(arg2, int):
|
||||
arg2 = ord(arg2)
|
||||
return eval('%s %s %s' % (arg1, op, arg2))
|
||||
|
||||
def visit_unary_expression(self, op, arg):
|
||||
arg.visit(self)
|
||||
|
||||
def visit_cast(self, op, arg):
|
||||
return arg.visit(self)
|
||||
|
||||
def visit_cond_expression(self, op, arg1, arg2):
|
||||
arg1 = arg1.visit(self)
|
||||
if not isinstance(arg1, int):
|
||||
arg1 = ord(arg1)
|
||||
arg2 = arg2.visit(self)
|
||||
if not isinstance(arg2, int):
|
||||
arg2 = ord(arg2)
|
||||
return eval('%s %s %s' % (arg1, op, arg2))
|
||||
|
||||
def visit_get_static(self, cls, name):
|
||||
return self.mem[name]
|
||||
|
||||
|
||||
TEST = './apks/pacsec/magicspiral.apk'
|
||||
|
||||
vm = dvm.DalvikVMFormat(apk.APK(TEST).get_dex())
|
||||
vma = uVMAnalysis(vm)
|
||||
|
||||
method = vm.get_method('crypt')[0]
|
||||
|
||||
amethod = vma.get_method(method)
|
||||
dvmethod = DvMethod(amethod)
|
||||
dvmethod.process() # build IR Form / control flow...
|
||||
|
||||
graph = dvmethod.graph
|
||||
visitor = DemoEmulator(graph)
|
||||
|
||||
l = [94, 42, 93, 88, 3, 2, 95, 2, 13, 85, 11, 2, 19, 1, 125, 19, 0, 102,
|
||||
30, 24, 19, 99, 76, 21, 102, 22, 26, 111, 39, 125, 2, 44, 80, 10, 90,
|
||||
5, 119, 100, 119, 60, 4, 87, 79, 42, 52]
|
||||
visitor.init(dvmethod.lparams[0], l)
|
||||
|
||||
KEYVALUE = '6^)(9-p35a%3#4S!4S0)$Yt%^&5(j.g^&o(*0)$Yv!#O@6GpG@=+3j.&6^)(0-=1'
|
||||
visitor.init('KEYVALUE', '[BKEYVALUE')
|
||||
visitor.init('[BKEYVALUE', KEYVALUE)
|
||||
|
||||
visitor.init('keylen', len(KEYVALUE))
|
||||
|
||||
method.show()
|
||||
|
||||
def show_mem(visitor):
|
||||
print 'Memory[4]: %s' % visitor.mem[4]
|
||||
print '==> %r' % ''.join(chr(i) for i in visitor.mem[4])
|
||||
|
||||
show_mem(visitor)
|
||||
print '\nStarting visit...',
|
||||
graph.get_entry().visit(visitor)
|
||||
print ' done !\n'
|
||||
show_mem(visitor)
|
||||
|
329
demos/dad_print.py
Executable file
329
demos/dad_print.py
Executable file
@ -0,0 +1,329 @@
|
||||
#!/usr/bin/env python
|
||||
# This file is part of Androguard.
|
||||
#
|
||||
# Copyright (c) 2012 Geoffroy Gueguen <geoffroy.gueguen@gmail.com>
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Androguard is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Androguard is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with Androguard. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import sys
|
||||
sys.path.append('./')
|
||||
|
||||
from androguard.core.bytecodes import apk, dvm
|
||||
from androguard.core.analysis.analysis import uVMAnalysis
|
||||
from androguard.decompiler.dad.decompile import DvMethod
|
||||
from androguard.decompiler.dad.instruction import Constant, BinaryCompExpression
|
||||
|
||||
|
||||
class PrintVisitor(object):
|
||||
def __init__(self, graph):
|
||||
self.graph = graph
|
||||
self.visited_nodes = set()
|
||||
self.loop_follow = [None]
|
||||
self.latch_node = [None]
|
||||
self.if_follow = [None]
|
||||
self.switch_follow = [None]
|
||||
self.next_case = None
|
||||
|
||||
def visit_ins(self, ins):
|
||||
return ins.visit(self)
|
||||
|
||||
def visit_node(self, node):
|
||||
if node in (self.if_follow[-1], self.switch_follow[-1],
|
||||
self.loop_follow[-1], self.latch_node[-1]):
|
||||
return
|
||||
if node in self.visited_nodes:
|
||||
return
|
||||
self.visited_nodes.add(node)
|
||||
node.visit(self)
|
||||
|
||||
def visit_loop_node(self, loop):
|
||||
print '- Loop node', loop.num
|
||||
follow = loop.get_loop_follow()
|
||||
if follow is None and not loop.looptype.endless():
|
||||
exit('Loop has no follow !', 'error')
|
||||
if loop.looptype.pretest():
|
||||
if loop.true is follow:
|
||||
loop.neg()
|
||||
loop.true, loop.false = loop.false, loop.true
|
||||
cnd = loop.visit_cond(self)
|
||||
print 'while(%s) {' % cnd
|
||||
elif loop.looptype.posttest():
|
||||
print 'do {'
|
||||
self.latch_node.append(loop.latch)
|
||||
elif loop.looptype.endless():
|
||||
print 'while(true) {'
|
||||
pass
|
||||
self.loop_follow.append(follow)
|
||||
if loop.looptype.pretest():
|
||||
self.visit_node(loop.true)
|
||||
else:
|
||||
self.visit_node(loop.cond)
|
||||
self.loop_follow.pop()
|
||||
if loop.looptype.pretest():
|
||||
print '}'
|
||||
elif loop.looptype.posttest():
|
||||
print '} while(',
|
||||
self.latch_node.pop()
|
||||
loop.latch.visit_cond(self)
|
||||
print ')'
|
||||
else:
|
||||
self.visit_node(loop.latch)
|
||||
if follow is not None:
|
||||
self.visit_node(follow)
|
||||
|
||||
def visit_cond_node(self, cond):
|
||||
print '- Cond node', cond.num
|
||||
follow = cond.get_if_follow()
|
||||
if cond.false is self.loop_follow[-1]:
|
||||
cond.neg()
|
||||
cond.true, cond.false = cond.false, cond.true
|
||||
cond.visit_cond(self)
|
||||
self.visit_node(cond.false)
|
||||
elif follow is not None:
|
||||
is_else = not (follow in (cond.true, cond.false))
|
||||
if (cond.true in (follow, self.next_case)
|
||||
or cond.num > cond.true.num):
|
||||
cond.neg()
|
||||
cond.true, cond.false = cond.false, cond.true
|
||||
self.if_follow.append(follow)
|
||||
if not cond.true in self.visited_nodes:
|
||||
cnd = cond.visit_cond(self)
|
||||
print 'if (%s) {' % cnd
|
||||
self.visit_node(cond.true)
|
||||
if is_else and not cond.false in self.visited_nodes:
|
||||
print '} else {'
|
||||
self.visit_node(cond.false)
|
||||
print '}'
|
||||
self.if_follow.pop()
|
||||
self.visit_node(follow)
|
||||
else:
|
||||
cond.visit_cond(self)
|
||||
self.visit_node(cond.true)
|
||||
self.visit_node(cond.false)
|
||||
|
||||
def visit_short_circuit_condition(self, nnot, aand, cond1, cond2):
|
||||
if nnot:
|
||||
cond1.neg()
|
||||
cond1.visit_cond(self)
|
||||
cond2.visit_cond(self)
|
||||
|
||||
def visit_switch_node(self, switch):
|
||||
lins = switch.get_ins()
|
||||
for ins in lins[:-1]:
|
||||
self.visit_ins(ins)
|
||||
switch_ins = switch.get_ins()[-1]
|
||||
self.visit_ins(switch_ins)
|
||||
follow = switch.switch_follow
|
||||
cases = switch.cases
|
||||
self.switch_follow.append(follow)
|
||||
default = switch.default
|
||||
for i, node in enumerate(cases):
|
||||
if node in self.visited_nodes:
|
||||
continue
|
||||
for case in switch.node_to_case[node]:
|
||||
pass
|
||||
if i + 1 < len(cases):
|
||||
self.next_case = cases[i + 1]
|
||||
else:
|
||||
self.next_case = None
|
||||
if node is default:
|
||||
default = None
|
||||
self.visit_node(node)
|
||||
if default not in (None, follow):
|
||||
self.visit_node(default)
|
||||
self.switch_follow.pop()
|
||||
self.visit_node(follow)
|
||||
|
||||
def visit_statement_node(self, stmt):
|
||||
print '- Statement node', stmt.num
|
||||
sucs = self.graph.sucs(stmt)
|
||||
for ins in stmt.get_ins():
|
||||
self.visit_ins(ins)
|
||||
if len(sucs) == 0:
|
||||
return
|
||||
follow = sucs[0]
|
||||
self.visit_node(follow)
|
||||
|
||||
def visit_return_node(self, ret):
|
||||
print '- Return node', ret.num
|
||||
for ins in ret.get_ins():
|
||||
self.visit_ins(ins)
|
||||
|
||||
def visit_throw_node(self, throw):
|
||||
for ins in throw.get_ins():
|
||||
self.visit_ins(ins)
|
||||
|
||||
def visit_constant(self, cst):
|
||||
return cst
|
||||
|
||||
def visit_base_class(self, cls):
|
||||
return cls
|
||||
|
||||
def visit_variable(self, var):
|
||||
return 'v%s' % var
|
||||
|
||||
def visit_param(self, param):
|
||||
return 'p%s' % param
|
||||
|
||||
def visit_this(self):
|
||||
return 'this'
|
||||
|
||||
def visit_assign(self, lhs, rhs):
|
||||
if lhs is None:
|
||||
rhs.visit(self)
|
||||
return
|
||||
l = lhs.visit(self)
|
||||
r = rhs.visit(self)
|
||||
print '%s = %s;' % (l, r)
|
||||
|
||||
def visit_move_result(self, lhs, rhs):
|
||||
l = lhs.visit(self)
|
||||
r = rhs.visit(self)
|
||||
print '%s = %s;' % (l, r)
|
||||
|
||||
def visit_move(self, lhs, rhs):
|
||||
if lhs is rhs:
|
||||
return
|
||||
l = lhs.visit(self)
|
||||
r = rhs.visit(self)
|
||||
print '%s = %s;' % (l, r)
|
||||
|
||||
def visit_astore(self, array, index, rhs):
|
||||
arr = array.visit(self)
|
||||
if isinstance(index, Constant):
|
||||
idx = index.visit(self, 'I')
|
||||
else:
|
||||
idx = index.visit(self)
|
||||
r = rhs.visit(self)
|
||||
print '%s[%s] = %s' % (arr, idx, r)
|
||||
|
||||
def visit_put_static(self, cls, name, rhs):
|
||||
r = rhs.visit(self)
|
||||
return '%s.%s = %s' % (cls, name, r)
|
||||
|
||||
def visit_put_instance(self, lhs, name, rhs):
|
||||
l = lhs.visit(self)
|
||||
r = rhs.visit(self)
|
||||
return '%s.%s = %s' % (l, name, r)
|
||||
|
||||
def visit_new(self, atype):
|
||||
pass
|
||||
|
||||
def visit_invoke(self, name, base, args):
|
||||
base.visit(self)
|
||||
for arg in args:
|
||||
arg.visit(self)
|
||||
|
||||
def visit_return_void(self):
|
||||
print 'return;'
|
||||
|
||||
def visit_return(self, arg):
|
||||
a = arg.visit(self)
|
||||
print 'return %s;' % a
|
||||
|
||||
def visit_nop(self):
|
||||
pass
|
||||
|
||||
def visit_switch(self, arg):
|
||||
arg.visit(self)
|
||||
|
||||
def visit_check_cast(self, arg, atype):
|
||||
arg.visit(self)
|
||||
|
||||
def visit_aload(self, array, index):
|
||||
arr = array.visit(self)
|
||||
idx = index.visit(self)
|
||||
return '%s[%s]' % (arr, idx)
|
||||
|
||||
def visit_alength(self, array):
|
||||
res = array.visit(self)
|
||||
return '%s.length' % res
|
||||
|
||||
def visit_new_array(self, atype, size):
|
||||
size.visit(self)
|
||||
|
||||
def visit_filled_new_array(self, atype, size, args):
|
||||
atype.visit(self)
|
||||
size.visit(self)
|
||||
for arg in args:
|
||||
arg.visit(self)
|
||||
|
||||
def visit_fill_array(self, array, value):
|
||||
array.visit(self)
|
||||
|
||||
def visit_monitor_enter(self, ref):
|
||||
ref.visit(self)
|
||||
|
||||
def visit_monitor_exit(self, ref):
|
||||
pass
|
||||
|
||||
def visit_throw(self, ref):
|
||||
ref.visit(self)
|
||||
|
||||
def visit_binary_expression(self, op, arg1, arg2):
|
||||
val1 = arg1.visit(self)
|
||||
val2 = arg2.visit(self)
|
||||
return '%s %s %s' % (val1, op, val2)
|
||||
|
||||
def visit_unary_expression(self, op, arg):
|
||||
arg.visit(self)
|
||||
|
||||
def visit_cast(self, op, arg):
|
||||
a = arg.visit(self)
|
||||
return '(%s %s)' % (op, a)
|
||||
|
||||
def visit_cond_expression(self, op, arg1, arg2):
|
||||
val1 = arg1.visit(self)
|
||||
val2 = arg2.visit(self)
|
||||
return '%s %s %s' % (val1, op, val2)
|
||||
|
||||
def visit_condz_expression(self, op, arg):
|
||||
if isinstance(arg, BinaryCompExpression):
|
||||
arg.op = op
|
||||
arg.visit(self)
|
||||
else:
|
||||
arg.visit(self)
|
||||
|
||||
def visit_get_instance(self, arg, name):
|
||||
arg.visit(self)
|
||||
|
||||
def visit_get_static(self, cls, name):
|
||||
return '%s.%s' % (cls, name)
|
||||
|
||||
TEST = '../DroidDream/magicspiral.apk'
|
||||
|
||||
vm = dvm.DalvikVMFormat(apk.APK(TEST).get_dex())
|
||||
vma = uVMAnalysis(vm)
|
||||
|
||||
method = vm.get_method('crypt')[0]
|
||||
method.show()
|
||||
|
||||
amethod = vma.get_method(method)
|
||||
dvmethod = DvMethod(amethod)
|
||||
|
||||
dvmethod.process() # build IR Form / control flow...
|
||||
|
||||
graph = dvmethod.graph
|
||||
|
||||
print 'Entry block : %s\n' % graph.get_entry()
|
||||
|
||||
for block in graph: # graph.get_rpo() to iterate in reverse post order
|
||||
print 'Block : %s' % block
|
||||
for ins in block.get_ins():
|
||||
print ' - %s' % ins
|
||||
print
|
||||
|
||||
visitor = PrintVisitor(graph)
|
||||
graph.get_entry().visit(visitor)
|
24
demos/dalvikvm_format_1.py
Executable file
24
demos/dalvikvm_format_1.py
Executable file
@ -0,0 +1,24 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
|
||||
PATH_INSTALL = "./"
|
||||
sys.path.append(PATH_INSTALL)
|
||||
from androguard.core.bytecodes import dvm
|
||||
|
||||
TEST = "./examples/dalvik/test/bin/classes.dex"
|
||||
|
||||
j = dvm.DalvikVMFormat(open(TEST).read())
|
||||
|
||||
# SHOW CLASS (verbose)
|
||||
j.show()
|
||||
|
||||
# SHOW FIELDS
|
||||
for i in j.get_fields():
|
||||
print i.get_access_flags(), i.get_name(), i.get_descriptor()
|
||||
|
||||
print
|
||||
|
||||
# SHOW METHODS
|
||||
for i in j.get_methods():
|
||||
print i.get_access_flags(), i.get_name(), i.get_descriptor()
|
27
demos/dalvikvm_format_2.py
Executable file
27
demos/dalvikvm_format_2.py
Executable file
@ -0,0 +1,27 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys, random, string
|
||||
|
||||
PATH_INSTALL = "./"
|
||||
sys.path.append(PATH_INSTALL)
|
||||
|
||||
from androguard.core.bytecodes import dvm
|
||||
|
||||
TEST = "./examples/dalvik/test/bin/classes.dex"
|
||||
TEST_OUTPUT = "./examples/dalvik/test/bin/classes_output.dex"
|
||||
|
||||
j = dvm.DalvikVMFormat( open(TEST).read() )
|
||||
|
||||
# Modify the name of each field
|
||||
#for field in j.get_fields() :
|
||||
# field.set_name( random.choice( string.letters ) + ''.join([ random.choice(string.letters + string.digits) for i in range(10 - 1) ] ) )
|
||||
|
||||
# Modify the name of each method (minus the constructor (<init>) and a extern called method (go))
|
||||
#for method in j.get_methods() :
|
||||
# if method.get_name() != "go" and method.get_name() != "<init>" :
|
||||
# method.set_name( random.choice( string.letters ) + ''.join([ random.choice(string.letters + string.digits) for i in range(10 - 1) ] ) )
|
||||
|
||||
# SAVE CLASS
|
||||
fd = open( TEST_OUTPUT, "w" )
|
||||
fd.write( j.save() )
|
||||
fd.close()
|
22
demos/dalvikvm_format_3.py
Executable file
22
demos/dalvikvm_format_3.py
Executable file
@ -0,0 +1,22 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
|
||||
PATH_INSTALL = "./"
|
||||
sys.path.append(PATH_INSTALL)
|
||||
|
||||
from androguard.core.bytecodes import dvm
|
||||
from androguard.core.analysis import analysis
|
||||
|
||||
TEST = "examples/android/TestsAndroguard/bin/classes.dex"
|
||||
|
||||
j = dvm.DalvikVMFormat( open(TEST).read() )
|
||||
x = analysis.VMAnalysis( j )
|
||||
j.set_vmanalysis( x )
|
||||
|
||||
# SHOW CLASSES (verbose and pretty)
|
||||
j.pretty_show()
|
||||
|
||||
# SHOW METHODS
|
||||
for i in j.get_methods() :
|
||||
i.pretty_show( )
|
30
demos/dalvikvm_format_4.py
Executable file
30
demos/dalvikvm_format_4.py
Executable file
@ -0,0 +1,30 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
|
||||
PATH_INSTALL = "./"
|
||||
sys.path.append(PATH_INSTALL)
|
||||
|
||||
from androguard.core.bytecodes import dvm
|
||||
from androguard.core.analysis import analysis
|
||||
from androguard.decompiler import decompiler
|
||||
|
||||
TEST = "examples/android/TestsAndroguard/bin/classes.dex"
|
||||
|
||||
j = dvm.DalvikVMFormat( open(TEST).read() )
|
||||
jx = analysis.VMAnalysis( j )
|
||||
|
||||
#d = decompiler.DecompilerDex2Jad( j )
|
||||
#d = decompiler.DecompilerDed( j )
|
||||
d = decompiler.DecompilerDAD( j, jx )
|
||||
|
||||
j.set_decompiler( d )
|
||||
|
||||
# SHOW METHODS
|
||||
for i in j.get_methods() :
|
||||
if i.get_name() == "onCreate" :
|
||||
print i.get_class_name(), i.get_name()
|
||||
i.source()
|
||||
|
||||
# if i.get_name() == "testWhileTrue" :
|
||||
# i.source()
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user