Reduce stackframe in fcn_recurse ##anal

* This function is called recursively for each basic block
* Limits the size of functions that can be analized (anal.depth)
* The r2 -A Ch34 almost works now.
* Moving from stack to heap is not the right solution
This commit is contained in:
pancake 2021-02-03 18:55:18 +01:00 committed by GitHub
parent 545f9197f1
commit b6d1476674
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 204 additions and 188 deletions

View File

@ -1,9 +1,8 @@
/* radare - LGPL - Copyright 2010-2020 - nibble, alvaro, pancake */
/* radare - LGPL - Copyright 2010-2021 - nibble, alvaro, pancake */
#include <r_anal.h>
#include <r_parse.h>
#include <r_util.h>
#include <r_list.h>
#define READ_AHEAD 1
#define SDB_KEY_BB "bb.0x%"PFMT64x ".0x%"PFMT64x
@ -11,6 +10,7 @@
#define JMPTBLSZ 512
#define JMPTBL_LEA_SEARCH_SZ 64
#define JMPTBL_MAXFCNSIZE 4096
#define R_ANAL_MAX_INCSTACK 8096
#define BB_ALIGN 0x10
#define MAX_SCAN_SIZE 0x7ffffff
@ -289,13 +289,14 @@ static bool regs_exist(RAnalValue *src, RAnalValue *dst) {
// 0 if not skipped; 1 if skipped; 2 if skipped before
static int skip_hp(RAnal *anal, RAnalFunction *fcn, RAnalOp *op, RAnalBlock *bb, ut64 addr,
char *tmp_buf, int oplen, int un_idx, int *idx) {
int oplen, int un_idx, int *idx) {
// this step is required in order to prevent infinite recursion in some cases
if ((addr + un_idx - oplen) == fcn->addr) {
// use addr instead of op->addr to mark repeat
if (!anal->flb.exist_at (anal->flb.f, "skip", 4, addr)) {
snprintf (tmp_buf + 5, MAX_FLG_NAME_SIZE - 6, "%"PFMT64u, addr);
anal->flb.set (anal->flb.f, tmp_buf, addr, oplen);
char *name = r_str_newf ("skip.%"PFMT64u, addr);
anal->flb.set (anal->flb.f, name, addr, oplen);
free (name);
fcn->addr += oplen;
r_anal_block_relocate (bb, bb->addr + oplen, bb->size - oplen);
*idx = un_idx;
@ -515,14 +516,23 @@ static inline bool does_arch_destroys_dst(const char *arch) {
}
static int fcn_recurse(RAnal *anal, RAnalFunction *fcn, ut64 addr, ut64 len, int depth) {
const int continue_after_jump = anal->opt.afterjmp;
#if 0
if (depth < 1) {
if (anal->verbose) {
eprintf ("Too deep fcn_recurse at 0x%"PFMT64x "\n", addr);
}
return R_ANAL_RET_ERROR; // MUST BE TOO DEEP
}
// TODO Store all this stuff in the heap so we save memory in the stack
#endif
RAnalOp *op = NULL;
const bool continue_after_jump = anal->opt.afterjmp;
const int addrbytes = anal->iob.io ? anal->iob.io->addrbytes : 1;
char *last_reg_mov_lea_name = NULL;
RAnalBlock *bb = NULL;
RAnalBlock *bbg = NULL;
int ret = R_ANAL_RET_END, skip_ret = 0;
bool overlapped = false;
RAnalOp op = {0};
int oplen, idx = 0;
static ut64 cmpval = UT64_MAX; // inherited across functions, otherwise it breaks :?
bool varset = false;
@ -537,11 +547,10 @@ static int fcn_recurse(RAnal *anal, RAnalFunction *fcn, ut64 addr, ut64 len, int
0
};
bool arch_destroys_dst = does_arch_destroys_dst (anal->cur->arch);
bool is_arm = anal->cur->arch && !strncmp (anal->cur->arch, "arm", 3);
char tmp_buf[MAX_FLG_NAME_SIZE + 5] = "skip";
bool is_x86 = is_arm ? false: anal->cur->arch && !strncmp (anal->cur->arch, "x86", 3);
bool is_amd64 = is_x86 ? fcn->cc && !strcmp (fcn->cc, "amd64") : false;
bool is_dalvik = is_x86? false: anal->cur->arch && !strncmp (anal->cur->arch, "dalvik", 6);
const bool is_arm = anal->cur->arch && !strncmp (anal->cur->arch, "arm", 3);
const bool is_x86 = is_arm ? false: anal->cur->arch && !strncmp (anal->cur->arch, "x86", 3);
const bool is_amd64 = is_x86 ? fcn->cc && !strcmp (fcn->cc, "amd64") : false;
const bool is_dalvik = is_x86? false: anal->cur->arch && !strncmp (anal->cur->arch, "dalvik", 6);
RRegItem *variadic_reg = NULL;
if (is_amd64) {
variadic_reg = r_reg_get (anal->reg, "rax", R_REG_TYPE_GPR);
@ -594,7 +603,7 @@ static int fcn_recurse(RAnal *anal, RAnalFunction *fcn, ut64 addr, ut64 len, int
return R_ANAL_RET_END;
}
if (anal->verbose) {
eprintf ("r_anal_fcn_bb() fails at 0x%"PFMT64x ".\n", addr);
eprintf ("r_anal_fcn_bb() fails at 0x%"PFMT64x "\n", addr);
}
return R_ANAL_RET_ERROR; // MUST BE NOT DUP
}
@ -629,7 +638,6 @@ static int fcn_recurse(RAnal *anal, RAnalFunction *fcn, ut64 addr, ut64 len, int
r_list_free (list);
}
ut64 movdisp = UT64_MAX; // used by jmptbl when coded as "mov reg,[R*4+B]"
ut8 buf[32]; // 32 bytes is enough to hold any instruction.
int maxlen = len * addrbytes;
if (is_dalvik) {
bool skipAnalysis = false;
@ -652,6 +660,7 @@ static int fcn_recurse(RAnal *anal, RAnalFunction *fcn, ut64 addr, ut64 len, int
maxlen = 0;
}
op = r_anal_op_new ();
while (addrbytes * idx < maxlen) {
if (!last_is_reg_mov_lea) {
free (last_reg_mov_lea_name);
@ -664,6 +673,7 @@ repeat:
if (r_cons_is_breaked ()) {
break;
}
ut8 buf[32]; // 32 bytes is enough to hold any instruction.
ut32 at_delta = addrbytes * idx;
ut64 at = addr + at_delta;
ut64 bytes_read = R_MIN (len - at_delta, sizeof (buf));
@ -679,8 +689,8 @@ repeat:
}
gotoBeach (R_ANAL_RET_ERROR)
}
r_anal_op_fini (&op);
if ((oplen = r_anal_op (anal, &op, at, buf, bytes_read, R_ANAL_OP_MASK_ESIL | R_ANAL_OP_MASK_VAL | R_ANAL_OP_MASK_HINT)) < 1) {
r_anal_op_fini (op);
if ((oplen = r_anal_op (anal, op, at, buf, bytes_read, R_ANAL_OP_MASK_ESIL | R_ANAL_OP_MASK_VAL | R_ANAL_OP_MASK_HINT)) < 1) {
if (anal->verbose) {
eprintf ("Invalid instruction at 0x%"PFMT64x" with %d bits\n", at, anal->bits);
}
@ -690,7 +700,7 @@ repeat:
}
const char *bp_reg = anal->reg->name[R_REG_NAME_BP];
const char *sp_reg = anal->reg->name[R_REG_NAME_SP];
bool has_stack_regs = bp_reg && sp_reg;
const bool has_stack_regs = bp_reg && sp_reg;
if (anal->opt.nopskip && fcn->addr == at) {
RFlagItem *fi = anal->flb.get_at (anal->flb.f, addr, false);
@ -703,19 +713,19 @@ repeat:
}
}
}
switch (op.type & R_ANAL_OP_TYPE_MASK) {
switch (op->type & R_ANAL_OP_TYPE_MASK) {
case R_ANAL_OP_TYPE_TRAP:
case R_ANAL_OP_TYPE_ILL:
case R_ANAL_OP_TYPE_NOP:
if (r_anal_block_relocate (bb, at + op.size, bb->size)) {
addr = at + op.size;
if (r_anal_block_relocate (bb, at + op->size, bb->size)) {
addr = at + op->size;
fcn->addr = addr;
goto repeat;
}
}
}
if (op.hint.new_bits) {
r_anal_hint_set_bits (anal, op.jump, op.hint.new_bits);
if (op->hint.new_bits) {
r_anal_hint_set_bits (anal, op->jump, op->hint.new_bits);
}
if (idx > 0 && !overlapped) {
bbg = bbget (anal, at, anal->opt.jmpmid && is_x86);
@ -735,7 +745,7 @@ repeat:
}
}
if (!overlapped) {
ut64 newbbsize = bb->size + oplen;
const ut64 newbbsize = bb->size + oplen;
if (newbbsize > MAX_FCN_SIZE) {
gotoBeach (R_ANAL_RET_ERROR);
}
@ -771,7 +781,7 @@ repeat:
}
idx += oplen;
delay.un_idx = idx;
if (anal->opt.delay && op.delay > 0 && !delay.pending) {
if (anal->opt.delay && op->delay > 0 && !delay.pending) {
// Handle first pass through a branch delay jump:
// Come back and handle the current instruction later.
// Save the location of it in `delay.idx`
@ -781,7 +791,7 @@ repeat:
eprintf("Enter branch delay at 0x%08"PFMT64x ". bb->sz=%"PFMT64u"\n", at - oplen, bb->size);
}
delay.idx = idx - oplen;
delay.cnt = op.delay;
delay.cnt = op->delay;
delay.pending = 1; // we need this in case the actual idx is zero...
delay.adjust = !overlapped; // adjustment is required later to avoid double count
continue;
@ -803,7 +813,7 @@ repeat:
// at the original instruction that entered
// the branch delay.
}
} else if (op.delay > 0 && delay.pending) {
} else if (op->delay > 0 && delay.pending) {
if (anal->verbose) {
eprintf ("Revisit branch delay jump at 0x%08"PFMT64x ". bb->sz=%"PFMT64u"\n", addr + idx - oplen, bb->size);
}
@ -825,15 +835,15 @@ repeat:
}
// Note: if we got two branch delay instructions in a row due to an
// compiler bug or junk or something it wont get treated as a delay
switch (op.stackop) {
switch (op->stackop) {
case R_ANAL_STACK_INC:
if (R_ABS (op.stackptr) < 8096) {
fcn->stack += op.stackptr;
if (R_ABS (op->stackptr) < R_ANAL_MAX_INCSTACK) {
fcn->stack += op->stackptr;
if (fcn->stack > fcn->maxstack) {
fcn->maxstack = fcn->stack;
}
}
bb->stackptr += op.stackptr;
bb->stackptr += op->stackptr;
break;
case R_ANAL_STACK_RESET:
bb->stackptr = 0;
@ -841,41 +851,41 @@ repeat:
default:
break;
}
if (op.ptr && op.ptr != UT64_MAX && op.ptr != UT32_MAX) {
if (op->ptr && op->ptr != UT64_MAX && op->ptr != UT32_MAX) {
// swapped parameters wtf
r_anal_xrefs_set (anal, op.addr, op.ptr, R_ANAL_REF_TYPE_DATA);
r_anal_xrefs_set (anal, op->addr, op->ptr, R_ANAL_REF_TYPE_DATA);
}
analyze_retpoline (anal, &op);
switch (op.type & R_ANAL_OP_TYPE_MASK) {
analyze_retpoline (anal, op);
switch (op->type & R_ANAL_OP_TYPE_MASK) {
case R_ANAL_OP_TYPE_CMOV:
case R_ANAL_OP_TYPE_MOV:
last_is_reg_mov_lea = false;
if (is_arm) { // mov lr, pc
const char *esil = r_strbuf_get (&op.esil);
const char *esil = r_strbuf_get (&op->esil);
if (!r_str_cmp (esil, "pc,lr,=", -1)) {
last_is_mov_lr_pc = true;
}
}
if (has_stack_regs && op_is_set_bp (&op, bp_reg, sp_reg)) {
if (has_stack_regs && op_is_set_bp (op, bp_reg, sp_reg)) {
fcn->bp_off = fcn->stack;
}
// Is this a mov of immediate value into a register?
if (op.dst && op.dst->reg && op.dst->reg->name && op.val > 0 && op.val != UT64_MAX) {
if (op->dst && op->dst->reg && op->dst->reg->name && op->val > 0 && op->val != UT64_MAX) {
free (last_reg_mov_lea_name);
if ((last_reg_mov_lea_name = strdup (op.dst->reg->name))) {
last_reg_mov_lea_val = op.val;
if ((last_reg_mov_lea_name = strdup (op->dst->reg->name))) {
last_reg_mov_lea_val = op->val;
last_is_reg_mov_lea = true;
}
}
// skip mov reg, reg
if (anal->opt.jmptbl) {
if (op.scale && op.ireg) {
movdisp = op.disp;
if (op->scale && op->ireg) {
movdisp = op->disp;
}
}
if (anal->opt.hpskip && regs_exist (op.src[0], op.dst)
&& !strcmp (op.src[0]->reg->name, op.dst->reg->name)) {
skip_ret = skip_hp (anal, fcn, &op, bb, addr, tmp_buf, oplen, delay.un_idx, &idx);
if (anal->opt.hpskip && regs_exist (op->src[0], op->dst)
&& !strcmp (op->src[0]->reg->name, op->dst->reg->name)) {
skip_ret = skip_hp (anal, fcn, op, bb, addr, oplen, delay.un_idx, &idx);
if (skip_ret == 1) {
goto repeat;
}
@ -886,35 +896,35 @@ repeat:
break;
case R_ANAL_OP_TYPE_LEA:
last_is_reg_mov_lea = false;
// if first byte in op.ptr is 0xff, then set leaddr assuming its a jumptable
// if first byte in op->ptr is 0xff, then set leaddr assuming its a jumptable
{
ut8 buf[4];
anal->iob.read_at (anal->iob.io, op.ptr, buf, sizeof (buf));
anal->iob.read_at (anal->iob.io, op->ptr, buf, sizeof (buf));
if ((buf[2] == 0xff || buf[2] == 0xfe) && buf[3] == 0xff) {
leaddr_pair *pair = R_NEW (leaddr_pair);
if (!pair) {
eprintf ("Cannot create leaddr_pair\n");
gotoBeach (R_ANAL_RET_ERROR);
}
pair->op_addr = op.addr;
pair->leaddr = op.ptr; // XXX movdisp is dupped but seems to be trashed sometimes(?), better track leaddr separately
pair->op_addr = op->addr;
pair->leaddr = op->ptr; // XXX movdisp is dupped but seems to be trashed sometimes(?), better track leaddr separately
r_list_append (anal->leaddrs, pair);
}
if (has_stack_regs && op_is_set_bp (&op, bp_reg, sp_reg)) {
fcn->bp_off = fcn->stack - op.src[0]->delta;
if (has_stack_regs && op_is_set_bp (op, bp_reg, sp_reg)) {
fcn->bp_off = fcn->stack - op->src[0]->delta;
}
if (op.dst && op.dst->reg && op.dst->reg->name && op.ptr > 0 && op.ptr != UT64_MAX) {
if (op->dst && op->dst->reg && op->dst->reg->name && op->ptr > 0 && op->ptr != UT64_MAX) {
free (last_reg_mov_lea_name);
if ((last_reg_mov_lea_name = strdup (op.dst->reg->name))) {
last_reg_mov_lea_val = op.ptr;
if ((last_reg_mov_lea_name = strdup (op->dst->reg->name))) {
last_reg_mov_lea_val = op->ptr;
last_is_reg_mov_lea = true;
}
}
}
// skip lea reg,[reg]
if (anal->opt.hpskip && regs_exist (op.src[0], op.dst)
&& !strcmp (op.src[0]->reg->name, op.dst->reg->name)) {
skip_ret = skip_hp (anal, fcn, &op, bb, at, tmp_buf, oplen, delay.un_idx, &idx);
if (anal->opt.hpskip && regs_exist (op->src[0], op->dst)
&& !strcmp (op->src[0]->reg->name, op->dst->reg->name)) {
skip_ret = skip_hp (anal, fcn, op, bb, at, oplen, delay.un_idx, &idx);
if (skip_ret == 1) {
goto repeat;
}
@ -923,10 +933,10 @@ repeat:
}
}
if (anal->opt.jmptbl) {
RAnalOp jmp_aop = {0};
ut64 jmptbl_addr = op.ptr;
ut64 casetbl_addr = op.ptr;
if (is_delta_pointer_table (anal, fcn, op.addr, op.ptr, &jmptbl_addr, &casetbl_addr, &jmp_aop)) {
RAnalOp *jmp_aop = r_anal_op_new ();
ut64 jmptbl_addr = op->ptr;
ut64 casetbl_addr = op->ptr;
if (is_delta_pointer_table (anal, fcn, op->addr, op->ptr, &jmptbl_addr, &casetbl_addr, jmp_aop)) {
ut64 table_size, default_case = 0;
// we require both checks here since try_get_jmptbl_info uses
// BB info of the final jmptbl jump, which is no present with
@ -934,32 +944,34 @@ repeat:
// try_get_delta_jmptbl_info doesn't work at times where the
// lea comes after the cmp/default case cjmp, which can be
// handled with try_get_jmptbl_info
if (try_get_jmptbl_info (anal, fcn, jmp_aop.addr, bb, &table_size, &default_case)
|| try_get_delta_jmptbl_info (anal, fcn, jmp_aop.addr, op.addr, &table_size, &default_case)) {
ret = casetbl_addr == op.ptr
? try_walkthrough_jmptbl (anal, fcn, bb, depth, jmp_aop.addr, jmptbl_addr, op.ptr, 4, table_size, default_case, 4)
: try_walkthrough_casetbl (anal, fcn, bb, depth, jmp_aop.addr, jmptbl_addr, casetbl_addr, op.ptr, 4, table_size, default_case, 4);
ut64 addr = jmp_aop->addr;
if (try_get_jmptbl_info (anal, fcn, addr, bb, &table_size, &default_case)
|| try_get_delta_jmptbl_info (anal, fcn, addr, op->addr, &table_size, &default_case)) {
// TODO: -1-
ret = casetbl_addr == op->ptr
? try_walkthrough_jmptbl (anal, fcn, bb, depth, addr, jmptbl_addr, op->ptr, 4, table_size, default_case, 4)
: try_walkthrough_casetbl (anal, fcn, bb, depth, addr, jmptbl_addr, casetbl_addr, op->ptr, 4, table_size, default_case, 4);
if (ret) {
lea_jmptbl_ip = jmp_aop.addr;
lea_jmptbl_ip = addr;
}
}
}
r_anal_op_fini (&jmp_aop);
r_anal_op_free (jmp_aop);
}
break;
case R_ANAL_OP_TYPE_LOAD:
if (anal->opt.loads) {
if (anal->iob.is_valid_offset (anal->iob.io, op.ptr, 0)) {
r_meta_set (anal, R_META_TYPE_DATA, op.ptr, 4, "");
if (anal->iob.is_valid_offset (anal->iob.io, op->ptr, 0)) {
r_meta_set (anal, R_META_TYPE_DATA, op->ptr, 4, "");
}
}
break;
// Case of valid but unused "add [rax], al"
case R_ANAL_OP_TYPE_ADD:
if (anal->opt.ijmp) {
if ((op.size + 4 <= bytes_read) && !memcmp (buf + op.size, "\x00\x00\x00\x00", 4)) {
if ((op->size + 4 <= bytes_read) && !memcmp (buf + op->size, "\x00\x00\x00\x00", 4)) {
r_anal_block_set_size (bb, bb->size - oplen);
op.type = R_ANAL_OP_TYPE_RET;
op->type = R_ANAL_OP_TYPE_RET;
gotoBeach (R_ANAL_RET_END);
}
}
@ -972,11 +984,11 @@ repeat:
// do nothing, because the nopskip goes before this switch
break;
case R_ANAL_OP_TYPE_JMP:
if (op.jump == UT64_MAX) {
if (op->jump == UT64_MAX) {
gotoBeach (R_ANAL_RET_END);
}
{
RFlagItem *fi = anal->flb.get_at (anal->flb.f, op.jump, false);
RFlagItem *fi = anal->flb.get_at (anal->flb.f, op->jump, false);
if (fi && strstr (fi->name, "imp.")) {
gotoBeach (R_ANAL_RET_END);
}
@ -985,49 +997,50 @@ repeat:
gotoBeach (R_ANAL_RET_END);
}
if (anal->opt.jmpref) {
(void) r_anal_xrefs_set (anal, op.addr, op.jump, R_ANAL_REF_TYPE_CODE);
(void) r_anal_xrefs_set (anal, op->addr, op->jump, R_ANAL_REF_TYPE_CODE);
}
if (!anal->opt.jmpabove && (op.jump < fcn->addr)) {
if (!anal->opt.jmpabove && (op->jump < fcn->addr)) {
gotoBeach (R_ANAL_RET_END);
}
if (r_anal_noreturn_at (anal, op.jump)) {
if (r_anal_noreturn_at (anal, op->jump)) {
gotoBeach (R_ANAL_RET_END);
}
{
bool must_eob = true;
RIOMap *map = anal->iob.map_get (anal->iob.io, addr);
if (map) {
must_eob = ( ! r_io_map_contain (map, op.jump) );
must_eob = ( ! r_io_map_contain (map, op->jump) );
}
if (must_eob) {
op.jump = UT64_MAX;
op->jump = UT64_MAX;
gotoBeach (R_ANAL_RET_END);
}
}
#if FIX_JMP_FWD
bb->jump = op.jump;
bb->jump = op->jump;
bb->fail = UT64_MAX;
FITFCNSZ ();
gotoBeach (R_ANAL_RET_END);
#else
if (!overlapped) {
bb->jump = op.jump;
bb->jump = op->jump;
bb->fail = UT64_MAX;
}
ret = r_anal_fcn_bb (anal, fcn, op.jump, depth);
// -1
ret = r_anal_fcn_bb (anal, fcn, op->jump, depth);
int tc = anal->opt.tailcall;
if (tc) {
// eprintf ("TAIL CALL AT 0x%llx\n", op.addr);
int diff = op.jump - op.addr;
// eprintf ("TAIL CALL AT 0x%llx\n", op->addr);
int diff = op->jump - op->addr;
if (tc < 0) {
ut8 buf[32];
(void)anal->iob.read_at (anal->iob.io, op.jump, (ut8 *) buf, sizeof (buf));
(void)anal->iob.read_at (anal->iob.io, op->jump, (ut8 *) buf, sizeof (buf));
if (r_anal_is_prelude (anal, buf, sizeof (buf))) {
fcn_recurse (anal, fcn, op.jump, anal->opt.bb_max_size, depth - 1);
fcn_recurse (anal, fcn, op->jump, anal->opt.bb_max_size, depth - 1);
}
} else if (R_ABS (diff) > tc) {
(void) r_anal_xrefs_set (anal, op.addr, op.jump, R_ANAL_REF_TYPE_CALL);
fcn_recurse (anal, fcn, op.jump, anal->opt.bb_max_size, depth - 1);
(void) r_anal_xrefs_set (anal, op->addr, op->jump, R_ANAL_REF_TYPE_CALL);
fcn_recurse (anal, fcn, op->jump, anal->opt.bb_max_size, depth - 1);
gotoBeach (R_ANAL_RET_END);
}
}
@ -1035,19 +1048,19 @@ repeat:
#endif
break;
case R_ANAL_OP_TYPE_SUB:
if (op.val != UT64_MAX && op.val > 0) {
if (op->val != UT64_MAX && op->val > 0) {
// if register is not stack
cmpval = op.val;
cmpval = op->val;
}
break;
case R_ANAL_OP_TYPE_CMP: {
ut64 val = is_x86 ? op.val : op.ptr;
ut64 val = is_x86 ? op->val : op->ptr;
if (val) {
cmpval = val;
bb->cmpval = cmpval;
bb->cmpreg = op.reg;
bb->cmpreg = op->reg;
r_anal_cond_free (bb->cond);
bb->cond = r_anal_cond_new_from_op (&op);
bb->cond = r_anal_cond_new_from_op (op);
}
}
break;
@ -1056,58 +1069,60 @@ repeat:
case R_ANAL_OP_TYPE_RCJMP:
case R_ANAL_OP_TYPE_UCJMP:
if (anal->opt.cjmpref) {
(void) r_anal_xrefs_set (anal, op.addr, op.jump, R_ANAL_REF_TYPE_CODE);
(void) r_anal_xrefs_set (anal, op->addr, op->jump, R_ANAL_REF_TYPE_CODE);
}
if (!overlapped) {
bb->jump = op.jump;
bb->fail = op.fail;
bb->jump = op->jump;
bb->fail = op->fail;
}
if (bb->cond) {
bb->cond->type = op.cond;
bb->cond->type = op->cond;
}
if (anal->opt.jmptbl) {
if (op.ptr != UT64_MAX) {
if (op->ptr != UT64_MAX) {
ut64 table_size, default_case;
table_size = cmpval + 1;
default_case = op.fail; // is this really default case?
if (cmpval != UT64_MAX && default_case != UT64_MAX && (op.reg || op.ireg)) {
if (op.ireg) {
ret = try_walkthrough_jmptbl (anal, fcn, bb, depth, op.addr, op.ptr, op.ptr, anal->bits >> 3, table_size, default_case, ret);
} else { // op.reg
ret = walkthrough_arm_jmptbl_style (anal, fcn, bb, depth, op.addr, op.ptr, anal->bits >> 3, table_size, default_case, ret);
default_case = op->fail; // is this really default case?
if (cmpval != UT64_MAX && default_case != UT64_MAX && (op->reg || op->ireg)) {
// TODO -1
if (op->ireg) {
ret = try_walkthrough_jmptbl (anal, fcn, bb, depth, op->addr, op->ptr, op->ptr, anal->bits >> 3, table_size, default_case, ret);
} else { // op->reg
ret = walkthrough_arm_jmptbl_style (anal, fcn, bb, depth, op->addr, op->ptr, anal->bits >> 3, table_size, default_case, ret);
}
// check if op.jump and op.fail contain jump table location
// check if op->jump and op->fail contain jump table location
// clear jump address, because it's jump table location
if (op.jump == op.ptr) {
op.jump = UT64_MAX;
} else if (op.fail == op.ptr) {
op.fail = UT64_MAX;
if (op->jump == op->ptr) {
op->jump = UT64_MAX;
} else if (op->fail == op->ptr) {
op->fail = UT64_MAX;
}
cmpval = UT64_MAX;
}
}
}
int saved_stack = fcn->stack;
// TODO: depth -1 in here
if (continue_after_jump) {
r_anal_fcn_bb (anal, fcn, op.jump, depth);
r_anal_fcn_bb (anal, fcn, op->jump, depth);
fcn->stack = saved_stack;
ret = r_anal_fcn_bb (anal, fcn, op.fail, depth);
ret = r_anal_fcn_bb (anal, fcn, op->fail, depth);
fcn->stack = saved_stack;
} else {
ret = r_anal_fcn_bb (anal, fcn, op.jump, depth);
ret = r_anal_fcn_bb (anal, fcn, op->jump, depth);
fcn->stack = saved_stack;
ret = r_anal_fcn_bb (anal, fcn, op.fail, depth);
ret = r_anal_fcn_bb (anal, fcn, op->fail, depth);
fcn->stack = saved_stack;
if (op.jump < fcn->addr) {
if (op->jump < fcn->addr) {
if (!overlapped) {
bb->jump = op.jump;
bb->jump = op->jump;
bb->fail = UT64_MAX;
}
gotoBeach (R_ANAL_RET_END);
}
}
// XXX breaks mips analysis too !op.delay
// XXX breaks mips analysis too !op->delay
// this will be all x86, arm (at least)
// without which the analysis is really slow,
// presumably because each opcode would get revisited
@ -1121,10 +1136,10 @@ repeat:
case R_ANAL_OP_TYPE_IRCALL:
/* call [dst] */
// XXX: this is TYPE_MCALL or indirect-call
(void) r_anal_xrefs_set (anal, op.addr, op.ptr, R_ANAL_REF_TYPE_CALL);
(void) r_anal_xrefs_set (anal, op->addr, op->ptr, R_ANAL_REF_TYPE_CALL);
if (r_anal_noreturn_at (anal, op.ptr)) {
RAnalFunction *f = r_anal_get_function_at (anal, op.ptr);
if (r_anal_noreturn_at (anal, op->ptr)) {
RAnalFunction *f = r_anal_get_function_at (anal, op->ptr);
if (f) {
f->is_noreturn = true;
}
@ -1134,10 +1149,10 @@ repeat:
case R_ANAL_OP_TYPE_CCALL:
case R_ANAL_OP_TYPE_CALL:
/* call dst */
(void) r_anal_xrefs_set (anal, op.addr, op.jump, R_ANAL_REF_TYPE_CALL);
(void) r_anal_xrefs_set (anal, op->addr, op->jump, R_ANAL_REF_TYPE_CALL);
if (r_anal_noreturn_at (anal, op.jump)) {
RAnalFunction *f = r_anal_get_function_at (anal, op.jump);
if (r_anal_noreturn_at (anal, op->jump)) {
RAnalFunction *f = r_anal_get_function_at (anal, op->jump);
if (f) {
f->is_noreturn = true;
}
@ -1154,41 +1169,42 @@ repeat:
case R_ANAL_OP_TYPE_IJMP:
case R_ANAL_OP_TYPE_IRJMP:
// if the next instruction is a symbol
if (anal->opt.ijmp && isSymbolNextInstruction (anal, &op)) {
if (anal->opt.ijmp && isSymbolNextInstruction (anal, op)) {
gotoBeach (R_ANAL_RET_END);
}
// switch statement
if (anal->opt.jmptbl && lea_jmptbl_ip != op.addr) {
// op.ireg since rip relative addressing produces way too many false positives otherwise
// op.ireg is 0 for rip relative, "rax", etc otherwise
if (op.ptr != UT64_MAX && op.ireg) { // direct jump
if (anal->opt.jmptbl && lea_jmptbl_ip != op->addr) {
ut8 buf[32]; // 32 bytes is enough to hold any instruction.
// op->ireg since rip relative addressing produces way too many false positives otherwise
// op->ireg is 0 for rip relative, "rax", etc otherwise
if (op->ptr != UT64_MAX && op->ireg) { // direct jump
ut64 table_size, default_case;
if (try_get_jmptbl_info (anal, fcn, op.addr, bb, &table_size, &default_case)) {
if (try_get_jmptbl_info (anal, fcn, op->addr, bb, &table_size, &default_case)) {
bool case_table = false;
RAnalOp prev_op;
anal->iob.read_at (anal->iob.io, op.addr - op.size, buf, sizeof (buf));
if (r_anal_op (anal, &prev_op, op.addr - op.size, buf, sizeof (buf), R_ANAL_OP_MASK_VAL) > 0) {
bool prev_op_has_dst_name = prev_op.dst && prev_op.dst->reg && prev_op.dst->reg->name;
bool op_has_src_name = op.src[0] && op.src[0]->reg && op.src[0]->reg->name;
bool same_reg = (op.ireg && prev_op_has_dst_name && !strcmp (op.ireg, prev_op.dst->reg->name))
|| (op_has_src_name && prev_op_has_dst_name && !strcmp (op.src[0]->reg->name, prev_op.dst->reg->name));
if (prev_op.type == R_ANAL_OP_TYPE_MOV && prev_op.disp && prev_op.disp != UT64_MAX && same_reg) {
RAnalOp *prev_op = r_anal_op_new ();
anal->iob.read_at (anal->iob.io, op->addr - op->size, buf, sizeof (buf));
if (r_anal_op (anal, prev_op, op->addr - op->size, buf, sizeof (buf), R_ANAL_OP_MASK_VAL) > 0) {
bool prev_op_has_dst_name = prev_op->dst && prev_op->dst->reg && prev_op->dst->reg->name;
bool op_has_src_name = op->src[0] && op->src[0]->reg && op->src[0]->reg->name;
bool same_reg = (op->ireg && prev_op_has_dst_name && !strcmp (op->ireg, prev_op->dst->reg->name))
|| (op_has_src_name && prev_op_has_dst_name && !strcmp (op->src[0]->reg->name, prev_op->dst->reg->name));
if (prev_op->type == R_ANAL_OP_TYPE_MOV && prev_op->disp && prev_op->disp != UT64_MAX && same_reg) {
// movzx reg, byte [reg + case_table]
// jmp dword [reg*4 + jump_table]
if (try_walkthrough_casetbl (anal, fcn, bb, depth, op.addr, op.ptr, prev_op.disp, op.ptr, anal->bits >> 3, table_size, default_case, ret)) {
if (try_walkthrough_casetbl (anal, fcn, bb, depth - 1, op->addr, op->ptr, prev_op->disp, op->ptr, anal->bits >> 3, table_size, default_case, ret)) {
ret = case_table = true;
}
}
}
r_anal_op_fini (&prev_op);
r_anal_op_free (prev_op);
if (!case_table) {
ret = try_walkthrough_jmptbl (anal, fcn, bb, depth, op.addr, op.ptr, op.ptr, anal->bits >> 3, table_size, default_case, ret);
ret = try_walkthrough_jmptbl (anal, fcn, bb, depth, op->addr, op->ptr, op->ptr, anal->bits >> 3, table_size, default_case, ret);
}
}
} else if (op.ptr != UT64_MAX && op.reg) { // direct jump
} else if (op->ptr != UT64_MAX && op->reg) { // direct jump
ut64 table_size, default_case;
if (try_get_jmptbl_info (anal, fcn, op.addr, bb, &table_size, &default_case)) {
ret = try_walkthrough_jmptbl (anal, fcn, bb, depth, op.addr, op.ptr, op.ptr, anal->bits >> 3, table_size, default_case, ret);
if (try_get_jmptbl_info (anal, fcn, op->addr, bb, &table_size, &default_case)) {
ret = try_walkthrough_jmptbl (anal, fcn, bb, depth - 1, op->addr, op->ptr, op->ptr, anal->bits >> 3, table_size, default_case, ret);
}
} else if (movdisp == 0) {
ut64 jmptbl_base = UT64_MAX;
@ -1196,13 +1212,13 @@ repeat:
RListIter *lea_op_iter = NULL;
RListIter *iter;
leaddr_pair *pair;
// find nearest candidate leaddr before op.addr
// find nearest candidate leaddr before op->addr
r_list_foreach (anal->leaddrs, iter, pair) {
if (pair->op_addr >= op.addr) {
if (pair->op_addr >= op->addr) {
continue;
}
if (lea_op_off == UT64_MAX || lea_op_off > op.addr - pair->op_addr) {
lea_op_off = op.addr - pair->op_addr;
if (lea_op_off == UT64_MAX || lea_op_off > op->addr - pair->op_addr) {
lea_op_off = op->addr - pair->op_addr;
jmptbl_base = pair->leaddr;
lea_op_iter = iter;
}
@ -1211,62 +1227,62 @@ repeat:
r_list_delete (anal->leaddrs, lea_op_iter);
}
ut64 table_size = cmpval + 1;
ret = try_walkthrough_jmptbl (anal, fcn, bb, depth, op.addr, jmptbl_base, jmptbl_base, 4, table_size, -1, ret);
ret = try_walkthrough_jmptbl (anal, fcn, bb, depth - 1, op->addr, jmptbl_base, jmptbl_base, 4, table_size, -1, ret);
cmpval = UT64_MAX;
} else if (movdisp != UT64_MAX) {
ut64 table_size, default_case;
if (try_get_jmptbl_info (anal, fcn, op.addr, bb, &table_size, &default_case)) {
op.ptr = movdisp;
ret = try_walkthrough_jmptbl (anal, fcn, bb, depth, op.addr, op.ptr, op.ptr, anal->bits >> 3, table_size, default_case, ret);
if (try_get_jmptbl_info (anal, fcn, op->addr, bb, &table_size, &default_case)) {
op->ptr = movdisp;
ret = try_walkthrough_jmptbl (anal, fcn, bb, depth - 1, op->addr, op->ptr, op->ptr, anal->bits >> 3, table_size, default_case, ret);
}
movdisp = UT64_MAX;
} else if (is_arm) {
if (op.ptrsize == 1) { // TBB
ut64 pred_cmpval = try_get_cmpval_from_parents(anal, fcn, bb, op.ireg);
if (op->ptrsize == 1) { // TBB
ut64 pred_cmpval = try_get_cmpval_from_parents(anal, fcn, bb, op->ireg);
ut64 table_size = 0;
if (pred_cmpval != UT64_MAX) {
table_size += pred_cmpval;
} else {
table_size += cmpval;
}
ret = try_walkthrough_jmptbl (anal, fcn, bb, depth, op.addr, op.addr + op.size,
op.addr + 4, 1, table_size, UT64_MAX, ret);
ret = try_walkthrough_jmptbl (anal, fcn, bb, depth - 1, op->addr, op->addr + op->size,
op->addr + 4, 1, table_size, UT64_MAX, ret);
// skip inlined jumptable
idx += table_size;
}
if (op.ptrsize == 2) { // LDRH on thumb/arm
ut64 pred_cmpval = try_get_cmpval_from_parents(anal, fcn, bb, op.ireg);
if (op->ptrsize == 2) { // LDRH on thumb/arm
ut64 pred_cmpval = try_get_cmpval_from_parents(anal, fcn, bb, op->ireg);
int tablesize = 1;
if (pred_cmpval != UT64_MAX) {
tablesize += pred_cmpval;
} else {
tablesize += cmpval;
}
ret = try_walkthrough_jmptbl (anal, fcn, bb, depth, op.addr, op.addr + op.size,
op.addr + 4, 2, tablesize, UT64_MAX, ret);
ret = try_walkthrough_jmptbl (anal, fcn, bb, depth - 1, op->addr, op->addr + op->size,
op->addr + 4, 2, tablesize, UT64_MAX, ret);
// skip inlined jumptable
idx += (tablesize * 2);
}
}
}
if (lea_jmptbl_ip == op.addr) {
if (lea_jmptbl_ip == op->addr) {
lea_jmptbl_ip = UT64_MAX;
}
if (anal->opt.ijmp) {
if (continue_after_jump) {
r_anal_fcn_bb (anal, fcn, op.jump, depth);
ret = r_anal_fcn_bb (anal, fcn, op.fail, depth);
r_anal_fcn_bb (anal, fcn, op->jump, depth - 1);
ret = r_anal_fcn_bb (anal, fcn, op->fail, depth - 1);
if (overlapped) {
goto analopfinish;
}
}
if (r_anal_noreturn_at (anal, op.jump) || op.eob) {
if (r_anal_noreturn_at (anal, op->jump) || op->eob) {
goto analopfinish;
}
} else {
analopfinish:
if (op.type == R_ANAL_OP_TYPE_RJMP) {
if (op->type == R_ANAL_OP_TYPE_RJMP) {
gotoBeach (R_ANAL_RET_NOP);
} else {
gotoBeach (R_ANAL_RET_END);
@ -1276,33 +1292,33 @@ analopfinish:
/* fallthru */
case R_ANAL_OP_TYPE_PUSH:
last_is_push = true;
last_push_addr = op.val;
last_push_addr = op->val;
if (anal->iob.is_valid_offset (anal->iob.io, last_push_addr, 1)) {
(void) r_anal_xrefs_set (anal, op.addr, last_push_addr, R_ANAL_REF_TYPE_DATA);
(void) r_anal_xrefs_set (anal, op->addr, last_push_addr, R_ANAL_REF_TYPE_DATA);
}
break;
case R_ANAL_OP_TYPE_UPUSH:
if ((op.type & R_ANAL_OP_TYPE_REG) && last_is_reg_mov_lea && op.src[0] && op.src[0]->reg
&& op.src[0]->reg->name && !strcmp (op.src[0]->reg->name, last_reg_mov_lea_name)) {
if ((op->type & R_ANAL_OP_TYPE_REG) && last_is_reg_mov_lea && op->src[0] && op->src[0]->reg
&& op->src[0]->reg->name && !strcmp (op->src[0]->reg->name, last_reg_mov_lea_name)) {
last_is_push = true;
last_push_addr = last_reg_mov_lea_val;
if (anal->iob.is_valid_offset (anal->iob.io, last_push_addr, 1)) {
(void) r_anal_xrefs_set (anal, op.addr, last_push_addr, R_ANAL_REF_TYPE_DATA);
(void) r_anal_xrefs_set (anal, op->addr, last_push_addr, R_ANAL_REF_TYPE_DATA);
}
}
break;
case R_ANAL_OP_TYPE_RET:
if (op.family == R_ANAL_OP_FAMILY_PRIV) {
if (op->family == R_ANAL_OP_FAMILY_PRIV) {
fcn->type = R_ANAL_FCN_TYPE_INT;
}
if (last_is_push && anal->opt.pushret) {
op.type = R_ANAL_OP_TYPE_JMP;
op.jump = last_push_addr;
bb->jump = op.jump;
ret = r_anal_fcn_bb (anal, fcn, op.jump, depth);
op->type = R_ANAL_OP_TYPE_JMP;
op->jump = last_push_addr;
bb->jump = op->jump;
ret = r_anal_fcn_bb (anal, fcn, op->jump, depth - 1);
goto beach;
}
if (!op.cond) {
if (!op->cond) {
if (anal->verbose) {
eprintf ("RET 0x%08"PFMT64x ". overlap=%s %"PFMT64u" %"PFMT64u"\n",
addr + delay.un_idx - oplen, r_str_bool (overlapped),
@ -1313,45 +1329,45 @@ analopfinish:
break;
}
if (has_stack_regs && arch_destroys_dst) {
if (op_is_set_bp (&op, bp_reg, sp_reg) && op.src[1]) {
switch (op.type & R_ANAL_OP_TYPE_MASK) {
if (op_is_set_bp (op, bp_reg, sp_reg) && op->src[1]) {
switch (op->type & R_ANAL_OP_TYPE_MASK) {
case R_ANAL_OP_TYPE_ADD:
fcn->bp_off = fcn->stack - op.src[1]->imm;
fcn->bp_off = fcn->stack - op->src[1]->imm;
break;
case R_ANAL_OP_TYPE_SUB:
fcn->bp_off = fcn->stack + op.src[1]->imm;
fcn->bp_off = fcn->stack + op->src[1]->imm;
break;
}
}
}
if (anal->opt.vars && !varset) {
r_anal_extract_vars (anal, fcn, &op);
r_anal_extract_vars (anal, fcn, op);
}
if (op.type != R_ANAL_OP_TYPE_MOV && op.type != R_ANAL_OP_TYPE_CMOV && op.type != R_ANAL_OP_TYPE_LEA) {
if (op->type != R_ANAL_OP_TYPE_MOV && op->type != R_ANAL_OP_TYPE_CMOV && op->type != R_ANAL_OP_TYPE_LEA) {
last_is_reg_mov_lea = false;
}
if (op.type != R_ANAL_OP_TYPE_PUSH && op.type != R_ANAL_OP_TYPE_RPUSH) {
if (op->type != R_ANAL_OP_TYPE_PUSH && op->type != R_ANAL_OP_TYPE_RPUSH) {
last_is_push = false;
}
if (is_arm && op.type != R_ANAL_OP_TYPE_MOV) {
if (is_arm && op->type != R_ANAL_OP_TYPE_MOV) {
last_is_mov_lr_pc = false;
}
if (has_variadic_reg && !fcn->is_variadic) {
variadic_reg = r_reg_get (anal->reg, "rax", R_REG_TYPE_GPR);
bool dst_is_variadic = op.dst && op.dst->reg
&& variadic_reg && op.dst->reg->offset == variadic_reg->offset;
bool op_is_cmp = (op.type == R_ANAL_OP_TYPE_CMP) || op.type == R_ANAL_OP_TYPE_ACMP;
bool dst_is_variadic = op->dst && op->dst->reg
&& variadic_reg && op->dst->reg->offset == variadic_reg->offset;
bool op_is_cmp = (op->type == R_ANAL_OP_TYPE_CMP) || op->type == R_ANAL_OP_TYPE_ACMP;
if (dst_is_variadic && !op_is_cmp) {
has_variadic_reg = false;
} else if (op_is_cmp) {
if (op.src[0] && op.src[0]->reg && (op.dst->reg == op.src[0]->reg) && dst_is_variadic) {
if (op->src[0] && op->src[0]->reg && (op->dst->reg == op->src[0]->reg) && dst_is_variadic) {
fcn->is_variadic = true;
}
}
}
}
beach:
r_anal_op_fini (&op);
r_anal_op_free (op);
R_FREE (last_reg_mov_lea_name);
if (bb && bb->size == 0) {
r_anal_function_remove_block (fcn, bb);

View File

@ -77,14 +77,14 @@ static int cmpcalls(const void *_a, const void *_b) {
return (as > bs)? 1: (as < bs)? -1: 0;
}
static int cmpnbbs (const void *_a, const void *_b) {
static int cmpnbbs(const void *_a, const void *_b) {
const RAnalFunction *a = _a, *b = _b;
ut64 as = r_list_length (a->bbs);
ut64 bs = r_list_length (b->bbs);
return (as> bs)? 1: (as< bs)? -1: 0;
}
static int cmpaddr (const void *_a, const void *_b) {
static int cmpaddr(const void *_a, const void *_b) {
const RAnalFunction *a = _a, *b = _b;
return (a->addr > b->addr)? 1: (a->addr < b->addr)? -1: 0;
}