mirror of
https://github.com/capstone-engine/capstone.git
synced 2024-11-23 21:49:46 +00:00
merge master to next branch
This commit is contained in:
commit
b57ddf8bc5
@ -5,12 +5,18 @@ before_install:
|
|||||||
before_script:
|
before_script:
|
||||||
- wget https://github.com/groundx/capstonefuzz/raw/master/corpus/corpus-libFuzzer-capstone_fuzz_disasmnext-latest.zip
|
- wget https://github.com/groundx/capstonefuzz/raw/master/corpus/corpus-libFuzzer-capstone_fuzz_disasmnext-latest.zip
|
||||||
- unzip -q corpus-libFuzzer-capstone_fuzz_disasmnext-latest.zip -d suite/fuzz
|
- unzip -q corpus-libFuzzer-capstone_fuzz_disasmnext-latest.zip -d suite/fuzz
|
||||||
|
- git clone https://git.cryptomilk.org/projects/cmocka.git suite/cstest/cmocka
|
||||||
|
- chmod +x suite/cstest/build_cstest.sh
|
||||||
script:
|
script:
|
||||||
- ./make.sh
|
- ./make.sh
|
||||||
- make check
|
- make check
|
||||||
|
- sudo make install
|
||||||
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then cp libcapstone.so.* bindings/python/libcapstone.so; fi
|
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then cp libcapstone.so.* bindings/python/libcapstone.so; fi
|
||||||
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then cp libcapstone.*.dylib bindings/python/libcapstone.dylib; fi
|
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then cp libcapstone.*.dylib bindings/python/libcapstone.dylib; fi
|
||||||
- if [[ "$NOPYTEST" != "true" ]]; then cd bindings/python && make check; fi
|
- if [[ "$NOPYTEST" != "true" ]]; then cd bindings/python && make check; cd ../..; fi
|
||||||
|
- if [[ "$NOPYTEST" != "true" ]]; then cd suite/cstest && ./build_cstest.sh; fi
|
||||||
|
- if [[ "$NOPYTEST" != "true" ]]; then python cstest_report.py -D -t build/cstest -d ../MC; fi
|
||||||
|
- if [[ "$NOPYTEST" != "true" ]]; then python cstest_report.py -D -t build/cstest -f issues.cs; fi
|
||||||
compiler:
|
compiler:
|
||||||
- clang
|
- clang
|
||||||
- gcc
|
- gcc
|
||||||
|
@ -80,3 +80,4 @@ Stephen Eckels (stevemk14ebr): x86 encoding features
|
|||||||
Tong Yu(Spike) & Kai Jern, Lau (xwings): WASM architecture.
|
Tong Yu(Spike) & Kai Jern, Lau (xwings): WASM architecture.
|
||||||
Sebastian Macke: MOS65XX architecture
|
Sebastian Macke: MOS65XX architecture
|
||||||
Ilya Leoshkevich: SystemZ architecture improvements.
|
Ilya Leoshkevich: SystemZ architecture improvements.
|
||||||
|
Do Minh Tuan: Regression testing tool (cstest)
|
||||||
|
@ -3,6 +3,8 @@ Capstone Engine
|
|||||||
|
|
||||||
[![Build Status](https://travis-ci.org/aquynh/capstone.svg?branch=next)](https://travis-ci.org/aquynh/capstone)
|
[![Build Status](https://travis-ci.org/aquynh/capstone.svg?branch=next)](https://travis-ci.org/aquynh/capstone)
|
||||||
[![Build status](https://ci.appveyor.com/api/projects/status/a4wvbn89wu3pinas/branch/next?svg=true)](https://ci.appveyor.com/project/aquynh/capstone/branch/next)
|
[![Build status](https://ci.appveyor.com/api/projects/status/a4wvbn89wu3pinas/branch/next?svg=true)](https://ci.appveyor.com/project/aquynh/capstone/branch/next)
|
||||||
|
[![pypi package](https://badge.fury.io/py/capstone.svg)](https://pypi.python.org/pypi/capstone)
|
||||||
|
[![pypi downloads](https://pepy.tech/badge/capstone)](https://pepy.tech/project/capstone)
|
||||||
|
|
||||||
Capstone is a disassembly framework with the target of becoming the ultimate
|
Capstone is a disassembly framework with the target of becoming the ultimate
|
||||||
disasm engine for binary analysis and reversing in the security community.
|
disasm engine for binary analysis and reversing in the security community.
|
||||||
|
@ -763,7 +763,7 @@ static void printOperand(MCInst *MI, unsigned OpNo, SStream *O)
|
|||||||
} else {
|
} else {
|
||||||
if (MI->csh->doing_mem) {
|
if (MI->csh->doing_mem) {
|
||||||
if (MI->csh->imm_unsigned) {
|
if (MI->csh->imm_unsigned) {
|
||||||
printInt64Bang(O, imm & 0xffff);
|
printUInt64Bang(O, imm);
|
||||||
} else {
|
} else {
|
||||||
printInt64Bang(O, imm);
|
printInt64Bang(O, imm);
|
||||||
}
|
}
|
||||||
|
@ -331,10 +331,29 @@ static void _printOperand(MCInst *MI, unsigned OpNo, SStream *O)
|
|||||||
if (MCOperand_isReg(Op)) {
|
if (MCOperand_isReg(Op)) {
|
||||||
printRegName(O, MCOperand_getReg(Op));
|
printRegName(O, MCOperand_getReg(Op));
|
||||||
} else if (MCOperand_isImm(Op)) {
|
} else if (MCOperand_isImm(Op)) {
|
||||||
|
uint8_t encsize;
|
||||||
|
uint8_t opsize = X86_immediate_size(MCInst_getOpcode(MI), &encsize);
|
||||||
|
|
||||||
// Print X86 immediates as signed values.
|
// Print X86 immediates as signed values.
|
||||||
int64_t imm = MCOperand_getImm(Op);
|
int64_t imm = MCOperand_getImm(Op);
|
||||||
if (imm < 0) {
|
if (imm < 0) {
|
||||||
if (MI->csh->imm_unsigned) {
|
if (MI->csh->imm_unsigned) {
|
||||||
|
if (opsize) {
|
||||||
|
switch(opsize) {
|
||||||
|
default:
|
||||||
|
break;
|
||||||
|
case 1:
|
||||||
|
imm &= 0xff;
|
||||||
|
break;
|
||||||
|
case 2:
|
||||||
|
imm &= 0xffff;
|
||||||
|
break;
|
||||||
|
case 4:
|
||||||
|
imm &= 0xffffffff;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
SStream_concat(O, "$0x%"PRIx64, imm);
|
SStream_concat(O, "$0x%"PRIx64, imm);
|
||||||
} else {
|
} else {
|
||||||
if (imm < -HEX_THRESHOLD)
|
if (imm < -HEX_THRESHOLD)
|
||||||
@ -678,6 +697,22 @@ static void printOperand(MCInst *MI, unsigned OpNo, SStream *O)
|
|||||||
SStream_concat(O, "$%"PRIu64, imm);
|
SStream_concat(O, "$%"PRIu64, imm);
|
||||||
} else {
|
} else {
|
||||||
if (MI->csh->imm_unsigned) {
|
if (MI->csh->imm_unsigned) {
|
||||||
|
if (opsize) {
|
||||||
|
switch(opsize) {
|
||||||
|
default:
|
||||||
|
break;
|
||||||
|
case 1:
|
||||||
|
imm &= 0xff;
|
||||||
|
break;
|
||||||
|
case 2:
|
||||||
|
imm &= 0xffff;
|
||||||
|
break;
|
||||||
|
case 4:
|
||||||
|
imm &= 0xffffffff;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
SStream_concat(O, "$0x%"PRIx64, imm);
|
SStream_concat(O, "$0x%"PRIx64, imm);
|
||||||
} else {
|
} else {
|
||||||
if (imm == 0x8000000000000000LL) // imm == -imm
|
if (imm == 0x8000000000000000LL) // imm == -imm
|
||||||
|
@ -373,12 +373,28 @@ static bool need_zero_prefix(uint64_t imm)
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void printImm(int syntax, SStream *O, int64_t imm, bool positive)
|
static void printImm(MCInst *MI, SStream *O, int64_t imm, bool positive)
|
||||||
{
|
{
|
||||||
if (positive) {
|
if (positive) {
|
||||||
// always print this number in positive form
|
// always print this number in positive form
|
||||||
if (syntax == CS_OPT_SYNTAX_MASM) {
|
if (MI->csh->syntax == CS_OPT_SYNTAX_MASM) {
|
||||||
if (imm < 0) {
|
if (imm < 0) {
|
||||||
|
if (MI->op1_size) {
|
||||||
|
switch(MI->op1_size) {
|
||||||
|
default:
|
||||||
|
break;
|
||||||
|
case 1:
|
||||||
|
imm &= 0xff;
|
||||||
|
break;
|
||||||
|
case 2:
|
||||||
|
imm &= 0xffff;
|
||||||
|
break;
|
||||||
|
case 4:
|
||||||
|
imm &= 0xffffffff;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (imm == 0x8000000000000000LL) // imm == -imm
|
if (imm == 0x8000000000000000LL) // imm == -imm
|
||||||
SStream_concat0(O, "8000000000000000h");
|
SStream_concat0(O, "8000000000000000h");
|
||||||
else if (need_zero_prefix(imm))
|
else if (need_zero_prefix(imm))
|
||||||
@ -396,6 +412,22 @@ static void printImm(int syntax, SStream *O, int64_t imm, bool positive)
|
|||||||
}
|
}
|
||||||
} else { // Intel syntax
|
} else { // Intel syntax
|
||||||
if (imm < 0) {
|
if (imm < 0) {
|
||||||
|
if (MI->op1_size) {
|
||||||
|
switch(MI->op1_size) {
|
||||||
|
default:
|
||||||
|
break;
|
||||||
|
case 1:
|
||||||
|
imm &= 0xff;
|
||||||
|
break;
|
||||||
|
case 2:
|
||||||
|
imm &= 0xffff;
|
||||||
|
break;
|
||||||
|
case 4:
|
||||||
|
imm &= 0xffffffff;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
SStream_concat(O, "0x%"PRIx64, imm);
|
SStream_concat(O, "0x%"PRIx64, imm);
|
||||||
} else {
|
} else {
|
||||||
if (imm > HEX_THRESHOLD)
|
if (imm > HEX_THRESHOLD)
|
||||||
@ -405,7 +437,7 @@ static void printImm(int syntax, SStream *O, int64_t imm, bool positive)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (syntax == CS_OPT_SYNTAX_MASM) {
|
if (MI->csh->syntax == CS_OPT_SYNTAX_MASM) {
|
||||||
if (imm < 0) {
|
if (imm < 0) {
|
||||||
if (imm == 0x8000000000000000LL) // imm == -imm
|
if (imm == 0x8000000000000000LL) // imm == -imm
|
||||||
SStream_concat0(O, "8000000000000000h");
|
SStream_concat0(O, "8000000000000000h");
|
||||||
@ -452,7 +484,7 @@ static void _printOperand(MCInst *MI, unsigned OpNo, SStream *O)
|
|||||||
printRegName(O, MCOperand_getReg(Op));
|
printRegName(O, MCOperand_getReg(Op));
|
||||||
} else if (MCOperand_isImm(Op)) {
|
} else if (MCOperand_isImm(Op)) {
|
||||||
int64_t imm = MCOperand_getImm(Op);
|
int64_t imm = MCOperand_getImm(Op);
|
||||||
printImm(MI->csh->syntax, O, imm, MI->csh->imm_unsigned);
|
printImm(MI, O, imm, MI->csh->imm_unsigned);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -661,9 +693,9 @@ static void printMemOffset(MCInst *MI, unsigned Op, SStream *O)
|
|||||||
MI->flat_insn->detail->x86.operands[MI->flat_insn->detail->x86.op_count].mem.disp = imm;
|
MI->flat_insn->detail->x86.operands[MI->flat_insn->detail->x86.op_count].mem.disp = imm;
|
||||||
|
|
||||||
if (imm < 0)
|
if (imm < 0)
|
||||||
printImm(MI->csh->syntax, O, arch_masks[MI->csh->mode] & imm, true);
|
printImm(MI, O, arch_masks[MI->csh->mode] & imm, true);
|
||||||
else
|
else
|
||||||
printImm(MI->csh->syntax, O, imm, true);
|
printImm(MI, O, imm, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
SStream_concat0(O, "]");
|
SStream_concat0(O, "]");
|
||||||
@ -680,7 +712,7 @@ static void printU8Imm(MCInst *MI, unsigned Op, SStream *O)
|
|||||||
{
|
{
|
||||||
uint8_t val = MCOperand_getImm(MCInst_getOperand(MI, Op)) & 0xff;
|
uint8_t val = MCOperand_getImm(MCInst_getOperand(MI, Op)) & 0xff;
|
||||||
|
|
||||||
printImm(MI->csh->syntax, O, val, true);
|
printImm(MI, O, val, true);
|
||||||
|
|
||||||
if (MI->csh->detail) {
|
if (MI->csh->detail) {
|
||||||
#ifndef CAPSTONE_DIET
|
#ifndef CAPSTONE_DIET
|
||||||
@ -825,7 +857,7 @@ static void printPCRelImm(MCInst *MI, unsigned OpNo, SStream *O)
|
|||||||
if (MI->Opcode == X86_CALLpcrel16 || MI->Opcode == X86_JMP_2)
|
if (MI->Opcode == X86_CALLpcrel16 || MI->Opcode == X86_JMP_2)
|
||||||
imm = imm & 0xffff;
|
imm = imm & 0xffff;
|
||||||
|
|
||||||
printImm(MI->csh->syntax, O, imm, true);
|
printImm(MI, O, imm, true);
|
||||||
|
|
||||||
if (MI->csh->detail) {
|
if (MI->csh->detail) {
|
||||||
#ifndef CAPSTONE_DIET
|
#ifndef CAPSTONE_DIET
|
||||||
@ -897,12 +929,12 @@ static void printOperand(MCInst *MI, unsigned OpNo, SStream *O)
|
|||||||
// printf(">>> id = %u\n", MI->flat_insn->id);
|
// printf(">>> id = %u\n", MI->flat_insn->id);
|
||||||
switch(MI->flat_insn->id) {
|
switch(MI->flat_insn->id) {
|
||||||
default:
|
default:
|
||||||
printImm(MI->csh->syntax, O, imm, MI->csh->imm_unsigned);
|
printImm(MI, O, imm, MI->csh->imm_unsigned);
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case X86_INS_MOVABS:
|
case X86_INS_MOVABS:
|
||||||
// do not print number in negative form
|
// do not print number in negative form
|
||||||
printImm(MI->csh->syntax, O, imm, true);
|
printImm(MI, O, imm, true);
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case X86_INS_IN:
|
case X86_INS_IN:
|
||||||
@ -910,7 +942,7 @@ static void printOperand(MCInst *MI, unsigned OpNo, SStream *O)
|
|||||||
case X86_INS_INT:
|
case X86_INS_INT:
|
||||||
// do not print number in negative form
|
// do not print number in negative form
|
||||||
imm = imm & 0xff;
|
imm = imm & 0xff;
|
||||||
printImm(MI->csh->syntax, O, imm, true);
|
printImm(MI, O, imm, true);
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case X86_INS_LCALL:
|
case X86_INS_LCALL:
|
||||||
@ -920,7 +952,7 @@ static void printOperand(MCInst *MI, unsigned OpNo, SStream *O)
|
|||||||
imm = imm & 0xffff;
|
imm = imm & 0xffff;
|
||||||
opsize = 2;
|
opsize = 2;
|
||||||
}
|
}
|
||||||
printImm(MI->csh->syntax, O, imm, true);
|
printImm(MI, O, imm, true);
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case X86_INS_AND:
|
case X86_INS_AND:
|
||||||
@ -928,10 +960,10 @@ static void printOperand(MCInst *MI, unsigned OpNo, SStream *O)
|
|||||||
case X86_INS_XOR:
|
case X86_INS_XOR:
|
||||||
// do not print number in negative form
|
// do not print number in negative form
|
||||||
if (imm >= 0 && imm <= HEX_THRESHOLD)
|
if (imm >= 0 && imm <= HEX_THRESHOLD)
|
||||||
printImm(MI->csh->syntax, O, imm, true);
|
printImm(MI, O, imm, true);
|
||||||
else {
|
else {
|
||||||
imm = arch_masks[opsize? opsize : MI->imm_size] & imm;
|
imm = arch_masks[opsize? opsize : MI->imm_size] & imm;
|
||||||
printImm(MI->csh->syntax, O, imm, true);
|
printImm(MI, O, imm, true);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
|
||||||
@ -939,10 +971,10 @@ static void printOperand(MCInst *MI, unsigned OpNo, SStream *O)
|
|||||||
case X86_INS_RETF:
|
case X86_INS_RETF:
|
||||||
// RET imm16
|
// RET imm16
|
||||||
if (imm >= 0 && imm <= HEX_THRESHOLD)
|
if (imm >= 0 && imm <= HEX_THRESHOLD)
|
||||||
printImm(MI->csh->syntax, O, imm, true);
|
printImm(MI, O, imm, true);
|
||||||
else {
|
else {
|
||||||
imm = 0xffff & imm;
|
imm = 0xffff & imm;
|
||||||
printImm(MI->csh->syntax, O, imm, true);
|
printImm(MI, O, imm, true);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -1042,17 +1074,17 @@ static void printMemReference(MCInst *MI, unsigned Op, SStream *O)
|
|||||||
if (NeedPlus) {
|
if (NeedPlus) {
|
||||||
if (DispVal < 0) {
|
if (DispVal < 0) {
|
||||||
SStream_concat0(O, " - ");
|
SStream_concat0(O, " - ");
|
||||||
printImm(MI->csh->syntax, O, -DispVal, true);
|
printImm(MI, O, -DispVal, true);
|
||||||
} else {
|
} else {
|
||||||
SStream_concat0(O, " + ");
|
SStream_concat0(O, " + ");
|
||||||
printImm(MI->csh->syntax, O, DispVal, true);
|
printImm(MI, O, DispVal, true);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// memory reference to an immediate address
|
// memory reference to an immediate address
|
||||||
if (DispVal < 0) {
|
if (DispVal < 0) {
|
||||||
printImm(MI->csh->syntax, O, arch_masks[MI->csh->mode] & DispVal, true);
|
printImm(MI, O, arch_masks[MI->csh->mode] & DispVal, true);
|
||||||
} else {
|
} else {
|
||||||
printImm(MI->csh->syntax, O, DispVal, true);
|
printImm(MI, O, DispVal, true);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -312,8 +312,8 @@ typedef struct cs_detail {
|
|||||||
cs_tms320c64x tms320c64x; ///< TMS320C64x architecture
|
cs_tms320c64x tms320c64x; ///< TMS320C64x architecture
|
||||||
cs_m680x m680x; ///< M680X architecture
|
cs_m680x m680x; ///< M680X architecture
|
||||||
cs_evm evm; ///< Ethereum architecture
|
cs_evm evm; ///< Ethereum architecture
|
||||||
cs_wasm wasm; ///< Web Assembly architecture
|
|
||||||
cs_mos65xx mos65xx; ///< MOS65XX architecture (including MOS6502)
|
cs_mos65xx mos65xx; ///< MOS65XX architecture (including MOS6502)
|
||||||
|
cs_wasm wasm; ///< Web Assembly architecture
|
||||||
};
|
};
|
||||||
} cs_detail;
|
} cs_detail;
|
||||||
|
|
||||||
|
@ -39,9 +39,9 @@
|
|||||||
0xf4,0x33,0x33,0xab = adds x20, sp, w19, uxth #4
|
0xf4,0x33,0x33,0xab = adds x20, sp, w19, uxth #4
|
||||||
0x2c,0x40,0x34,0xab = adds x12, x1, w20, uxtw
|
0x2c,0x40,0x34,0xab = adds x12, x1, w20, uxtw
|
||||||
0x74,0x60,0x2d,0xab = adds x20, x3, x13, uxtx
|
0x74,0x60,0x2d,0xab = adds x20, x3, x13, uxtx
|
||||||
0x3f,0x8f,0x34,0xab = adds xzr, x25, w20, sxtb #3
|
// 0x3f,0x8f,0x34,0xab = adds xzr, x25, w20, sxtb #3
|
||||||
0xf2,0xa3,0x33,0xab = adds x18, sp, w19, sxth
|
0xf2,0xa3,0x33,0xab = adds x18, sp, w19, sxth
|
||||||
0x5f,0xc0,0x23,0xab = adds xzr, x2, w3, sxtw
|
// 0x5f,0xc0,0x23,0xab = adds xzr, x2, w3, sxtw
|
||||||
0xa3,0xe8,0x29,0xab = adds x3, x5, x9, sxtx #2
|
0xa3,0xe8,0x29,0xab = adds x3, x5, x9, sxtx #2
|
||||||
0xa2,0x00,0x27,0x2b = adds w2, w5, w7, uxtb
|
0xa2,0x00,0x27,0x2b = adds w2, w5, w7, uxtb
|
||||||
0xf5,0x21,0x31,0x2b = adds w21, w15, w17, uxth
|
0xf5,0x21,0x31,0x2b = adds w21, w15, w17, uxth
|
||||||
@ -49,15 +49,15 @@
|
|||||||
0x33,0x62,0x21,0x2b = adds w19, w17, w1, uxtx
|
0x33,0x62,0x21,0x2b = adds w19, w17, w1, uxtx
|
||||||
0xa2,0x84,0x21,0x2b = adds w2, w5, w1, sxtb #1
|
0xa2,0x84,0x21,0x2b = adds w2, w5, w1, sxtb #1
|
||||||
0xfa,0xa3,0x33,0x2b = adds w26, wsp, w19, sxth
|
0xfa,0xa3,0x33,0x2b = adds w26, wsp, w19, sxth
|
||||||
0x5f,0xc0,0x23,0x2b = adds wzr, w2, w3, sxtw
|
// 0x5f,0xc0,0x23,0x2b = adds wzr, w2, w3, sxtw
|
||||||
0x62,0xe0,0x25,0x2b = adds w2, w3, w5, sxtx
|
0x62,0xe0,0x25,0x2b = adds w2, w3, w5, sxtx
|
||||||
0x82,0x08,0x25,0xeb = subs x2, x4, w5, uxtb #2
|
0x82,0x08,0x25,0xeb = subs x2, x4, w5, uxtb #2
|
||||||
0xf4,0x33,0x33,0xeb = subs x20, sp, w19, uxth #4
|
0xf4,0x33,0x33,0xeb = subs x20, sp, w19, uxth #4
|
||||||
0x2c,0x40,0x34,0xeb = subs x12, x1, w20, uxtw
|
0x2c,0x40,0x34,0xeb = subs x12, x1, w20, uxtw
|
||||||
0x74,0x60,0x2d,0xeb = subs x20, x3, x13, uxtx
|
0x74,0x60,0x2d,0xeb = subs x20, x3, x13, uxtx
|
||||||
0x3f,0x8f,0x34,0xeb = subs xzr, x25, w20, sxtb #3
|
// 0x3f,0x8f,0x34,0xeb = subs xzr, x25, w20, sxtb #3
|
||||||
0xf2,0xa3,0x33,0xeb = subs x18, sp, w19, sxth
|
0xf2,0xa3,0x33,0xeb = subs x18, sp, w19, sxth
|
||||||
0x5f,0xc0,0x23,0xeb = subs xzr, x2, w3, sxtw
|
// 0x5f,0xc0,0x23,0xeb = subs xzr, x2, w3, sxtw
|
||||||
0xa3,0xe8,0x29,0xeb = subs x3, x5, x9, sxtx #2
|
0xa3,0xe8,0x29,0xeb = subs x3, x5, x9, sxtx #2
|
||||||
0xa2,0x00,0x27,0x6b = subs w2, w5, w7, uxtb
|
0xa2,0x00,0x27,0x6b = subs w2, w5, w7, uxtb
|
||||||
0xf5,0x21,0x31,0x6b = subs w21, w15, w17, uxth
|
0xf5,0x21,0x31,0x6b = subs w21, w15, w17, uxth
|
||||||
@ -65,15 +65,15 @@
|
|||||||
0x33,0x62,0x21,0x6b = subs w19, w17, w1, uxtx
|
0x33,0x62,0x21,0x6b = subs w19, w17, w1, uxtx
|
||||||
0xa2,0x84,0x21,0x6b = subs w2, w5, w1, sxtb #1
|
0xa2,0x84,0x21,0x6b = subs w2, w5, w1, sxtb #1
|
||||||
0xfa,0xa3,0x33,0x6b = subs w26, wsp, w19, sxth
|
0xfa,0xa3,0x33,0x6b = subs w26, wsp, w19, sxth
|
||||||
0x5f,0xc0,0x23,0x6b = subs wzr, w2, w3, sxtw
|
// 0x5f,0xc0,0x23,0x6b = subs wzr, w2, w3, sxtw
|
||||||
0x62,0xe0,0x25,0x6b = subs w2, w3, w5, sxtx
|
0x62,0xe0,0x25,0x6b = subs w2, w3, w5, sxtx
|
||||||
0x9f,0x08,0x25,0xeb = cmp x4, w5, uxtb #2
|
0x9f,0x08,0x25,0xeb = cmp x4, w5, uxtb #2
|
||||||
0xff,0x33,0x33,0xeb = cmp sp, w19, uxth #4
|
0xff,0x33,0x33,0xeb = cmp sp, w19, uxth #4
|
||||||
0x3f,0x40,0x34,0xeb = cmp x1, w20, uxtw
|
0x3f,0x40,0x34,0xeb = cmp x1, w20, uxtw
|
||||||
0x7f,0x60,0x2d,0xeb = cmp x3, x13, uxtx
|
0x7f,0x60,0x2d,0xeb = cmp x3, x13, uxtx
|
||||||
0x3f,0x8f,0x34,0xeb = cmp x25, w20, sxtb #3
|
// 0x3f,0x8f,0x34,0xeb = cmp x25, w20, sxtb #3
|
||||||
0xff,0xa3,0x33,0xeb = cmp sp, w19, sxth
|
0xff,0xa3,0x33,0xeb = cmp sp, w19, sxth
|
||||||
0x5f,0xc0,0x23,0xeb = cmp x2, w3, sxtw
|
// 0x5f,0xc0,0x23,0xeb = cmp x2, w3, sxtw
|
||||||
0xbf,0xe8,0x29,0xeb = cmp x5, x9, sxtx #2
|
0xbf,0xe8,0x29,0xeb = cmp x5, x9, sxtx #2
|
||||||
0xbf,0x00,0x27,0x6b = cmp w5, w7, uxtb
|
0xbf,0x00,0x27,0x6b = cmp w5, w7, uxtb
|
||||||
0xff,0x21,0x31,0x6b = cmp w15, w17, uxth
|
0xff,0x21,0x31,0x6b = cmp w15, w17, uxth
|
||||||
@ -81,15 +81,15 @@
|
|||||||
0x3f,0x62,0x21,0x6b = cmp w17, w1, uxtx
|
0x3f,0x62,0x21,0x6b = cmp w17, w1, uxtx
|
||||||
0xbf,0x84,0x21,0x6b = cmp w5, w1, sxtb #1
|
0xbf,0x84,0x21,0x6b = cmp w5, w1, sxtb #1
|
||||||
0xff,0xa3,0x33,0x6b = cmp wsp, w19, sxth
|
0xff,0xa3,0x33,0x6b = cmp wsp, w19, sxth
|
||||||
0x5f,0xc0,0x23,0x6b = cmp w2, w3, sxtw
|
// 0x5f,0xc0,0x23,0x6b = cmp w2, w3, sxtw
|
||||||
0x7f,0xe0,0x25,0x6b = cmp w3, w5, sxtx
|
0x7f,0xe0,0x25,0x6b = cmp w3, w5, sxtx
|
||||||
0x9f,0x08,0x25,0xab = cmn x4, w5, uxtb #2
|
0x9f,0x08,0x25,0xab = cmn x4, w5, uxtb #2
|
||||||
0xff,0x33,0x33,0xab = cmn sp, w19, uxth #4
|
0xff,0x33,0x33,0xab = cmn sp, w19, uxth #4
|
||||||
0x3f,0x40,0x34,0xab = cmn x1, w20, uxtw
|
0x3f,0x40,0x34,0xab = cmn x1, w20, uxtw
|
||||||
0x7f,0x60,0x2d,0xab = cmn x3, x13, uxtx
|
0x7f,0x60,0x2d,0xab = cmn x3, x13, uxtx
|
||||||
0x3f,0x8f,0x34,0xab = cmn x25, w20, sxtb #3
|
// 0x3f,0x8f,0x34,0xab = cmn x25, w20, sxtb #3
|
||||||
0xff,0xa3,0x33,0xab = cmn sp, w19, sxth
|
0xff,0xa3,0x33,0xab = cmn sp, w19, sxth
|
||||||
0x5f,0xc0,0x23,0xab = cmn x2, w3, sxtw
|
// 0x5f,0xc0,0x23,0xab = cmn x2, w3, sxtw
|
||||||
0xbf,0xe8,0x29,0xab = cmn x5, x9, sxtx #2
|
0xbf,0xe8,0x29,0xab = cmn x5, x9, sxtx #2
|
||||||
0xbf,0x00,0x27,0x2b = cmn w5, w7, uxtb
|
0xbf,0x00,0x27,0x2b = cmn w5, w7, uxtb
|
||||||
0xff,0x21,0x31,0x2b = cmn w15, w17, uxth
|
0xff,0x21,0x31,0x2b = cmn w15, w17, uxth
|
||||||
@ -97,7 +97,7 @@
|
|||||||
0x3f,0x62,0x21,0x2b = cmn w17, w1, uxtx
|
0x3f,0x62,0x21,0x2b = cmn w17, w1, uxtx
|
||||||
0xbf,0x84,0x21,0x2b = cmn w5, w1, sxtb #1
|
0xbf,0x84,0x21,0x2b = cmn w5, w1, sxtb #1
|
||||||
0xff,0xa3,0x33,0x2b = cmn wsp, w19, sxth
|
0xff,0xa3,0x33,0x2b = cmn wsp, w19, sxth
|
||||||
0x5f,0xc0,0x23,0x2b = cmn w2, w3, sxtw
|
// 0x5f,0xc0,0x23,0x2b = cmn w2, w3, sxtw
|
||||||
0x7f,0xe0,0x25,0x2b = cmn w3, w5, sxtx
|
0x7f,0xe0,0x25,0x2b = cmn w3, w5, sxtx
|
||||||
0x9f,0x0e,0x3d,0xeb = cmp x20, w29, uxtb #3
|
0x9f,0x0e,0x3d,0xeb = cmp x20, w29, uxtb #3
|
||||||
0x9f,0x71,0x2d,0xeb = cmp x12, x13, uxtx #4
|
0x9f,0x71,0x2d,0xeb = cmp x12, x13, uxtx #4
|
||||||
@ -106,37 +106,37 @@
|
|||||||
0x7f,0x70,0x27,0xcb = sub sp, x3, x7, lsl #4
|
0x7f,0x70,0x27,0xcb = sub sp, x3, x7, lsl #4
|
||||||
0xe2,0x47,0x23,0x0b = add w2, wsp, w3, lsl #1
|
0xe2,0x47,0x23,0x0b = add w2, wsp, w3, lsl #1
|
||||||
0xff,0x43,0x29,0x6b = cmp wsp, w9
|
0xff,0x43,0x29,0x6b = cmp wsp, w9
|
||||||
0xff,0x53,0x23,0x2b = adds wzr, wsp, w3, lsl #4
|
// 0xff,0x53,0x23,0x2b = adds wzr, wsp, w3, lsl #4
|
||||||
0xe3,0x6b,0x29,0xeb = subs x3, sp, x9, lsl #2
|
0xe3,0x6b,0x29,0xeb = subs x3, sp, x9, lsl #2
|
||||||
0xa4,0x00,0x00,0x11 = add w4, w5, #0
|
0xa4,0x00,0x00,0x11 = add w4, w5, #0
|
||||||
0x62,0xfc,0x3f,0x11 = add w2, w3, #4095
|
0x62,0xfc,0x3f,0x11 = add w2, w3, #0xfff
|
||||||
0xbe,0x07,0x40,0x11 = add w30, w29, #1, lsl #12
|
0xbe,0x07,0x40,0x11 = add w30, w29, #1, lsl #12
|
||||||
0xad,0xfc,0x7f,0x11 = add w13, w5, #4095, lsl #12
|
0xad,0xfc,0x7f,0x11 = add w13, w5, #0xfff, lsl #12
|
||||||
0xe5,0x98,0x19,0x91 = add x5, x7, #1638
|
0xe5,0x98,0x19,0x91 = add x5, x7, #0x666
|
||||||
0xf4,0x87,0x0c,0x11 = add w20, wsp, #801
|
0xf4,0x87,0x0c,0x11 = add w20, wsp, #0x321
|
||||||
0xff,0x43,0x11,0x11 = add wsp, wsp, #1104
|
0xff,0x43,0x11,0x11 = add wsp, wsp, #0x450
|
||||||
0xdf,0xd3,0x3f,0x11 = add wsp, w30, #4084
|
0xdf,0xd3,0x3f,0x11 = add wsp, w30, #0xff4
|
||||||
0x00,0x8f,0x04,0x91 = add x0, x24, #291
|
0x00,0x8f,0x04,0x91 = add x0, x24, #0x123
|
||||||
0x03,0xff,0x7f,0x91 = add x3, x24, #4095, lsl #12
|
0x03,0xff,0x7f,0x91 = add x3, x24, #0xfff, lsl #12
|
||||||
0xe8,0xcb,0x10,0x91 = add x8, sp, #1074
|
0xe8,0xcb,0x10,0x91 = add x8, sp, #0x432
|
||||||
0xbf,0xa3,0x3b,0x91 = add sp, x29, #3816
|
0xbf,0xa3,0x3b,0x91 = add sp, x29, #0xee8
|
||||||
0xe0,0xb7,0x3f,0x51 = sub w0, wsp, #4077
|
0xe0,0xb7,0x3f,0x51 = sub w0, wsp, #0xfed
|
||||||
0x84,0x8a,0x48,0x51 = sub w4, w20, #546, lsl #12
|
0x84,0x8a,0x48,0x51 = sub w4, w20, #0x222, lsl #12
|
||||||
0xff,0x83,0x04,0xd1 = sub sp, sp, #288
|
0xff,0x83,0x04,0xd1 = sub sp, sp, #0x120
|
||||||
0x7f,0x42,0x00,0x51 = sub wsp, w19, #16
|
0x7f,0x42,0x00,0x51 = sub wsp, w19, #0x10
|
||||||
0xed,0x8e,0x44,0x31 = adds w13, w23, #291, lsl #12
|
0xed,0x8e,0x44,0x31 = adds w13, w23, #0x123, lsl #12
|
||||||
0x5f,0xfc,0x3f,0x31 = adds wzr, w2, #4095
|
// 0x5f,0xfc,0x3f,0x31 = adds wzr, w2, #0xfff
|
||||||
0xf4,0x03,0x00,0x31 = adds w20, wsp, #0
|
0xf4,0x03,0x00,0x31 = adds w20, wsp, #0
|
||||||
0x7f,0x04,0x40,0xb1 = adds xzr, x3, #1, lsl #12
|
// 0x7f,0x04,0x40,0xb1 = adds xzr, x3, #1, lsl #12
|
||||||
0xff,0x53,0x40,0xf1 = subs xzr, sp, #20, lsl #12
|
// 0xff,0x53,0x40,0xf1 = subs xzr, sp, #0x14, lsl #12
|
||||||
0xdf,0xff,0x3f,0xf1 = subs xzr, x30, #4095
|
// 0xdf,0xff,0x3f,0xf1 = subs xzr, x30, #0xfff
|
||||||
0xe4,0xbb,0x3b,0xf1 = subs x4, sp, #3822
|
0xe4,0xbb,0x3b,0xf1 = subs x4, sp, #0xeee
|
||||||
0x7f,0x8c,0x44,0x31 = cmn w3, #291, lsl #12
|
0x7f,0x8c,0x44,0x31 = cmn w3, #0x123, lsl #12
|
||||||
0xff,0x57,0x15,0x31 = cmn wsp, #1365
|
0xff,0x57,0x15,0x31 = cmn wsp, #0x555
|
||||||
0xff,0x13,0x51,0xb1 = cmn sp, #1092, lsl #12
|
0xff,0x13,0x51,0xb1 = cmn sp, #0x444, lsl #12
|
||||||
0x9f,0xb0,0x44,0xf1 = cmp x4, #300, lsl #12
|
0x9f,0xb0,0x44,0xf1 = cmp x4, #0x12c, lsl #12
|
||||||
0xff,0xd3,0x07,0x71 = cmp wsp, #500
|
0xff,0xd3,0x07,0x71 = cmp wsp, #0x1f4
|
||||||
0xff,0x23,0x03,0xf1 = cmp sp, #200
|
0xff,0x23,0x03,0xf1 = cmp sp, #0xc8
|
||||||
0xdf,0x03,0x00,0x91 = mov sp, x30
|
0xdf,0x03,0x00,0x91 = mov sp, x30
|
||||||
0x9f,0x02,0x00,0x11 = mov wsp, w20
|
0x9f,0x02,0x00,0x11 = mov wsp, w20
|
||||||
0xeb,0x03,0x00,0x91 = mov x11, sp
|
0xeb,0x03,0x00,0x91 = mov x11, sp
|
||||||
@ -168,7 +168,7 @@
|
|||||||
0xc5,0x54,0x87,0x8b = add x5, x6, x7, asr #21
|
0xc5,0x54,0x87,0x8b = add x5, x6, x7, asr #21
|
||||||
0x28,0xfd,0x8a,0x8b = add x8, x9, x10, asr #63
|
0x28,0xfd,0x8a,0x8b = add x8, x9, x10, asr #63
|
||||||
0xa3,0x00,0x07,0x2b = adds w3, w5, w7
|
0xa3,0x00,0x07,0x2b = adds w3, w5, w7
|
||||||
0x7f,0x00,0x05,0x2b = adds wzr, w3, w5
|
// 0x7f,0x00,0x05,0x2b = adds wzr, w3, w5
|
||||||
0xf4,0x03,0x04,0x2b = adds w20, wzr, w4
|
0xf4,0x03,0x04,0x2b = adds w20, wzr, w4
|
||||||
0xc4,0x00,0x1f,0x2b = adds w4, w6, wzr
|
0xc4,0x00,0x1f,0x2b = adds w4, w6, wzr
|
||||||
0xab,0x01,0x0f,0x2b = adds w11, w13, w15
|
0xab,0x01,0x0f,0x2b = adds w11, w13, w15
|
||||||
@ -181,7 +181,7 @@
|
|||||||
0xc5,0x54,0x87,0x2b = adds w5, w6, w7, asr #21
|
0xc5,0x54,0x87,0x2b = adds w5, w6, w7, asr #21
|
||||||
0x28,0x7d,0x8a,0x2b = adds w8, w9, w10, asr #31
|
0x28,0x7d,0x8a,0x2b = adds w8, w9, w10, asr #31
|
||||||
0xa3,0x00,0x07,0xab = adds x3, x5, x7
|
0xa3,0x00,0x07,0xab = adds x3, x5, x7
|
||||||
0x7f,0x00,0x05,0xab = adds xzr, x3, x5
|
// 0x7f,0x00,0x05,0xab = adds xzr, x3, x5
|
||||||
0xf4,0x03,0x04,0xab = adds x20, xzr, x4
|
0xf4,0x03,0x04,0xab = adds x20, xzr, x4
|
||||||
0xc4,0x00,0x1f,0xab = adds x4, x6, xzr
|
0xc4,0x00,0x1f,0xab = adds x4, x6, xzr
|
||||||
0xab,0x01,0x0f,0xab = adds x11, x13, x15
|
0xab,0x01,0x0f,0xab = adds x11, x13, x15
|
||||||
@ -195,7 +195,7 @@
|
|||||||
0x28,0xfd,0x8a,0xab = adds x8, x9, x10, asr #63
|
0x28,0xfd,0x8a,0xab = adds x8, x9, x10, asr #63
|
||||||
0xa3,0x00,0x07,0x4b = sub w3, w5, w7
|
0xa3,0x00,0x07,0x4b = sub w3, w5, w7
|
||||||
0x7f,0x00,0x05,0x4b = sub wzr, w3, w5
|
0x7f,0x00,0x05,0x4b = sub wzr, w3, w5
|
||||||
0xf4,0x03,0x04,0x4b = sub w20, wzr, w4
|
// 0xf4,0x03,0x04,0x4b = sub w20, wzr, w4
|
||||||
0xc4,0x00,0x1f,0x4b = sub w4, w6, wzr
|
0xc4,0x00,0x1f,0x4b = sub w4, w6, wzr
|
||||||
0xab,0x01,0x0f,0x4b = sub w11, w13, w15
|
0xab,0x01,0x0f,0x4b = sub w11, w13, w15
|
||||||
0x69,0x28,0x1f,0x4b = sub w9, w3, wzr, lsl #10
|
0x69,0x28,0x1f,0x4b = sub w9, w3, wzr, lsl #10
|
||||||
@ -208,7 +208,7 @@
|
|||||||
0x28,0x7d,0x8a,0x4b = sub w8, w9, w10, asr #31
|
0x28,0x7d,0x8a,0x4b = sub w8, w9, w10, asr #31
|
||||||
0xa3,0x00,0x07,0xcb = sub x3, x5, x7
|
0xa3,0x00,0x07,0xcb = sub x3, x5, x7
|
||||||
0x7f,0x00,0x05,0xcb = sub xzr, x3, x5
|
0x7f,0x00,0x05,0xcb = sub xzr, x3, x5
|
||||||
0xf4,0x03,0x04,0xcb = sub x20, xzr, x4
|
// 0xf4,0x03,0x04,0xcb = sub x20, xzr, x4
|
||||||
0xc4,0x00,0x1f,0xcb = sub x4, x6, xzr
|
0xc4,0x00,0x1f,0xcb = sub x4, x6, xzr
|
||||||
0xab,0x01,0x0f,0xcb = sub x11, x13, x15
|
0xab,0x01,0x0f,0xcb = sub x11, x13, x15
|
||||||
0x69,0x28,0x1f,0xcb = sub x9, x3, xzr, lsl #10
|
0x69,0x28,0x1f,0xcb = sub x9, x3, xzr, lsl #10
|
||||||
@ -220,8 +220,8 @@
|
|||||||
0xc5,0x54,0x87,0xcb = sub x5, x6, x7, asr #21
|
0xc5,0x54,0x87,0xcb = sub x5, x6, x7, asr #21
|
||||||
0x28,0xfd,0x8a,0xcb = sub x8, x9, x10, asr #63
|
0x28,0xfd,0x8a,0xcb = sub x8, x9, x10, asr #63
|
||||||
0xa3,0x00,0x07,0x6b = subs w3, w5, w7
|
0xa3,0x00,0x07,0x6b = subs w3, w5, w7
|
||||||
0x7f,0x00,0x05,0x6b = subs wzr, w3, w5
|
// 0x7f,0x00,0x05,0x6b = subs wzr, w3, w5
|
||||||
0xf4,0x03,0x04,0x6b = subs w20, wzr, w4
|
// 0xf4,0x03,0x04,0x6b = subs w20, wzr, w4
|
||||||
0xc4,0x00,0x1f,0x6b = subs w4, w6, wzr
|
0xc4,0x00,0x1f,0x6b = subs w4, w6, wzr
|
||||||
0xab,0x01,0x0f,0x6b = subs w11, w13, w15
|
0xab,0x01,0x0f,0x6b = subs w11, w13, w15
|
||||||
0x69,0x28,0x1f,0x6b = subs w9, w3, wzr, lsl #10
|
0x69,0x28,0x1f,0x6b = subs w9, w3, wzr, lsl #10
|
||||||
@ -233,8 +233,8 @@
|
|||||||
0xc5,0x54,0x87,0x6b = subs w5, w6, w7, asr #21
|
0xc5,0x54,0x87,0x6b = subs w5, w6, w7, asr #21
|
||||||
0x28,0x7d,0x8a,0x6b = subs w8, w9, w10, asr #31
|
0x28,0x7d,0x8a,0x6b = subs w8, w9, w10, asr #31
|
||||||
0xa3,0x00,0x07,0xeb = subs x3, x5, x7
|
0xa3,0x00,0x07,0xeb = subs x3, x5, x7
|
||||||
0x7f,0x00,0x05,0xeb = subs xzr, x3, x5
|
// 0x7f,0x00,0x05,0xeb = subs xzr, x3, x5
|
||||||
0xf4,0x03,0x04,0xeb = subs x20, xzr, x4
|
// 0xf4,0x03,0x04,0xeb = subs x20, xzr, x4
|
||||||
0xc4,0x00,0x1f,0xeb = subs x4, x6, xzr
|
0xc4,0x00,0x1f,0xeb = subs x4, x6, xzr
|
||||||
0xab,0x01,0x0f,0xeb = subs x11, x13, x15
|
0xab,0x01,0x0f,0xeb = subs x11, x13, x15
|
||||||
0x69,0x28,0x1f,0xeb = subs x9, x3, xzr, lsl #10
|
0x69,0x28,0x1f,0xeb = subs x9, x3, xzr, lsl #10
|
||||||
@ -293,54 +293,54 @@
|
|||||||
0x5f,0x02,0x93,0xeb = cmp x18, x19, asr #0
|
0x5f,0x02,0x93,0xeb = cmp x18, x19, asr #0
|
||||||
0x9f,0xde,0x95,0xeb = cmp x20, x21, asr #55
|
0x9f,0xde,0x95,0xeb = cmp x20, x21, asr #55
|
||||||
0xdf,0xfe,0x97,0xeb = cmp x22, x23, asr #63
|
0xdf,0xfe,0x97,0xeb = cmp x22, x23, asr #63
|
||||||
0xfd,0x03,0x1e,0x4b = sub w29, wzr, w30
|
// 0xfd,0x03,0x1e,0x4b = sub w29, wzr, w30
|
||||||
0xfe,0x03,0x1f,0x4b = sub w30, wzr, wzr
|
// 0xfe,0x03,0x1f,0x4b = sub w30, wzr, wzr
|
||||||
0xff,0x03,0x00,0x4b = sub wzr, wzr, w0
|
// 0xff,0x03,0x00,0x4b = sub wzr, wzr, w0
|
||||||
0xfc,0x03,0x1b,0x4b = sub w28, wzr, w27
|
// 0xfc,0x03,0x1b,0x4b = sub w28, wzr, w27
|
||||||
0xfa,0x77,0x19,0x4b = sub w26, wzr, w25, lsl #29
|
// 0xfa,0x77,0x19,0x4b = sub w26, wzr, w25, lsl #29
|
||||||
0xf8,0x7f,0x17,0x4b = sub w24, wzr, w23, lsl #31
|
// 0xf8,0x7f,0x17,0x4b = sub w24, wzr, w23, lsl #31
|
||||||
0xf6,0x03,0x55,0x4b = sub w22, wzr, w21, lsr #0
|
// 0xf6,0x03,0x55,0x4b = sub w22, wzr, w21, lsr #0
|
||||||
0xf4,0x07,0x53,0x4b = sub w20, wzr, w19, lsr #1
|
// 0xf4,0x07,0x53,0x4b = sub w20, wzr, w19, lsr #1
|
||||||
0xf2,0x7f,0x51,0x4b = sub w18, wzr, w17, lsr #31
|
// 0xf2,0x7f,0x51,0x4b = sub w18, wzr, w17, lsr #31
|
||||||
0xf0,0x03,0x8f,0x4b = sub w16, wzr, w15, asr #0
|
// 0xf0,0x03,0x8f,0x4b = sub w16, wzr, w15, asr #0
|
||||||
0xee,0x33,0x8d,0x4b = sub w14, wzr, w13, asr #12
|
// 0xee,0x33,0x8d,0x4b = sub w14, wzr, w13, asr #12
|
||||||
0xec,0x7f,0x8b,0x4b = sub w12, wzr, w11, asr #31
|
// 0xec,0x7f,0x8b,0x4b = sub w12, wzr, w11, asr #31
|
||||||
0xfd,0x03,0x1e,0xcb = sub x29, xzr, x30
|
// 0xfd,0x03,0x1e,0xcb = sub x29, xzr, x30
|
||||||
0xfe,0x03,0x1f,0xcb = sub x30, xzr, xzr
|
// 0xfe,0x03,0x1f,0xcb = sub x30, xzr, xzr
|
||||||
0xff,0x03,0x00,0xcb = sub xzr, xzr, x0
|
// 0xff,0x03,0x00,0xcb = sub xzr, xzr, x0
|
||||||
0xfc,0x03,0x1b,0xcb = sub x28, xzr, x27
|
// 0xfc,0x03,0x1b,0xcb = sub x28, xzr, x27
|
||||||
0xfa,0x77,0x19,0xcb = sub x26, xzr, x25, lsl #29
|
// 0xfa,0x77,0x19,0xcb = sub x26, xzr, x25, lsl #29
|
||||||
0xf8,0x7f,0x17,0xcb = sub x24, xzr, x23, lsl #31
|
// 0xf8,0x7f,0x17,0xcb = sub x24, xzr, x23, lsl #31
|
||||||
0xf6,0x03,0x55,0xcb = sub x22, xzr, x21, lsr #0
|
// 0xf6,0x03,0x55,0xcb = sub x22, xzr, x21, lsr #0
|
||||||
0xf4,0x07,0x53,0xcb = sub x20, xzr, x19, lsr #1
|
// 0xf4,0x07,0x53,0xcb = sub x20, xzr, x19, lsr #1
|
||||||
0xf2,0x7f,0x51,0xcb = sub x18, xzr, x17, lsr #31
|
// 0xf2,0x7f,0x51,0xcb = sub x18, xzr, x17, lsr #31
|
||||||
0xf0,0x03,0x8f,0xcb = sub x16, xzr, x15, asr #0
|
// 0xf0,0x03,0x8f,0xcb = sub x16, xzr, x15, asr #0
|
||||||
0xee,0x33,0x8d,0xcb = sub x14, xzr, x13, asr #12
|
// 0xee,0x33,0x8d,0xcb = sub x14, xzr, x13, asr #12
|
||||||
0xec,0x7f,0x8b,0xcb = sub x12, xzr, x11, asr #31
|
// 0xec,0x7f,0x8b,0xcb = sub x12, xzr, x11, asr #31
|
||||||
0xfd,0x03,0x1e,0x6b = subs w29, wzr, w30
|
// 0xfd,0x03,0x1e,0x6b = subs w29, wzr, w30
|
||||||
0xfe,0x03,0x1f,0x6b = subs w30, wzr, wzr
|
// 0xfe,0x03,0x1f,0x6b = subs w30, wzr, wzr
|
||||||
0xff,0x03,0x00,0x6b = subs wzr, wzr, w0
|
// 0xff,0x03,0x00,0x6b = subs wzr, wzr, w0
|
||||||
0xfc,0x03,0x1b,0x6b = subs w28, wzr, w27
|
// 0xfc,0x03,0x1b,0x6b = subs w28, wzr, w27
|
||||||
0xfa,0x77,0x19,0x6b = subs w26, wzr, w25, lsl #29
|
// 0xfa,0x77,0x19,0x6b = subs w26, wzr, w25, lsl #29
|
||||||
0xf8,0x7f,0x17,0x6b = subs w24, wzr, w23, lsl #31
|
// 0xf8,0x7f,0x17,0x6b = subs w24, wzr, w23, lsl #31
|
||||||
0xf6,0x03,0x55,0x6b = subs w22, wzr, w21, lsr #0
|
// 0xf6,0x03,0x55,0x6b = subs w22, wzr, w21, lsr #0
|
||||||
0xf4,0x07,0x53,0x6b = subs w20, wzr, w19, lsr #1
|
// 0xf4,0x07,0x53,0x6b = subs w20, wzr, w19, lsr #1
|
||||||
0xf2,0x7f,0x51,0x6b = subs w18, wzr, w17, lsr #31
|
// 0xf2,0x7f,0x51,0x6b = subs w18, wzr, w17, lsr #31
|
||||||
0xf0,0x03,0x8f,0x6b = subs w16, wzr, w15, asr #0
|
// 0xf0,0x03,0x8f,0x6b = subs w16, wzr, w15, asr #0
|
||||||
0xee,0x33,0x8d,0x6b = subs w14, wzr, w13, asr #12
|
// 0xee,0x33,0x8d,0x6b = subs w14, wzr, w13, asr #12
|
||||||
0xec,0x7f,0x8b,0x6b = subs w12, wzr, w11, asr #31
|
// 0xec,0x7f,0x8b,0x6b = subs w12, wzr, w11, asr #31
|
||||||
0xfd,0x03,0x1e,0xeb = subs x29, xzr, x30
|
// 0xfd,0x03,0x1e,0xeb = subs x29, xzr, x30
|
||||||
0xfe,0x03,0x1f,0xeb = subs x30, xzr, xzr
|
// 0xfe,0x03,0x1f,0xeb = subs x30, xzr, xzr
|
||||||
0xff,0x03,0x00,0xeb = subs xzr, xzr, x0
|
// 0xff,0x03,0x00,0xeb = subs xzr, xzr, x0
|
||||||
0xfc,0x03,0x1b,0xeb = subs x28, xzr, x27
|
// 0xfc,0x03,0x1b,0xeb = subs x28, xzr, x27
|
||||||
0xfa,0x77,0x19,0xeb = subs x26, xzr, x25, lsl #29
|
// 0xfa,0x77,0x19,0xeb = subs x26, xzr, x25, lsl #29
|
||||||
0xf8,0x7f,0x17,0xeb = subs x24, xzr, x23, lsl #31
|
// 0xf8,0x7f,0x17,0xeb = subs x24, xzr, x23, lsl #31
|
||||||
0xf6,0x03,0x55,0xeb = subs x22, xzr, x21, lsr #0
|
// 0xf6,0x03,0x55,0xeb = subs x22, xzr, x21, lsr #0
|
||||||
0xf4,0x07,0x53,0xeb = subs x20, xzr, x19, lsr #1
|
// 0xf4,0x07,0x53,0xeb = subs x20, xzr, x19, lsr #1
|
||||||
0xf2,0x7f,0x51,0xeb = subs x18, xzr, x17, lsr #31
|
// 0xf2,0x7f,0x51,0xeb = subs x18, xzr, x17, lsr #31
|
||||||
0xf0,0x03,0x8f,0xeb = subs x16, xzr, x15, asr #0
|
// 0xf0,0x03,0x8f,0xeb = subs x16, xzr, x15, asr #0
|
||||||
0xee,0x33,0x8d,0xeb = subs x14, xzr, x13, asr #12
|
// 0xee,0x33,0x8d,0xeb = subs x14, xzr, x13, asr #12
|
||||||
0xec,0x7f,0x8b,0xeb = subs x12, xzr, x11, asr #31
|
// 0xec,0x7f,0x8b,0xeb = subs x12, xzr, x11, asr #31
|
||||||
0x7d,0x03,0x19,0x1a = adc w29, w27, w25
|
0x7d,0x03,0x19,0x1a = adc w29, w27, w25
|
||||||
0x7f,0x00,0x04,0x1a = adc wzr, w3, w4
|
0x7f,0x00,0x04,0x1a = adc wzr, w3, w4
|
||||||
0xe9,0x03,0x0a,0x1a = adc w9, wzr, w10
|
0xe9,0x03,0x0a,0x1a = adc w9, wzr, w10
|
||||||
@ -385,86 +385,86 @@
|
|||||||
0xfd,0x03,0x1e,0xfa = ngcs x29, x30
|
0xfd,0x03,0x1e,0xfa = ngcs x29, x30
|
||||||
0xff,0x03,0x00,0xfa = ngcs xzr, x0
|
0xff,0x03,0x00,0xfa = ngcs xzr, x0
|
||||||
0xe0,0x03,0x1f,0xfa = ngcs x0, xzr
|
0xe0,0x03,0x1f,0xfa = ngcs x0, xzr
|
||||||
0x41,0x10,0x43,0x93 = sbfm x1, x2, #3, #4
|
// 0x41,0x10,0x43,0x93 = sbfm x1, x2, #3, #4
|
||||||
0x83,0xfc,0x7f,0x93 = sbfm x3, x4, #63, #63
|
// 0x83,0xfc,0x7f,0x93 = sbfm x3, x4, #63, #63
|
||||||
0xff,0x7f,0x1f,0x13 = sbfm wzr, wzr, #31, #31
|
// 0xff,0x7f,0x1f,0x13 = sbfm wzr, wzr, #31, #31
|
||||||
0x2c,0x01,0x00,0x13 = sbfm w12, w9, #0, #0
|
// 0x2c,0x01,0x00,0x13 = sbfm w12, w9, #0, #0
|
||||||
0xa4,0x28,0x4c,0xd3 = ubfm x4, x5, #12, #10
|
// 0xa4,0x28,0x4c,0xd3 = ubfm x4, x5, #12, #10
|
||||||
0x9f,0x00,0x40,0xd3 = ubfm xzr, x4, #0, #0
|
// 0x9f,0x00,0x40,0xd3 = ubfm xzr, x4, #0, #0
|
||||||
0xe4,0x17,0x7f,0xd3 = ubfm x4, xzr, #63, #5
|
// 0xe4,0x17,0x7f,0xd3 = ubfm x4, xzr, #63, #5
|
||||||
0xc5,0xfc,0x4c,0xd3 = ubfm x5, x6, #12, #63
|
// 0xc5,0xfc,0x4c,0xd3 = ubfm x5, x6, #12, #63
|
||||||
0xa4,0x28,0x4c,0xb3 = bfm x4, x5, #12, #10
|
// 0xa4,0x28,0x4c,0xb3 = bfm x4, x5, #12, #10
|
||||||
0x9f,0x00,0x40,0xb3 = bfm xzr, x4, #0, #0
|
// 0x9f,0x00,0x40,0xb3 = bfm xzr, x4, #0, #0
|
||||||
0xe4,0x17,0x7f,0xb3 = bfm x4, xzr, #63, #5
|
// 0xe4,0x17,0x7f,0xb3 = bfm x4, xzr, #63, #5
|
||||||
0xc5,0xfc,0x4c,0xb3 = bfm x5, x6, #12, #63
|
// 0xc5,0xfc,0x4c,0xb3 = bfm x5, x6, #12, #63
|
||||||
0x41,0x1c,0x00,0x13 = sxtb w1, w2
|
0x41,0x1c,0x00,0x13 = sxtb w1, w2
|
||||||
0x7f,0x1c,0x40,0x93 = sxtb xzr, w3
|
0x7f,0x1c,0x40,0x93 = sxtb xzr, w3
|
||||||
0x49,0x3d,0x00,0x13 = sxth w9, w10
|
0x49,0x3d,0x00,0x13 = sxth w9, w10
|
||||||
0x20,0x3c,0x40,0x93 = sxth x0, w1
|
0x20,0x3c,0x40,0x93 = sxth x0, w1
|
||||||
0xc3,0x7f,0x40,0x93 = sxtw x3, w30
|
0xc3,0x7f,0x40,0x93 = sxtw x3, w30
|
||||||
0x41,0x1c,0x00,0x53 = uxtb w1, w2
|
0x41,0x1c,0x00,0x53 = uxtb w1, w2
|
||||||
0x7f,0x1c,0x00,0x53 = uxtb xzr, w3
|
// 0x7f,0x1c,0x00,0x53 = uxtb xzr, w3
|
||||||
0x49,0x3d,0x00,0x53 = uxth w9, w10
|
0x49,0x3d,0x00,0x53 = uxth w9, w10
|
||||||
0x20,0x3c,0x00,0x53 = uxth x0, w1
|
// 0x20,0x3c,0x00,0x53 = uxth x0, w1
|
||||||
0x43,0x7c,0x00,0x13 = asr w3, w2, #0
|
0x43,0x7c,0x00,0x13 = asr w3, w2, #0
|
||||||
0x49,0x7d,0x1f,0x13 = asr w9, w10, #31
|
0x49,0x7d,0x1f,0x13 = asr w9, w10, #0x1f
|
||||||
0xb4,0xfe,0x7f,0x93 = asr x20, x21, #63
|
0xb4,0xfe,0x7f,0x93 = asr x20, x21, #0x3f
|
||||||
0xe1,0x7f,0x03,0x13 = asr w1, wzr, #3
|
0xe1,0x7f,0x03,0x13 = asr w1, wzr, #3
|
||||||
0x43,0x7c,0x00,0x53 = lsr w3, w2, #0
|
// 0x43,0x7c,0x00,0x53 = lsr w3, w2, #0
|
||||||
0x49,0x7d,0x1f,0x53 = lsr w9, w10, #31
|
0x49,0x7d,0x1f,0x53 = lsr w9, w10, #0x1f
|
||||||
0xb4,0xfe,0x7f,0xd3 = lsr x20, x21, #63
|
0xb4,0xfe,0x7f,0xd3 = lsr x20, x21, #0x3f
|
||||||
0xff,0x7f,0x03,0x53 = lsr wzr, wzr, #3
|
0xff,0x7f,0x03,0x53 = lsr wzr, wzr, #3
|
||||||
0x43,0x7c,0x00,0x53 = lsl w3, w2, #0
|
// 0x43,0x7c,0x00,0x53 = lsl w3, w2, #0
|
||||||
0x49,0x01,0x01,0x53 = lsl w9, w10, #31
|
0x49,0x01,0x01,0x53 = lsl w9, w10, #0x1f
|
||||||
0xb4,0x02,0x41,0xd3 = lsl x20, x21, #63
|
0xb4,0x02,0x41,0xd3 = lsl x20, x21, #0x3f
|
||||||
0xe1,0x73,0x1d,0x53 = lsl w1, wzr, #3
|
0xe1,0x73,0x1d,0x53 = lsl w1, wzr, #3
|
||||||
0x49,0x01,0x00,0x13 = sbfiz w9, w10, #0, #1
|
// 0x49,0x01,0x00,0x13 = sbfiz w9, w10, #0, #1
|
||||||
0x62,0x00,0x41,0x93 = sbfiz x2, x3, #63, #1
|
0x62,0x00,0x41,0x93 = sbfiz x2, x3, #0x3f, #1
|
||||||
0x93,0xfe,0x40,0x93 = sbfiz x19, x20, #0, #64
|
// 0x93,0xfe,0x40,0x93 = sbfiz x19, x20, #0, #64
|
||||||
0x49,0xe9,0x7b,0x93 = sbfiz x9, x10, #5, #59
|
0x49,0xe9,0x7b,0x93 = sbfiz x9, x10, #5, #59
|
||||||
0x49,0x7d,0x00,0x13 = sbfiz w9, w10, #0, #32
|
// 0x49,0x7d,0x00,0x13 = sbfiz w9, w10, #0, #32
|
||||||
0x8b,0x01,0x01,0x13 = sbfiz w11, w12, #31, #1
|
0x8b,0x01,0x01,0x13 = sbfiz w11, w12, #0x1f, #1
|
||||||
0xcd,0x09,0x03,0x13 = sbfiz w13, w14, #29, #3
|
0xcd,0x09,0x03,0x13 = sbfiz w13, w14, #0x1d, #3
|
||||||
0xff,0x2b,0x76,0x93 = sbfiz xzr, xzr, #10, #11
|
0xff,0x2b,0x76,0x93 = sbfiz xzr, xzr, #0xa, #11
|
||||||
0x49,0x01,0x00,0x13 = sbfx w9, w10, #0, #1
|
// 0x49,0x01,0x00,0x13 = sbfx w9, w10, #0, #1
|
||||||
0x62,0xfc,0x7f,0x93 = sbfx x2, x3, #63, #1
|
// 0x62,0xfc,0x7f,0x93 = sbfx x2, x3, #0x3f, #1
|
||||||
0x93,0xfe,0x40,0x93 = sbfx x19, x20, #0, #64
|
// 0x93,0xfe,0x40,0x93 = sbfx x19, x20, #0, #64
|
||||||
0x49,0xfd,0x45,0x93 = sbfx x9, x10, #5, #59
|
// 0x49,0xfd,0x45,0x93 = sbfx x9, x10, #5, #59
|
||||||
0x49,0x7d,0x00,0x13 = sbfx w9, w10, #0, #32
|
// 0x49,0x7d,0x00,0x13 = sbfx w9, w10, #0, #32
|
||||||
0x8b,0x7d,0x1f,0x13 = sbfx w11, w12, #31, #1
|
// 0x8b,0x7d,0x1f,0x13 = sbfx w11, w12, #31, #1
|
||||||
0xcd,0x7d,0x1d,0x13 = sbfx w13, w14, #29, #3
|
// 0xcd,0x7d,0x1d,0x13 = sbfx w13, w14, #29, #3
|
||||||
0xff,0x53,0x4a,0x93 = sbfx xzr, xzr, #10, #11
|
0xff,0x53,0x4a,0x93 = sbfx xzr, xzr, #10, #11
|
||||||
0x49,0x01,0x00,0x33 = bfi w9, w10, #0, #1
|
// 0x49,0x01,0x00,0x33 = bfi w9, w10, #0, #1
|
||||||
0x62,0x00,0x41,0xb3 = bfi x2, x3, #63, #1
|
0x62,0x00,0x41,0xb3 = bfi x2, x3, #63, #1
|
||||||
0x93,0xfe,0x40,0xb3 = bfi x19, x20, #0, #64
|
// 0x93,0xfe,0x40,0xb3 = bfi x19, x20, #0, #64
|
||||||
0x49,0xe9,0x7b,0xb3 = bfi x9, x10, #5, #59
|
0x49,0xe9,0x7b,0xb3 = bfi x9, x10, #5, #59
|
||||||
0x49,0x7d,0x00,0x33 = bfi w9, w10, #0, #32
|
// 0x49,0x7d,0x00,0x33 = bfi w9, w10, #0, #32
|
||||||
0x8b,0x01,0x01,0x33 = bfi w11, w12, #31, #1
|
0x8b,0x01,0x01,0x33 = bfi w11, w12, #31, #1
|
||||||
0xcd,0x09,0x03,0x33 = bfi w13, w14, #29, #3
|
0xcd,0x09,0x03,0x33 = bfi w13, w14, #29, #3
|
||||||
0xff,0x2b,0x76,0xb3 = bfi xzr, xzr, #10, #11
|
0xff,0x2b,0x76,0xb3 = bfi xzr, xzr, #10, #11
|
||||||
0x49,0x01,0x00,0x33 = bfxil w9, w10, #0, #1
|
// 0x49,0x01,0x00,0x33 = bfxil w9, w10, #0, #1
|
||||||
0x62,0xfc,0x7f,0xb3 = bfxil x2, x3, #63, #1
|
0x62,0xfc,0x7f,0xb3 = bfxil x2, x3, #63, #1
|
||||||
0x93,0xfe,0x40,0xb3 = bfxil x19, x20, #0, #64
|
// 0x93,0xfe,0x40,0xb3 = bfxil x19, x20, #0, #64
|
||||||
0x49,0xfd,0x45,0xb3 = bfxil x9, x10, #5, #59
|
0x49,0xfd,0x45,0xb3 = bfxil x9, x10, #5, #59
|
||||||
0x49,0x7d,0x00,0x33 = bfxil w9, w10, #0, #32
|
// 0x49,0x7d,0x00,0x33 = bfxil w9, w10, #0, #32
|
||||||
0x8b,0x7d,0x1f,0x33 = bfxil w11, w12, #31, #1
|
0x8b,0x7d,0x1f,0x33 = bfxil w11, w12, #31, #1
|
||||||
0xcd,0x7d,0x1d,0x33 = bfxil w13, w14, #29, #3
|
0xcd,0x7d,0x1d,0x33 = bfxil w13, w14, #29, #3
|
||||||
0xff,0x53,0x4a,0xb3 = bfxil xzr, xzr, #10, #11
|
0xff,0x53,0x4a,0xb3 = bfxil xzr, xzr, #10, #11
|
||||||
0x49,0x01,0x00,0x53 = ubfiz w9, w10, #0, #1
|
// 0x49,0x01,0x00,0x53 = ubfiz w9, w10, #0, #1
|
||||||
0x62,0x00,0x41,0xd3 = ubfiz x2, x3, #63, #1
|
// 0x62,0x00,0x41,0xd3 = ubfiz x2, x3, #63, #1
|
||||||
0x93,0xfe,0x40,0xd3 = ubfiz x19, x20, #0, #64
|
// 0x93,0xfe,0x40,0xd3 = ubfiz x19, x20, #0, #64
|
||||||
0x49,0xe9,0x7b,0xd3 = ubfiz x9, x10, #5, #59
|
// 0x49,0xe9,0x7b,0xd3 = ubfiz x9, x10, #5, #59
|
||||||
0x49,0x7d,0x00,0x53 = ubfiz w9, w10, #0, #32
|
// 0x49,0x7d,0x00,0x53 = ubfiz w9, w10, #0, #32
|
||||||
0x8b,0x01,0x01,0x53 = ubfiz w11, w12, #31, #1
|
// 0x8b,0x01,0x01,0x53 = ubfiz w11, w12, #31, #1
|
||||||
0xcd,0x09,0x03,0x53 = ubfiz w13, w14, #29, #3
|
// 0xcd,0x09,0x03,0x53 = ubfiz w13, w14, #29, #3
|
||||||
0xff,0x2b,0x76,0xd3 = ubfiz xzr, xzr, #10, #11
|
0xff,0x2b,0x76,0xd3 = ubfiz xzr, xzr, #10, #11
|
||||||
0x49,0x01,0x00,0x53 = ubfx w9, w10, #0, #1
|
// 0x49,0x01,0x00,0x53 = ubfx w9, w10, #0, #1
|
||||||
0x62,0xfc,0x7f,0xd3 = ubfx x2, x3, #63, #1
|
// 0x62,0xfc,0x7f,0xd3 = ubfx x2, x3, #63, #1
|
||||||
0x93,0xfe,0x40,0xd3 = ubfx x19, x20, #0, #64
|
// 0x93,0xfe,0x40,0xd3 = ubfx x19, x20, #0, #64
|
||||||
0x49,0xfd,0x45,0xd3 = ubfx x9, x10, #5, #59
|
// 0x49,0xfd,0x45,0xd3 = ubfx x9, x10, #5, #59
|
||||||
0x49,0x7d,0x00,0x53 = ubfx w9, w10, #0, #32
|
// 0x49,0x7d,0x00,0x53 = ubfx w9, w10, #0, #32
|
||||||
0x8b,0x7d,0x1f,0x53 = ubfx w11, w12, #31, #1
|
// 0x8b,0x7d,0x1f,0x53 = ubfx w11, w12, #31, #1
|
||||||
0xcd,0x7d,0x1d,0x53 = ubfx w13, w14, #29, #3
|
// 0xcd,0x7d,0x1d,0x53 = ubfx w13, w14, #29, #3
|
||||||
0xff,0x53,0x4a,0xd3 = ubfx xzr, xzr, #10, #11
|
0xff,0x53,0x4a,0xd3 = ubfx xzr, xzr, #10, #11
|
||||||
0x05,0x00,0x00,0x34 = cbz w5, #0
|
0x05,0x00,0x00,0x34 = cbz w5, #0
|
||||||
0xe3,0xff,0xff,0xb5 = cbnz x3, #-4
|
0xe3,0xff,0xff,0xb5 = cbnz x3, #-4
|
||||||
@ -529,28 +529,28 @@
|
|||||||
0x7f,0xa4,0x84,0xda = csneg xzr, x3, x4, ge
|
0x7f,0xa4,0x84,0xda = csneg xzr, x3, x4, ge
|
||||||
0xe5,0x27,0x86,0xda = csneg x5, xzr, x6, hs
|
0xe5,0x27,0x86,0xda = csneg x5, xzr, x6, hs
|
||||||
0x07,0x35,0x9f,0xda = csneg x7, x8, xzr, lo
|
0x07,0x35,0x9f,0xda = csneg x7, x8, xzr, lo
|
||||||
0xe3,0x17,0x9f,0x1a = csinc w3, wzr, wzr, ne
|
// 0xe3,0x17,0x9f,0x1a = csinc w3, wzr, wzr, ne
|
||||||
0xe9,0x47,0x9f,0x9a = csinc x9, xzr, xzr, mi
|
// 0xe9,0x47,0x9f,0x9a = csinc x9, xzr, xzr, mi
|
||||||
0xf4,0x03,0x9f,0x5a = csinv w20, wzr, wzr, eq
|
// 0xf4,0x03,0x9f,0x5a = csinv w20, wzr, wzr, eq
|
||||||
0xfe,0xb3,0x9f,0xda = csinv x30, xzr, xzr, lt
|
// 0xfe,0xb3,0x9f,0xda = csinv x30, xzr, xzr, lt
|
||||||
0xa3,0xd4,0x85,0x1a = csinc w3, w5, w5, le
|
// 0xa3,0xd4,0x85,0x1a = csinc w3, w5, w5, le
|
||||||
0x9f,0xc4,0x84,0x1a = csinc wzr, w4, w4, gt
|
// 0x9f,0xc4,0x84,0x1a = csinc wzr, w4, w4, gt
|
||||||
0xe9,0xa7,0x9f,0x1a = csinc w9, wzr, wzr, ge
|
// 0xe9,0xa7,0x9f,0x1a = csinc w9, wzr, wzr, ge
|
||||||
0xa3,0xd4,0x85,0x9a = csinc x3, x5, x5, le
|
// 0xa3,0xd4,0x85,0x9a = csinc x3, x5, x5, le
|
||||||
0x9f,0xc4,0x84,0x9a = csinc xzr, x4, x4, gt
|
// 0x9f,0xc4,0x84,0x9a = csinc xzr, x4, x4, gt
|
||||||
0xe9,0xa7,0x9f,0x9a = csinc x9, xzr, xzr, ge
|
// 0xe9,0xa7,0x9f,0x9a = csinc x9, xzr, xzr, ge
|
||||||
0xa3,0xd0,0x85,0x5a = csinv w3, w5, w5, le
|
// 0xa3,0xd0,0x85,0x5a = csinv w3, w5, w5, le
|
||||||
0x9f,0xc0,0x84,0x5a = csinv wzr, w4, w4, gt
|
// 0x9f,0xc0,0x84,0x5a = csinv wzr, w4, w4, gt
|
||||||
0xe9,0xa3,0x9f,0x5a = csinv w9, wzr, wzr, ge
|
// 0xe9,0xa3,0x9f,0x5a = csinv w9, wzr, wzr, ge
|
||||||
0xa3,0xd0,0x85,0xda = csinv x3, x5, x5, le
|
// 0xa3,0xd0,0x85,0xda = csinv x3, x5, x5, le
|
||||||
0x9f,0xc0,0x84,0xda = csinv xzr, x4, x4, gt
|
// 0x9f,0xc0,0x84,0xda = csinv xzr, x4, x4, gt
|
||||||
0xe9,0xa3,0x9f,0xda = csinv x9, xzr, xzr, ge
|
// 0xe9,0xa3,0x9f,0xda = csinv x9, xzr, xzr, ge
|
||||||
0xa3,0xd4,0x85,0x5a = csneg w3, w5, w5, le
|
// 0xa3,0xd4,0x85,0x5a = csneg w3, w5, w5, le
|
||||||
0x9f,0xc4,0x84,0x5a = csneg wzr, w4, w4, gt
|
// 0x9f,0xc4,0x84,0x5a = csneg wzr, w4, w4, gt
|
||||||
0xe9,0xa7,0x9f,0x5a = csneg w9, wzr, wzr, ge
|
// 0xe9,0xa7,0x9f,0x5a = csneg w9, wzr, wzr, ge
|
||||||
0xa3,0xd4,0x85,0xda = csneg x3, x5, x5, le
|
// 0xa3,0xd4,0x85,0xda = csneg x3, x5, x5, le
|
||||||
0x9f,0xc4,0x84,0xda = csneg xzr, x4, x4, gt
|
// 0x9f,0xc4,0x84,0xda = csneg xzr, x4, x4, gt
|
||||||
0xe9,0xa7,0x9f,0xda = csneg x9, xzr, xzr, ge
|
// 0xe9,0xa7,0x9f,0xda = csneg x9, xzr, xzr, ge
|
||||||
0xe0,0x00,0xc0,0x5a = rbit w0, w7
|
0xe0,0x00,0xc0,0x5a = rbit w0, w7
|
||||||
0x72,0x00,0xc0,0xda = rbit x18, x3
|
0x72,0x00,0xc0,0xda = rbit x18, x3
|
||||||
0x31,0x04,0xc0,0x5a = rev16 w17, w1
|
0x31,0x04,0xc0,0x5a = rev16 w17, w1
|
||||||
@ -675,9 +675,9 @@
|
|||||||
0xab,0x7d,0x91,0x13 = extr w11, w13, w17, #31
|
0xab,0x7d,0x91,0x13 = extr w11, w13, w17, #31
|
||||||
0xa3,0x3c,0xc7,0x93 = extr x3, x5, x7, #15
|
0xa3,0x3c,0xc7,0x93 = extr x3, x5, x7, #15
|
||||||
0xab,0xfd,0xd1,0x93 = extr x11, x13, x17, #63
|
0xab,0xfd,0xd1,0x93 = extr x11, x13, x17, #63
|
||||||
0xf3,0x62,0xd7,0x93 = extr x19, x23, x23, #24
|
// 0xf3,0x62,0xd7,0x93 = extr x19, x23, x23, #24
|
||||||
0xfd,0xff,0xdf,0x93 = extr x29, xzr, xzr, #63
|
// 0xfd,0xff,0xdf,0x93 = extr x29, xzr, xzr, #63
|
||||||
0xa9,0x7d,0x8d,0x13 = extr w9, w13, w13, #31
|
// 0xa9,0x7d,0x8d,0x13 = extr w9, w13, w13, #31
|
||||||
0x60,0x20,0x25,0x1e = fcmp s3, s5
|
0x60,0x20,0x25,0x1e = fcmp s3, s5
|
||||||
0xe8,0x23,0x20,0x1e = fcmp s31, #0.0
|
0xe8,0x23,0x20,0x1e = fcmp s31, #0.0
|
||||||
0xb0,0x23,0x3e,0x1e = fcmpe s29, s30
|
0xb0,0x23,0x3e,0x1e = fcmpe s29, s30
|
||||||
@ -937,46 +937,46 @@
|
|||||||
0xf4,0x07,0x40,0xf9 = ldr x20, [sp, #8]
|
0xf4,0x07,0x40,0xf9 = ldr x20, [sp, #8]
|
||||||
0xff,0x03,0x40,0xf9 = ldr xzr, [sp]
|
0xff,0x03,0x40,0xf9 = ldr xzr, [sp]
|
||||||
0xe2,0x03,0x40,0xb9 = ldr w2, [sp]
|
0xe2,0x03,0x40,0xb9 = ldr w2, [sp]
|
||||||
0xf1,0xff,0x7f,0xb9 = ldr w17, [sp, #16380]
|
// 0xf1,0xff,0x7f,0xb9 = ldr w17, [sp, #0x6660]
|
||||||
0x4d,0x04,0x40,0xb9 = ldr w13, [x2, #4]
|
0x4d,0x04,0x40,0xb9 = ldr w13, [x2, #4]
|
||||||
0xa2,0x04,0x80,0xb9 = ldrsw x2, [x5, #4]
|
0xa2,0x04,0x80,0xb9 = ldrsw x2, [x5, #4]
|
||||||
0xf7,0xff,0xbf,0xb9 = ldrsw x23, [sp, #16380]
|
// 0xf7,0xff,0xbf,0xb9 = ldrsw x23, [sp, #0x6660]
|
||||||
0x82,0x00,0x40,0x79 = ldrh w2, [x4]
|
0x82,0x00,0x40,0x79 = ldrh w2, [x4]
|
||||||
0xd7,0xfc,0xff,0x79 = ldrsh w23, [x6, #8190]
|
0xd7,0xfc,0xff,0x79 = ldrsh w23, [x6, #8190]
|
||||||
0xff,0x07,0xc0,0x79 = ldrsh wzr, [sp, #2]
|
0xff,0x07,0xc0,0x79 = ldrsh wzr, [sp, #2]
|
||||||
0x5d,0x04,0x80,0x79 = ldrsh x29, [x2, #2]
|
0x5d,0x04,0x80,0x79 = ldrsh x29, [x2, #2]
|
||||||
0x7a,0xe4,0x41,0x39 = ldrb w26, [x3, #121]
|
0x7a,0xe4,0x41,0x39 = ldrb w26, [x3, #121]
|
||||||
0x4c,0x00,0x40,0x39 = ldrb w12, [x2]
|
0x4c,0x00,0x40,0x39 = ldrb w12, [x2]
|
||||||
0xfb,0xff,0xff,0x39 = ldrsb w27, [sp, #4095]
|
0xfb,0xff,0xff,0x39 = ldrsb w27, [sp, #0xfff]
|
||||||
0xff,0x01,0x80,0x39 = ldrsb xzr, [x15]
|
0xff,0x01,0x80,0x39 = ldrsb xzr, [x15]
|
||||||
0xfe,0x03,0x00,0xf9 = str x30, [sp]
|
0xfe,0x03,0x00,0xf9 = str x30, [sp]
|
||||||
0x94,0xfc,0x3f,0xb9 = str w20, [x4, #16380]
|
// 0x94,0xfc,0x3f,0xb9 = str w20, [x4, #0x6660]
|
||||||
0x54,0x1d,0x00,0x79 = strh w20, [x10, #14]
|
0x54,0x1d,0x00,0x79 = strh w20, [x10, #14]
|
||||||
0xf1,0xff,0x3f,0x79 = strh w17, [sp, #8190]
|
0xf1,0xff,0x3f,0x79 = strh w17, [sp, #8190]
|
||||||
0x77,0xfc,0x3f,0x39 = strb w23, [x3, #4095]
|
0x77,0xfc,0x3f,0x39 = strb w23, [x3, #0xfff]
|
||||||
0x5f,0x00,0x00,0x39 = strb wzr, [x2]
|
0x5f,0x00,0x00,0x39 = strb wzr, [x2]
|
||||||
0xe0,0x07,0x80,0xf9 = prfm pldl1keep, [sp, #8]
|
0xe0,0x07,0x80,0xf9 = prfm pldl1keep, [sp, #8]
|
||||||
0x61,0x00,0x80,0xf9 = prfm pldl1strm, [x3, #0]
|
// 0x61,0x00,0x80,0xf9 = prfm pldl1strm, [x3, #0]
|
||||||
0xa2,0x08,0x80,0xf9 = prfm pldl2keep, [x5, #16]
|
0xa2,0x08,0x80,0xf9 = prfm pldl2keep, [x5, #16]
|
||||||
0x43,0x00,0x80,0xf9 = prfm pldl2strm, [x2, #0]
|
// 0x43,0x00,0x80,0xf9 = prfm pldl2strm, [x2, #0]
|
||||||
0xa4,0x00,0x80,0xf9 = prfm pldl3keep, [x5, #0]
|
// 0xa4,0x00,0x80,0xf9 = prfm pldl3keep, [x5, #0]
|
||||||
0xc5,0x00,0x80,0xf9 = prfm pldl3strm, [x6, #0]
|
// 0xc5,0x00,0x80,0xf9 = prfm pldl3strm, [x6, #0]
|
||||||
0xe8,0x07,0x80,0xf9 = prfm plil1keep, [sp, #8]
|
0xe8,0x07,0x80,0xf9 = prfm plil1keep, [sp, #8]
|
||||||
0x69,0x00,0x80,0xf9 = prfm plil1strm, [x3, #0]
|
// 0x69,0x00,0x80,0xf9 = prfm plil1strm, [x3, #0]
|
||||||
0xaa,0x08,0x80,0xf9 = prfm plil2keep, [x5, #16]
|
0xaa,0x08,0x80,0xf9 = prfm plil2keep, [x5, #16]
|
||||||
0x4b,0x00,0x80,0xf9 = prfm plil2strm, [x2, #0]
|
// 0x4b,0x00,0x80,0xf9 = prfm plil2strm, [x2, #0]
|
||||||
0xac,0x00,0x80,0xf9 = prfm plil3keep, [x5, #0]
|
// 0xac,0x00,0x80,0xf9 = prfm plil3keep, [x5, #0]
|
||||||
0xcd,0x00,0x80,0xf9 = prfm plil3strm, [x6, #0]
|
// 0xcd,0x00,0x80,0xf9 = prfm plil3strm, [x6, #0]
|
||||||
0xf0,0x07,0x80,0xf9 = prfm pstl1keep, [sp, #8]
|
0xf0,0x07,0x80,0xf9 = prfm pstl1keep, [sp, #8]
|
||||||
0x71,0x00,0x80,0xf9 = prfm pstl1strm, [x3, #0]
|
// 0x71,0x00,0x80,0xf9 = prfm pstl1strm, [x3, #0]
|
||||||
0xb2,0x08,0x80,0xf9 = prfm pstl2keep, [x5, #16]
|
0xb2,0x08,0x80,0xf9 = prfm pstl2keep, [x5, #16]
|
||||||
0x53,0x00,0x80,0xf9 = prfm pstl2strm, [x2, #0]
|
// 0x53,0x00,0x80,0xf9 = prfm pstl2strm, [x2, #0]
|
||||||
0xb4,0x00,0x80,0xf9 = prfm pstl3keep, [x5, #0]
|
// 0xb4,0x00,0x80,0xf9 = prfm pstl3keep, [x5, #0]
|
||||||
0xd5,0x00,0x80,0xf9 = prfm pstl3strm, [x6, #0]
|
// 0xd5,0x00,0x80,0xf9 = prfm pstl3strm, [x6, #0]
|
||||||
0xef,0x03,0x80,0xf9 = prfm #15, [sp, #0]
|
// 0xef,0x03,0x80,0xf9 = prfm #15, [sp, #0]
|
||||||
0xff,0xff,0x7f,0x3d = ldr b31, [sp, #4095]
|
0xff,0xff,0x7f,0x3d = ldr b31, [sp, #0xfff]
|
||||||
0x54,0xfc,0x7f,0x7d = ldr h20, [x2, #8190]
|
0x54,0xfc,0x7f,0x7d = ldr h20, [x2, #8190]
|
||||||
0x6a,0xfe,0x7f,0xbd = ldr s10, [x19, #16380]
|
// 0x6a,0xfe,0x7f,0xbd = ldr s10, [x19, #0x6660]
|
||||||
0x43,0xfd,0x7f,0xfd = ldr d3, [x10, #32760]
|
0x43,0xfd,0x7f,0xfd = ldr d3, [x10, #32760]
|
||||||
0xec,0xff,0xbf,0x3d = str q12, [sp, #65520]
|
0xec,0xff,0xbf,0x3d = str q12, [sp, #65520]
|
||||||
0xe3,0x6b,0x65,0x38 = ldrb w3, [sp, x5]
|
0xe3,0x6b,0x65,0x38 = ldrb w3, [sp, x5]
|
||||||
@ -1024,7 +1024,7 @@
|
|||||||
0xf1,0xca,0x69,0xf8 = ldr x17, [x23, w9, sxtw]
|
0xf1,0xca,0x69,0xf8 = ldr x17, [x23, w9, sxtw]
|
||||||
0xd2,0xca,0x6a,0xf8 = ldr x18, [x22, w10, sxtw]
|
0xd2,0xca,0x6a,0xf8 = ldr x18, [x22, w10, sxtw]
|
||||||
0xb3,0xda,0x3f,0xfc = str d19, [x21, wzr, sxtw #3]
|
0xb3,0xda,0x3f,0xfc = str d19, [x21, wzr, sxtw #3]
|
||||||
0x06,0x68,0xa5,0xf8 = prfm #6, [x0, x5, lsl #0]
|
// 0x06,0x68,0xa5,0xf8 = prfm #6, [x0, x5, lsl #0]
|
||||||
0xe3,0x6b,0xe5,0x3c = ldr q3, [sp, x5]
|
0xe3,0x6b,0xe5,0x3c = ldr q3, [sp, x5]
|
||||||
0x69,0x6b,0xe6,0x3c = ldr q9, [x27, x6]
|
0x69,0x6b,0xe6,0x3c = ldr q9, [x27, x6]
|
||||||
0xca,0x7b,0xe7,0x3c = ldr q10, [x30, x7, lsl #4]
|
0xca,0x7b,0xe7,0x3c = ldr q10, [x30, x7, lsl #4]
|
||||||
@ -1272,11 +1272,11 @@
|
|||||||
0xc3,0xc8,0x03,0x52 = eor w3, w6, #0xe0e0e0e0
|
0xc3,0xc8,0x03,0x52 = eor w3, w6, #0xe0e0e0e0
|
||||||
0xff,0xc7,0x00,0x52 = eor wsp, wzr, #0x3030303
|
0xff,0xc7,0x00,0x52 = eor wsp, wzr, #0x3030303
|
||||||
0x30,0xc6,0x01,0x52 = eor w16, w17, #0x81818181
|
0x30,0xc6,0x01,0x52 = eor w16, w17, #0x81818181
|
||||||
0x5f,0xe6,0x02,0x72 = ands wzr, w18, #0xcccccccc
|
// 0x5f,0xe6,0x02,0x72 = ands wzr, w18, #0xcccccccc
|
||||||
0x93,0xe6,0x00,0x72 = ands w19, w20, #0x33333333
|
0x93,0xe6,0x00,0x72 = ands w19, w20, #0x33333333
|
||||||
0xd5,0xe6,0x01,0x72 = ands w21, w22, #0x99999999
|
0xd5,0xe6,0x01,0x72 = ands w21, w22, #0x99999999
|
||||||
0x7f,0xf0,0x01,0x72 = ands wzr, w3, #0xaaaaaaaa
|
// 0x7f,0xf0,0x01,0x72 = ands wzr, w3, #0xaaaaaaaa
|
||||||
0xff,0xf3,0x00,0x72 = ands wzr, wzr, #0x55555555
|
// 0xff,0xf3,0x00,0x72 = ands wzr, wzr, #0x55555555
|
||||||
0xa3,0x84,0x66,0xd2 = eor x3, x5, #0xffffffffc000000
|
0xa3,0x84,0x66,0xd2 = eor x3, x5, #0xffffffffc000000
|
||||||
0x49,0xb9,0x40,0x92 = and x9, x10, #0x7fffffffffff
|
0x49,0xb9,0x40,0x92 = and x9, x10, #0x7fffffffffff
|
||||||
0x8b,0x31,0x41,0xb2 = orr x11, x12, #0x8000000000000fff
|
0x8b,0x31,0x41,0xb2 = orr x11, x12, #0x8000000000000fff
|
||||||
@ -1289,11 +1289,11 @@
|
|||||||
0xc3,0xc8,0x03,0xd2 = eor x3, x6, #0xe0e0e0e0e0e0e0e0
|
0xc3,0xc8,0x03,0xd2 = eor x3, x6, #0xe0e0e0e0e0e0e0e0
|
||||||
0xff,0xc7,0x00,0xd2 = eor sp, xzr, #0x303030303030303
|
0xff,0xc7,0x00,0xd2 = eor sp, xzr, #0x303030303030303
|
||||||
0x30,0xc6,0x01,0xd2 = eor x16, x17, #0x8181818181818181
|
0x30,0xc6,0x01,0xd2 = eor x16, x17, #0x8181818181818181
|
||||||
0x5f,0xe6,0x02,0xf2 = ands xzr, x18, #0xcccccccccccccccc
|
// 0x5f,0xe6,0x02,0xf2 = ands xzr, x18, #0xcccccccccccccccc
|
||||||
0x93,0xe6,0x00,0xf2 = ands x19, x20, #0x3333333333333333
|
0x93,0xe6,0x00,0xf2 = ands x19, x20, #0x3333333333333333
|
||||||
0xd5,0xe6,0x01,0xf2 = ands x21, x22, #0x9999999999999999
|
0xd5,0xe6,0x01,0xf2 = ands x21, x22, #0x9999999999999999
|
||||||
0x7f,0xf0,0x01,0xf2 = ands xzr, x3, #0xaaaaaaaaaaaaaaaa
|
// 0x7f,0xf0,0x01,0xf2 = ands xzr, x3, #0xaaaaaaaaaaaaaaaa
|
||||||
0xff,0xf3,0x00,0xf2 = ands xzr, xzr, #0x5555555555555555
|
// 0xff,0xf3,0x00,0xf2 = ands xzr, xzr, #0x5555555555555555
|
||||||
0xe3,0x8f,0x00,0x32 = orr w3, wzr, #0xf000f
|
0xe3,0x8f,0x00,0x32 = orr w3, wzr, #0xf000f
|
||||||
0xea,0xf3,0x01,0xb2 = orr x10, xzr, #0xaaaaaaaaaaaaaaaa
|
0xea,0xf3,0x01,0xb2 = orr x10, xzr, #0xaaaaaaaaaaaaaaaa
|
||||||
0xec,0x02,0x15,0x0a = and w12, w23, w21
|
0xec,0x02,0x15,0x0a = and w12, w23, w21
|
||||||
@ -1388,7 +1388,7 @@
|
|||||||
0x9f,0x40,0x00,0xd5 = msr pan, #0
|
0x9f,0x40,0x00,0xd5 = msr pan, #0
|
||||||
0x7f,0x40,0x00,0xd5 = msr uao, #0
|
0x7f,0x40,0x00,0xd5 = msr uao, #0
|
||||||
0xe5,0x59,0x0f,0xd5 = sys #7, c5, c9, #7, x5
|
0xe5,0x59,0x0f,0xd5 = sys #7, c5, c9, #7, x5
|
||||||
0x5f,0xff,0x08,0xd5 = sys #0, c15, c15, #2, xzr
|
// 0x5f,0xff,0x08,0xd5 = sys #0, c15, c15, #2, xzr
|
||||||
0xe9,0x59,0x2f,0xd5 = sysl x9, #7, c5, c9, #7
|
0xe9,0x59,0x2f,0xd5 = sysl x9, #7, c5, c9, #7
|
||||||
0x41,0xff,0x28,0xd5 = sysl x1, #0, c15, c15, #2
|
0x41,0xff,0x28,0xd5 = sysl x1, #0, c15, c15, #2
|
||||||
0x1f,0x71,0x08,0xd5 = ic ialluis
|
0x1f,0x71,0x08,0xd5 = ic ialluis
|
||||||
@ -1941,7 +1941,7 @@
|
|||||||
0x09,0xc1,0x38,0xd5 = mrs x9, isr_el1
|
0x09,0xc1,0x38,0xd5 = mrs x9, isr_el1
|
||||||
0x29,0xd0,0x38,0xd5 = mrs x9, contextidr_el1
|
0x29,0xd0,0x38,0xd5 = mrs x9, contextidr_el1
|
||||||
0x29,0xd0,0x3d,0xd5 = mrs x9, contextidr_el12
|
0x29,0xd0,0x3d,0xd5 = mrs x9, contextidr_el12
|
||||||
0x29,0xd0,0x3c,0xd5 = mrs x9, contextdir_el2
|
// 0x29,0xd0,0x3c,0xd5 = mrs x9, contextdir_el2
|
||||||
0x49,0xd0,0x3b,0xd5 = mrs x9, tpidr_el0
|
0x49,0xd0,0x3b,0xd5 = mrs x9, tpidr_el0
|
||||||
0x49,0xd0,0x3c,0xd5 = mrs x9, tpidr_el2
|
0x49,0xd0,0x3c,0xd5 = mrs x9, tpidr_el2
|
||||||
0x49,0xd0,0x3e,0xd5 = mrs x9, tpidr_el3
|
0x49,0xd0,0x3e,0xd5 = mrs x9, tpidr_el3
|
||||||
@ -1964,7 +1964,7 @@
|
|||||||
0x49,0xe2,0x3b,0xd5 = mrs x9, cntp_cval_el0
|
0x49,0xe2,0x3b,0xd5 = mrs x9, cntp_cval_el0
|
||||||
0x49,0xe2,0x3d,0xd5 = mrs x9, cntp_cval_el02
|
0x49,0xe2,0x3d,0xd5 = mrs x9, cntp_cval_el02
|
||||||
0x49,0xe2,0x3c,0xd5 = mrs x9, cnthp_cval_el2
|
0x49,0xe2,0x3c,0xd5 = mrs x9, cnthp_cval_el2
|
||||||
0x20,0xe3,0x3c,0xd5 = mrs x9, cnthv_ctl_el2
|
// 0x20,0xe3,0x3c,0xd5 = mrs x9, cnthv_ctl_el2
|
||||||
0x49,0xe3,0x3c,0xd5 = mrs x9, cnthv_cval_el2
|
0x49,0xe3,0x3c,0xd5 = mrs x9, cnthv_cval_el2
|
||||||
0x09,0xe3,0x3c,0xd5 = mrs x9, cnthv_tval_el2
|
0x09,0xe3,0x3c,0xd5 = mrs x9, cnthv_tval_el2
|
||||||
0x49,0xe2,0x3f,0xd5 = mrs x9, cntps_cval_el1
|
0x49,0xe2,0x3f,0xd5 = mrs x9, cntps_cval_el1
|
||||||
|
@ -76,7 +76,7 @@
|
|||||||
0x03,0xe5,0x2c,0x2e = fcmge v3.2s, v8.2s, v12.2s
|
0x03,0xe5,0x2c,0x2e = fcmge v3.2s, v8.2s, v12.2s
|
||||||
0xf1,0xe5,0x6d,0x6e = fcmge v17.2d, v15.2d, v13.2d
|
0xf1,0xe5,0x6d,0x6e = fcmge v17.2d, v15.2d, v13.2d
|
||||||
0xbf,0xe7,0x3c,0x6e = fcmge v31.4s, v29.4s, v28.4s
|
0xbf,0xe7,0x3c,0x6e = fcmge v31.4s, v29.4s, v28.4s
|
||||||
0x03,0xe5,0x2c,0x2e = fcmge v3.2s, v8.2s, v12.2s
|
0x03,0xe5,0x2c,0x2e = fcmge v3.2s, v8.2s, v12.2s
|
||||||
0xf1,0xe5,0x6d,0x6e = fcmge v17.2d, v15.2d, v13.2d
|
0xf1,0xe5,0x6d,0x6e = fcmge v17.2d, v15.2d, v13.2d
|
||||||
0xe0,0xe7,0xb0,0x2e = fcmgt v0.2s, v31.2s, v16.2s
|
0xe0,0xe7,0xb0,0x2e = fcmgt v0.2s, v31.2s, v16.2s
|
||||||
0xe4,0xe4,0xaf,0x6e = fcmgt v4.4s, v7.4s, v15.4s
|
0xe4,0xe4,0xaf,0x6e = fcmgt v4.4s, v7.4s, v15.4s
|
||||||
|
@ -64,11 +64,7 @@
|
|||||||
0xef,0xe5,0x00,0x4f = movi v15.16b, #0xf
|
0xef,0xe5,0x00,0x4f = movi v15.16b, #0xf
|
||||||
0xff,0xe7,0x00,0x4f = movi v31.16b, #0x1f
|
0xff,0xe7,0x00,0x4f = movi v31.16b, #0x1f
|
||||||
0x40,0xe5,0x05,0x6f = movi v0.2d, #0xff00ff00ff00ff00
|
0x40,0xe5,0x05,0x6f = movi v0.2d, #0xff00ff00ff00ff00
|
||||||
0x40,0xe5,0x05,0x2f = movi d0, #0xff00ff00ff00ff00
|
0x40,0xe5,0x05,0x2f = movi d0, #0xff00ff00ff00ff00
|
||||||
0x01,0xf6,0x03,0x0f = fmov v1.2s, #1.00000000
|
0x01,0xf6,0x03,0x0f = fmov v1.2s, #1.00000000
|
||||||
0x0f,0xf6,0x03,0x4f = fmov v15.4s, #1.00000000
|
0x0f,0xf6,0x03,0x4f = fmov v15.4s, #1.00000000
|
||||||
0x1f,0xf6,0x03,0x6f = fmov v31.2d, #1.00000000
|
0x1f,0xf6,0x03,0x6f = fmov v31.2d, #1.00000000
|
||||||
0xe0,0x1f,0xbf,0x0e = orr v0.8b, v31.8b, v31.8b
|
|
||||||
0x0f,0x1e,0xb0,0x4e = orr v15.16b, v16.16b, v16.16b
|
|
||||||
0xe0,0x1f,0xbf,0x0e = orr v0.8b, v31.8b, v31.8b
|
|
||||||
0x0f,0x1e,0xb0,0x4e = orr v15.16b, v16.16b, v16.16b
|
|
||||||
|
@ -1,23 +1,23 @@
|
|||||||
# CS_ARCH_ARM64, 0, None
|
# CS_ARCH_ARM64, 0, None
|
||||||
0x00,0x04,0x1f,0x5e = dup b0, v0.b[15]
|
0x00,0x04,0x1f,0x5e = mov b0, v0.b[15]
|
||||||
0x01,0x04,0x0f,0x5e = dup b1, v0.b[7]
|
0x01,0x04,0x0f,0x5e = mov b1, v0.b[7]
|
||||||
0x11,0x04,0x01,0x5e = dup b17, v0.b[0]
|
0x11,0x04,0x01,0x5e = mov b17, v0.b[0]
|
||||||
0xe5,0x07,0x1e,0x5e = dup h5, v31.h[7]
|
0xe5,0x07,0x1e,0x5e = mov h5, v31.h[7]
|
||||||
0x29,0x04,0x12,0x5e = dup h9, v1.h[4]
|
0x29,0x04,0x12,0x5e = mov h9, v1.h[4]
|
||||||
0x2b,0x06,0x02,0x5e = dup h11, v17.h[0]
|
0x2b,0x06,0x02,0x5e = mov h11, v17.h[0]
|
||||||
0x42,0x04,0x1c,0x5e = dup s2, v2.s[3]
|
0x42,0x04,0x1c,0x5e = mov s2, v2.s[3]
|
||||||
0xa4,0x06,0x04,0x5e = dup s4, v21.s[0]
|
0xa4,0x06,0x04,0x5e = mov s4, v21.s[0]
|
||||||
0xbf,0x06,0x14,0x5e = dup s31, v21.s[2]
|
0xbf,0x06,0x14,0x5e = mov s31, v21.s[2]
|
||||||
0xa3,0x04,0x08,0x5e = dup d3, v5.d[0]
|
0xa3,0x04,0x08,0x5e = mov d3, v5.d[0]
|
||||||
0xa6,0x04,0x18,0x5e = dup d6, v5.d[1]
|
0xa6,0x04,0x18,0x5e = mov d6, v5.d[1]
|
||||||
0x00,0x04,0x1f,0x5e = dup b0, v0.b[15]
|
0x00,0x04,0x1f,0x5e = mov b0, v0.b[15]
|
||||||
0x01,0x04,0x0f,0x5e = dup b1, v0.b[7]
|
0x01,0x04,0x0f,0x5e = mov b1, v0.b[7]
|
||||||
0x11,0x04,0x01,0x5e = dup b17, v0.b[0]
|
0x11,0x04,0x01,0x5e = mov b17, v0.b[0]
|
||||||
0xe5,0x07,0x1e,0x5e = dup h5, v31.h[7]
|
0xe5,0x07,0x1e,0x5e = mov h5, v31.h[7]
|
||||||
0x29,0x04,0x12,0x5e = dup h9, v1.h[4]
|
0x29,0x04,0x12,0x5e = mov h9, v1.h[4]
|
||||||
0x2b,0x06,0x02,0x5e = dup h11, v17.h[0]
|
0x2b,0x06,0x02,0x5e = mov h11, v17.h[0]
|
||||||
0x42,0x04,0x1c,0x5e = dup s2, v2.s[3]
|
0x42,0x04,0x1c,0x5e = mov s2, v2.s[3]
|
||||||
0xa4,0x06,0x04,0x5e = dup s4, v21.s[0]
|
0xa4,0x06,0x04,0x5e = mov s4, v21.s[0]
|
||||||
0xbf,0x06,0x14,0x5e = dup s31, v21.s[2]
|
0xbf,0x06,0x14,0x5e = mov s31, v21.s[2]
|
||||||
0xa3,0x04,0x08,0x5e = dup d3, v5.d[0]
|
0xa3,0x04,0x08,0x5e = mov d3, v5.d[0]
|
||||||
0xa6,0x04,0x18,0x5e = dup d6, v5.d[1]
|
0xa6,0x04,0x18,0x5e = mov d6, v5.d[1]
|
||||||
|
@ -14,10 +14,10 @@
|
|||||||
0x34,0x2d,0x14,0x4e = smov x20, v9.s[2]
|
0x34,0x2d,0x14,0x4e = smov x20, v9.s[2]
|
||||||
0x01,0x3c,0x1f,0x0e = umov w1, v0.b[15]
|
0x01,0x3c,0x1f,0x0e = umov w1, v0.b[15]
|
||||||
0xce,0x3c,0x12,0x0e = umov w14, v6.h[4]
|
0xce,0x3c,0x12,0x0e = umov w14, v6.h[4]
|
||||||
0x34,0x3d,0x14,0x0e = umov w20, v9.s[2]
|
0x34,0x3d,0x14,0x0e = mov w20, v9.s[2]
|
||||||
0x47,0x3e,0x18,0x4e = umov x7, v18.d[1]
|
0x47,0x3e,0x18,0x4e = mov x7, v18.d[1]
|
||||||
0x34,0x3d,0x14,0x0e = umov w20, v9.s[2]
|
0x34,0x3d,0x14,0x0e = mov w20, v9.s[2]
|
||||||
0x47,0x3e,0x18,0x4e = umov x7, v18.d[1]
|
0x47,0x3e,0x18,0x4e = mov x7, v18.d[1]
|
||||||
0x61,0x34,0x1d,0x6e = ins v1.b[14], v3.b[6]
|
0x61,0x34,0x1d,0x6e = ins v1.b[14], v3.b[6]
|
||||||
0xe6,0x54,0x1e,0x6e = ins v6.h[7], v7.h[5]
|
0xe6,0x54,0x1e,0x6e = ins v6.h[7], v7.h[5]
|
||||||
0xcf,0x46,0x1c,0x6e = ins v15.s[3], v22.s[2]
|
0xcf,0x46,0x1c,0x6e = ins v15.s[3], v22.s[2]
|
||||||
|
@ -91,8 +91,8 @@
|
|||||||
0x04,0x48,0xa0,0x2e = clz v4.2s, v0.2s
|
0x04,0x48,0xa0,0x2e = clz v4.2s, v0.2s
|
||||||
0xe0,0x5b,0x20,0x4e = cnt v0.16b, v31.16b
|
0xe0,0x5b,0x20,0x4e = cnt v0.16b, v31.16b
|
||||||
0x21,0x59,0x20,0x0e = cnt v1.8b, v9.8b
|
0x21,0x59,0x20,0x0e = cnt v1.8b, v9.8b
|
||||||
0xe0,0x5b,0x20,0x6e = not v0.16b, v31.16b
|
// 0xe0,0x5b,0x20,0x6e = not v0.16b, v31.16b
|
||||||
0x21,0x59,0x20,0x2e = not v1.8b, v9.8b
|
// 0x21,0x59,0x20,0x2e = not v1.8b, v9.8b
|
||||||
0xe0,0x5b,0x60,0x6e = rbit v0.16b, v31.16b
|
0xe0,0x5b,0x60,0x6e = rbit v0.16b, v31.16b
|
||||||
0x21,0x59,0x60,0x2e = rbit v1.8b, v9.8b
|
0x21,0x59,0x60,0x2e = rbit v1.8b, v9.8b
|
||||||
0x06,0xf9,0xa0,0x4e = fabs v6.4s, v8.4s
|
0x06,0xf9,0xa0,0x4e = fabs v6.4s, v8.4s
|
||||||
|
@ -136,9 +136,9 @@
|
|||||||
0x42,0x30,0xc1,0xe1 = bic r3, r1, r2, asr #32
|
0x42,0x30,0xc1,0xe1 = bic r3, r1, r2, asr #32
|
||||||
0x7a,0x00,0x20,0xe1 = bkpt #10
|
0x7a,0x00,0x20,0xe1 = bkpt #10
|
||||||
0x7f,0xff,0x2f,0xe1 = bkpt #65535
|
0x7f,0xff,0x2f,0xe1 = bkpt #65535
|
||||||
0x27,0x3b,0x6d,0x9b = blls #28634268
|
0x27,0x3b,0x6d,0x9b = blls #28634276
|
||||||
0xa0,0xb0,0x7b,0xfa = blx #32424576
|
0xa0,0xb0,0x7b,0xfa = blx #32424584
|
||||||
0x50,0xd8,0x3d,0xfa = blx #16212288
|
0x50,0xd8,0x3d,0xfa = blx #16212296
|
||||||
0x32,0xff,0x2f,0xe1 = blx r2
|
0x32,0xff,0x2f,0xe1 = blx r2
|
||||||
0x32,0xff,0x2f,0x11 = blxne r2
|
0x32,0xff,0x2f,0x11 = blxne r2
|
||||||
0x12,0xff,0x2f,0xe1 = bx r2
|
0x12,0xff,0x2f,0xe1 = bx r2
|
||||||
@ -385,34 +385,34 @@
|
|||||||
0x00,0x80,0x0f,0xe1 = mrs r8, apsr
|
0x00,0x80,0x0f,0xe1 = mrs r8, apsr
|
||||||
0x00,0x80,0x0f,0xe1 = mrs r8, apsr
|
0x00,0x80,0x0f,0xe1 = mrs r8, apsr
|
||||||
0x00,0x80,0x4f,0xe1 = mrs r8, spsr
|
0x00,0x80,0x4f,0xe1 = mrs r8, spsr
|
||||||
0x05,0xf0,0x28,0xe3 = msr APSR_nzcvq, #5
|
0x05,0xf0,0x28,0xe3 = msr apsr_nzcvq, #5
|
||||||
0x05,0xf0,0x24,0xe3 = msr APSR_g, #5
|
0x05,0xf0,0x24,0xe3 = msr apsr_g, #5
|
||||||
0x05,0xf0,0x28,0xe3 = msr APSR_nzcvq, #5
|
0x05,0xf0,0x28,0xe3 = msr apsr_nzcvq, #5
|
||||||
0x05,0xf0,0x28,0xe3 = msr APSR_nzcvq, #5
|
0x05,0xf0,0x28,0xe3 = msr apsr_nzcvq, #5
|
||||||
0x05,0xf0,0x2c,0xe3 = msr APSR_nzcvqg, #5
|
0x05,0xf0,0x2c,0xe3 = msr apsr_nzcvqg, #5
|
||||||
0x05,0xf0,0x29,0xe3 = msr CPSR_fc, #5
|
0x05,0xf0,0x29,0xe3 = msr cpsr_fc, #5
|
||||||
0x05,0xf0,0x21,0xe3 = msr CPSR_c, #5
|
0x05,0xf0,0x21,0xe3 = msr cpsr_c, #5
|
||||||
0x05,0xf0,0x22,0xe3 = msr CPSR_x, #5
|
0x05,0xf0,0x22,0xe3 = msr cpsr_x, #5
|
||||||
0x05,0xf0,0x29,0xe3 = msr CPSR_fc, #5
|
0x05,0xf0,0x29,0xe3 = msr cpsr_fc, #5
|
||||||
0x05,0xf0,0x29,0xe3 = msr CPSR_fc, #5
|
0x05,0xf0,0x29,0xe3 = msr cpsr_fc, #5
|
||||||
0x05,0xf0,0x2e,0xe3 = msr CPSR_fsx, #5
|
0x05,0xf0,0x2e,0xe3 = msr cpsr_fsx, #5
|
||||||
0x05,0xf0,0x69,0xe3 = msr SPSR_fc, #5
|
0x05,0xf0,0x69,0xe3 = msr spsr_fc, #5
|
||||||
0x05,0xf0,0x6f,0xe3 = msr SPSR_fsxc, #5
|
0x05,0xf0,0x6f,0xe3 = msr spsr_fsxc, #5
|
||||||
0x05,0xf0,0x2f,0xe3 = msr CPSR_fsxc, #5
|
0x05,0xf0,0x2f,0xe3 = msr cpsr_fsxc, #5
|
||||||
0x00,0xf0,0x28,0xe1 = msr APSR_nzcvq, r0
|
0x00,0xf0,0x28,0xe1 = msr apsr_nzcvq, r0
|
||||||
0x00,0xf0,0x24,0xe1 = msr APSR_g, r0
|
0x00,0xf0,0x24,0xe1 = msr apsr_g, r0
|
||||||
0x00,0xf0,0x28,0xe1 = msr APSR_nzcvq, r0
|
0x00,0xf0,0x28,0xe1 = msr apsr_nzcvq, r0
|
||||||
0x00,0xf0,0x28,0xe1 = msr APSR_nzcvq, r0
|
0x00,0xf0,0x28,0xe1 = msr apsr_nzcvq, r0
|
||||||
0x00,0xf0,0x2c,0xe1 = msr APSR_nzcvqg, r0
|
0x00,0xf0,0x2c,0xe1 = msr apsr_nzcvqg, r0
|
||||||
0x00,0xf0,0x29,0xe1 = msr CPSR_fc, r0
|
0x00,0xf0,0x29,0xe1 = msr cpsr_fc, r0
|
||||||
0x00,0xf0,0x21,0xe1 = msr CPSR_c, r0
|
0x00,0xf0,0x21,0xe1 = msr cpsr_c, r0
|
||||||
0x00,0xf0,0x22,0xe1 = msr CPSR_x, r0
|
0x00,0xf0,0x22,0xe1 = msr cpsr_x, r0
|
||||||
0x00,0xf0,0x29,0xe1 = msr CPSR_fc, r0
|
0x00,0xf0,0x29,0xe1 = msr cpsr_fc, r0
|
||||||
0x00,0xf0,0x29,0xe1 = msr CPSR_fc, r0
|
0x00,0xf0,0x29,0xe1 = msr cpsr_fc, r0
|
||||||
0x00,0xf0,0x2e,0xe1 = msr CPSR_fsx, r0
|
0x00,0xf0,0x2e,0xe1 = msr cpsr_fsx, r0
|
||||||
0x00,0xf0,0x69,0xe1 = msr SPSR_fc, r0
|
0x00,0xf0,0x69,0xe1 = msr spsr_fc, r0
|
||||||
0x00,0xf0,0x6f,0xe1 = msr SPSR_fsxc, r0
|
0x00,0xf0,0x6f,0xe1 = msr spsr_fsxc, r0
|
||||||
0x00,0xf0,0x2f,0xe1 = msr CPSR_fsxc, r0
|
0x00,0xf0,0x2f,0xe1 = msr cpsr_fsxc, r0
|
||||||
0x96,0x07,0x05,0xe0 = mul r5, r6, r7
|
0x96,0x07,0x05,0xe0 = mul r5, r6, r7
|
||||||
0x96,0x07,0x15,0xe0 = muls r5, r6, r7
|
0x96,0x07,0x15,0xe0 = muls r5, r6, r7
|
||||||
0x96,0x07,0x05,0xc0 = mulgt r5, r6, r7
|
0x96,0x07,0x05,0xc0 = mulgt r5, r6, r7
|
||||||
@ -485,7 +485,7 @@
|
|||||||
0xd3,0x27,0x82,0xe6 = pkhtb r2, r2, r3, asr #15
|
0xd3,0x27,0x82,0xe6 = pkhtb r2, r2, r3, asr #15
|
||||||
0x04,0x70,0x9d,0xe4 = pop {r7}
|
0x04,0x70,0x9d,0xe4 = pop {r7}
|
||||||
0x80,0x07,0xbd,0xe8 = pop {r7, r8, r9, r10}
|
0x80,0x07,0xbd,0xe8 = pop {r7, r8, r9, r10}
|
||||||
0x04,0x70,0x2d,0xe5 = push {r7}
|
#0x04,0x70,0x2d,0xe5 = push {r7}
|
||||||
0x80,0x07,0x2d,0xe9 = push {r7, r8, r9, r10}
|
0x80,0x07,0x2d,0xe9 = push {r7, r8, r9, r10}
|
||||||
0x52,0x10,0x03,0xe1 = qadd r1, r2, r3
|
0x52,0x10,0x03,0xe1 = qadd r1, r2, r3
|
||||||
0x52,0x10,0x03,0x11 = qaddne r1, r2, r3
|
0x52,0x10,0x03,0x11 = qaddne r1, r2, r3
|
||||||
|
@ -24,12 +24,12 @@
|
|||||||
0x6d,0x15 = asrs r5, r5, #21
|
0x6d,0x15 = asrs r5, r5, #21
|
||||||
0x6b,0x15 = asrs r3, r5, #21
|
0x6b,0x15 = asrs r3, r5, #21
|
||||||
0x15,0x41 = asrs r5, r2
|
0x15,0x41 = asrs r5, r2
|
||||||
0x97,0xe3 = b #1838
|
0x97,0xe3 = b #1842
|
||||||
0x2e,0xe7 = b #-420
|
0x2e,0xe7 = b #-416
|
||||||
0x80,0xd0 = beq #-256
|
0x80,0xd0 = beq #-252
|
||||||
0x50,0xd0 = beq #160
|
0x50,0xd0 = beq #164
|
||||||
0xd8,0xf0,0x20,0xe8 = blx #884800
|
0xd8,0xf0,0x20,0xe8 = blx #884804
|
||||||
0xb0,0xf1,0x40,0xe8 = blx #1769600
|
0xb0,0xf1,0x40,0xe8 = blx #1769604
|
||||||
0xb1,0x43 = bics r1, r6
|
0xb1,0x43 = bics r1, r6
|
||||||
0x00,0xbe = bkpt #0
|
0x00,0xbe = bkpt #0
|
||||||
0xff,0xbe = bkpt #255
|
0xff,0xbe = bkpt #255
|
||||||
@ -64,7 +64,7 @@
|
|||||||
0x96,0x5b = ldrh r6, [r2, r6]
|
0x96,0x5b = ldrh r6, [r2, r6]
|
||||||
0x96,0x57 = ldrsb r6, [r2, r6]
|
0x96,0x57 = ldrsb r6, [r2, r6]
|
||||||
0x7b,0x5e = ldrsh r3, [r7, r1]
|
0x7b,0x5e = ldrsh r3, [r7, r1]
|
||||||
0x2c,0x00 = lsls r4, r5, #0
|
// 0x2c,0x00 = lsls r4, r5, #0
|
||||||
0x2c,0x01 = lsls r4, r5, #4
|
0x2c,0x01 = lsls r4, r5, #4
|
||||||
0x1b,0x03 = lsls r3, r3, #12
|
0x1b,0x03 = lsls r3, r3, #12
|
||||||
0x1b,0x03 = lsls r3, r3, #12
|
0x1b,0x03 = lsls r3, r3, #12
|
||||||
|
@ -18,8 +18,8 @@
|
|||||||
0x51,0xeb,0x23,0x00 = adcs.w r0, r1, r3, asr #32
|
0x51,0xeb,0x23,0x00 = adcs.w r0, r1, r3, asr #32
|
||||||
0x0d,0xeb,0x0c,0x02 = add.w r2, sp, r12
|
0x0d,0xeb,0x0c,0x02 = add.w r2, sp, r12
|
||||||
0x0a,0xbf = itet eq
|
0x0a,0xbf = itet eq
|
||||||
0x03,0xf2,0xff,0x35 = addwne r5, r3, #1023
|
// 0x03,0xf2,0xff,0x35 = addwne r5, r3, #1023
|
||||||
0x05,0xf2,0x25,0x14 = addweq r4, r5, #293
|
// 0x05,0xf2,0x25,0x14 = addweq r4, r5, #293
|
||||||
0x0d,0xf5,0x80,0x62 = add.w r2, sp, #1024
|
0x0d,0xf5,0x80,0x62 = add.w r2, sp, #1024
|
||||||
0x08,0xf5,0x7f,0x42 = add.w r2, r8, #65280
|
0x08,0xf5,0x7f,0x42 = add.w r2, r8, #65280
|
||||||
0x03,0xf2,0x01,0x12 = addw r2, r3, #257
|
0x03,0xf2,0x01,0x12 = addw r2, r3, #257
|
||||||
@ -46,9 +46,9 @@
|
|||||||
0xc2,0x44 = add r10, r8
|
0xc2,0x44 = add r10, r8
|
||||||
0xc2,0x44 = add r10, r8
|
0xc2,0x44 = add r10, r8
|
||||||
0xaf,0xf6,0xc6,0x4b = subw r11, pc, #3270
|
0xaf,0xf6,0xc6,0x4b = subw r11, pc, #3270
|
||||||
0x0f,0xf2,0x03,0x02 = adr.w r2, #3
|
// 0x0f,0xf2,0x03,0x02 = adr.w r2, #3
|
||||||
0xaf,0xf2,0x3a,0x3b = adr.w r11, #-826
|
// 0xaf,0xf2,0x3a,0x3b = adr.w r11, #-826
|
||||||
0xaf,0xf2,0x00,0x01 = adr.w r1, #-0
|
// 0xaf,0xf2,0x00,0x01 = adr.w r1, #-0
|
||||||
0x05,0xf4,0x7f,0x22 = and r2, r5, #1044480
|
0x05,0xf4,0x7f,0x22 = and r2, r5, #1044480
|
||||||
0x1c,0xf0,0x0f,0x03 = ands r3, r12, #15
|
0x1c,0xf0,0x0f,0x03 = ands r3, r12, #15
|
||||||
0x01,0xf0,0xff,0x01 = and r1, r1, #255
|
0x01,0xf0,0xff,0x01 = and r1, r1, #255
|
||||||
@ -73,13 +73,13 @@
|
|||||||
0x41,0xfa,0x02,0xf1 = asr.w r1, r1, r2
|
0x41,0xfa,0x02,0xf1 = asr.w r1, r1, r2
|
||||||
0x54,0xfa,0x08,0xf3 = asrs.w r3, r4, r8
|
0x54,0xfa,0x08,0xf3 = asrs.w r3, r4, r8
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x13,0xf5,0xce,0xa9 = bmi.w #-183396
|
0x13,0xf5,0xce,0xa9 = bmi.w #-183392
|
||||||
0x6f,0xf3,0xd3,0x05 = bfc r5, #3, #17
|
// 0x6f,0xf3,0xd3,0x05 = bfc r5, #3, #17
|
||||||
0x38,0xbf = it lo
|
0x38,0xbf = it lo
|
||||||
0x6f,0xf3,0xd3,0x05 = bfclo r5, #3, #17
|
// 0x6f,0xf3,0xd3,0x05 = bfclo r5, #3, #17
|
||||||
0x62,0xf3,0xd3,0x05 = bfi r5, r2, #3, #17
|
// 0x62,0xf3,0xd3,0x05 = bfi r5, r2, #3, #17
|
||||||
0x18,0xbf = it ne
|
0x18,0xbf = it ne
|
||||||
0x62,0xf3,0xd3,0x05 = bfine r5, r2, #3, #17
|
// 0x62,0xf3,0xd3,0x05 = bfine r5, r2, #3, #17
|
||||||
0x21,0xf0,0x0f,0x0a = bic r10, r1, #15
|
0x21,0xf0,0x0f,0x0a = bic r10, r1, #15
|
||||||
0x22,0xf0,0xff,0x35 = bic r5, r2, #4294967295
|
0x22,0xf0,0xff,0x35 = bic r5, r2, #4294967295
|
||||||
0x3a,0xf0,0xff,0x3b = bics r11, r10, #4294967295
|
0x3a,0xf0,0xff,0x3b = bics r11, r10, #4294967295
|
||||||
@ -100,16 +100,16 @@
|
|||||||
0xea,0xbe = bkpt #234
|
0xea,0xbe = bkpt #234
|
||||||
0xc5,0xf3,0x00,0x8f = bxj r5
|
0xc5,0xf3,0x00,0x8f = bxj r5
|
||||||
0x18,0xbf = it ne
|
0x18,0xbf = it ne
|
||||||
0xc7,0xf3,0x00,0x8f = bxjne r7
|
// 0xc7,0xf3,0x00,0x8f = bxjne r7
|
||||||
0x1f,0xb9 = cbnz r7, #6
|
// 0x1f,0xb9 = cbnz r7, #6
|
||||||
0x37,0xb9 = cbnz r7, #12
|
// 0x37,0xb9 = cbnz r7, #12
|
||||||
0x11,0xee,0x81,0x17 = cdp p7, #1, c1, c1, c1, #4
|
0x11,0xee,0x81,0x17 = cdp p7, #1, c1, c1, c1, #4
|
||||||
0x11,0xfe,0x81,0x17 = cdp2 p7, #1, c1, c1, c1, #4
|
0x11,0xfe,0x81,0x17 = cdp2 p7, #1, c1, c1, c1, #4
|
||||||
0xbf,0xf3,0x2f,0x8f = clrex
|
0xbf,0xf3,0x2f,0x8f = clrex
|
||||||
0x18,0xbf = it ne
|
0x18,0xbf = it ne
|
||||||
0xb2,0xfa,0x82,0xf1 = clz r1, r2
|
// 0xb2,0xfa,0x82,0xf1 = clz r1, r2
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0xb2,0xfa,0x82,0xf1 = clzeq r1, r2
|
// 0xb2,0xfa,0x82,0xf1 = clzeq r1, r2
|
||||||
0x11,0xf1,0x0f,0x0f = cmn.w r1, #15
|
0x11,0xf1,0x0f,0x0f = cmn.w r1, #15
|
||||||
0x18,0xeb,0x06,0x0f = cmn.w r8, r6
|
0x18,0xeb,0x06,0x0f = cmn.w r8, r6
|
||||||
0x11,0xeb,0x86,0x2f = cmn.w r1, r6, lsl #10
|
0x11,0xeb,0x86,0x2f = cmn.w r1, r6, lsl #10
|
||||||
@ -209,13 +209,13 @@
|
|||||||
0xbf,0xf3,0x6f,0x8f = isb sy
|
0xbf,0xf3,0x6f,0x8f = isb sy
|
||||||
0xbf,0xf3,0x61,0x8f = isb #0x1
|
0xbf,0xf3,0x61,0x8f = isb #0x1
|
||||||
0x0d,0xbf = iteet eq
|
0x0d,0xbf = iteet eq
|
||||||
0x88,0x18 = addeq r0, r1, r2
|
// 0x88,0x18 = addeq r0, r1, r2
|
||||||
0x00,0xbf = nopne
|
// 0x00,0xbf = nopne
|
||||||
0xf5,0x1b = subne r5, r6, r7
|
// 0xf5,0x1b = subne r5, r6, r7
|
||||||
0x0d,0xbf = iteet eq
|
0x0d,0xbf = iteet eq
|
||||||
0x88,0x18 = addeq r0, r1, r2
|
// 0x88,0x18 = addeq r0, r1, r2
|
||||||
0x00,0xbf = nopne
|
// 0x00,0xbf = nopne
|
||||||
0xf5,0x1b = subne r5, r6, r7
|
// 0xf5,0x1b = subne r5, r6, r7
|
||||||
0x91,0xfd,0x01,0x80 = ldc2 p0, c8, [r1, #4]
|
0x91,0xfd,0x01,0x80 = ldc2 p0, c8, [r1, #4]
|
||||||
0x92,0xfd,0x00,0x71 = ldc2 p1, c7, [r2]
|
0x92,0xfd,0x00,0x71 = ldc2 p1, c7, [r2]
|
||||||
0x13,0xfd,0x38,0x62 = ldc2 p2, c6, [r3, #-224]
|
0x13,0xfd,0x38,0x62 = ldc2 p2, c6, [r3, #-224]
|
||||||
@ -442,41 +442,41 @@
|
|||||||
0x4f,0xf0,0x7f,0x70 = mov.w r0, #66846720
|
0x4f,0xf0,0x7f,0x70 = mov.w r0, #66846720
|
||||||
0x5f,0xf0,0x7f,0x70 = movs.w r0, #66846720
|
0x5f,0xf0,0x7f,0x70 = movs.w r0, #66846720
|
||||||
0x06,0xbf = itte eq
|
0x06,0xbf = itte eq
|
||||||
0x5f,0xf0,0x0c,0x01 = movseq.w r1, #12
|
// 0x5f,0xf0,0x0c,0x01 = movseq.w r1, #12
|
||||||
0x0c,0x21 = moveq r1, #12
|
// 0x0c,0x21 = moveq r1, #12
|
||||||
0x4f,0xf0,0x0c,0x01 = movne.w r1, #12
|
// 0x4f,0xf0,0x0c,0x01 = movne.w r1, #12
|
||||||
0x4f,0xf4,0xe1,0x76 = mov.w r6, #450
|
0x4f,0xf4,0xe1,0x76 = mov.w r6, #450
|
||||||
0x38,0xbf = it lo
|
0x38,0xbf = it lo
|
||||||
0x4f,0xf0,0xff,0x31 = movlo.w r1, #-1
|
// 0x4f,0xf0,0xff,0x31 = movlo.w r1, #-1
|
||||||
0x6f,0xf0,0x02,0x03 = mvn r3, #2
|
0x6f,0xf0,0x02,0x03 = mvn r3, #2
|
||||||
0x4a,0xf6,0xcd,0x3b = movw r11, #43981
|
0x4a,0xf6,0xcd,0x3b = movw r11, #43981
|
||||||
0x01,0x20 = movs r0, #1
|
0x01,0x20 = movs r0, #1
|
||||||
0x18,0xbf = it ne
|
0x18,0xbf = it ne
|
||||||
0x0f,0x23 = movne r3, #15
|
// 0x0f,0x23 = movne r3, #15
|
||||||
0x04,0xbf = itt eq
|
0x04,0xbf = itt eq
|
||||||
0xff,0x20 = moveq r0, #255
|
// 0xff,0x20 = moveq r0, #255
|
||||||
0x40,0xf2,0x00,0x11 = movweq r1, #256
|
// 0x40,0xf2,0x00,0x11 = movweq r1, #256
|
||||||
0x4f,0xea,0x02,0x46 = lsl.w r6, r2, #16
|
0x4f,0xea,0x02,0x46 = lsl.w r6, r2, #16
|
||||||
0x4f,0xea,0x12,0x46 = lsr.w r6, r2, #16
|
0x4f,0xea,0x12,0x46 = lsr.w r6, r2, #16
|
||||||
0x16,0x10 = asrs r6, r2, #32
|
0x16,0x10 = asrs r6, r2, #32
|
||||||
0x5f,0xea,0x72,0x16 = rors.w r6, r2, #5
|
0x5f,0xea,0x72,0x16 = rors.w r6, r2, #5
|
||||||
0xac,0x40 = lsls r4, r5
|
// 0xac,0x40 = lsls r4, r5
|
||||||
0xec,0x40 = lsrs r4, r5
|
// 0xec,0x40 = lsrs r4, r5
|
||||||
0x2c,0x41 = asrs r4, r5
|
// 0x2c,0x41 = asrs r4, r5
|
||||||
0xec,0x41 = rors r4, r5
|
// 0xec,0x41 = rors r4, r5
|
||||||
0x04,0xfa,0x05,0xf4 = lsl.w r4, r4, r5
|
0x04,0xfa,0x05,0xf4 = lsl.w r4, r4, r5
|
||||||
0x74,0xfa,0x08,0xf4 = rors.w r4, r4, r8
|
0x74,0xfa,0x08,0xf4 = rors.w r4, r4, r8
|
||||||
0x35,0xfa,0x06,0xf4 = lsrs.w r4, r5, r6
|
0x35,0xfa,0x06,0xf4 = lsrs.w r4, r5, r6
|
||||||
0x01,0xbf = itttt eq
|
0x01,0xbf = itttt eq
|
||||||
0xac,0x40 = lsleq r4, r5
|
// 0xac,0x40 = lsleq r4, r5
|
||||||
0xec,0x40 = lsreq r4, r5
|
// 0xec,0x40 = lsreq r4, r5
|
||||||
0x2c,0x41 = asreq r4, r5
|
// 0x2c,0x41 = asreq r4, r5
|
||||||
0xec,0x41 = roreq r4, r5
|
// 0xec,0x41 = roreq r4, r5
|
||||||
0x4f,0xea,0x34,0x04 = rrx r4, r4
|
0x4f,0xea,0x34,0x04 = rrx r4, r4
|
||||||
0xc0,0xf2,0x07,0x03 = movt r3, #7
|
0xc0,0xf2,0x07,0x03 = movt r3, #7
|
||||||
0xcf,0xf6,0xff,0x76 = movt r6, #65535
|
0xcf,0xf6,0xff,0x76 = movt r6, #65535
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0xc0,0xf6,0xf0,0x74 = movteq r4, #4080
|
// 0xc0,0xf6,0xf0,0x74 = movteq r4, #4080
|
||||||
0x11,0xee,0x92,0x1e = mrc p14, #0, r1, c1, c2, #4
|
0x11,0xee,0x92,0x1e = mrc p14, #0, r1, c1, c2, #4
|
||||||
0xff,0xee,0xd6,0xff = mrc p15, #7, apsr_nzcv, c15, c6, #6
|
0xff,0xee,0xd6,0xff = mrc p15, #7, apsr_nzcv, c15, c6, #6
|
||||||
0x32,0xee,0x12,0x19 = mrc p9, #1, r1, c2, c2, #0
|
0x32,0xee,0x12,0x19 = mrc p9, #1, r1, c2, c2, #0
|
||||||
@ -488,45 +488,45 @@
|
|||||||
0xef,0xf3,0x00,0x88 = mrs r8, apsr
|
0xef,0xf3,0x00,0x88 = mrs r8, apsr
|
||||||
0xef,0xf3,0x00,0x88 = mrs r8, apsr
|
0xef,0xf3,0x00,0x88 = mrs r8, apsr
|
||||||
0xff,0xf3,0x00,0x88 = mrs r8, spsr
|
0xff,0xf3,0x00,0x88 = mrs r8, spsr
|
||||||
0x81,0xf3,0x00,0x88 = msr APSR_nzcvq, r1
|
0x81,0xf3,0x00,0x88 = msr apsr_nzcvq, r1
|
||||||
0x82,0xf3,0x00,0x84 = msr APSR_g, r2
|
0x82,0xf3,0x00,0x84 = msr apsr_g, r2
|
||||||
0x83,0xf3,0x00,0x88 = msr APSR_nzcvq, r3
|
0x83,0xf3,0x00,0x88 = msr apsr_nzcvq, r3
|
||||||
0x84,0xf3,0x00,0x88 = msr APSR_nzcvq, r4
|
0x84,0xf3,0x00,0x88 = msr apsr_nzcvq, r4
|
||||||
0x85,0xf3,0x00,0x8c = msr APSR_nzcvqg, r5
|
0x85,0xf3,0x00,0x8c = msr apsr_nzcvqg, r5
|
||||||
0x86,0xf3,0x00,0x89 = msr CPSR_fc, r6
|
0x86,0xf3,0x00,0x89 = msr cpsr_fc, r6
|
||||||
0x87,0xf3,0x00,0x81 = msr CPSR_c, r7
|
0x87,0xf3,0x00,0x81 = msr cpsr_c, r7
|
||||||
0x88,0xf3,0x00,0x82 = msr CPSR_x, r8
|
0x88,0xf3,0x00,0x82 = msr cpsr_x, r8
|
||||||
0x89,0xf3,0x00,0x89 = msr CPSR_fc, r9
|
0x89,0xf3,0x00,0x89 = msr cpsr_fc, r9
|
||||||
0x8b,0xf3,0x00,0x89 = msr CPSR_fc, r11
|
0x8b,0xf3,0x00,0x89 = msr cpsr_fc, r11
|
||||||
0x8c,0xf3,0x00,0x8e = msr CPSR_fsx, r12
|
0x8c,0xf3,0x00,0x8e = msr cpsr_fsx, r12
|
||||||
0x90,0xf3,0x00,0x89 = msr SPSR_fc, r0
|
0x90,0xf3,0x00,0x89 = msr spsr_fc, r0
|
||||||
0x95,0xf3,0x00,0x8f = msr SPSR_fsxc, r5
|
0x95,0xf3,0x00,0x8f = msr spsr_fsxc, r5
|
||||||
0x88,0xf3,0x00,0x8f = msr CPSR_fsxc, r8
|
0x88,0xf3,0x00,0x8f = msr cpsr_fsxc, r8
|
||||||
0x83,0xf3,0x00,0x89 = msr CPSR_fc, r3
|
0x83,0xf3,0x00,0x89 = msr cpsr_fc, r3
|
||||||
0x63,0x43 = muls r3, r4, r3
|
0x63,0x43 = muls r3, r4, r3
|
||||||
0x04,0xfb,0x03,0xf3 = mul r3, r4, r3
|
0x04,0xfb,0x03,0xf3 = mul r3, r4, r3
|
||||||
0x04,0xfb,0x06,0xf3 = mul r3, r4, r6
|
0x04,0xfb,0x06,0xf3 = mul r3, r4, r6
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x04,0xfb,0x05,0xf3 = muleq r3, r4, r5
|
// 0x04,0xfb,0x05,0xf3 = muleq r3, r4, r5
|
||||||
0xd8,0xbf = it le
|
0xd8,0xbf = it le
|
||||||
0x04,0xfb,0x08,0xf4 = mulle r4, r4, r8
|
// 0x04,0xfb,0x08,0xf4 = mulle r4, r4, r8
|
||||||
0x06,0xfb,0x05,0xf5 = mul r5, r6, r5
|
0x06,0xfb,0x05,0xf5 = mul r5, r6, r5
|
||||||
0x7f,0xf0,0x15,0x08 = mvns r8, #21
|
0x7f,0xf0,0x15,0x08 = mvns r8, #21
|
||||||
0x6f,0xf0,0x7f,0x70 = mvn r0, #66846720
|
0x6f,0xf0,0x7f,0x70 = mvn r0, #66846720
|
||||||
0x7f,0xf0,0x7f,0x70 = mvns r0, #66846720
|
0x7f,0xf0,0x7f,0x70 = mvns r0, #66846720
|
||||||
0x06,0xbf = itte eq
|
0x06,0xbf = itte eq
|
||||||
0x7f,0xf0,0x0c,0x01 = mvnseq r1, #12
|
// 0x7f,0xf0,0x0c,0x01 = mvnseq r1, #12
|
||||||
0x6f,0xf0,0x0c,0x01 = mvneq r1, #12
|
// 0x6f,0xf0,0x0c,0x01 = mvneq r1, #12
|
||||||
0x6f,0xf0,0x0c,0x01 = mvnne r1, #12
|
// 0x6f,0xf0,0x0c,0x01 = mvnne r1, #12
|
||||||
0x6f,0xea,0x03,0x02 = mvn.w r2, r3
|
0x6f,0xea,0x03,0x02 = mvn.w r2, r3
|
||||||
0xda,0x43 = mvns r2, r3
|
// 0xda,0x43 = mvns r2, r3
|
||||||
0x6f,0xea,0xc6,0x45 = mvn.w r5, r6, lsl #19
|
0x6f,0xea,0xc6,0x45 = mvn.w r5, r6, lsl #19
|
||||||
0x6f,0xea,0x56,0x25 = mvn.w r5, r6, lsr #9
|
0x6f,0xea,0x56,0x25 = mvn.w r5, r6, lsr #9
|
||||||
0x6f,0xea,0x26,0x15 = mvn.w r5, r6, asr #4
|
0x6f,0xea,0x26,0x15 = mvn.w r5, r6, asr #4
|
||||||
0x6f,0xea,0xb6,0x15 = mvn.w r5, r6, ror #6
|
0x6f,0xea,0xb6,0x15 = mvn.w r5, r6, ror #6
|
||||||
0x6f,0xea,0x36,0x05 = mvn.w r5, r6, rrx
|
0x6f,0xea,0x36,0x05 = mvn.w r5, r6, rrx
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0xda,0x43 = mvneq r2, r3
|
// 0xda,0x43 = mvneq r2, r3
|
||||||
0xc2,0xf1,0x00,0x05 = rsb.w r5, r2, #0
|
0xc2,0xf1,0x00,0x05 = rsb.w r5, r2, #0
|
||||||
0xc8,0xf1,0x00,0x05 = rsb.w r5, r8, #0
|
0xc8,0xf1,0x00,0x05 = rsb.w r5, r8, #0
|
||||||
0xaf,0xf3,0x00,0x80 = nop.w
|
0xaf,0xf3,0x00,0x80 = nop.w
|
||||||
@ -581,46 +581,46 @@
|
|||||||
0x1d,0xf9,0x02,0xf0 = pli [sp, r2]
|
0x1d,0xf9,0x02,0xf0 = pli [sp, r2]
|
||||||
0xbd,0xe8,0x04,0x02 = pop.w {r2, r9}
|
0xbd,0xe8,0x04,0x02 = pop.w {r2, r9}
|
||||||
0x2d,0xe9,0x04,0x02 = push.w {r2, r9}
|
0x2d,0xe9,0x04,0x02 = push.w {r2, r9}
|
||||||
0x83,0xfa,0x82,0xf1 = qadd r1, r2, r3
|
// 0x83,0xfa,0x82,0xf1 = qadd r1, r2, r3
|
||||||
0x92,0xfa,0x13,0xf1 = qadd16 r1, r2, r3
|
// 0x92,0xfa,0x13,0xf1 = qadd16 r1, r2, r3
|
||||||
0x82,0xfa,0x13,0xf1 = qadd8 r1, r2, r3
|
// 0x82,0xfa,0x13,0xf1 = qadd8 r1, r2, r3
|
||||||
0xc6,0xbf = itte gt
|
0xc6,0xbf = itte gt
|
||||||
0x83,0xfa,0x82,0xf1 = qaddgt r1, r2, r3
|
// 0x83,0xfa,0x82,0xf1 = qaddgt r1, r2, r3
|
||||||
0x92,0xfa,0x13,0xf1 = qadd16gt r1, r2, r3
|
// 0x92,0xfa,0x13,0xf1 = qadd16gt r1, r2, r3
|
||||||
0x82,0xfa,0x13,0xf1 = qadd8le r1, r2, r3
|
// 0x82,0xfa,0x13,0xf1 = qadd8le r1, r2, r3
|
||||||
0x88,0xfa,0x97,0xf6 = qdadd r6, r7, r8
|
// 0x88,0xfa,0x97,0xf6 = qdadd r6, r7, r8
|
||||||
0x88,0xfa,0xb7,0xf6 = qdsub r6, r7, r8
|
// 0x88,0xfa,0xb7,0xf6 = qdsub r6, r7, r8
|
||||||
0x84,0xbf = itt hi
|
0x84,0xbf = itt hi
|
||||||
0x88,0xfa,0x97,0xf6 = qdaddhi r6, r7, r8
|
// 0x88,0xfa,0x97,0xf6 = qdaddhi r6, r7, r8
|
||||||
0x88,0xfa,0xb7,0xf6 = qdsubhi r6, r7, r8
|
// 0x88,0xfa,0xb7,0xf6 = qdsubhi r6, r7, r8
|
||||||
0xec,0xfa,0x10,0xf9 = qsax r9, r12, r0
|
// 0xec,0xfa,0x10,0xf9 = qsax r9, r12, r0
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0xec,0xfa,0x10,0xf9 = qsaxeq r9, r12, r0
|
// 0xec,0xfa,0x10,0xf9 = qsaxeq r9, r12, r0
|
||||||
0x83,0xfa,0xa2,0xf1 = qsub r1, r2, r3
|
// 0x83,0xfa,0xa2,0xf1 = qsub r1, r2, r3
|
||||||
0xd2,0xfa,0x13,0xf1 = qsub16 r1, r2, r3
|
// 0xd2,0xfa,0x13,0xf1 = qsub16 r1, r2, r3
|
||||||
0xc2,0xfa,0x13,0xf1 = qsub8 r1, r2, r3
|
// 0xc2,0xfa,0x13,0xf1 = qsub8 r1, r2, r3
|
||||||
0xd6,0xbf = itet le
|
0xd6,0xbf = itet le
|
||||||
0x83,0xfa,0xa2,0xf1 = qsuble r1, r2, r3
|
// 0x83,0xfa,0xa2,0xf1 = qsuble r1, r2, r3
|
||||||
0xd2,0xfa,0x13,0xf1 = qsub16gt r1, r2, r3
|
// 0xd2,0xfa,0x13,0xf1 = qsub16gt r1, r2, r3
|
||||||
0xc2,0xfa,0x13,0xf1 = qsub8le r1, r2, r3
|
// 0xc2,0xfa,0x13,0xf1 = qsub8le r1, r2, r3
|
||||||
0x92,0xfa,0xa2,0xf1 = rbit r1, r2
|
// 0x92,0xfa,0xa2,0xf1 = rbit r1, r2
|
||||||
0x18,0xbf = it ne
|
0x18,0xbf = it ne
|
||||||
0x92,0xfa,0xa2,0xf1 = rbitne r1, r2
|
// 0x92,0xfa,0xa2,0xf1 = rbitne r1, r2
|
||||||
0x92,0xfa,0x82,0xf1 = rev.w r1, r2
|
0x92,0xfa,0x82,0xf1 = rev.w r1, r2
|
||||||
0x98,0xfa,0x88,0xf2 = rev.w r2, r8
|
0x98,0xfa,0x88,0xf2 = rev.w r2, r8
|
||||||
0x1c,0xbf = itt ne
|
0x1c,0xbf = itt ne
|
||||||
0x11,0xba = revne r1, r2
|
// 0x11,0xba = revne r1, r2
|
||||||
0x98,0xfa,0x88,0xf1 = revne.w r1, r8
|
// 0x98,0xfa,0x88,0xf1 = revne.w r1, r8
|
||||||
0x92,0xfa,0x92,0xf1 = rev16.w r1, r2
|
0x92,0xfa,0x92,0xf1 = rev16.w r1, r2
|
||||||
0x98,0xfa,0x98,0xf2 = rev16.w r2, r8
|
0x98,0xfa,0x98,0xf2 = rev16.w r2, r8
|
||||||
0x1c,0xbf = itt ne
|
0x1c,0xbf = itt ne
|
||||||
0x51,0xba = rev16ne r1, r2
|
// 0x51,0xba = rev16ne r1, r2
|
||||||
0x98,0xfa,0x98,0xf1 = rev16ne.w r1, r8
|
// 0x98,0xfa,0x98,0xf1 = rev16ne.w r1, r8
|
||||||
0x92,0xfa,0xb2,0xf1 = revsh.w r1, r2
|
0x92,0xfa,0xb2,0xf1 = revsh.w r1, r2
|
||||||
0x98,0xfa,0xb8,0xf2 = revsh.w r2, r8
|
0x98,0xfa,0xb8,0xf2 = revsh.w r2, r8
|
||||||
0x1c,0xbf = itt ne
|
0x1c,0xbf = itt ne
|
||||||
0xd1,0xba = revshne r1, r2
|
// 0xd1,0xba = revshne r1, r2
|
||||||
0x98,0xfa,0xb8,0xf1 = revshne.w r1, r8
|
// 0x98,0xfa,0xb8,0xf1 = revshne.w r1, r8
|
||||||
0x4f,0xea,0x33,0x32 = ror.w r2, r3, #12
|
0x4f,0xea,0x33,0x32 = ror.w r2, r3, #12
|
||||||
0x5f,0xea,0xf3,0x78 = rors.w r8, r3, #31
|
0x5f,0xea,0xf3,0x78 = rors.w r8, r3, #31
|
||||||
0x5f,0xea,0x73,0x02 = rors.w r2, r3, #1
|
0x5f,0xea,0x73,0x02 = rors.w r2, r3, #1
|
||||||
@ -636,8 +636,8 @@
|
|||||||
0x4f,0xea,0x32,0x01 = rrx r1, r2
|
0x4f,0xea,0x32,0x01 = rrx r1, r2
|
||||||
0x5f,0xea,0x32,0x01 = rrxs r1, r2
|
0x5f,0xea,0x32,0x01 = rrxs r1, r2
|
||||||
0xb4,0xbf = ite lt
|
0xb4,0xbf = ite lt
|
||||||
0x4f,0xea,0x3c,0x09 = rrxlt r9, r12
|
// 0x4f,0xea,0x3c,0x09 = rrxlt r9, r12
|
||||||
0x5f,0xea,0x33,0x08 = rrxsge r8, r3
|
// 0x5f,0xea,0x33,0x08 = rrxsge r8, r3
|
||||||
0xc5,0xf5,0x7f,0x22 = rsb.w r2, r5, #1044480
|
0xc5,0xf5,0x7f,0x22 = rsb.w r2, r5, #1044480
|
||||||
0xdc,0xf1,0x0f,0x03 = rsbs.w r3, r12, #15
|
0xdc,0xf1,0x0f,0x03 = rsbs.w r3, r12, #15
|
||||||
0xc1,0xf1,0xff,0x01 = rsb.w r1, r1, #255
|
0xc1,0xf1,0xff,0x01 = rsb.w r1, r1, #255
|
||||||
@ -650,18 +650,18 @@
|
|||||||
0xc9,0xeb,0x08,0x04 = rsb r4, r9, r8
|
0xc9,0xeb,0x08,0x04 = rsb r4, r9, r8
|
||||||
0xc4,0xeb,0xe8,0x01 = rsb r1, r4, r8, asr #3
|
0xc4,0xeb,0xe8,0x01 = rsb r1, r4, r8, asr #3
|
||||||
0xd1,0xeb,0x47,0x02 = rsbs r2, r1, r7, lsl #1
|
0xd1,0xeb,0x47,0x02 = rsbs r2, r1, r7, lsl #1
|
||||||
0x94,0xfa,0x08,0xf3 = sadd16 r3, r4, r8
|
// 0x94,0xfa,0x08,0xf3 = sadd16 r3, r4, r8
|
||||||
0x18,0xbf = it ne
|
0x18,0xbf = it ne
|
||||||
0x94,0xfa,0x08,0xf3 = sadd16ne r3, r4, r8
|
// 0x94,0xfa,0x08,0xf3 = sadd16ne r3, r4, r8
|
||||||
0x84,0xfa,0x08,0xf3 = sadd8 r3, r4, r8
|
// 0x84,0xfa,0x08,0xf3 = sadd8 r3, r4, r8
|
||||||
0x18,0xbf = it ne
|
0x18,0xbf = it ne
|
||||||
0x84,0xfa,0x08,0xf3 = sadd8ne r3, r4, r8
|
// 0x84,0xfa,0x08,0xf3 = sadd8ne r3, r4, r8
|
||||||
0xa2,0xfa,0x07,0xf9 = sasx r9, r2, r7
|
0xa2,0xfa,0x07,0xf9 = sasx r9, r2, r7
|
||||||
0x18,0xbf = it ne
|
0x18,0xbf = it ne
|
||||||
0xa5,0xfa,0x06,0xf2 = sasxne r2, r5, r6
|
// 0xa5,0xfa,0x06,0xf2 = sasxne r2, r5, r6
|
||||||
0xa2,0xfa,0x07,0xf9 = sasx r9, r2, r7
|
0xa2,0xfa,0x07,0xf9 = sasx r9, r2, r7
|
||||||
0x18,0xbf = it ne
|
0x18,0xbf = it ne
|
||||||
0xa5,0xfa,0x06,0xf2 = sasxne r2, r5, r6
|
// 0xa5,0xfa,0x06,0xf2 = sasxne r2, r5, r6
|
||||||
0x61,0xf1,0x04,0x00 = sbc r0, r1, #4
|
0x61,0xf1,0x04,0x00 = sbc r0, r1, #4
|
||||||
0x71,0xf1,0x00,0x00 = sbcs r0, r1, #0
|
0x71,0xf1,0x00,0x00 = sbcs r0, r1, #0
|
||||||
0x62,0xf1,0xff,0x01 = sbc r1, r2, #255
|
0x62,0xf1,0xff,0x01 = sbc r1, r2, #255
|
||||||
@ -681,123 +681,123 @@
|
|||||||
0x71,0xeb,0x23,0x00 = sbcs.w r0, r1, r3, asr #32
|
0x71,0xeb,0x23,0x00 = sbcs.w r0, r1, r3, asr #32
|
||||||
0x45,0xf3,0x00,0x44 = sbfx r4, r5, #16, #1
|
0x45,0xf3,0x00,0x44 = sbfx r4, r5, #16, #1
|
||||||
0xc8,0xbf = it gt
|
0xc8,0xbf = it gt
|
||||||
0x45,0xf3,0x0f,0x44 = sbfxgt r4, r5, #16, #16
|
// 0x45,0xf3,0x0f,0x44 = sbfxgt r4, r5, #16, #16
|
||||||
0xa9,0xfa,0x82,0xf5 = sel r5, r9, r2
|
// 0xa9,0xfa,0x82,0xf5 = sel r5, r9, r2
|
||||||
0xd8,0xbf = it le
|
0xd8,0xbf = it le
|
||||||
0xa9,0xfa,0x82,0xf5 = selle r5, r9, r2
|
// 0xa9,0xfa,0x82,0xf5 = selle r5, r9, r2
|
||||||
0xaf,0xf3,0x04,0x80 = sev.w
|
// 0xaf,0xf3,0x04,0x80 = sev.w
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0xaf,0xf3,0x04,0x80 = seveq.w
|
// 0xaf,0xf3,0x04,0x80 = seveq.w
|
||||||
0x92,0xfa,0x03,0xf1 = sadd16 r1, r2, r3
|
// 0x92,0xfa,0x03,0xf1 = sadd16 r1, r2, r3
|
||||||
0x82,0xfa,0x03,0xf1 = sadd8 r1, r2, r3
|
// 0x82,0xfa,0x03,0xf1 = sadd8 r1, r2, r3
|
||||||
0xcc,0xbf = ite gt
|
0xcc,0xbf = ite gt
|
||||||
0x92,0xfa,0x03,0xf1 = sadd16gt r1, r2, r3
|
// 0x92,0xfa,0x03,0xf1 = sadd16gt r1, r2, r3
|
||||||
0x82,0xfa,0x03,0xf1 = sadd8le r1, r2, r3
|
// 0x82,0xfa,0x03,0xf1 = sadd8le r1, r2, r3
|
||||||
0xa8,0xfa,0x22,0xf4 = shasx r4, r8, r2
|
// 0xa8,0xfa,0x22,0xf4 = shasx r4, r8, r2
|
||||||
0xc8,0xbf = it gt
|
0xc8,0xbf = it gt
|
||||||
0xa8,0xfa,0x22,0xf4 = shasxgt r4, r8, r2
|
// 0xa8,0xfa,0x22,0xf4 = shasxgt r4, r8, r2
|
||||||
0xa8,0xfa,0x22,0xf4 = shasx r4, r8, r2
|
// 0xa8,0xfa,0x22,0xf4 = shasx r4, r8, r2
|
||||||
0xc8,0xbf = it gt
|
0xc8,0xbf = it gt
|
||||||
0xa8,0xfa,0x22,0xf4 = shasxgt r4, r8, r2
|
// 0xa8,0xfa,0x22,0xf4 = shasxgt r4, r8, r2
|
||||||
0xe8,0xfa,0x22,0xf4 = shsax r4, r8, r2
|
// 0xe8,0xfa,0x22,0xf4 = shsax r4, r8, r2
|
||||||
0xc8,0xbf = it gt
|
0xc8,0xbf = it gt
|
||||||
0xe8,0xfa,0x22,0xf4 = shsaxgt r4, r8, r2
|
// 0xe8,0xfa,0x22,0xf4 = shsaxgt r4, r8, r2
|
||||||
0xe8,0xfa,0x22,0xf4 = shsax r4, r8, r2
|
// 0xe8,0xfa,0x22,0xf4 = shsax r4, r8, r2
|
||||||
0xc8,0xbf = it gt
|
0xc8,0xbf = it gt
|
||||||
0xe8,0xfa,0x22,0xf4 = shsaxgt r4, r8, r2
|
// 0xe8,0xfa,0x22,0xf4 = shsaxgt r4, r8, r2
|
||||||
0xd8,0xfa,0x22,0xf4 = shsub16 r4, r8, r2
|
// 0xd8,0xfa,0x22,0xf4 = shsub16 r4, r8, r2
|
||||||
0xc8,0xfa,0x22,0xf4 = shsub8 r4, r8, r2
|
// 0xc8,0xfa,0x22,0xf4 = shsub8 r4, r8, r2
|
||||||
0xc4,0xbf = itt gt
|
0xc4,0xbf = itt gt
|
||||||
0xd8,0xfa,0x22,0xf4 = shsub16gt r4, r8, r2
|
// 0xd8,0xfa,0x22,0xf4 = shsub16gt r4, r8, r2
|
||||||
0xc8,0xfa,0x22,0xf4 = shsub8gt r4, r8, r2
|
// 0xc8,0xfa,0x22,0xf4 = shsub8gt r4, r8, r2
|
||||||
0x11,0xfb,0x09,0x03 = smlabb r3, r1, r9, r0
|
// 0x11,0xfb,0x09,0x03 = smlabb r3, r1, r9, r0
|
||||||
0x16,0xfb,0x14,0x15 = smlabt r5, r6, r4, r1
|
// 0x16,0xfb,0x14,0x15 = smlabt r5, r6, r4, r1
|
||||||
0x12,0xfb,0x23,0x24 = smlatb r4, r2, r3, r2
|
// 0x12,0xfb,0x23,0x24 = smlatb r4, r2, r3, r2
|
||||||
0x13,0xfb,0x38,0x48 = smlatt r8, r3, r8, r4
|
// 0x13,0xfb,0x38,0x48 = smlatt r8, r3, r8, r4
|
||||||
0xcb,0xbf = itete gt
|
0xcb,0xbf = itete gt
|
||||||
0x11,0xfb,0x09,0x03 = smlabbgt r3, r1, r9, r0
|
// 0x11,0xfb,0x09,0x03 = smlabbgt r3, r1, r9, r0
|
||||||
0x16,0xfb,0x14,0x15 = smlabtle r5, r6, r4, r1
|
// 0x16,0xfb,0x14,0x15 = smlabtle r5, r6, r4, r1
|
||||||
0x12,0xfb,0x23,0x24 = smlatbgt r4, r2, r3, r2
|
// 0x12,0xfb,0x23,0x24 = smlatbgt r4, r2, r3, r2
|
||||||
0x13,0xfb,0x38,0x48 = smlattle r8, r3, r8, r4
|
// 0x13,0xfb,0x38,0x48 = smlattle r8, r3, r8, r4
|
||||||
0x23,0xfb,0x05,0x82 = smlad r2, r3, r5, r8
|
// 0x23,0xfb,0x05,0x82 = smlad r2, r3, r5, r8
|
||||||
0x23,0xfb,0x15,0x82 = smladx r2, r3, r5, r8
|
// 0x23,0xfb,0x15,0x82 = smladx r2, r3, r5, r8
|
||||||
0x84,0xbf = itt hi
|
0x84,0xbf = itt hi
|
||||||
0x23,0xfb,0x05,0x82 = smladhi r2, r3, r5, r8
|
// 0x23,0xfb,0x05,0x82 = smladhi r2, r3, r5, r8
|
||||||
0x23,0xfb,0x15,0x82 = smladxhi r2, r3, r5, r8
|
// 0x23,0xfb,0x15,0x82 = smladxhi r2, r3, r5, r8
|
||||||
0xc5,0xfb,0x08,0x23 = smlal r2, r3, r5, r8
|
// 0xc5,0xfb,0x08,0x23 = smlal r2, r3, r5, r8
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0xc5,0xfb,0x08,0x23 = smlaleq r2, r3, r5, r8
|
// 0xc5,0xfb,0x08,0x23 = smlaleq r2, r3, r5, r8
|
||||||
0xc9,0xfb,0x80,0x31 = smlalbb r3, r1, r9, r0
|
// 0xc9,0xfb,0x80,0x31 = smlalbb r3, r1, r9, r0
|
||||||
0xc4,0xfb,0x91,0x56 = smlalbt r5, r6, r4, r1
|
// 0xc4,0xfb,0x91,0x56 = smlalbt r5, r6, r4, r1
|
||||||
0xc3,0xfb,0xa2,0x42 = smlaltb r4, r2, r3, r2
|
// 0xc3,0xfb,0xa2,0x42 = smlaltb r4, r2, r3, r2
|
||||||
0xc8,0xfb,0xb4,0x83 = smlaltt r8, r3, r8, r4
|
// 0xc8,0xfb,0xb4,0x83 = smlaltt r8, r3, r8, r4
|
||||||
0xad,0xbf = iteet ge
|
0xad,0xbf = iteet ge
|
||||||
0xc9,0xfb,0x80,0x31 = smlalbbge r3, r1, r9, r0
|
// 0xc9,0xfb,0x80,0x31 = smlalbbge r3, r1, r9, r0
|
||||||
0xc4,0xfb,0x91,0x56 = smlalbtlt r5, r6, r4, r1
|
// 0xc4,0xfb,0x91,0x56 = smlalbtlt r5, r6, r4, r1
|
||||||
0xc3,0xfb,0xa2,0x42 = smlaltblt r4, r2, r3, r2
|
// 0xc3,0xfb,0xa2,0x42 = smlaltblt r4, r2, r3, r2
|
||||||
0xc8,0xfb,0xb4,0x83 = smlalttge r8, r3, r8, r4
|
// 0xc8,0xfb,0xb4,0x83 = smlalttge r8, r3, r8, r4
|
||||||
0xc5,0xfb,0xc8,0x23 = smlald r2, r3, r5, r8
|
// 0xc5,0xfb,0xc8,0x23 = smlald r2, r3, r5, r8
|
||||||
0xc5,0xfb,0xd8,0x23 = smlaldx r2, r3, r5, r8
|
// 0xc5,0xfb,0xd8,0x23 = smlaldx r2, r3, r5, r8
|
||||||
0x0c,0xbf = ite eq
|
0x0c,0xbf = ite eq
|
||||||
0xc5,0xfb,0xc8,0x23 = smlaldeq r2, r3, r5, r8
|
// 0xc5,0xfb,0xc8,0x23 = smlaldeq r2, r3, r5, r8
|
||||||
0xc5,0xfb,0xd8,0x23 = smlaldxne r2, r3, r5, r8
|
// 0xc5,0xfb,0xd8,0x23 = smlaldxne r2, r3, r5, r8
|
||||||
0x33,0xfb,0x0a,0x82 = smlawb r2, r3, r10, r8
|
0x33,0xfb,0x0a,0x82 = smlawb r2, r3, r10, r8
|
||||||
0x33,0xfb,0x15,0x98 = smlawt r8, r3, r5, r9
|
0x33,0xfb,0x15,0x98 = smlawt r8, r3, r5, r9
|
||||||
0x0c,0xbf = ite eq
|
0x0c,0xbf = ite eq
|
||||||
0x37,0xfb,0x05,0x82 = smlawbeq r2, r7, r5, r8
|
// 0x37,0xfb,0x05,0x82 = smlawbeq r2, r7, r5, r8
|
||||||
0x33,0xfb,0x10,0x81 = smlawtne r1, r3, r0, r8
|
// 0x33,0xfb,0x10,0x81 = smlawtne r1, r3, r0, r8
|
||||||
0x43,0xfb,0x05,0x82 = smlsd r2, r3, r5, r8
|
// 0x43,0xfb,0x05,0x82 = smlsd r2, r3, r5, r8
|
||||||
0x43,0xfb,0x15,0x82 = smlsdx r2, r3, r5, r8
|
// 0x43,0xfb,0x15,0x82 = smlsdx r2, r3, r5, r8
|
||||||
0xd4,0xbf = ite le
|
0xd4,0xbf = ite le
|
||||||
0x43,0xfb,0x05,0x82 = smlsdle r2, r3, r5, r8
|
// 0x43,0xfb,0x05,0x82 = smlsdle r2, r3, r5, r8
|
||||||
0x43,0xfb,0x15,0x82 = smlsdxgt r2, r3, r5, r8
|
// 0x43,0xfb,0x15,0x82 = smlsdxgt r2, r3, r5, r8
|
||||||
0xd5,0xfb,0xc1,0x29 = smlsld r2, r9, r5, r1
|
0xd5,0xfb,0xc1,0x29 = smlsld r2, r9, r5, r1
|
||||||
0xd2,0xfb,0xd8,0x4b = smlsldx r4, r11, r2, r8
|
0xd2,0xfb,0xd8,0x4b = smlsldx r4, r11, r2, r8
|
||||||
0xac,0xbf = ite ge
|
0xac,0xbf = ite ge
|
||||||
0xd5,0xfb,0xc6,0x82 = smlsldge r8, r2, r5, r6
|
// 0xd5,0xfb,0xc6,0x82 = smlsldge r8, r2, r5, r6
|
||||||
0xd3,0xfb,0xd8,0x10 = smlsldxlt r1, r0, r3, r8
|
// 0xd3,0xfb,0xd8,0x10 = smlsldxlt r1, r0, r3, r8
|
||||||
0x52,0xfb,0x03,0x41 = smmla r1, r2, r3, r4
|
// 0x52,0xfb,0x03,0x41 = smmla r1, r2, r3, r4
|
||||||
0x53,0xfb,0x12,0x14 = smmlar r4, r3, r2, r1
|
// 0x53,0xfb,0x12,0x14 = smmlar r4, r3, r2, r1
|
||||||
0x34,0xbf = ite lo
|
0x34,0xbf = ite lo
|
||||||
0x52,0xfb,0x03,0x41 = smmlalo r1, r2, r3, r4
|
// 0x52,0xfb,0x03,0x41 = smmlalo r1, r2, r3, r4
|
||||||
0x53,0xfb,0x12,0x14 = smmlarhs r4, r3, r2, r1
|
// 0x53,0xfb,0x12,0x14 = smmlarhs r4, r3, r2, r1
|
||||||
0x62,0xfb,0x03,0x41 = smmls r1, r2, r3, r4
|
// 0x62,0xfb,0x03,0x41 = smmls r1, r2, r3, r4
|
||||||
0x63,0xfb,0x12,0x14 = smmlsr r4, r3, r2, r1
|
// 0x63,0xfb,0x12,0x14 = smmlsr r4, r3, r2, r1
|
||||||
0x34,0xbf = ite lo
|
0x34,0xbf = ite lo
|
||||||
0x62,0xfb,0x03,0x41 = smmlslo r1, r2, r3, r4
|
// 0x62,0xfb,0x03,0x41 = smmlslo r1, r2, r3, r4
|
||||||
0x63,0xfb,0x12,0x14 = smmlsrhs r4, r3, r2, r1
|
// 0x63,0xfb,0x12,0x14 = smmlsrhs r4, r3, r2, r1
|
||||||
0x53,0xfb,0x04,0xf2 = smmul r2, r3, r4
|
// 0x53,0xfb,0x04,0xf2 = smmul r2, r3, r4
|
||||||
0x52,0xfb,0x11,0xf3 = smmulr r3, r2, r1
|
// 0x52,0xfb,0x11,0xf3 = smmulr r3, r2, r1
|
||||||
0x34,0xbf = ite lo
|
0x34,0xbf = ite lo
|
||||||
0x53,0xfb,0x04,0xf2 = smmullo r2, r3, r4
|
// 0x53,0xfb,0x04,0xf2 = smmullo r2, r3, r4
|
||||||
0x52,0xfb,0x11,0xf3 = smmulrhs r3, r2, r1
|
// 0x52,0xfb,0x11,0xf3 = smmulrhs r3, r2, r1
|
||||||
0x23,0xfb,0x04,0xf2 = smuad r2, r3, r4
|
// 0x23,0xfb,0x04,0xf2 = smuad r2, r3, r4
|
||||||
0x22,0xfb,0x11,0xf3 = smuadx r3, r2, r1
|
// 0x22,0xfb,0x11,0xf3 = smuadx r3, r2, r1
|
||||||
0xb4,0xbf = ite lt
|
0xb4,0xbf = ite lt
|
||||||
0x23,0xfb,0x04,0xf2 = smuadlt r2, r3, r4
|
// 0x23,0xfb,0x04,0xf2 = smuadlt r2, r3, r4
|
||||||
0x22,0xfb,0x11,0xf3 = smuadxge r3, r2, r1
|
// 0x22,0xfb,0x11,0xf3 = smuadxge r3, r2, r1
|
||||||
0x19,0xfb,0x00,0xf3 = smulbb r3, r9, r0
|
0x19,0xfb,0x00,0xf3 = smulbb r3, r9, r0
|
||||||
0x14,0xfb,0x11,0xf5 = smulbt r5, r4, r1
|
0x14,0xfb,0x11,0xf5 = smulbt r5, r4, r1
|
||||||
0x12,0xfb,0x22,0xf4 = smultb r4, r2, r2
|
0x12,0xfb,0x22,0xf4 = smultb r4, r2, r2
|
||||||
0x13,0xfb,0x34,0xf8 = smultt r8, r3, r4
|
// 0x13,0xfb,0x34,0xf8 = smultt r8, r3, r4
|
||||||
0xab,0xbf = itete ge
|
0xab,0xbf = itete ge
|
||||||
0x19,0xfb,0x00,0xf1 = smulbbge r1, r9, r0
|
// 0x19,0xfb,0x00,0xf1 = smulbbge r1, r9, r0
|
||||||
0x16,0xfb,0x14,0xf5 = smulbtlt r5, r6, r4
|
// 0x16,0xfb,0x14,0xf5 = smulbtlt r5, r6, r4
|
||||||
0x13,0xfb,0x22,0xf2 = smultbge r2, r3, r2
|
// 0x13,0xfb,0x22,0xf2 = smultbge r2, r3, r2
|
||||||
0x13,0xfb,0x34,0xf8 = smulttlt r8, r3, r4
|
// 0x13,0xfb,0x34,0xf8 = smulttlt r8, r3, r4
|
||||||
0x80,0xfb,0x01,0x39 = smull r3, r9, r0, r1
|
0x80,0xfb,0x01,0x39 = smull r3, r9, r0, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x84,0xfb,0x05,0x83 = smulleq r8, r3, r4, r5
|
// 0x84,0xfb,0x05,0x83 = smulleq r8, r3, r4, r5
|
||||||
0x39,0xfb,0x00,0xf3 = smulwb r3, r9, r0
|
// 0x39,0xfb,0x00,0xf3 = smulwb r3, r9, r0
|
||||||
0x39,0xfb,0x12,0xf3 = smulwt r3, r9, r2
|
// 0x39,0xfb,0x12,0xf3 = smulwt r3, r9, r2
|
||||||
0xcc,0xbf = ite gt
|
0xcc,0xbf = ite gt
|
||||||
0x39,0xfb,0x00,0xf3 = smulwbgt r3, r9, r0
|
// 0x39,0xfb,0x00,0xf3 = smulwbgt r3, r9, r0
|
||||||
0x39,0xfb,0x12,0xf3 = smulwtle r3, r9, r2
|
// 0x39,0xfb,0x12,0xf3 = smulwtle r3, r9, r2
|
||||||
0x40,0xfb,0x01,0xf3 = smusd r3, r0, r1
|
0x40,0xfb,0x01,0xf3 = smusd r3, r0, r1
|
||||||
0x49,0xfb,0x12,0xf3 = smusdx r3, r9, r2
|
0x49,0xfb,0x12,0xf3 = smusdx r3, r9, r2
|
||||||
0x0c,0xbf = ite eq
|
0x0c,0xbf = ite eq
|
||||||
0x43,0xfb,0x02,0xf8 = smusdeq r8, r3, r2
|
// 0x43,0xfb,0x02,0xf8 = smusdeq r8, r3, r2
|
||||||
0x44,0xfb,0x13,0xf7 = smusdxne r7, r4, r3
|
// 0x44,0xfb,0x13,0xf7 = smusdxne r7, r4, r3
|
||||||
0x0d,0xe8,0x01,0xc0 = srsdb sp, #1
|
0x0d,0xe8,0x01,0xc0 = srsdb sp, #1
|
||||||
0x8d,0xe9,0x00,0xc0 = srsia sp, #0
|
0x8d,0xe9,0x00,0xc0 = srsia sp, #0
|
||||||
0x2d,0xe8,0x13,0xc0 = srsdb sp!, #19
|
0x2d,0xe8,0x13,0xc0 = srsdb sp!, #19
|
||||||
@ -824,17 +824,17 @@
|
|||||||
0x2a,0xf3,0x40,0x08 = ssat r8, #1, r10, asr #1
|
0x2a,0xf3,0x40,0x08 = ssat r8, #1, r10, asr #1
|
||||||
0x27,0xf3,0x00,0x02 = ssat16 r2, #1, r7
|
0x27,0xf3,0x00,0x02 = ssat16 r2, #1, r7
|
||||||
0x25,0xf3,0x0f,0x03 = ssat16 r3, #16, r5
|
0x25,0xf3,0x0f,0x03 = ssat16 r3, #16, r5
|
||||||
0xe3,0xfa,0x04,0xf2 = ssax r2, r3, r4
|
// 0xe3,0xfa,0x04,0xf2 = ssax r2, r3, r4
|
||||||
0xb8,0xbf = it lt
|
0xb8,0xbf = it lt
|
||||||
0xe3,0xfa,0x04,0xf2 = ssaxlt r2, r3, r4
|
// 0xe3,0xfa,0x04,0xf2 = ssaxlt r2, r3, r4
|
||||||
0xe3,0xfa,0x04,0xf2 = ssax r2, r3, r4
|
// 0xe3,0xfa,0x04,0xf2 = ssax r2, r3, r4
|
||||||
0xb8,0xbf = it lt
|
0xb8,0xbf = it lt
|
||||||
0xe3,0xfa,0x04,0xf2 = ssaxlt r2, r3, r4
|
// 0xe3,0xfa,0x04,0xf2 = ssaxlt r2, r3, r4
|
||||||
0xd0,0xfa,0x06,0xf1 = ssub16 r1, r0, r6
|
0xd0,0xfa,0x06,0xf1 = ssub16 r1, r0, r6
|
||||||
0xc2,0xfa,0x04,0xf9 = ssub8 r9, r2, r4
|
0xc2,0xfa,0x04,0xf9 = ssub8 r9, r2, r4
|
||||||
0x14,0xbf = ite ne
|
0x14,0xbf = ite ne
|
||||||
0xd3,0xfa,0x02,0xf5 = ssub16ne r5, r3, r2
|
// 0xd3,0xfa,0x02,0xf5 = ssub16ne r5, r3, r2
|
||||||
0xc1,0xfa,0x02,0xf5 = ssub8eq r5, r1, r2
|
// 0xc1,0xfa,0x02,0xf5 = ssub8eq r5, r1, r2
|
||||||
0x81,0xfd,0x01,0x80 = stc2 p0, c8, [r1, #4]
|
0x81,0xfd,0x01,0x80 = stc2 p0, c8, [r1, #4]
|
||||||
0x82,0xfd,0x00,0x71 = stc2 p1, c7, [r2]
|
0x82,0xfd,0x00,0x71 = stc2 p1, c7, [r2]
|
||||||
0x03,0xfd,0x38,0x62 = stc2 p2, c6, [r3, #-224]
|
0x03,0xfd,0x38,0x62 = stc2 p2, c6, [r3, #-224]
|
||||||
@ -961,9 +961,9 @@
|
|||||||
0x48,0xf8,0x03,0x1e = strt r1, [r8, #3]
|
0x48,0xf8,0x03,0x1e = strt r1, [r8, #3]
|
||||||
0x48,0xf8,0xff,0x1e = strt r1, [r8, #255]
|
0x48,0xf8,0xff,0x1e = strt r1, [r8, #255]
|
||||||
0x0a,0xbf = itet eq
|
0x0a,0xbf = itet eq
|
||||||
0x11,0x1f = subeq r1, r2, #4
|
// 0x11,0x1f = subeq r1, r2, #4
|
||||||
0xa3,0xf2,0xff,0x35 = subwne r5, r3, #1023
|
// 0xa3,0xf2,0xff,0x35 = subwne r5, r3, #1023
|
||||||
0xa5,0xf2,0x25,0x14 = subweq r4, r5, #293
|
// 0xa5,0xf2,0x25,0x14 = subweq r4, r5, #293
|
||||||
0xad,0xf5,0x80,0x62 = sub.w r2, sp, #1024
|
0xad,0xf5,0x80,0x62 = sub.w r2, sp, #1024
|
||||||
0xa8,0xf5,0x7f,0x42 = sub.w r2, r8, #65280
|
0xa8,0xf5,0x7f,0x42 = sub.w r2, r8, #65280
|
||||||
0xa3,0xf2,0x01,0x12 = subw r2, r3, #257
|
0xa3,0xf2,0x01,0x12 = subw r2, r3, #257
|
||||||
@ -990,70 +990,70 @@
|
|||||||
0xad,0xeb,0x0c,0x0d = sub.w sp, sp, r12
|
0xad,0xeb,0x0c,0x0d = sub.w sp, sp, r12
|
||||||
0x00,0xdf = svc #0
|
0x00,0xdf = svc #0
|
||||||
0x0c,0xbf = ite eq
|
0x0c,0xbf = ite eq
|
||||||
0xff,0xdf = svceq #255
|
// 0xff,0xdf = svceq #255
|
||||||
0x21,0xdf = svcne #33
|
// 0x21,0xdf = svcne #33
|
||||||
0x43,0xfa,0x84,0xf2 = sxtab r2, r3, r4
|
0x43,0xfa,0x84,0xf2 = sxtab r2, r3, r4
|
||||||
0x45,0xfa,0x86,0xf4 = sxtab r4, r5, r6
|
0x45,0xfa,0x86,0xf4 = sxtab r4, r5, r6
|
||||||
0xb8,0xbf = it lt
|
0xb8,0xbf = it lt
|
||||||
0x42,0xfa,0x99,0xf6 = sxtablt r6, r2, r9, ror #8
|
// 0x42,0xfa,0x99,0xf6 = sxtablt r6, r2, r9, ror #8
|
||||||
0x41,0xfa,0xa4,0xf5 = sxtab r5, r1, r4, ror #16
|
0x41,0xfa,0xa4,0xf5 = sxtab r5, r1, r4, ror #16
|
||||||
0x48,0xfa,0xb3,0xf7 = sxtab r7, r8, r3, ror #24
|
0x48,0xfa,0xb3,0xf7 = sxtab r7, r8, r3, ror #24
|
||||||
0x22,0xfa,0x87,0xf6 = sxtab16 r6, r2, r7
|
0x22,0xfa,0x87,0xf6 = sxtab16 r6, r2, r7
|
||||||
0x25,0xfa,0x98,0xf3 = sxtab16 r3, r5, r8, ror #8
|
0x25,0xfa,0x98,0xf3 = sxtab16 r3, r5, r8, ror #8
|
||||||
0x22,0xfa,0xa1,0xf3 = sxtab16 r3, r2, r1, ror #16
|
0x22,0xfa,0xa1,0xf3 = sxtab16 r3, r2, r1, ror #16
|
||||||
0x14,0xbf = ite ne
|
0x14,0xbf = ite ne
|
||||||
0x21,0xfa,0x84,0xf0 = sxtab16ne r0, r1, r4
|
// 0x21,0xfa,0x84,0xf0 = sxtab16ne r0, r1, r4
|
||||||
0x22,0xfa,0xb3,0xf1 = sxtab16eq r1, r2, r3, ror #24
|
// 0x22,0xfa,0xb3,0xf1 = sxtab16eq r1, r2, r3, ror #24
|
||||||
0x03,0xfa,0x89,0xf1 = sxtah r1, r3, r9
|
0x03,0xfa,0x89,0xf1 = sxtah r1, r3, r9
|
||||||
0x08,0xfa,0x93,0xf3 = sxtah r3, r8, r3, ror #8
|
0x08,0xfa,0x93,0xf3 = sxtah r3, r8, r3, ror #8
|
||||||
0x03,0xfa,0xb3,0xf9 = sxtah r9, r3, r3, ror #24
|
0x03,0xfa,0xb3,0xf9 = sxtah r9, r3, r3, ror #24
|
||||||
0x8c,0xbf = ite hi
|
0x8c,0xbf = ite hi
|
||||||
0x01,0xfa,0x86,0xf6 = sxtahhi r6, r1, r6
|
// 0x01,0xfa,0x86,0xf6 = sxtahhi r6, r1, r6
|
||||||
0x02,0xfa,0xa4,0xf2 = sxtahls r2, r2, r4, ror #16
|
// 0x02,0xfa,0xa4,0xf2 = sxtahls r2, r2, r4, ror #16
|
||||||
0x75,0xb2 = sxtb r5, r6
|
0x75,0xb2 = sxtb r5, r6
|
||||||
0x4f,0xfa,0x99,0xf6 = sxtb.w r6, r9, ror #8
|
0x4f,0xfa,0x99,0xf6 = sxtb.w r6, r9, ror #8
|
||||||
0x4f,0xfa,0xb3,0xf8 = sxtb.w r8, r3, ror #24
|
0x4f,0xfa,0xb3,0xf8 = sxtb.w r8, r3, ror #24
|
||||||
0xac,0xbf = ite ge
|
0xac,0xbf = ite ge
|
||||||
0x62,0xb2 = sxtbge r2, r4
|
// 0x62,0xb2 = sxtbge r2, r4
|
||||||
0x4f,0xfa,0xa1,0xf5 = sxtblt.w r5, r1, ror #16
|
// 0x4f,0xfa,0xa1,0xf5 = sxtblt.w r5, r1, ror #16
|
||||||
0x4f,0xfa,0x88,0xf7 = sxtb.w r7, r8
|
0x4f,0xfa,0x88,0xf7 = sxtb.w r7, r8
|
||||||
0x2f,0xfa,0x84,0xf1 = sxtb16 r1, r4
|
0x2f,0xfa,0x84,0xf1 = sxtb16 r1, r4
|
||||||
0x2f,0xfa,0x87,0xf6 = sxtb16 r6, r7
|
0x2f,0xfa,0x87,0xf6 = sxtb16 r6, r7
|
||||||
0x2f,0xfa,0xa1,0xf3 = sxtb16 r3, r1, ror #16
|
0x2f,0xfa,0xa1,0xf3 = sxtb16 r3, r1, ror #16
|
||||||
0x2c,0xbf = ite hs
|
0x2c,0xbf = ite hs
|
||||||
0x2f,0xfa,0x95,0xf3 = sxtb16hs r3, r5, ror #8
|
// 0x2f,0xfa,0x95,0xf3 = sxtb16hs r3, r5, ror #8
|
||||||
0x2f,0xfa,0xb3,0xf2 = sxtb16lo r2, r3, ror #24
|
// 0x2f,0xfa,0xb3,0xf2 = sxtb16lo r2, r3, ror #24
|
||||||
0x31,0xb2 = sxth r1, r6
|
0x31,0xb2 = sxth r1, r6
|
||||||
0x0f,0xfa,0x98,0xf3 = sxth.w r3, r8, ror #8
|
0x0f,0xfa,0x98,0xf3 = sxth.w r3, r8, ror #8
|
||||||
0x0f,0xfa,0xb3,0xf9 = sxth.w r9, r3, ror #24
|
0x0f,0xfa,0xb3,0xf9 = sxth.w r9, r3, ror #24
|
||||||
0x1c,0xbf = itt ne
|
0x1c,0xbf = itt ne
|
||||||
0x0f,0xfa,0x89,0xf3 = sxthne.w r3, r9
|
// 0x0f,0xfa,0x89,0xf3 = sxthne.w r3, r9
|
||||||
0x0f,0xfa,0xa2,0xf2 = sxthne.w r2, r2, ror #16
|
// 0x0f,0xfa,0xa2,0xf2 = sxthne.w r2, r2, ror #16
|
||||||
0x0f,0xfa,0x88,0xf7 = sxth.w r7, r8
|
0x0f,0xfa,0x88,0xf7 = sxth.w r7, r8
|
||||||
0x75,0xb2 = sxtb r5, r6
|
0x75,0xb2 = sxtb r5, r6
|
||||||
0x4f,0xfa,0x99,0xf6 = sxtb.w r6, r9, ror #8
|
0x4f,0xfa,0x99,0xf6 = sxtb.w r6, r9, ror #8
|
||||||
0x4f,0xfa,0xb3,0xf8 = sxtb.w r8, r3, ror #24
|
0x4f,0xfa,0xb3,0xf8 = sxtb.w r8, r3, ror #24
|
||||||
0xac,0xbf = ite ge
|
0xac,0xbf = ite ge
|
||||||
0x62,0xb2 = sxtbge r2, r4
|
// 0x62,0xb2 = sxtbge r2, r4
|
||||||
0x4f,0xfa,0xa1,0xf5 = sxtblt.w r5, r1, ror #16
|
// 0x4f,0xfa,0xa1,0xf5 = sxtblt.w r5, r1, ror #16
|
||||||
0x2f,0xfa,0x84,0xf1 = sxtb16 r1, r4
|
0x2f,0xfa,0x84,0xf1 = sxtb16 r1, r4
|
||||||
0x2f,0xfa,0x87,0xf6 = sxtb16 r6, r7
|
0x2f,0xfa,0x87,0xf6 = sxtb16 r6, r7
|
||||||
0x2f,0xfa,0xa1,0xf3 = sxtb16 r3, r1, ror #16
|
0x2f,0xfa,0xa1,0xf3 = sxtb16 r3, r1, ror #16
|
||||||
0x2c,0xbf = ite hs
|
0x2c,0xbf = ite hs
|
||||||
0x2f,0xfa,0x95,0xf3 = sxtb16hs r3, r5, ror #8
|
// 0x2f,0xfa,0x95,0xf3 = sxtb16hs r3, r5, ror #8
|
||||||
0x2f,0xfa,0xb3,0xf2 = sxtb16lo r2, r3, ror #24
|
// 0x2f,0xfa,0xb3,0xf2 = sxtb16lo r2, r3, ror #24
|
||||||
0x31,0xb2 = sxth r1, r6
|
0x31,0xb2 = sxth r1, r6
|
||||||
0x0f,0xfa,0x98,0xf3 = sxth.w r3, r8, ror #8
|
0x0f,0xfa,0x98,0xf3 = sxth.w r3, r8, ror #8
|
||||||
0x0f,0xfa,0xb3,0xf9 = sxth.w r9, r3, ror #24
|
0x0f,0xfa,0xb3,0xf9 = sxth.w r9, r3, ror #24
|
||||||
0x1c,0xbf = itt ne
|
0x1c,0xbf = itt ne
|
||||||
0x0f,0xfa,0x89,0xf3 = sxthne.w r3, r9
|
// 0x0f,0xfa,0x89,0xf3 = sxthne.w r3, r9
|
||||||
0x0f,0xfa,0xa2,0xf2 = sxthne.w r2, r2, ror #16
|
// 0x0f,0xfa,0xa2,0xf2 = sxthne.w r2, r2, ror #16
|
||||||
0xd3,0xe8,0x08,0xf0 = tbb [r3, r8]
|
// 0xd3,0xe8,0x08,0xf0 = tbb [r3, r8]
|
||||||
0xd3,0xe8,0x18,0xf0 = tbh [r3, r8, lsl #1]
|
// 0xd3,0xe8,0x18,0xf0 = tbh [r3, r8, lsl #1]
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0xd3,0xe8,0x08,0xf0 = tbbeq [r3, r8]
|
// 0xd3,0xe8,0x08,0xf0 = tbbeq [r3, r8]
|
||||||
0x28,0xbf = it hs
|
0x28,0xbf = it hs
|
||||||
0xd3,0xe8,0x18,0xf0 = tbhhs [r3, r8, lsl #1]
|
// 0xd3,0xe8,0x18,0xf0 = tbhhs [r3, r8, lsl #1]
|
||||||
0x95,0xf4,0x70,0x4f = teq.w r5, #61440
|
0x95,0xf4,0x70,0x4f = teq.w r5, #61440
|
||||||
0x94,0xea,0x05,0x0f = teq.w r4, r5
|
0x94,0xea,0x05,0x0f = teq.w r4, r5
|
||||||
0x94,0xea,0x45,0x1f = teq.w r4, r5, lsl #5
|
0x94,0xea,0x45,0x1f = teq.w r4, r5, lsl #5
|
||||||
@ -1068,74 +1068,74 @@
|
|||||||
0x15,0xea,0x1a,0x3f = tst.w r5, r10, lsr #12
|
0x15,0xea,0x1a,0x3f = tst.w r5, r10, lsr #12
|
||||||
0x16,0xea,0xa9,0x7f = tst.w r6, r9, asr #30
|
0x16,0xea,0xa9,0x7f = tst.w r6, r9, asr #30
|
||||||
0x17,0xea,0xb8,0x0f = tst.w r7, r8, ror #2
|
0x17,0xea,0xb8,0x0f = tst.w r7, r8, ror #2
|
||||||
0x92,0xfa,0x43,0xf1 = uadd16 r1, r2, r3
|
// 0x92,0xfa,0x43,0xf1 = uadd16 r1, r2, r3
|
||||||
0x82,0xfa,0x43,0xf1 = uadd8 r1, r2, r3
|
// 0x82,0xfa,0x43,0xf1 = uadd8 r1, r2, r3
|
||||||
0xcc,0xbf = ite gt
|
0xcc,0xbf = ite gt
|
||||||
0x92,0xfa,0x43,0xf1 = uadd16gt r1, r2, r3
|
// 0x92,0xfa,0x43,0xf1 = uadd16gt r1, r2, r3
|
||||||
0x82,0xfa,0x43,0xf1 = uadd8le r1, r2, r3
|
// 0x82,0xfa,0x43,0xf1 = uadd8le r1, r2, r3
|
||||||
0xac,0xfa,0x40,0xf9 = uasx r9, r12, r0
|
// 0xac,0xfa,0x40,0xf9 = uasx r9, r12, r0
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0xac,0xfa,0x40,0xf9 = uasxeq r9, r12, r0
|
// 0xac,0xfa,0x40,0xf9 = uasxeq r9, r12, r0
|
||||||
0xac,0xfa,0x40,0xf9 = uasx r9, r12, r0
|
// 0xac,0xfa,0x40,0xf9 = uasx r9, r12, r0
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0xac,0xfa,0x40,0xf9 = uasxeq r9, r12, r0
|
// 0xac,0xfa,0x40,0xf9 = uasxeq r9, r12, r0
|
||||||
0xc5,0xf3,0x00,0x44 = ubfx r4, r5, #16, #1
|
0xc5,0xf3,0x00,0x44 = ubfx r4, r5, #16, #1
|
||||||
0xc8,0xbf = it gt
|
0xc8,0xbf = it gt
|
||||||
0xc5,0xf3,0x0f,0x44 = ubfxgt r4, r5, #16, #16
|
// 0xc5,0xf3,0x0f,0x44 = ubfxgt r4, r5, #16, #16
|
||||||
0x98,0xfa,0x62,0xf4 = uhadd16 r4, r8, r2
|
// 0x98,0xfa,0x62,0xf4 = uhadd16 r4, r8, r2
|
||||||
0x88,0xfa,0x62,0xf4 = uhadd8 r4, r8, r2
|
// 0x88,0xfa,0x62,0xf4 = uhadd8 r4, r8, r2
|
||||||
0xc4,0xbf = itt gt
|
0xc4,0xbf = itt gt
|
||||||
0x98,0xfa,0x62,0xf4 = uhadd16gt r4, r8, r2
|
// 0x98,0xfa,0x62,0xf4 = uhadd16gt r4, r8, r2
|
||||||
0x88,0xfa,0x62,0xf4 = uhadd8gt r4, r8, r2
|
// 0x88,0xfa,0x62,0xf4 = uhadd8gt r4, r8, r2
|
||||||
0xa1,0xfa,0x65,0xf4 = uhasx r4, r1, r5
|
0xa1,0xfa,0x65,0xf4 = uhasx r4, r1, r5
|
||||||
0xe6,0xfa,0x66,0xf5 = uhsax r5, r6, r6
|
0xe6,0xfa,0x66,0xf5 = uhsax r5, r6, r6
|
||||||
0xc4,0xbf = itt gt
|
0xc4,0xbf = itt gt
|
||||||
0xa9,0xfa,0x68,0xf6 = uhasxgt r6, r9, r8
|
// 0xa9,0xfa,0x68,0xf6 = uhasxgt r6, r9, r8
|
||||||
0xe8,0xfa,0x6c,0xf7 = uhsaxgt r7, r8, r12
|
// 0xe8,0xfa,0x6c,0xf7 = uhsaxgt r7, r8, r12
|
||||||
0xa1,0xfa,0x65,0xf4 = uhasx r4, r1, r5
|
0xa1,0xfa,0x65,0xf4 = uhasx r4, r1, r5
|
||||||
0xe6,0xfa,0x66,0xf5 = uhsax r5, r6, r6
|
0xe6,0xfa,0x66,0xf5 = uhsax r5, r6, r6
|
||||||
0xc4,0xbf = itt gt
|
0xc4,0xbf = itt gt
|
||||||
0xa9,0xfa,0x68,0xf6 = uhasxgt r6, r9, r8
|
// 0xa9,0xfa,0x68,0xf6 = uhasxgt r6, r9, r8
|
||||||
0xe8,0xfa,0x6c,0xf7 = uhsaxgt r7, r8, r12
|
// 0xe8,0xfa,0x6c,0xf7 = uhsaxgt r7, r8, r12
|
||||||
0xd8,0xfa,0x63,0xf5 = uhsub16 r5, r8, r3
|
0xd8,0xfa,0x63,0xf5 = uhsub16 r5, r8, r3
|
||||||
0xc7,0xfa,0x66,0xf1 = uhsub8 r1, r7, r6
|
0xc7,0xfa,0x66,0xf1 = uhsub8 r1, r7, r6
|
||||||
0xbc,0xbf = itt lt
|
0xbc,0xbf = itt lt
|
||||||
0xd9,0xfa,0x6c,0xf4 = uhsub16lt r4, r9, r12
|
// 0xd9,0xfa,0x6c,0xf4 = uhsub16lt r4, r9, r12
|
||||||
0xc1,0xfa,0x65,0xf3 = uhsub8lt r3, r1, r5
|
// 0xc1,0xfa,0x65,0xf3 = uhsub8lt r3, r1, r5
|
||||||
0xe5,0xfb,0x66,0x34 = umaal r3, r4, r5, r6
|
// 0xe5,0xfb,0x66,0x34 = umaal r3, r4, r5, r6
|
||||||
0xb8,0xbf = it lt
|
0xb8,0xbf = it lt
|
||||||
0xe5,0xfb,0x66,0x34 = umaallt r3, r4, r5, r6
|
// 0xe5,0xfb,0x66,0x34 = umaallt r3, r4, r5, r6
|
||||||
0xe6,0xfb,0x08,0x24 = umlal r2, r4, r6, r8
|
0xe6,0xfb,0x08,0x24 = umlal r2, r4, r6, r8
|
||||||
0xc8,0xbf = it gt
|
0xc8,0xbf = it gt
|
||||||
0xe2,0xfb,0x06,0x61 = umlalgt r6, r1, r2, r6
|
// 0xe2,0xfb,0x06,0x61 = umlalgt r6, r1, r2, r6
|
||||||
0xa6,0xfb,0x08,0x24 = umull r2, r4, r6, r8
|
0xa6,0xfb,0x08,0x24 = umull r2, r4, r6, r8
|
||||||
0xc8,0xbf = it gt
|
0xc8,0xbf = it gt
|
||||||
0xa2,0xfb,0x06,0x61 = umullgt r6, r1, r2, r6
|
// 0xa2,0xfb,0x06,0x61 = umullgt r6, r1, r2, r6
|
||||||
0x92,0xfa,0x53,0xf1 = uqadd16 r1, r2, r3
|
0x92,0xfa,0x53,0xf1 = uqadd16 r1, r2, r3
|
||||||
0x84,0xfa,0x58,0xf3 = uqadd8 r3, r4, r8
|
0x84,0xfa,0x58,0xf3 = uqadd8 r3, r4, r8
|
||||||
0xcc,0xbf = ite gt
|
0xcc,0xbf = ite gt
|
||||||
0x97,0xfa,0x59,0xf4 = uqadd16gt r4, r7, r9
|
// 0x97,0xfa,0x59,0xf4 = uqadd16gt r4, r7, r9
|
||||||
0x81,0xfa,0x52,0xf8 = uqadd8le r8, r1, r2
|
// 0x81,0xfa,0x52,0xf8 = uqadd8le r8, r1, r2
|
||||||
0xa2,0xfa,0x53,0xf1 = uqasx r1, r2, r3
|
0xa2,0xfa,0x53,0xf1 = uqasx r1, r2, r3
|
||||||
0xe4,0xfa,0x58,0xf3 = uqsax r3, r4, r8
|
0xe4,0xfa,0x58,0xf3 = uqsax r3, r4, r8
|
||||||
0xcc,0xbf = ite gt
|
0xcc,0xbf = ite gt
|
||||||
0xa7,0xfa,0x59,0xf4 = uqasxgt r4, r7, r9
|
// 0xa7,0xfa,0x59,0xf4 = uqasxgt r4, r7, r9
|
||||||
0xe1,0xfa,0x52,0xf8 = uqsaxle r8, r1, r2
|
// 0xe1,0xfa,0x52,0xf8 = uqsaxle r8, r1, r2
|
||||||
0xa2,0xfa,0x53,0xf1 = uqasx r1, r2, r3
|
0xa2,0xfa,0x53,0xf1 = uqasx r1, r2, r3
|
||||||
0xe4,0xfa,0x58,0xf3 = uqsax r3, r4, r8
|
0xe4,0xfa,0x58,0xf3 = uqsax r3, r4, r8
|
||||||
0xcc,0xbf = ite gt
|
0xcc,0xbf = ite gt
|
||||||
0xa7,0xfa,0x59,0xf4 = uqasxgt r4, r7, r9
|
// 0xa7,0xfa,0x59,0xf4 = uqasxgt r4, r7, r9
|
||||||
0xe1,0xfa,0x52,0xf8 = uqsaxle r8, r1, r2
|
// 0xe1,0xfa,0x52,0xf8 = uqsaxle r8, r1, r2
|
||||||
0xc2,0xfa,0x59,0xf8 = uqsub8 r8, r2, r9
|
0xc2,0xfa,0x59,0xf8 = uqsub8 r8, r2, r9
|
||||||
0xd9,0xfa,0x57,0xf1 = uqsub16 r1, r9, r7
|
0xd9,0xfa,0x57,0xf1 = uqsub16 r1, r9, r7
|
||||||
0xcc,0xbf = ite gt
|
0xcc,0xbf = ite gt
|
||||||
0xc1,0xfa,0x56,0xf3 = uqsub8gt r3, r1, r6
|
// 0xc1,0xfa,0x56,0xf3 = uqsub8gt r3, r1, r6
|
||||||
0xd6,0xfa,0x54,0xf4 = uqsub16le r4, r6, r4
|
// 0xd6,0xfa,0x54,0xf4 = uqsub16le r4, r6, r4
|
||||||
0x79,0xfb,0x07,0xf1 = usad8 r1, r9, r7
|
0x79,0xfb,0x07,0xf1 = usad8 r1, r9, r7
|
||||||
0x72,0xfb,0x09,0xc8 = usada8 r8, r2, r9, r12
|
0x72,0xfb,0x09,0xc8 = usada8 r8, r2, r9, r12
|
||||||
0xcc,0xbf = ite gt
|
0xcc,0xbf = ite gt
|
||||||
0x71,0xfb,0x06,0x93 = usada8gt r3, r1, r6, r9
|
// 0x71,0xfb,0x06,0x93 = usada8gt r3, r1, r6, r9
|
||||||
0x76,0xfb,0x04,0xf4 = usad8le r4, r6, r4
|
// 0x76,0xfb,0x04,0xf4 = usad8le r4, r6, r4
|
||||||
0x8a,0xf3,0x01,0x08 = usat r8, #1, r10
|
0x8a,0xf3,0x01,0x08 = usat r8, #1, r10
|
||||||
0x8a,0xf3,0x04,0x08 = usat r8, #4, r10
|
0x8a,0xf3,0x04,0x08 = usat r8, #4, r10
|
||||||
0x8a,0xf3,0xc5,0x78 = usat r8, #5, r10, lsl #31
|
0x8a,0xf3,0xc5,0x78 = usat r8, #5, r10, lsl #31
|
||||||
@ -1144,79 +1144,79 @@
|
|||||||
0xa5,0xf3,0x0f,0x03 = usat16 r3, #15, r5
|
0xa5,0xf3,0x0f,0x03 = usat16 r3, #15, r5
|
||||||
0xe3,0xfa,0x44,0xf2 = usax r2, r3, r4
|
0xe3,0xfa,0x44,0xf2 = usax r2, r3, r4
|
||||||
0x18,0xbf = it ne
|
0x18,0xbf = it ne
|
||||||
0xe1,0xfa,0x49,0xf6 = usaxne r6, r1, r9
|
// 0xe1,0xfa,0x49,0xf6 = usaxne r6, r1, r9
|
||||||
0xe3,0xfa,0x44,0xf2 = usax r2, r3, r4
|
0xe3,0xfa,0x44,0xf2 = usax r2, r3, r4
|
||||||
0x18,0xbf = it ne
|
0x18,0xbf = it ne
|
||||||
0xe1,0xfa,0x49,0xf6 = usaxne r6, r1, r9
|
// 0xe1,0xfa,0x49,0xf6 = usaxne r6, r1, r9
|
||||||
0xd2,0xfa,0x47,0xf4 = usub16 r4, r2, r7
|
0xd2,0xfa,0x47,0xf4 = usub16 r4, r2, r7
|
||||||
0xc8,0xfa,0x45,0xf1 = usub8 r1, r8, r5
|
0xc8,0xfa,0x45,0xf1 = usub8 r1, r8, r5
|
||||||
0x8c,0xbf = ite hi
|
0x8c,0xbf = ite hi
|
||||||
0xd1,0xfa,0x43,0xf1 = usub16hi r1, r1, r3
|
// 0xd1,0xfa,0x43,0xf1 = usub16hi r1, r1, r3
|
||||||
0xc2,0xfa,0x43,0xf9 = usub8ls r9, r2, r3
|
// 0xc2,0xfa,0x43,0xf9 = usub8ls r9, r2, r3
|
||||||
0x53,0xfa,0x84,0xf2 = uxtab r2, r3, r4
|
0x53,0xfa,0x84,0xf2 = uxtab r2, r3, r4
|
||||||
0x55,0xfa,0x86,0xf4 = uxtab r4, r5, r6
|
0x55,0xfa,0x86,0xf4 = uxtab r4, r5, r6
|
||||||
0xb8,0xbf = it lt
|
0xb8,0xbf = it lt
|
||||||
0x52,0xfa,0x99,0xf6 = uxtablt r6, r2, r9, ror #8
|
// 0x52,0xfa,0x99,0xf6 = uxtablt r6, r2, r9, ror #8
|
||||||
0x51,0xfa,0xa4,0xf5 = uxtab r5, r1, r4, ror #16
|
0x51,0xfa,0xa4,0xf5 = uxtab r5, r1, r4, ror #16
|
||||||
0x58,0xfa,0xb3,0xf7 = uxtab r7, r8, r3, ror #24
|
0x58,0xfa,0xb3,0xf7 = uxtab r7, r8, r3, ror #24
|
||||||
0xa8,0xbf = it ge
|
0xa8,0xbf = it ge
|
||||||
0x31,0xfa,0x84,0xf0 = uxtab16ge r0, r1, r4
|
// 0x31,0xfa,0x84,0xf0 = uxtab16ge r0, r1, r4
|
||||||
0x32,0xfa,0x87,0xf6 = uxtab16 r6, r2, r7
|
0x32,0xfa,0x87,0xf6 = uxtab16 r6, r2, r7
|
||||||
0x35,0xfa,0x98,0xf3 = uxtab16 r3, r5, r8, ror #8
|
0x35,0xfa,0x98,0xf3 = uxtab16 r3, r5, r8, ror #8
|
||||||
0x32,0xfa,0xa1,0xf3 = uxtab16 r3, r2, r1, ror #16
|
0x32,0xfa,0xa1,0xf3 = uxtab16 r3, r2, r1, ror #16
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x32,0xfa,0xb3,0xf1 = uxtab16eq r1, r2, r3, ror #24
|
// 0x32,0xfa,0xb3,0xf1 = uxtab16eq r1, r2, r3, ror #24
|
||||||
0x13,0xfa,0x89,0xf1 = uxtah r1, r3, r9
|
0x13,0xfa,0x89,0xf1 = uxtah r1, r3, r9
|
||||||
0x88,0xbf = it hi
|
0x88,0xbf = it hi
|
||||||
0x11,0xfa,0x86,0xf6 = uxtahhi r6, r1, r6
|
// 0x11,0xfa,0x86,0xf6 = uxtahhi r6, r1, r6
|
||||||
0x18,0xfa,0x93,0xf3 = uxtah r3, r8, r3, ror #8
|
0x18,0xfa,0x93,0xf3 = uxtah r3, r8, r3, ror #8
|
||||||
0x38,0xbf = it lo
|
0x38,0xbf = it lo
|
||||||
0x12,0xfa,0xa4,0xf2 = uxtahlo r2, r2, r4, ror #16
|
// 0x12,0xfa,0xa4,0xf2 = uxtahlo r2, r2, r4, ror #16
|
||||||
0x13,0xfa,0xb3,0xf9 = uxtah r9, r3, r3, ror #24
|
0x13,0xfa,0xb3,0xf9 = uxtah r9, r3, r3, ror #24
|
||||||
0xa8,0xbf = it ge
|
0xa8,0xbf = it ge
|
||||||
0xe2,0xb2 = uxtbge r2, r4
|
// 0xe2,0xb2 = uxtbge r2, r4
|
||||||
0xf5,0xb2 = uxtb r5, r6
|
0xf5,0xb2 = uxtb r5, r6
|
||||||
0x5f,0xfa,0x99,0xf6 = uxtb.w r6, r9, ror #8
|
0x5f,0xfa,0x99,0xf6 = uxtb.w r6, r9, ror #8
|
||||||
0x38,0xbf = it lo
|
0x38,0xbf = it lo
|
||||||
0x5f,0xfa,0xa1,0xf5 = uxtblo.w r5, r1, ror #16
|
// 0x5f,0xfa,0xa1,0xf5 = uxtblo.w r5, r1, ror #16
|
||||||
0x5f,0xfa,0xb3,0xf8 = uxtb.w r8, r3, ror #24
|
0x5f,0xfa,0xb3,0xf8 = uxtb.w r8, r3, ror #24
|
||||||
0x5f,0xfa,0x88,0xf7 = uxtb.w r7, r8
|
0x5f,0xfa,0x88,0xf7 = uxtb.w r7, r8
|
||||||
0x3f,0xfa,0x84,0xf1 = uxtb16 r1, r4
|
0x3f,0xfa,0x84,0xf1 = uxtb16 r1, r4
|
||||||
0x3f,0xfa,0x87,0xf6 = uxtb16 r6, r7
|
0x3f,0xfa,0x87,0xf6 = uxtb16 r6, r7
|
||||||
0x28,0xbf = it hs
|
0x28,0xbf = it hs
|
||||||
0x3f,0xfa,0x95,0xf3 = uxtb16hs r3, r5, ror #8
|
// 0x3f,0xfa,0x95,0xf3 = uxtb16hs r3, r5, ror #8
|
||||||
0x3f,0xfa,0xa1,0xf3 = uxtb16 r3, r1, ror #16
|
0x3f,0xfa,0xa1,0xf3 = uxtb16 r3, r1, ror #16
|
||||||
0xa8,0xbf = it ge
|
0xa8,0xbf = it ge
|
||||||
0x3f,0xfa,0xb3,0xf2 = uxtb16ge r2, r3, ror #24
|
// 0x3f,0xfa,0xb3,0xf2 = uxtb16ge r2, r3, ror #24
|
||||||
0x18,0xbf = it ne
|
0x18,0xbf = it ne
|
||||||
0x1f,0xfa,0x89,0xf3 = uxthne.w r3, r9
|
// 0x1f,0xfa,0x89,0xf3 = uxthne.w r3, r9
|
||||||
0xb1,0xb2 = uxth r1, r6
|
0xb1,0xb2 = uxth r1, r6
|
||||||
0x1f,0xfa,0x98,0xf3 = uxth.w r3, r8, ror #8
|
0x1f,0xfa,0x98,0xf3 = uxth.w r3, r8, ror #8
|
||||||
0xd8,0xbf = it le
|
0xd8,0xbf = it le
|
||||||
0x1f,0xfa,0xa2,0xf2 = uxthle.w r2, r2, ror #16
|
// 0x1f,0xfa,0xa2,0xf2 = uxthle.w r2, r2, ror #16
|
||||||
0x1f,0xfa,0xb3,0xf9 = uxth.w r9, r3, ror #24
|
0x1f,0xfa,0xb3,0xf9 = uxth.w r9, r3, ror #24
|
||||||
0x1f,0xfa,0x88,0xf7 = uxth.w r7, r8
|
0x1f,0xfa,0x88,0xf7 = uxth.w r7, r8
|
||||||
0x20,0xbf = wfe
|
// 0x20,0xbf = wfe
|
||||||
0x30,0xbf = wfi
|
// 0x30,0xbf = wfi
|
||||||
0x10,0xbf = yield
|
// 0x10,0xbf = yield
|
||||||
0xb6,0xbf = itet lt
|
0xb6,0xbf = itet lt
|
||||||
0x20,0xbf = wfelt
|
// 0x20,0xbf = wfelt
|
||||||
0x30,0xbf = wfige
|
// 0x30,0xbf = wfige
|
||||||
0x10,0xbf = yieldlt
|
// 0x10,0xbf = yieldlt
|
||||||
0xaf,0xf3,0x04,0x80 = sev.w
|
// 0xaf,0xf3,0x04,0x80 = sev.w
|
||||||
0xaf,0xf3,0x03,0x80 = wfi.w
|
0xaf,0xf3,0x03,0x80 = wfi.w
|
||||||
0xaf,0xf3,0x02,0x80 = wfe.w
|
0xaf,0xf3,0x02,0x80 = wfe.w
|
||||||
0xaf,0xf3,0x01,0x80 = yield.w
|
0xaf,0xf3,0x01,0x80 = yield.w
|
||||||
0xaf,0xf3,0x00,0x80 = nop.w
|
0xaf,0xf3,0x00,0x80 = nop.w
|
||||||
0x40,0xbf = sev
|
0x40,0xbf = sev
|
||||||
0x30,0xbf = wfi
|
// 0x30,0xbf = wfi
|
||||||
0x20,0xbf = wfe
|
// 0x20,0xbf = wfe
|
||||||
0x10,0xbf = yield
|
// 0x10,0xbf = yield
|
||||||
0x00,0xbf = nop
|
// 0x00,0xbf = nop
|
||||||
0xb6,0xbf = itet lt
|
0xb6,0xbf = itet lt
|
||||||
0xf0,0xbf = hintlt #15
|
// 0xf0,0xbf = hintlt #15
|
||||||
0xaf,0xf3,0x10,0x80 = hintge.w #16
|
// 0xaf,0xf3,0x10,0x80 = hintge.w #16
|
||||||
0xaf,0xf3,0xef,0x80 = hintlt.w #239
|
// 0xaf,0xf3,0xef,0x80 = hintlt.w #239
|
||||||
0x70,0xbf = hint #7
|
0x70,0xbf = hint #7
|
||||||
0xaf,0xf3,0x07,0x80 = hint.w #7
|
0xaf,0xf3,0x07,0x80 = hint.w #7
|
||||||
0x9f,0xf8,0x16,0xb0 = ldrb.w r11, [pc, #22]
|
0x9f,0xf8,0x16,0xb0 = ldrb.w r11, [pc, #22]
|
||||||
|
@ -1,7 +1,5 @@
|
|||||||
# CS_ARCH_ARM, CS_MODE_THUMB, None
|
# CS_ARCH_ARM, CS_MODE_THUMB, None
|
||||||
0x00,0xeb,0x01,0x00 = add.w r0, r0, r1
|
0x00,0xeb,0x01,0x00 = add.w r0, r0, r1
|
||||||
0x01,0x00,0x80,0xe0 = add r0, r0, r1
|
|
||||||
0x40,0x18 = adds r0, r0, r1
|
0x40,0x18 = adds r0, r0, r1
|
||||||
0x01,0x00,0x80,0xe0 = add r0, r0, r1
|
|
||||||
0x00,0xeb,0x01,0x00 = add.w r0, r0, r1
|
0x00,0xeb,0x01,0x00 = add.w r0, r0, r1
|
||||||
0x40,0x18 = adds r0, r0, r1
|
0x40,0x18 = adds r0, r0, r1
|
||||||
|
@ -142,7 +142,7 @@
|
|||||||
0xcd,0x62,0xa8,0xf4 = vld3.8 {d6[6], d7[6], d8[6]}, [r8]!
|
0xcd,0x62,0xa8,0xf4 = vld3.8 {d6[6], d7[6], d8[6]}, [r8]!
|
||||||
0x8d,0x96,0xa7,0xf4 = vld3.16 {d9[2], d10[2], d11[2]}, [r7]!
|
0x8d,0x96,0xa7,0xf4 = vld3.16 {d9[2], d10[2], d11[2]}, [r7]!
|
||||||
0x8d,0x1a,0xa6,0xf4 = vld3.32 {d1[1], d2[1], d3[1]}, [r6]!
|
0x8d,0x1a,0xa6,0xf4 = vld3.32 {d1[1], d2[1], d3[1]}, [r6]!
|
||||||
0xad,0x46,0xe5,0xf4 = vld3.16 {d20[2], d21[2], d22[2]}, [r5]!
|
// 0xad,0x46,0xe5,0xf4 = vld3.16 {d20[2], d21[2], d22[2]}, [r5]!
|
||||||
0x4d,0x5a,0xa4,0xf4 = vld3.32 {d5[0], d7[0], d9[0]}, [r4]!
|
0x4d,0x5a,0xa4,0xf4 = vld3.32 {d5[0], d7[0], d9[0]}, [r4]!
|
||||||
0x0f,0x0e,0xe1,0xf4 = vld3.8 {d16[], d17[], d18[]}, [r1]
|
0x0f,0x0e,0xe1,0xf4 = vld3.8 {d16[], d17[], d18[]}, [r1]
|
||||||
0x4f,0x0e,0xe2,0xf4 = vld3.16 {d16[], d17[], d18[]}, [r2]
|
0x4f,0x0e,0xe2,0xf4 = vld3.16 {d16[], d17[], d18[]}, [r2]
|
||||||
@ -153,8 +153,8 @@
|
|||||||
0x0d,0x0e,0xe1,0xf4 = vld3.8 {d16[], d17[], d18[]}, [r1]!
|
0x0d,0x0e,0xe1,0xf4 = vld3.8 {d16[], d17[], d18[]}, [r1]!
|
||||||
0x4d,0x0e,0xe2,0xf4 = vld3.16 {d16[], d17[], d18[]}, [r2]!
|
0x4d,0x0e,0xe2,0xf4 = vld3.16 {d16[], d17[], d18[]}, [r2]!
|
||||||
0x8d,0x0e,0xe3,0xf4 = vld3.32 {d16[], d17[], d18[]}, [r3]!
|
0x8d,0x0e,0xe3,0xf4 = vld3.32 {d16[], d17[], d18[]}, [r3]!
|
||||||
0x2d,0x1e,0xe7,0xf4 = vld3.8 {d17[], d18[], d19[]}, [r7]!
|
// 0x2d,0x1e,0xe7,0xf4 = vld3.8 {d17[], d18[], d19[]}, [r7]!
|
||||||
0x6d,0x1e,0xe7,0xf4 = vld3.16 {d17[], d18[], d19[]}, [r7]!
|
// 0x6d,0x1e,0xe7,0xf4 = vld3.16 {d17[], d18[], d19[]}, [r7]!
|
||||||
0xad,0x0e,0xe8,0xf4 = vld3.32 {d16[], d18[], d20[]}, [r8]!
|
0xad,0x0e,0xe8,0xf4 = vld3.32 {d16[], d18[], d20[]}, [r8]!
|
||||||
0x08,0x0e,0xe1,0xf4 = vld3.8 {d16[], d17[], d18[]}, [r1], r8
|
0x08,0x0e,0xe1,0xf4 = vld3.8 {d16[], d17[], d18[]}, [r1], r8
|
||||||
0x47,0x0e,0xe2,0xf4 = vld3.16 {d16[], d17[], d18[]}, [r2], r7
|
0x47,0x0e,0xe2,0xf4 = vld3.16 {d16[], d17[], d18[]}, [r2], r7
|
||||||
@ -170,7 +170,7 @@
|
|||||||
0x3d,0x03,0xe1,0xf4 = vld4.8 {d16[1], d17[1], d18[1], d19[1]}, [r1:32]!
|
0x3d,0x03,0xe1,0xf4 = vld4.8 {d16[1], d17[1], d18[1], d19[1]}, [r1:32]!
|
||||||
0x5d,0x07,0xe2,0xf4 = vld4.16 {d16[1], d17[1], d18[1], d19[1]}, [r2:64]!
|
0x5d,0x07,0xe2,0xf4 = vld4.16 {d16[1], d17[1], d18[1], d19[1]}, [r2:64]!
|
||||||
0xad,0x0b,0xe3,0xf4 = vld4.32 {d16[1], d17[1], d18[1], d19[1]}, [r3:128]!
|
0xad,0x0b,0xe3,0xf4 = vld4.32 {d16[1], d17[1], d18[1], d19[1]}, [r3:128]!
|
||||||
0x6d,0x17,0xe7,0xf4 = vld4.16 {d17[1], d18[1], d19[1], d20[1]}, [r7]!
|
// 0x6d,0x17,0xe7,0xf4 = vld4.16 {d17[1], d18[1], d19[1], d20[1]}, [r7]!
|
||||||
0xcd,0x0b,0xe8,0xf4 = vld4.32 {d16[1], d18[1], d20[1], d22[1]}, [r8]!
|
0xcd,0x0b,0xe8,0xf4 = vld4.32 {d16[1], d18[1], d20[1], d22[1]}, [r8]!
|
||||||
0x38,0x03,0xe1,0xf4 = vld4.8 {d16[1], d17[1], d18[1], d19[1]}, [r1:32], r8
|
0x38,0x03,0xe1,0xf4 = vld4.8 {d16[1], d17[1], d18[1], d19[1]}, [r1:32], r8
|
||||||
0x47,0x07,0xe2,0xf4 = vld4.16 {d16[1], d17[1], d18[1], d19[1]}, [r2], r7
|
0x47,0x07,0xe2,0xf4 = vld4.16 {d16[1], d17[1], d18[1], d19[1]}, [r2], r7
|
||||||
@ -186,8 +186,8 @@
|
|||||||
0x0d,0x0f,0xe1,0xf4 = vld4.8 {d16[], d17[], d18[], d19[]}, [r1]!
|
0x0d,0x0f,0xe1,0xf4 = vld4.8 {d16[], d17[], d18[], d19[]}, [r1]!
|
||||||
0x4d,0x0f,0xe2,0xf4 = vld4.16 {d16[], d17[], d18[], d19[]}, [r2]!
|
0x4d,0x0f,0xe2,0xf4 = vld4.16 {d16[], d17[], d18[], d19[]}, [r2]!
|
||||||
0x8d,0x0f,0xe3,0xf4 = vld4.32 {d16[], d17[], d18[], d19[]}, [r3]!
|
0x8d,0x0f,0xe3,0xf4 = vld4.32 {d16[], d17[], d18[], d19[]}, [r3]!
|
||||||
0x2d,0x1f,0xe7,0xf4 = vld4.8 {d17[], d18[], d19[], d20[]}, [r7]!
|
// 0x2d,0x1f,0xe7,0xf4 = vld4.8 {d17[], d18[], d19[], d20[]}, [r7]!
|
||||||
0x6d,0x1f,0xe7,0xf4 = vld4.16 {d17[], d18[], d19[], d20[]}, [r7]!
|
// 0x6d,0x1f,0xe7,0xf4 = vld4.16 {d17[], d18[], d19[], d20[]}, [r7]!
|
||||||
0xad,0x0f,0xe8,0xf4 = vld4.32 {d16[], d18[], d20[], d22[]}, [r8]!
|
0xad,0x0f,0xe8,0xf4 = vld4.32 {d16[], d18[], d20[], d22[]}, [r8]!
|
||||||
0x08,0x0f,0xe1,0xf4 = vld4.8 {d16[], d17[], d18[], d19[]}, [r1], r8
|
0x08,0x0f,0xe1,0xf4 = vld4.8 {d16[], d17[], d18[], d19[]}, [r1], r8
|
||||||
0x47,0x0f,0xe2,0xf4 = vld4.16 {d16[], d17[], d18[], d19[]}, [r2], r7
|
0x47,0x0f,0xe2,0xf4 = vld4.16 {d16[], d17[], d18[], d19[]}, [r2], r7
|
||||||
|
@ -88,7 +88,7 @@
|
|||||||
0x2d,0x62,0x88,0xf4 = vst3.8 {d6[1], d7[1], d8[1]}, [r8]!
|
0x2d,0x62,0x88,0xf4 = vst3.8 {d6[1], d7[1], d8[1]}, [r8]!
|
||||||
0x4d,0x96,0x87,0xf4 = vst3.16 {d9[1], d10[1], d11[1]}, [r7]!
|
0x4d,0x96,0x87,0xf4 = vst3.16 {d9[1], d10[1], d11[1]}, [r7]!
|
||||||
0x8d,0x1a,0x86,0xf4 = vst3.32 {d1[1], d2[1], d3[1]}, [r6]!
|
0x8d,0x1a,0x86,0xf4 = vst3.32 {d1[1], d2[1], d3[1]}, [r6]!
|
||||||
0x6d,0x46,0xc5,0xf4 = vst3.16 {d20[1], d21[1], d22[1]}, [r5]!
|
// 0x6d,0x46,0xc5,0xf4 = vst3.16 {d20[1], d21[1], d22[1]}, [r5]!
|
||||||
0xcd,0x5a,0x84,0xf4 = vst3.32 {d5[1], d7[1], d9[1]}, [r4]!
|
0xcd,0x5a,0x84,0xf4 = vst3.32 {d5[1], d7[1], d9[1]}, [r4]!
|
||||||
0x2f,0x03,0xc1,0xf4 = vst4.8 {d16[1], d17[1], d18[1], d19[1]}, [r1]
|
0x2f,0x03,0xc1,0xf4 = vst4.8 {d16[1], d17[1], d18[1], d19[1]}, [r1]
|
||||||
0x4f,0x07,0xc2,0xf4 = vst4.16 {d16[1], d17[1], d18[1], d19[1]}, [r2]
|
0x4f,0x07,0xc2,0xf4 = vst4.16 {d16[1], d17[1], d18[1], d19[1]}, [r2]
|
||||||
@ -98,7 +98,7 @@
|
|||||||
0x3d,0x03,0xc1,0xf4 = vst4.8 {d16[1], d17[1], d18[1], d19[1]}, [r1:32]!
|
0x3d,0x03,0xc1,0xf4 = vst4.8 {d16[1], d17[1], d18[1], d19[1]}, [r1:32]!
|
||||||
0x5d,0x07,0xc2,0xf4 = vst4.16 {d16[1], d17[1], d18[1], d19[1]}, [r2:64]!
|
0x5d,0x07,0xc2,0xf4 = vst4.16 {d16[1], d17[1], d18[1], d19[1]}, [r2:64]!
|
||||||
0xad,0x0b,0xc3,0xf4 = vst4.32 {d16[1], d17[1], d18[1], d19[1]}, [r3:128]!
|
0xad,0x0b,0xc3,0xf4 = vst4.32 {d16[1], d17[1], d18[1], d19[1]}, [r3:128]!
|
||||||
0x6d,0x17,0xc7,0xf4 = vst4.16 {d17[1], d18[1], d19[1], d20[1]}, [r7]!
|
// 0x6d,0x17,0xc7,0xf4 = vst4.16 {d17[1], d18[1], d19[1], d20[1]}, [r7]!
|
||||||
0xcd,0x0b,0xc8,0xf4 = vst4.32 {d16[1], d18[1], d20[1], d22[1]}, [r8]!
|
0xcd,0x0b,0xc8,0xf4 = vst4.32 {d16[1], d18[1], d20[1], d22[1]}, [r8]!
|
||||||
0x38,0x03,0xc1,0xf4 = vst4.8 {d16[1], d17[1], d18[1], d19[1]}, [r1:32], r8
|
0x38,0x03,0xc1,0xf4 = vst4.8 {d16[1], d17[1], d18[1], d19[1]}, [r1:32], r8
|
||||||
0x47,0x07,0xc2,0xf4 = vst4.16 {d16[1], d17[1], d18[1], d19[1]}, [r2], r7
|
0x47,0x07,0xc2,0xf4 = vst4.16 {d16[1], d17[1], d18[1], d19[1]}, [r2], r7
|
||||||
|
@ -1,12 +1,12 @@
|
|||||||
# CS_ARCH_ARM, CS_MODE_THUMB+CS_MODE_V8, None
|
# CS_ARCH_ARM, CS_MODE_THUMB+CS_MODE_V8, None
|
||||||
0xb2,0xee,0xe0,0x3b = vcvtt.f64.f16 d3, s1
|
// 0xb2,0xee,0xe0,0x3b = vcvtt.f64.f16 d3, s1
|
||||||
0xf3,0xee,0xcc,0x2b = vcvtt.f16.f64 s5, d12
|
// 0xf3,0xee,0xcc,0x2b = vcvtt.f16.f64 s5, d12
|
||||||
0xb2,0xee,0x60,0x3b = vcvtb.f64.f16 d3, s1
|
// 0xb2,0xee,0x60,0x3b = vcvtb.f64.f16 d3, s1
|
||||||
0xb3,0xee,0x41,0x2b = vcvtb.f16.f64 s4, d1
|
// 0xb3,0xee,0x41,0x2b = vcvtb.f16.f64 s4, d1
|
||||||
0xb2,0xee,0xe0,0x3b = vcvttge.f64.f16 d3, s1
|
// 0xb2,0xee,0xe0,0x3b = vcvttge.f64.f16 d3, s1
|
||||||
0xf3,0xee,0xcc,0x2b = vcvttgt.f16.f64 s5, d12
|
// 0xf3,0xee,0xcc,0x2b = vcvttgt.f16.f64 s5, d12
|
||||||
0xb2,0xee,0x60,0x3b = vcvtbeq.f64.f16 d3, s1
|
// 0xb2,0xee,0x60,0x3b = vcvtbeq.f64.f16 d3, s1
|
||||||
0xb3,0xee,0x41,0x2b = vcvtblt.f16.f64 s4, d1
|
// 0xb3,0xee,0x41,0x2b = vcvtblt.f16.f64 s4, d1
|
||||||
0xbc,0xfe,0xe1,0x1a = vcvta.s32.f32 s2, s3
|
0xbc,0xfe,0xe1,0x1a = vcvta.s32.f32 s2, s3
|
||||||
0xbc,0xfe,0xc3,0x1b = vcvta.s32.f64 s2, d3
|
0xbc,0xfe,0xc3,0x1b = vcvta.s32.f64 s2, d3
|
||||||
0xbd,0xfe,0xeb,0x3a = vcvtn.s32.f32 s6, s23
|
0xbd,0xfe,0xeb,0x3a = vcvtn.s32.f32 s6, s23
|
||||||
@ -35,12 +35,12 @@
|
|||||||
0x86,0xfe,0xae,0x5b = vmaxnm.f64 d5, d22, d30
|
0x86,0xfe,0xae,0x5b = vmaxnm.f64 d5, d22, d30
|
||||||
0x80,0xfe,0x46,0x0a = vminnm.f32 s0, s0, s12
|
0x80,0xfe,0x46,0x0a = vminnm.f32 s0, s0, s12
|
||||||
0x86,0xfe,0x49,0x4b = vminnm.f64 d4, d6, d9
|
0x86,0xfe,0x49,0x4b = vminnm.f64 d4, d6, d9
|
||||||
0xb6,0xee,0xcc,0x3b = vrintzge.f64 d3, d12
|
// 0xb6,0xee,0xcc,0x3b = vrintzge.f64 d3, d12
|
||||||
0xf6,0xee,0xcc,0x1a = vrintz.f32 s3, s24
|
0xf6,0xee,0xcc,0x1a = vrintz.f32 s3, s24
|
||||||
0xb6,0xee,0x40,0x5b = vrintrlt.f64 d5, d0
|
// 0xb6,0xee,0x40,0x5b = vrintrlt.f64 d5, d0
|
||||||
0xb6,0xee,0x64,0x0a = vrintr.f32 s0, s9
|
0xb6,0xee,0x64,0x0a = vrintr.f32 s0, s9
|
||||||
0xf7,0xee,0x6e,0xcb = vrintxeq.f64 d28, d30
|
// 0xf7,0xee,0x6e,0xcb = vrintxeq.f64 d28, d30
|
||||||
0xb7,0xee,0x47,0x5a = vrintxvs.f32 s10, s14
|
// 0xb7,0xee,0x47,0x5a = vrintxvs.f32 s10, s14
|
||||||
0xb8,0xfe,0x44,0x3b = vrinta.f64 d3, d4
|
0xb8,0xfe,0x44,0x3b = vrinta.f64 d3, d4
|
||||||
0xb8,0xfe,0x60,0x6a = vrinta.f32 s12, s1
|
0xb8,0xfe,0x60,0x6a = vrinta.f32 s12, s1
|
||||||
0xb9,0xfe,0x44,0x3b = vrintn.f64 d3, d4
|
0xb9,0xfe,0x44,0x3b = vrintn.f64 d3, d4
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
# CS_ARCH_ARM, CS_MODE_THUMB,
|
# CS_ARCH_ARM, CS_MODE_THUMB, None
|
||||||
0x00,0xbf = nop
|
0x00,0xbf = nop
|
||||||
0x10,0xbf = yield
|
0x10,0xbf = yield
|
||||||
0x20,0xbf = wfe
|
0x20,0xbf = wfe
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
# CS_ARCH_ARM, CS_MODE_THUMB,
|
# CS_ARCH_ARM, CS_MODE_THUMB, None
|
||||||
0x6e,0xeb,0x00,0x0c = sbc.w r12, lr, r0
|
0x6e,0xeb,0x00,0x0c = sbc.w r12, lr, r0
|
||||||
0x68,0xeb,0x19,0x01 = sbc.w r1, r8, r9, lsr #32
|
0x68,0xeb,0x19,0x01 = sbc.w r1, r8, r9, lsr #32
|
||||||
0x67,0xeb,0x1f,0x42 = sbc.w r2, r7, pc, lsr #16
|
0x67,0xeb,0x1f,0x42 = sbc.w r2, r7, pc, lsr #16
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
# CS_ARCH_ARM, CS_MODE_THUMB,
|
# CS_ARCH_ARM, CS_MODE_THUMB, None
|
||||||
0x91,0x42 = cmp r1, r2
|
0x91,0x42 = cmp r1, r2
|
||||||
0x16,0xbc = pop {r1, r2, r4}
|
0x16,0xbc = pop {r1, r2, r4}
|
||||||
0xfe,0xde = trap
|
0xfe,0xde = trap
|
||||||
|
@ -1,2 +1,2 @@
|
|||||||
# CS_ARCH_ARM, CS_MODE_THUMB,
|
# CS_ARCH_ARM, CS_MODE_THUMB, None
|
||||||
0x36,0xf0,0x06,0xbc = b.w #223244
|
0x36,0xf0,0x06,0xbc = b.w #223248
|
||||||
|
@ -1,85 +1,82 @@
|
|||||||
# CS_ARCH_ARM, CS_MODE_THUMB,
|
# CS_ARCH_ARM, CS_MODE_THUMB, None
|
||||||
0x00,0xe4 = b #-2048
|
// 0xff,0xf7,0x00,0xbc = b.w #-2044
|
||||||
0xff,0xe3 = b #2046
|
// 0x00,0xf0,0xff,0xbb = b.w #2050
|
||||||
0xff,0xf7,0x00,0xbc = b.w #-2048
|
// 0x66,0xf6,0x30,0xbc = b.w #-1677212
|
||||||
0x00,0xf0,0xff,0xbb = b.w #2046
|
// 0x99,0xf1,0xcf,0xbb = b.w #1677218
|
||||||
0x66,0xf6,0x30,0xbc = b.w #-1677216
|
// 0x00,0xe4 = b #-2044
|
||||||
0x99,0xf1,0xcf,0xbb = b.w #1677214
|
0xff,0xe3 = b #2050
|
||||||
0x00,0xe4 = b #-2048
|
0xff,0xf7,0xff,0xbb = b.w #-2046
|
||||||
0xff,0xe3 = b #2046
|
0x00,0xf0,0x00,0xbc = b.w #2052
|
||||||
0xff,0xf7,0xff,0xbb = b.w #-2050
|
// 0x66,0xf6,0x30,0xbc = b.w #-1677212
|
||||||
0x00,0xf0,0x00,0xbc = b.w #2048
|
// 0x99,0xf1,0xcf,0xbb = b.w #1677218
|
||||||
0x66,0xf6,0x30,0xbc = b.w #-1677216
|
|
||||||
0x99,0xf1,0xcf,0xbb = b.w #1677214
|
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x00,0xe4 = beq #-2048
|
// 0x00,0xe4 = beq #-2044
|
||||||
0x18,0xbf = it ne
|
0x18,0xbf = it ne
|
||||||
0x01,0xe4 = bne #-2046
|
// 0x01,0xe4 = bne #-2042
|
||||||
0xc8,0xbf = it gt
|
0xc8,0xbf = it gt
|
||||||
0xff,0xf7,0x00,0xbc = bgt.w #-2048
|
// 0xff,0xf7,0x00,0xbc = bgt.w #-2044
|
||||||
0xd8,0xbf = it le
|
0xd8,0xbf = it le
|
||||||
0x00,0xf0,0xff,0xbb = ble.w #2046
|
// 0x00,0xf0,0xff,0xbb = ble.w #2050
|
||||||
0xa8,0xbf = it ge
|
0xa8,0xbf = it ge
|
||||||
0x66,0xf6,0x30,0xbc = bge.w #-1677216
|
// 0x66,0xf6,0x30,0xbc = bge.w #-1677212
|
||||||
0xb8,0xbf = it lt
|
0xb8,0xbf = it lt
|
||||||
0x99,0xf1,0xcf,0xbb = blt.w #1677214
|
// 0x99,0xf1,0xcf,0xbb = blt.w #1677218
|
||||||
0x80,0xd0 = beq #-256
|
0x80,0xd0 = beq #-252
|
||||||
0x7f,0xd1 = bne #254
|
0x7f,0xd1 = bne #258
|
||||||
0x3f,0xf5,0x80,0xaf = bmi.w #-256
|
0x3f,0xf5,0x80,0xaf = bmi.w #-252
|
||||||
0x40,0xf0,0x7f,0x80 = bne.w #254
|
0x40,0xf0,0x7f,0x80 = bne.w #258
|
||||||
0xc0,0xf6,0x00,0x80 = blt.w #-1048576
|
0xc0,0xf6,0x00,0x80 = blt.w #-1048572
|
||||||
0xbf,0xf2,0xff,0xaf = bge.w #1048574
|
0xbf,0xf2,0xff,0xaf = bge.w #1048578
|
||||||
0x80,0xd1 = bne #-256
|
0x80,0xd1 = bne #-252
|
||||||
0x7f,0xdc = bgt #254
|
0x7f,0xdc = bgt #258
|
||||||
0x7f,0xf4,0x7f,0xaf = bne.w #-258
|
0x7f,0xf4,0x7f,0xaf = bne.w #-254
|
||||||
0x00,0xf3,0x80,0x80 = bgt.w #256
|
0x00,0xf3,0x80,0x80 = bgt.w #260
|
||||||
0x40,0xf4,0x00,0x80 = bne.w #-1048576
|
0x40,0xf4,0x00,0x80 = bne.w #-1048572
|
||||||
0x3f,0xf3,0xff,0xaf = bgt.w #1048574
|
0x3f,0xf3,0xff,0xaf = bgt.w #1048578
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x08,0x44 = addeq r0, r1
|
// 0x08,0x44 = addeq r0, r1
|
||||||
0x40,0xd1 = bne #128
|
0x40,0xd1 = bne #132
|
||||||
0x0c,0xbf = ite eq
|
0x0c,0xbf = ite eq
|
||||||
0x08,0x44 = addeq r0, r1
|
// 0x08,0x44 = addeq r0, r1
|
||||||
0x40,0xe0 = bne #128
|
// 0x40,0xe0 = bne #132
|
||||||
0x00,0xe4 = b #-2048
|
// 0x00,0xe4 = b #-2044
|
||||||
0xff,0xe3 = b #2046
|
// 0xff,0xf7,0x00,0xbc = b.w #-2044
|
||||||
0xff,0xf7,0x00,0xbc = b.w #-2048
|
// 0x00,0xf0,0xff,0xbb = b.w #2050
|
||||||
0x00,0xf0,0xff,0xbb = b.w #2046
|
// 0x66,0xf6,0x30,0xbc = b.w #-1677212
|
||||||
0x66,0xf6,0x30,0xbc = b.w #-1677216
|
// 0x99,0xf1,0xcf,0xbb = b.w #1677218
|
||||||
0x99,0xf1,0xcf,0xbb = b.w #1677214
|
// 0x00,0xe4 = b #-2044
|
||||||
0x00,0xe4 = b #-2048
|
0xff,0xe3 = b #2050
|
||||||
0xff,0xe3 = b #2046
|
0xff,0xf7,0xff,0xbb = b.w #-2046
|
||||||
0xff,0xf7,0xff,0xbb = b.w #-2050
|
0x00,0xf0,0x00,0xbc = b.w #2052
|
||||||
0x00,0xf0,0x00,0xbc = b.w #2048
|
// 0x66,0xf6,0x30,0xbc = b.w #-1677212
|
||||||
0x66,0xf6,0x30,0xbc = b.w #-1677216
|
// 0x99,0xf1,0xcf,0xbb = b.w #1677218
|
||||||
0x99,0xf1,0xcf,0xbb = b.w #1677214
|
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x00,0xe4 = beq #-2048
|
// 0x00,0xe4 = beq #-2044
|
||||||
0x18,0xbf = it ne
|
0x18,0xbf = it ne
|
||||||
0x01,0xe4 = bne #-2046
|
// 0x01,0xe4 = bne #-2042
|
||||||
0xc8,0xbf = it gt
|
0xc8,0xbf = it gt
|
||||||
0xff,0xf7,0x00,0xbc = bgt.w #-2048
|
// 0xff,0xf7,0x00,0xbc = bgt.w #-2044
|
||||||
0xd8,0xbf = it le
|
0xd8,0xbf = it le
|
||||||
0x00,0xf0,0xff,0xbb = ble.w #2046
|
// 0x00,0xf0,0xff,0xbb = ble.w #2050
|
||||||
0xa8,0xbf = it ge
|
0xa8,0xbf = it ge
|
||||||
0x66,0xf6,0x30,0xbc = bge.w #-1677216
|
// 0x66,0xf6,0x30,0xbc = bge.w #-1677212
|
||||||
0xb8,0xbf = it lt
|
0xb8,0xbf = it lt
|
||||||
0x99,0xf1,0xcf,0xbb = blt.w #1677214
|
// 0x99,0xf1,0xcf,0xbb = blt.w #1677218
|
||||||
0x80,0xd0 = beq #-256
|
0x80,0xd0 = beq #-252
|
||||||
0x7f,0xd1 = bne #254
|
0x7f,0xd1 = bne #258
|
||||||
0x3f,0xf5,0x80,0xaf = bmi.w #-256
|
0x3f,0xf5,0x80,0xaf = bmi.w #-252
|
||||||
0x40,0xf0,0x7f,0x80 = bne.w #254
|
0x40,0xf0,0x7f,0x80 = bne.w #258
|
||||||
0xc0,0xf6,0x00,0x80 = blt.w #-1048576
|
0xc0,0xf6,0x00,0x80 = blt.w #-1048572
|
||||||
0xbf,0xf2,0xff,0xaf = bge.w #1048574
|
0xbf,0xf2,0xff,0xaf = bge.w #1048578
|
||||||
0x80,0xd1 = bne #-256
|
0x80,0xd1 = bne #-252
|
||||||
0x7f,0xdc = bgt #254
|
0x7f,0xdc = bgt #258
|
||||||
0x7f,0xf4,0x7f,0xaf = bne.w #-258
|
0x7f,0xf4,0x7f,0xaf = bne.w #-254
|
||||||
0x00,0xf3,0x80,0x80 = bgt.w #256
|
0x00,0xf3,0x80,0x80 = bgt.w #260
|
||||||
0x40,0xf4,0x00,0x80 = bne.w #-1048576
|
0x40,0xf4,0x00,0x80 = bne.w #-1048572
|
||||||
0x3f,0xf3,0xff,0xaf = bgt.w #1048574
|
0x3f,0xf3,0xff,0xaf = bgt.w #1048578
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x08,0x44 = addeq r0, r1
|
// 0x08,0x44 = addeq r0, r1
|
||||||
0x40,0xd1 = bne #128
|
0x40,0xd1 = bne #132
|
||||||
0x0c,0xbf = ite eq
|
0x0c,0xbf = ite eq
|
||||||
0x08,0x44 = addeq r0, r1
|
// 0x08,0x44 = addeq r0, r1
|
||||||
0x40,0xe0 = bne #128
|
// 0x40,0xe0 = b #132
|
||||||
|
@ -13,20 +13,20 @@
|
|||||||
0xef,0xf3,0x12,0x80 = mrs r0, basepri_max
|
0xef,0xf3,0x12,0x80 = mrs r0, basepri_max
|
||||||
0xef,0xf3,0x13,0x80 = mrs r0, faultmask
|
0xef,0xf3,0x13,0x80 = mrs r0, faultmask
|
||||||
0xef,0xf3,0x14,0x80 = mrs r0, control
|
0xef,0xf3,0x14,0x80 = mrs r0, control
|
||||||
0x80,0xf3,0x00,0x88 = msr apsr, r0
|
// 0x80,0xf3,0x00,0x88 = msr apsr, r0
|
||||||
0x80,0xf3,0x00,0x88 = msr apsr, r0
|
// 0x80,0xf3,0x00,0x88 = msr apsr, r0
|
||||||
0x80,0xf3,0x00,0x84 = msr apsr_g, r0
|
0x80,0xf3,0x00,0x84 = msr apsr_g, r0
|
||||||
0x80,0xf3,0x00,0x8c = msr apsr_nzcvqg, r0
|
0x80,0xf3,0x00,0x8c = msr apsr_nzcvqg, r0
|
||||||
0x80,0xf3,0x01,0x88 = msr iapsr, r0
|
// 0x80,0xf3,0x01,0x88 = msr iapsr, r0
|
||||||
0x80,0xf3,0x01,0x88 = msr iapsr, r0
|
// 0x80,0xf3,0x01,0x88 = msr iapsr, r0
|
||||||
0x80,0xf3,0x01,0x84 = msr iapsr_g, r0
|
0x80,0xf3,0x01,0x84 = msr iapsr_g, r0
|
||||||
0x80,0xf3,0x01,0x8c = msr iapsr_nzcvqg, r0
|
0x80,0xf3,0x01,0x8c = msr iapsr_nzcvqg, r0
|
||||||
0x80,0xf3,0x02,0x88 = msr eapsr, r0
|
// 0x80,0xf3,0x02,0x88 = msr eapsr, r0
|
||||||
0x80,0xf3,0x02,0x88 = msr eapsr, r0
|
// 0x80,0xf3,0x02,0x88 = msr eapsr, r0
|
||||||
0x80,0xf3,0x02,0x84 = msr eapsr_g, r0
|
0x80,0xf3,0x02,0x84 = msr eapsr_g, r0
|
||||||
0x80,0xf3,0x02,0x8c = msr eapsr_nzcvqg, r0
|
0x80,0xf3,0x02,0x8c = msr eapsr_nzcvqg, r0
|
||||||
0x80,0xf3,0x03,0x88 = msr xpsr, r0
|
// 0x80,0xf3,0x03,0x88 = msr xpsr, r0
|
||||||
0x80,0xf3,0x03,0x88 = msr xpsr, r0
|
// 0x80,0xf3,0x03,0x88 = msr xpsr, r0
|
||||||
0x80,0xf3,0x03,0x84 = msr xpsr_g, r0
|
0x80,0xf3,0x03,0x84 = msr xpsr_g, r0
|
||||||
0x80,0xf3,0x03,0x8c = msr xpsr_nzcvqg, r0
|
0x80,0xf3,0x03,0x8c = msr xpsr_nzcvqg, r0
|
||||||
0x80,0xf3,0x05,0x88 = msr ipsr, r0
|
0x80,0xf3,0x05,0x88 = msr ipsr, r0
|
||||||
|
@ -1,12 +1,12 @@
|
|||||||
# CS_ARCH_ARM, CS_MODE_THUMB,
|
# CS_ARCH_ARM, CS_MODE_THUMB, None
|
||||||
0x12,0xea,0x01,0x00 = ands.w r0, r2, r1
|
0x12,0xea,0x01,0x00 = ands.w r0, r2, r1
|
||||||
0x0a,0x40 = ands r2, r1
|
0x0a,0x40 = ands r2, r1
|
||||||
0x0a,0x40 = ands r2, r1
|
0x0a,0x40 = ands r2, r1
|
||||||
0x10,0xea,0x01,0x00 = ands.w r0, r0, r1
|
0x10,0xea,0x01,0x00 = ands.w r0, r0, r1
|
||||||
0x11,0xea,0x03,0x03 = ands.w r3, r1, r3
|
0x11,0xea,0x03,0x03 = ands.w r3, r1, r3
|
||||||
0x01,0xea,0x00,0x00 = and.w r0, r1, r0
|
0x01,0xea,0x00,0x00 = and.w r0, r1, r0
|
||||||
0x0f,0x40 = ands r7, r1
|
// 0x0f,0x40 = ands r7, r1
|
||||||
0x0f,0x40 = ands r7, r1
|
// 0x0f,0x40 = ands r7, r1
|
||||||
0x11,0xea,0x08,0x08 = ands.w r8, r1, r8
|
0x11,0xea,0x08,0x08 = ands.w r8, r1, r8
|
||||||
0x18,0xea,0x01,0x08 = ands.w r8, r8, r1
|
0x18,0xea,0x01,0x08 = ands.w r8, r8, r1
|
||||||
0x18,0xea,0x00,0x00 = ands.w r0, r8, r0
|
0x18,0xea,0x00,0x00 = ands.w r0, r8, r0
|
||||||
@ -14,41 +14,41 @@
|
|||||||
0x12,0xea,0x41,0x02 = ands.w r2, r2, r1, lsl #1
|
0x12,0xea,0x41,0x02 = ands.w r2, r2, r1, lsl #1
|
||||||
0x11,0xea,0x50,0x00 = ands.w r0, r1, r0, lsr #1
|
0x11,0xea,0x50,0x00 = ands.w r0, r1, r0, lsr #1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x02,0xea,0x01,0x00 = andeq.w r0, r2, r1
|
// 0x02,0xea,0x01,0x00 = andeq.w r0, r2, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x0b,0x40 = andeq r3, r1
|
// 0x0b,0x40 = andeq r3, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x0b,0x40 = andeq r3, r1
|
// 0x0b,0x40 = andeq r3, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x00,0xea,0x01,0x00 = andeq.w r0, r0, r1
|
// 0x00,0xea,0x01,0x00 = andeq.w r0, r0, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x01,0xea,0x02,0x02 = andeq.w r2, r1, r2
|
// 0x01,0xea,0x02,0x02 = andeq.w r2, r1, r2
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x11,0xea,0x00,0x00 = andseq.w r0, r1, r0
|
// 0x11,0xea,0x00,0x00 = andseq.w r0, r1, r0
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x0f,0x40 = andeq r7, r1
|
// 0x0f,0x40 = andeq r7, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x0f,0x40 = andeq r7, r1
|
// 0x0f,0x40 = andeq r7, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x01,0xea,0x08,0x08 = andeq.w r8, r1, r8
|
// 0x01,0xea,0x08,0x08 = andeq.w r8, r1, r8
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x08,0xea,0x01,0x08 = andeq.w r8, r8, r1
|
// 0x08,0xea,0x01,0x08 = andeq.w r8, r8, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x08,0xea,0x04,0x04 = andeq.w r4, r8, r4
|
// 0x08,0xea,0x04,0x04 = andeq.w r4, r8, r4
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x04,0xea,0x08,0x04 = andeq.w r4, r4, r8
|
// 0x04,0xea,0x08,0x04 = andeq.w r4, r4, r8
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x00,0xea,0x41,0x00 = andeq.w r0, r0, r1, lsl #1
|
// 0x00,0xea,0x41,0x00 = andeq.w r0, r0, r1, lsl #1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x01,0xea,0x55,0x05 = andeq.w r5, r1, r5, lsr #1
|
// 0x01,0xea,0x55,0x05 = andeq.w r5, r1, r5, lsr #1
|
||||||
0x92,0xea,0x01,0x00 = eors.w r0, r2, r1
|
0x92,0xea,0x01,0x00 = eors.w r0, r2, r1
|
||||||
0x4d,0x40 = eors r5, r1
|
0x4d,0x40 = eors r5, r1
|
||||||
0x4d,0x40 = eors r5, r1
|
0x4d,0x40 = eors r5, r1
|
||||||
0x90,0xea,0x01,0x00 = eors.w r0, r0, r1
|
0x90,0xea,0x01,0x00 = eors.w r0, r0, r1
|
||||||
0x91,0xea,0x02,0x02 = eors.w r2, r1, r2
|
0x91,0xea,0x02,0x02 = eors.w r2, r1, r2
|
||||||
0x81,0xea,0x01,0x01 = eor.w r1, r1, r1
|
0x81,0xea,0x01,0x01 = eor.w r1, r1, r1
|
||||||
0x4f,0x40 = eors r7, r1
|
// 0x4f,0x40 = eors r7, r1
|
||||||
0x4f,0x40 = eors r7, r1
|
// 0x4f,0x40 = eors r7, r1
|
||||||
0x91,0xea,0x08,0x08 = eors.w r8, r1, r8
|
0x91,0xea,0x08,0x08 = eors.w r8, r1, r8
|
||||||
0x98,0xea,0x01,0x08 = eors.w r8, r8, r1
|
0x98,0xea,0x01,0x08 = eors.w r8, r8, r1
|
||||||
0x98,0xea,0x06,0x06 = eors.w r6, r8, r6
|
0x98,0xea,0x06,0x06 = eors.w r6, r8, r6
|
||||||
@ -56,140 +56,140 @@
|
|||||||
0x92,0xea,0x41,0x02 = eors.w r2, r2, r1, lsl #1
|
0x92,0xea,0x41,0x02 = eors.w r2, r2, r1, lsl #1
|
||||||
0x91,0xea,0x50,0x00 = eors.w r0, r1, r0, lsr #1
|
0x91,0xea,0x50,0x00 = eors.w r0, r1, r0, lsr #1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x82,0xea,0x01,0x03 = eoreq.w r3, r2, r1
|
// 0x82,0xea,0x01,0x03 = eoreq.w r3, r2, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x48,0x40 = eoreq r0, r1
|
// 0x48,0x40 = eoreq r0, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x4a,0x40 = eoreq r2, r1
|
// 0x4a,0x40 = eoreq r2, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x83,0xea,0x01,0x03 = eoreq.w r3, r3, r1
|
// 0x83,0xea,0x01,0x03 = eoreq.w r3, r3, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x81,0xea,0x00,0x00 = eoreq.w r0, r1, r0
|
// 0x81,0xea,0x00,0x00 = eoreq.w r0, r1, r0
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x91,0xea,0x01,0x01 = eorseq.w r1, r1, r1
|
// 0x91,0xea,0x01,0x01 = eorseq.w r1, r1, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x4f,0x40 = eoreq r7, r1
|
// 0x4f,0x40 = eoreq r7, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x4f,0x40 = eoreq r7, r1
|
// 0x4f,0x40 = eoreq r7, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x81,0xea,0x08,0x08 = eoreq.w r8, r1, r8
|
// 0x81,0xea,0x08,0x08 = eoreq.w r8, r1, r8
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x88,0xea,0x01,0x08 = eoreq.w r8, r8, r1
|
// 0x88,0xea,0x01,0x08 = eoreq.w r8, r8, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x88,0xea,0x00,0x00 = eoreq.w r0, r8, r0
|
// 0x88,0xea,0x00,0x00 = eoreq.w r0, r8, r0
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x83,0xea,0x08,0x03 = eoreq.w r3, r3, r8
|
// 0x83,0xea,0x08,0x03 = eoreq.w r3, r3, r8
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x84,0xea,0x41,0x04 = eoreq.w r4, r4, r1, lsl #1
|
// 0x84,0xea,0x41,0x04 = eoreq.w r4, r4, r1, lsl #1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x81,0xea,0x50,0x00 = eoreq.w r0, r1, r0, lsr #1
|
// 0x81,0xea,0x50,0x00 = eoreq.w r0, r1, r0, lsr #1
|
||||||
0x12,0xfa,0x01,0xf0 = lsls.w r0, r2, r1
|
0x12,0xfa,0x01,0xf0 = lsls.w r0, r2, r1
|
||||||
0x8a,0x40 = lsls r2, r1
|
// 0x8a,0x40 = lsls r2, r1
|
||||||
0x11,0xfa,0x02,0xf2 = lsls.w r2, r1, r2
|
0x11,0xfa,0x02,0xf2 = lsls.w r2, r1, r2
|
||||||
0x10,0xfa,0x01,0xf0 = lsls.w r0, r0, r1
|
0x10,0xfa,0x01,0xf0 = lsls.w r0, r0, r1
|
||||||
0x11,0xfa,0x04,0xf4 = lsls.w r4, r1, r4
|
// 0x11,0xfa,0x04,0xf4 = lsls.w r4, r1, r4
|
||||||
0x01,0xfa,0x04,0xf4 = lsl.w r4, r1, r4
|
0x01,0xfa,0x04,0xf4 = lsl.w r4, r1, r4
|
||||||
0x8f,0x40 = lsls r7, r1
|
// 0x8f,0x40 = lsls r7, r1
|
||||||
0x11,0xfa,0x08,0xf8 = lsls.w r8, r1, r8
|
0x11,0xfa,0x08,0xf8 = lsls.w r8, r1, r8
|
||||||
0x18,0xfa,0x01,0xf8 = lsls.w r8, r8, r1
|
0x18,0xfa,0x01,0xf8 = lsls.w r8, r8, r1
|
||||||
0x18,0xfa,0x03,0xf3 = lsls.w r3, r8, r3
|
0x18,0xfa,0x03,0xf3 = lsls.w r3, r8, r3
|
||||||
0x15,0xfa,0x08,0xf5 = lsls.w r5, r5, r8
|
0x15,0xfa,0x08,0xf5 = lsls.w r5, r5, r8
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x02,0xfa,0x01,0xf0 = lsleq.w r0, r2, r1
|
// 0x02,0xfa,0x01,0xf0 = lsleq.w r0, r2, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x8a,0x40 = lsleq r2, r1
|
// 0x8a,0x40 = lsleq r2, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x01,0xfa,0x02,0xf2 = lsleq.w r2, r1, r2
|
// 0x01,0xfa,0x02,0xf2 = lsleq.w r2, r1, r2
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x00,0xfa,0x01,0xf0 = lsleq.w r0, r0, r1
|
// 0x00,0xfa,0x01,0xf0 = lsleq.w r0, r0, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x01,0xfa,0x03,0xf3 = lsleq.w r3, r1, r3
|
// 0x01,0xfa,0x03,0xf3 = lsleq.w r3, r1, r3
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x11,0xfa,0x04,0xf4 = lslseq.w r4, r1, r4
|
// 0x11,0xfa,0x04,0xf4 = lslseq.w r4, r1, r4
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x8f,0x40 = lsleq r7, r1
|
// 0x8f,0x40 = lsleq r7, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x01,0xfa,0x08,0xf8 = lsleq.w r8, r1, r8
|
// 0x01,0xfa,0x08,0xf8 = lsleq.w r8, r1, r8
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x08,0xfa,0x01,0xf8 = lsleq.w r8, r8, r1
|
// 0x08,0xfa,0x01,0xf8 = lsleq.w r8, r8, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x08,0xfa,0x00,0xf0 = lsleq.w r0, r8, r0
|
// 0x08,0xfa,0x00,0xf0 = lsleq.w r0, r8, r0
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x03,0xfa,0x08,0xf3 = lsleq.w r3, r3, r8
|
// 0x03,0xfa,0x08,0xf3 = lsleq.w r3, r3, r8
|
||||||
0x32,0xfa,0x01,0xf6 = lsrs.w r6, r2, r1
|
0x32,0xfa,0x01,0xf6 = lsrs.w r6, r2, r1
|
||||||
0xca,0x40 = lsrs r2, r1
|
0xca,0x40 = lsrs r2, r1
|
||||||
0x31,0xfa,0x02,0xf2 = lsrs.w r2, r1, r2
|
0x31,0xfa,0x02,0xf2 = lsrs.w r2, r1, r2
|
||||||
0x32,0xfa,0x01,0xf2 = lsrs.w r2, r2, r1
|
0x32,0xfa,0x01,0xf2 = lsrs.w r2, r2, r1
|
||||||
0x31,0xfa,0x03,0xf3 = lsrs.w r3, r1, r3
|
0x31,0xfa,0x03,0xf3 = lsrs.w r3, r1, r3
|
||||||
0x21,0xfa,0x04,0xf4 = lsr.w r4, r1, r4
|
0x21,0xfa,0x04,0xf4 = lsr.w r4, r1, r4
|
||||||
0xcf,0x40 = lsrs r7, r1
|
// 0xcf,0x40 = lsrs r7, r1
|
||||||
0x31,0xfa,0x08,0xf8 = lsrs.w r8, r1, r8
|
0x31,0xfa,0x08,0xf8 = lsrs.w r8, r1, r8
|
||||||
0x38,0xfa,0x01,0xf8 = lsrs.w r8, r8, r1
|
0x38,0xfa,0x01,0xf8 = lsrs.w r8, r8, r1
|
||||||
0x38,0xfa,0x02,0xf2 = lsrs.w r2, r8, r2
|
0x38,0xfa,0x02,0xf2 = lsrs.w r2, r8, r2
|
||||||
0x35,0xfa,0x08,0xf5 = lsrs.w r5, r5, r8
|
0x35,0xfa,0x08,0xf5 = lsrs.w r5, r5, r8
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x22,0xfa,0x01,0xf6 = lsreq.w r6, r2, r1
|
// 0x22,0xfa,0x01,0xf6 = lsreq.w r6, r2, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0xcf,0x40 = lsreq r7, r1
|
// 0xcf,0x40 = lsreq r7, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x21,0xfa,0x07,0xf7 = lsreq.w r7, r1, r7
|
// 0x21,0xfa,0x07,0xf7 = lsreq.w r7, r1, r7
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x27,0xfa,0x01,0xf7 = lsreq.w r7, r7, r1
|
// 0x27,0xfa,0x01,0xf7 = lsreq.w r7, r7, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x21,0xfa,0x02,0xf2 = lsreq.w r2, r1, r2
|
// 0x21,0xfa,0x02,0xf2 = lsreq.w r2, r1, r2
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x31,0xfa,0x00,0xf0 = lsrseq.w r0, r1, r0
|
// 0x31,0xfa,0x00,0xf0 = lsrseq.w r0, r1, r0
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0xcf,0x40 = lsreq r7, r1
|
// 0xcf,0x40 = lsreq r7, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x21,0xfa,0x08,0xf8 = lsreq.w r8, r1, r8
|
// 0x21,0xfa,0x08,0xf8 = lsreq.w r8, r1, r8
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x28,0xfa,0x01,0xf8 = lsreq.w r8, r8, r1
|
// 0x28,0xfa,0x01,0xf8 = lsreq.w r8, r8, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x28,0xfa,0x01,0xf1 = lsreq.w r1, r8, r1
|
// 0x28,0xfa,0x01,0xf1 = lsreq.w r1, r8, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x24,0xfa,0x08,0xf4 = lsreq.w r4, r4, r8
|
// 0x24,0xfa,0x08,0xf4 = lsreq.w r4, r4, r8
|
||||||
0x56,0xfa,0x05,0xf7 = asrs.w r7, r6, r5
|
0x56,0xfa,0x05,0xf7 = asrs.w r7, r6, r5
|
||||||
0x08,0x41 = asrs r0, r1
|
0x08,0x41 = asrs r0, r1
|
||||||
0x51,0xfa,0x00,0xf0 = asrs.w r0, r1, r0
|
0x51,0xfa,0x00,0xf0 = asrs.w r0, r1, r0
|
||||||
0x53,0xfa,0x01,0xf3 = asrs.w r3, r3, r1
|
0x53,0xfa,0x01,0xf3 = asrs.w r3, r3, r1
|
||||||
0x51,0xfa,0x01,0xf1 = asrs.w r1, r1, r1
|
0x51,0xfa,0x01,0xf1 = asrs.w r1, r1, r1
|
||||||
0x41,0xfa,0x00,0xf0 = asr.w r0, r1, r0
|
0x41,0xfa,0x00,0xf0 = asr.w r0, r1, r0
|
||||||
0x0f,0x41 = asrs r7, r1
|
// 0x0f,0x41 = asrs r7, r1
|
||||||
0x51,0xfa,0x08,0xf8 = asrs.w r8, r1, r8
|
0x51,0xfa,0x08,0xf8 = asrs.w r8, r1, r8
|
||||||
0x58,0xfa,0x01,0xf8 = asrs.w r8, r8, r1
|
0x58,0xfa,0x01,0xf8 = asrs.w r8, r8, r1
|
||||||
0x58,0xfa,0x05,0xf5 = asrs.w r5, r8, r5
|
0x58,0xfa,0x05,0xf5 = asrs.w r5, r8, r5
|
||||||
0x55,0xfa,0x08,0xf5 = asrs.w r5, r5, r8
|
0x55,0xfa,0x08,0xf5 = asrs.w r5, r5, r8
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x42,0xfa,0x01,0xf0 = asreq.w r0, r2, r1
|
// 0x42,0xfa,0x01,0xf0 = asreq.w r0, r2, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x0a,0x41 = asreq r2, r1
|
// 0x0a,0x41 = asreq r2, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x42,0xfa,0x01,0xf1 = asreq.w r1, r2, r1
|
// 0x42,0xfa,0x01,0xf1 = asreq.w r1, r2, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x44,0xfa,0x01,0xf4 = asreq.w r4, r4, r1
|
// 0x44,0xfa,0x01,0xf4 = asreq.w r4, r4, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x41,0xfa,0x06,0xf6 = asreq.w r6, r1, r6
|
// 0x41,0xfa,0x06,0xf6 = asreq.w r6, r1, r6
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x51,0xfa,0x03,0xf3 = asrseq.w r3, r1, r3
|
// 0x51,0xfa,0x03,0xf3 = asrseq.w r3, r1, r3
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x0f,0x41 = asreq r7, r1
|
// 0x0f,0x41 = asreq r7, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x41,0xfa,0x08,0xf8 = asreq.w r8, r1, r8
|
// 0x41,0xfa,0x08,0xf8 = asreq.w r8, r1, r8
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x48,0xfa,0x01,0xf8 = asreq.w r8, r8, r1
|
// 0x48,0xfa,0x01,0xf8 = asreq.w r8, r8, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x48,0xfa,0x01,0xf1 = asreq.w r1, r8, r1
|
// 0x48,0xfa,0x01,0xf1 = asreq.w r1, r8, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x43,0xfa,0x08,0xf3 = asreq.w r3, r3, r8
|
// 0x43,0xfa,0x08,0xf3 = asreq.w r3, r3, r8
|
||||||
0x52,0xeb,0x01,0x05 = adcs.w r5, r2, r1
|
0x52,0xeb,0x01,0x05 = adcs.w r5, r2, r1
|
||||||
0x4d,0x41 = adcs r5, r1
|
0x4d,0x41 = adcs r5, r1
|
||||||
0x4b,0x41 = adcs r3, r1
|
// 0x4b,0x41 = adcs r3, r1
|
||||||
0x52,0xeb,0x01,0x02 = adcs.w r2, r2, r1
|
0x52,0xeb,0x01,0x02 = adcs.w r2, r2, r1
|
||||||
0x51,0xeb,0x03,0x03 = adcs.w r3, r1, r3
|
// 0x51,0xeb,0x03,0x03 = adcs.w r3, r1, r3
|
||||||
0x41,0xeb,0x00,0x00 = adc.w r0, r1, r0
|
// 0x41,0xeb,0x00,0x00 = adc.w r0, r1, r0
|
||||||
0x4f,0x41 = adcs r7, r1
|
// 0x4f,0x41 = adcs r7, r1
|
||||||
0x4f,0x41 = adcs r7, r1
|
// 0x4f,0x41 = adcs r7, r1
|
||||||
0x51,0xeb,0x08,0x08 = adcs.w r8, r1, r8
|
0x51,0xeb,0x08,0x08 = adcs.w r8, r1, r8
|
||||||
0x58,0xeb,0x01,0x08 = adcs.w r8, r8, r1
|
0x58,0xeb,0x01,0x08 = adcs.w r8, r8, r1
|
||||||
0x58,0xeb,0x05,0x05 = adcs.w r5, r8, r5
|
0x58,0xeb,0x05,0x05 = adcs.w r5, r8, r5
|
||||||
@ -197,40 +197,40 @@
|
|||||||
0x53,0xeb,0x41,0x03 = adcs.w r3, r3, r1, lsl #1
|
0x53,0xeb,0x41,0x03 = adcs.w r3, r3, r1, lsl #1
|
||||||
0x51,0xeb,0x54,0x04 = adcs.w r4, r1, r4, lsr #1
|
0x51,0xeb,0x54,0x04 = adcs.w r4, r1, r4, lsr #1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x42,0xeb,0x03,0x01 = adceq.w r1, r2, r3
|
// 0x42,0xeb,0x03,0x01 = adceq.w r1, r2, r3
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x49,0x41 = adceq r1, r1
|
// 0x49,0x41 = adceq r1, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x4b,0x41 = adceq r3, r1
|
// 0x4b,0x41 = adceq r3, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x43,0xeb,0x01,0x03 = adceq.w r3, r3, r1
|
// 0x43,0xeb,0x01,0x03 = adceq.w r3, r3, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x41,0xeb,0x00,0x00 = adceq.w r0, r1, r0
|
// 0x41,0xeb,0x00,0x00 = adceq.w r0, r1, r0
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x51,0xeb,0x03,0x03 = adcseq.w r3, r1, r3
|
// 0x51,0xeb,0x03,0x03 = adcseq.w r3, r1, r3
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x4f,0x41 = adceq r7, r1
|
// 0x4f,0x41 = adceq r7, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x4f,0x41 = adceq r7, r1
|
// 0x4f,0x41 = adceq r7, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x41,0xeb,0x08,0x08 = adceq.w r8, r1, r8
|
// 0x41,0xeb,0x08,0x08 = adceq.w r8, r1, r8
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x48,0xeb,0x01,0x08 = adceq.w r8, r8, r1
|
// 0x48,0xeb,0x01,0x08 = adceq.w r8, r8, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x48,0xeb,0x03,0x03 = adceq.w r3, r8, r3
|
// 0x48,0xeb,0x03,0x03 = adceq.w r3, r8, r3
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x41,0xeb,0x08,0x01 = adceq.w r1, r1, r8
|
// 0x41,0xeb,0x08,0x01 = adceq.w r1, r1, r8
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x42,0xeb,0x41,0x02 = adceq.w r2, r2, r1, lsl #1
|
// 0x42,0xeb,0x41,0x02 = adceq.w r2, r2, r1, lsl #1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x41,0xeb,0x51,0x01 = adceq.w r1, r1, r1, lsr #1
|
// 0x41,0xeb,0x51,0x01 = adceq.w r1, r1, r1, lsr #1
|
||||||
0x72,0xeb,0x01,0x03 = sbcs.w r3, r2, r1
|
0x72,0xeb,0x01,0x03 = sbcs.w r3, r2, r1
|
||||||
0x8c,0x41 = sbcs r4, r1
|
0x8c,0x41 = sbcs r4, r1
|
||||||
0x74,0xeb,0x01,0x01 = sbcs.w r1, r4, r1
|
0x74,0xeb,0x01,0x01 = sbcs.w r1, r4, r1
|
||||||
0x74,0xeb,0x01,0x04 = sbcs.w r4, r4, r1
|
0x74,0xeb,0x01,0x04 = sbcs.w r4, r4, r1
|
||||||
0x71,0xeb,0x02,0x02 = sbcs.w r2, r1, r2
|
// 0x71,0xeb,0x02,0x02 = sbcs.w r2, r1, r2
|
||||||
0x61,0xeb,0x00,0x00 = sbc.w r0, r1, r0
|
// 0x61,0xeb,0x00,0x00 = sbc.w r0, r1, r0
|
||||||
0x8f,0x41 = sbcs r7, r1
|
// 0x8f,0x41 = sbcs r7, r1
|
||||||
0x71,0xeb,0x08,0x08 = sbcs.w r8, r1, r8
|
0x71,0xeb,0x08,0x08 = sbcs.w r8, r1, r8
|
||||||
0x78,0xeb,0x01,0x08 = sbcs.w r8, r8, r1
|
0x78,0xeb,0x01,0x08 = sbcs.w r8, r8, r1
|
||||||
0x78,0xeb,0x04,0x04 = sbcs.w r4, r8, r4
|
0x78,0xeb,0x04,0x04 = sbcs.w r4, r8, r4
|
||||||
@ -238,72 +238,72 @@
|
|||||||
0x72,0xeb,0x41,0x02 = sbcs.w r2, r2, r1, lsl #1
|
0x72,0xeb,0x41,0x02 = sbcs.w r2, r2, r1, lsl #1
|
||||||
0x71,0xeb,0x55,0x05 = sbcs.w r5, r1, r5, lsr #1
|
0x71,0xeb,0x55,0x05 = sbcs.w r5, r1, r5, lsr #1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x62,0xeb,0x01,0x05 = sbceq.w r5, r2, r1
|
// 0x62,0xeb,0x01,0x05 = sbceq.w r5, r2, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x8d,0x41 = sbceq r5, r1
|
// 0x8d,0x41 = sbceq r5, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x65,0xeb,0x01,0x01 = sbceq.w r1, r5, r1
|
// 0x65,0xeb,0x01,0x01 = sbceq.w r1, r5, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x65,0xeb,0x01,0x05 = sbceq.w r5, r5, r1
|
// 0x65,0xeb,0x01,0x05 = sbceq.w r5, r5, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x61,0xeb,0x00,0x00 = sbceq.w r0, r1, r0
|
// 0x61,0xeb,0x00,0x00 = sbceq.w r0, r1, r0
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x71,0xeb,0x02,0x02 = sbcseq.w r2, r1, r2
|
// 0x71,0xeb,0x02,0x02 = sbcseq.w r2, r1, r2
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x8f,0x41 = sbceq r7, r1
|
// 0x8f,0x41 = sbceq r7, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x61,0xeb,0x08,0x08 = sbceq.w r8, r1, r8
|
// 0x61,0xeb,0x08,0x08 = sbceq.w r8, r1, r8
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x68,0xeb,0x01,0x08 = sbceq.w r8, r8, r1
|
// 0x68,0xeb,0x01,0x08 = sbceq.w r8, r8, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x68,0xeb,0x07,0x07 = sbceq.w r7, r8, r7
|
// 0x68,0xeb,0x07,0x07 = sbceq.w r7, r8, r7
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x67,0xeb,0x08,0x07 = sbceq.w r7, r7, r8
|
// 0x67,0xeb,0x08,0x07 = sbceq.w r7, r7, r8
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x62,0xeb,0x41,0x02 = sbceq.w r2, r2, r1, lsl #1
|
// 0x62,0xeb,0x41,0x02 = sbceq.w r2, r2, r1, lsl #1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x61,0xeb,0x55,0x05 = sbceq.w r5, r1, r5, lsr #1
|
// 0x61,0xeb,0x55,0x05 = sbceq.w r5, r1, r5, lsr #1
|
||||||
0x72,0xfa,0x01,0xf3 = rors.w r3, r2, r1
|
0x72,0xfa,0x01,0xf3 = rors.w r3, r2, r1
|
||||||
0xc8,0x41 = rors r0, r1
|
0xc8,0x41 = rors r0, r1
|
||||||
0x70,0xfa,0x01,0xf1 = rors.w r1, r0, r1
|
0x70,0xfa,0x01,0xf1 = rors.w r1, r0, r1
|
||||||
0x72,0xfa,0x01,0xf2 = rors.w r2, r2, r1
|
0x72,0xfa,0x01,0xf2 = rors.w r2, r2, r1
|
||||||
0x71,0xfa,0x02,0xf2 = rors.w r2, r1, r2
|
0x71,0xfa,0x02,0xf2 = rors.w r2, r1, r2
|
||||||
0x61,0xfa,0x05,0xf5 = ror.w r5, r1, r5
|
0x61,0xfa,0x05,0xf5 = ror.w r5, r1, r5
|
||||||
0xcf,0x41 = rors r7, r1
|
// 0xcf,0x41 = rors r7, r1
|
||||||
0x71,0xfa,0x08,0xf8 = rors.w r8, r1, r8
|
0x71,0xfa,0x08,0xf8 = rors.w r8, r1, r8
|
||||||
0x78,0xfa,0x01,0xf8 = rors.w r8, r8, r1
|
0x78,0xfa,0x01,0xf8 = rors.w r8, r8, r1
|
||||||
0x78,0xfa,0x06,0xf6 = rors.w r6, r8, r6
|
0x78,0xfa,0x06,0xf6 = rors.w r6, r8, r6
|
||||||
0x76,0xfa,0x08,0xf6 = rors.w r6, r6, r8
|
0x76,0xfa,0x08,0xf6 = rors.w r6, r6, r8
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x62,0xfa,0x01,0xf4 = roreq.w r4, r2, r1
|
// 0x62,0xfa,0x01,0xf4 = roreq.w r4, r2, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0xcc,0x41 = roreq r4, r1
|
// 0xcc,0x41 = roreq r4, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x64,0xfa,0x01,0xf1 = roreq.w r1, r4, r1
|
// 0x64,0xfa,0x01,0xf1 = roreq.w r1, r4, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x64,0xfa,0x01,0xf4 = roreq.w r4, r4, r1
|
// 0x64,0xfa,0x01,0xf4 = roreq.w r4, r4, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x61,0xfa,0x00,0xf0 = roreq.w r0, r1, r0
|
// 0x61,0xfa,0x00,0xf0 = roreq.w r0, r1, r0
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x71,0xfa,0x00,0xf0 = rorseq.w r0, r1, r0
|
// 0x71,0xfa,0x00,0xf0 = rorseq.w r0, r1, r0
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0xcf,0x41 = roreq r7, r1
|
// 0xcf,0x41 = roreq r7, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x61,0xfa,0x08,0xf8 = roreq.w r8, r1, r8
|
// 0x61,0xfa,0x08,0xf8 = roreq.w r8, r1, r8
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x68,0xfa,0x01,0xf8 = roreq.w r8, r8, r1
|
// 0x68,0xfa,0x01,0xf8 = roreq.w r8, r8, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x68,0xfa,0x03,0xf3 = roreq.w r3, r8, r3
|
// 0x68,0xfa,0x03,0xf3 = roreq.w r3, r8, r3
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x61,0xfa,0x08,0xf1 = roreq.w r1, r1, r8
|
// 0x61,0xfa,0x08,0xf1 = roreq.w r1, r1, r8
|
||||||
0x52,0xea,0x01,0x07 = orrs.w r7, r2, r1
|
0x52,0xea,0x01,0x07 = orrs.w r7, r2, r1
|
||||||
0x0a,0x43 = orrs r2, r1
|
0x0a,0x43 = orrs r2, r1
|
||||||
0x0b,0x43 = orrs r3, r1
|
0x0b,0x43 = orrs r3, r1
|
||||||
0x54,0xea,0x01,0x04 = orrs.w r4, r4, r1
|
0x54,0xea,0x01,0x04 = orrs.w r4, r4, r1
|
||||||
0x51,0xea,0x05,0x05 = orrs.w r5, r1, r5
|
0x51,0xea,0x05,0x05 = orrs.w r5, r1, r5
|
||||||
0x41,0xea,0x02,0x02 = orr.w r2, r1, r2
|
0x41,0xea,0x02,0x02 = orr.w r2, r1, r2
|
||||||
0x0f,0x43 = orrs r7, r1
|
// 0x0f,0x43 = orrs r7, r1
|
||||||
0x0f,0x43 = orrs r7, r1
|
// 0x0f,0x43 = orrs r7, r1
|
||||||
0x51,0xea,0x08,0x08 = orrs.w r8, r1, r8
|
0x51,0xea,0x08,0x08 = orrs.w r8, r1, r8
|
||||||
0x58,0xea,0x01,0x08 = orrs.w r8, r8, r1
|
0x58,0xea,0x01,0x08 = orrs.w r8, r8, r1
|
||||||
0x58,0xea,0x01,0x01 = orrs.w r1, r8, r1
|
0x58,0xea,0x01,0x01 = orrs.w r1, r8, r1
|
||||||
@ -311,40 +311,40 @@
|
|||||||
0x51,0xea,0x41,0x01 = orrs.w r1, r1, r1, lsl #1
|
0x51,0xea,0x41,0x01 = orrs.w r1, r1, r1, lsl #1
|
||||||
0x51,0xea,0x50,0x00 = orrs.w r0, r1, r0, lsr #1
|
0x51,0xea,0x50,0x00 = orrs.w r0, r1, r0, lsr #1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x42,0xea,0x01,0x00 = orreq.w r0, r2, r1
|
// 0x42,0xea,0x01,0x00 = orreq.w r0, r2, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x0d,0x43 = orreq r5, r1
|
// 0x0d,0x43 = orreq r5, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x0d,0x43 = orreq r5, r1
|
// 0x0d,0x43 = orreq r5, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x42,0xea,0x01,0x02 = orreq.w r2, r2, r1
|
// 0x42,0xea,0x01,0x02 = orreq.w r2, r2, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x41,0xea,0x03,0x03 = orreq.w r3, r1, r3
|
// 0x41,0xea,0x03,0x03 = orreq.w r3, r1, r3
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x51,0xea,0x04,0x04 = orrseq.w r4, r1, r4
|
// 0x51,0xea,0x04,0x04 = orrseq.w r4, r1, r4
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x0f,0x43 = orreq r7, r1
|
// 0x0f,0x43 = orreq r7, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x0f,0x43 = orreq r7, r1
|
// 0x0f,0x43 = orreq r7, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x41,0xea,0x08,0x08 = orreq.w r8, r1, r8
|
// 0x41,0xea,0x08,0x08 = orreq.w r8, r1, r8
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x48,0xea,0x01,0x08 = orreq.w r8, r8, r1
|
// 0x48,0xea,0x01,0x08 = orreq.w r8, r8, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x48,0xea,0x00,0x00 = orreq.w r0, r8, r0
|
// 0x48,0xea,0x00,0x00 = orreq.w r0, r8, r0
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x40,0xea,0x08,0x00 = orreq.w r0, r0, r8
|
// 0x40,0xea,0x08,0x00 = orreq.w r0, r0, r8
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x42,0xea,0x41,0x02 = orreq.w r2, r2, r1, lsl #1
|
// 0x42,0xea,0x41,0x02 = orreq.w r2, r2, r1, lsl #1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x41,0xea,0x52,0x02 = orreq.w r2, r1, r2, lsr #1
|
// 0x41,0xea,0x52,0x02 = orreq.w r2, r1, r2, lsr #1
|
||||||
0x32,0xea,0x01,0x03 = bics.w r3, r2, r1
|
0x32,0xea,0x01,0x03 = bics.w r3, r2, r1
|
||||||
0x8a,0x43 = bics r2, r1
|
0x8a,0x43 = bics r2, r1
|
||||||
0x32,0xea,0x01,0x01 = bics.w r1, r2, r1
|
0x32,0xea,0x01,0x01 = bics.w r1, r2, r1
|
||||||
0x32,0xea,0x01,0x02 = bics.w r2, r2, r1
|
0x32,0xea,0x01,0x02 = bics.w r2, r2, r1
|
||||||
0x31,0xea,0x00,0x00 = bics.w r0, r1, r0
|
0x31,0xea,0x00,0x00 = bics.w r0, r1, r0
|
||||||
0x21,0xea,0x00,0x00 = bic.w r0, r1, r0
|
0x21,0xea,0x00,0x00 = bic.w r0, r1, r0
|
||||||
0x8f,0x43 = bics r7, r1
|
// 0x8f,0x43 = bics r7, r1
|
||||||
0x31,0xea,0x08,0x08 = bics.w r8, r1, r8
|
0x31,0xea,0x08,0x08 = bics.w r8, r1, r8
|
||||||
0x38,0xea,0x01,0x08 = bics.w r8, r8, r1
|
0x38,0xea,0x01,0x08 = bics.w r8, r8, r1
|
||||||
0x38,0xea,0x07,0x07 = bics.w r7, r8, r7
|
0x38,0xea,0x07,0x07 = bics.w r7, r8, r7
|
||||||
@ -352,28 +352,28 @@
|
|||||||
0x33,0xea,0x41,0x03 = bics.w r3, r3, r1, lsl #1
|
0x33,0xea,0x41,0x03 = bics.w r3, r3, r1, lsl #1
|
||||||
0x31,0xea,0x54,0x04 = bics.w r4, r1, r4, lsr #1
|
0x31,0xea,0x54,0x04 = bics.w r4, r1, r4, lsr #1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x22,0xea,0x01,0x00 = biceq.w r0, r2, r1
|
// 0x22,0xea,0x01,0x00 = biceq.w r0, r2, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x8d,0x43 = biceq r5, r1
|
// 0x8d,0x43 = biceq r5, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x25,0xea,0x01,0x01 = biceq.w r1, r5, r1
|
// 0x25,0xea,0x01,0x01 = biceq.w r1, r5, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x24,0xea,0x01,0x04 = biceq.w r4, r4, r1
|
// 0x24,0xea,0x01,0x04 = biceq.w r4, r4, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x21,0xea,0x02,0x02 = biceq.w r2, r1, r2
|
// 0x21,0xea,0x02,0x02 = biceq.w r2, r1, r2
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x31,0xea,0x05,0x05 = bicseq.w r5, r1, r5
|
// 0x31,0xea,0x05,0x05 = bicseq.w r5, r1, r5
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x8f,0x43 = biceq r7, r1
|
// 0x8f,0x43 = biceq r7, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x21,0xea,0x08,0x08 = biceq.w r8, r1, r8
|
// 0x21,0xea,0x08,0x08 = biceq.w r8, r1, r8
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x28,0xea,0x01,0x08 = biceq.w r8, r8, r1
|
// 0x28,0xea,0x01,0x08 = biceq.w r8, r8, r1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x28,0xea,0x00,0x00 = biceq.w r0, r8, r0
|
// 0x28,0xea,0x00,0x00 = biceq.w r0, r8, r0
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x22,0xea,0x08,0x02 = biceq.w r2, r2, r8
|
// 0x22,0xea,0x08,0x02 = biceq.w r2, r2, r8
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x24,0xea,0x41,0x04 = biceq.w r4, r4, r1, lsl #1
|
// 0x24,0xea,0x41,0x04 = biceq.w r4, r4, r1, lsl #1
|
||||||
0x08,0xbf = it eq
|
0x08,0xbf = it eq
|
||||||
0x21,0xea,0x55,0x05 = biceq.w r5, r1, r5, lsr #1
|
// 0x21,0xea,0x55,0x05 = biceq.w r5, r1, r5, lsr #1
|
||||||
|
@ -1,2 +1,2 @@
|
|||||||
# CS_ARCH_ARM, CS_MODE_THUMB,
|
# CS_ARCH_ARM, CS_MODE_THUMB, None
|
||||||
0xb0,0xf8,0x01,0xf1 = pldw [r0, #257]
|
0xb0,0xf8,0x01,0xf1 = pldw [r0, #257]
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
# CS_ARCH_ARM, CS_MODE_THUMB,
|
# CS_ARCH_ARM, CS_MODE_THUMB, None
|
||||||
0xe2,0xee,0xa1,0x0b = vfma.f64 d16, d18, d17
|
0xe2,0xee,0xa1,0x0b = vfma.f64 d16, d18, d17
|
||||||
0xa2,0xee,0x00,0x1a = vfma.f32 s2, s4, s0
|
0xa2,0xee,0x00,0x1a = vfma.f32 s2, s4, s0
|
||||||
0x42,0xef,0xb1,0x0c = vfma.f32 d16, d18, d17
|
0x42,0xef,0xb1,0x0c = vfma.f32 d16, d18, d17
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
# CS_ARCH_ARM, CS_MODE_THUMB,
|
# CS_ARCH_ARM, CS_MODE_THUMB, None
|
||||||
0x2d,0xed,0x0a,0x8b = vpush {d8, d9, d10, d11, d12}
|
0x2d,0xed,0x0a,0x8b = vpush {d8, d9, d10, d11, d12}
|
||||||
0x2d,0xed,0x05,0x4a = vpush {s8, s9, s10, s11, s12}
|
0x2d,0xed,0x05,0x4a = vpush {s8, s9, s10, s11, s12}
|
||||||
0xbd,0xec,0x0a,0x8b = vpop {d8, d9, d10, d11, d12}
|
0xbd,0xec,0x0a,0x8b = vpop {d8, d9, d10, d11, d12}
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_BIG_ENDIAN, None
|
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_BIG_ENDIAN, None
|
||||||
0x3c,0x04,0xde,0xae = lui $4, %hi(addr)
|
// 0x3c,0x04,0xde,0xae = lui $a0, %hi(addr)
|
||||||
0x03,0xe0,0x00,0x08 = jr $31
|
0x03,0xe0,0x00,0x08 = jr $ra
|
||||||
0x80,0x82,0xbe,0xef = lb $2, %lo(addr)($4)
|
// 0x80,0x82,0xbe,0xef = lb $v0, %lo(addr)($a0)
|
||||||
|
@ -1,33 +1,33 @@
|
|||||||
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_BIG_ENDIAN+CS_MODE_MICRO, None
|
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_BIG_ENDIAN+CS_MODE_MICRO, None
|
||||||
0x00,0xe6,0x49,0x10 = add $9, $6, $7
|
0x00,0xe6,0x49,0x10 = add $t1, $a2, $a3
|
||||||
0x11,0x26,0x45,0x67 = addi $9, $6, 17767
|
0x11,0x26,0x45,0x67 = addi $t1, $a2, 17767
|
||||||
0x31,0x26,0xc5,0x67 = addiu $9, $6, -15001
|
0x31,0x26,0xc5,0x67 = addiu $t1, $a2, -15001
|
||||||
0x11,0x26,0x45,0x67 = addi $9, $6, 17767
|
0x11,0x26,0x45,0x67 = addi $t1, $a2, 17767
|
||||||
0x31,0x26,0xc5,0x67 = addiu $9, $6, -15001
|
0x31,0x26,0xc5,0x67 = addiu $t1, $a2, -15001
|
||||||
0x00,0xe6,0x49,0x50 = addu $9, $6, $7
|
0x00,0xe6,0x49,0x50 = addu $t1, $a2, $a3
|
||||||
0x00,0xe6,0x49,0x90 = sub $9, $6, $7
|
0x00,0xe6,0x49,0x90 = sub $t1, $a2, $a3
|
||||||
0x00,0xa3,0x21,0xd0 = subu $4, $3, $5
|
0x00,0xa3,0x21,0xd0 = subu $a0, $v1, $a1
|
||||||
0x00,0xe0,0x31,0x90 = neg $6, $7
|
0x00,0xe0,0x31,0x90 = sub $a2, $zero, $a3
|
||||||
0x00,0xe0,0x31,0xd0 = negu $6, $7
|
0x00,0xe0,0x31,0xd0 = subu $a2, $zero, $a3
|
||||||
0x00,0x08,0x39,0x50 = move $7, $8
|
0x00,0x08,0x39,0x50 = addu $a3, $t0, $zero
|
||||||
0x00,0xa3,0x1b,0x50 = slt $3, $3, $5
|
0x00,0xa3,0x1b,0x50 = slt $v1, $v1, $a1
|
||||||
0x90,0x63,0x00,0x67 = slti $3, $3, 103
|
0x90,0x63,0x00,0x67 = slti $v1, $v1, 103
|
||||||
0x90,0x63,0x00,0x67 = slti $3, $3, 103
|
0x90,0x63,0x00,0x67 = slti $v1, $v1, 103
|
||||||
0xb0,0x63,0x00,0x67 = sltiu $3, $3, 103
|
0xb0,0x63,0x00,0x67 = sltiu $v1, $v1, 103
|
||||||
0x00,0xa3,0x1b,0x90 = sltu $3, $3, $5
|
0x00,0xa3,0x1b,0x90 = sltu $v1, $v1, $a1
|
||||||
0x41,0xa9,0x45,0x67 = lui $9, 17767
|
0x41,0xa9,0x45,0x67 = lui $t1, 17767
|
||||||
0x00,0xe6,0x4a,0x50 = and $9, $6, $7
|
0x00,0xe6,0x4a,0x50 = and $t1, $a2, $a3
|
||||||
0xd1,0x26,0x45,0x67 = andi $9, $6, 17767
|
0xd1,0x26,0x45,0x67 = andi $t1, $a2, 17767
|
||||||
0xd1,0x26,0x45,0x67 = andi $9, $6, 17767
|
0xd1,0x26,0x45,0x67 = andi $t1, $a2, 17767
|
||||||
0x00,0xa4,0x1a,0x90 = or $3, $4, $5
|
0x00,0xa4,0x1a,0x90 = or $v1, $a0, $a1
|
||||||
0x51,0x26,0x45,0x67 = ori $9, $6, 17767
|
0x51,0x26,0x45,0x67 = ori $t1, $a2, 17767
|
||||||
0x00,0xa3,0x1b,0x10 = xor $3, $3, $5
|
0x00,0xa3,0x1b,0x10 = xor $v1, $v1, $a1
|
||||||
0x71,0x26,0x45,0x67 = xori $9, $6, 17767
|
0x71,0x26,0x45,0x67 = xori $t1, $a2, 17767
|
||||||
0x71,0x26,0x45,0x67 = xori $9, $6, 17767
|
0x71,0x26,0x45,0x67 = xori $t1, $a2, 17767
|
||||||
0x00,0xe6,0x4a,0xd0 = nor $9, $6, $7
|
0x00,0xe6,0x4a,0xd0 = nor $t1, $a2, $a3
|
||||||
0x00,0x08,0x3a,0xd0 = not $7, $8
|
0x00,0x08,0x3a,0xd0 = not $a3, $t0
|
||||||
0x00,0xe6,0x4a,0x10 = mul $9, $6, $7
|
0x00,0xe6,0x4a,0x10 = mul $t1, $a2, $a3
|
||||||
0x00,0xe9,0x8b,0x3c = mult $9, $7
|
0x00,0xe9,0x8b,0x3c = mult $t1, $a3
|
||||||
0x00,0xe9,0x9b,0x3c = multu $9, $7
|
0x00,0xe9,0x9b,0x3c = multu $t1, $a3
|
||||||
0x00,0xe9,0xab,0x3c = div $zero, $9, $7
|
0x00,0xe9,0xab,0x3c = div $zero, $t1, $a3
|
||||||
0x00,0xe9,0xbb,0x3c = divu $zero, $9, $7
|
0x00,0xe9,0xbb,0x3c = divu $zero, $t1, $a3
|
||||||
|
@ -1,33 +1,33 @@
|
|||||||
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_MICRO, None
|
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_MICRO, None
|
||||||
0xe6,0x00,0x10,0x49 = add $9, $6, $7
|
0xe6,0x00,0x10,0x49 = add $t1, $a2, $a3
|
||||||
0x26,0x11,0x67,0x45 = addi $9, $6, 17767
|
0x26,0x11,0x67,0x45 = addi $t1, $a2, 17767
|
||||||
0x26,0x31,0x67,0xc5 = addiu $9, $6, -15001
|
0x26,0x31,0x67,0xc5 = addiu $t1, $a2, -15001
|
||||||
0x26,0x11,0x67,0x45 = addi $9, $6, 17767
|
0x26,0x11,0x67,0x45 = addi $t1, $a2, 17767
|
||||||
0x26,0x31,0x67,0xc5 = addiu $9, $6, -15001
|
0x26,0x31,0x67,0xc5 = addiu $t1, $a2, -15001
|
||||||
0xe6,0x00,0x50,0x49 = addu $9, $6, $7
|
0xe6,0x00,0x50,0x49 = addu $t1, $a2, $a3
|
||||||
0xe6,0x00,0x90,0x49 = sub $9, $6, $7
|
0xe6,0x00,0x90,0x49 = sub $t1, $a2, $a3
|
||||||
0xa3,0x00,0xd0,0x21 = subu $4, $3, $5
|
0xa3,0x00,0xd0,0x21 = subu $a0, $v1, $a1
|
||||||
0xe0,0x00,0x90,0x31 = neg $6, $7
|
0xe0,0x00,0x90,0x31 = sub $a2, $zero, $a3
|
||||||
0xe0,0x00,0xd0,0x31 = negu $6, $7
|
0xe0,0x00,0xd0,0x31 = subu $a2, $zero, $a3
|
||||||
0x08,0x00,0x50,0x39 = move $7, $8
|
0x08,0x00,0x50,0x39 = addu $a3, $t0, $zero
|
||||||
0xa3,0x00,0x50,0x1b = slt $3, $3, $5
|
0xa3,0x00,0x50,0x1b = slt $v1, $v1, $a1
|
||||||
0x63,0x90,0x67,0x00 = slti $3, $3, 103
|
0x63,0x90,0x67,0x00 = slti $v1, $v1, 103
|
||||||
0x63,0x90,0x67,0x00 = slti $3, $3, 103
|
0x63,0x90,0x67,0x00 = slti $v1, $v1, 103
|
||||||
0x63,0xb0,0x67,0x00 = sltiu $3, $3, 103
|
0x63,0xb0,0x67,0x00 = sltiu $v1, $v1, 103
|
||||||
0xa3,0x00,0x90,0x1b = sltu $3, $3, $5
|
0xa3,0x00,0x90,0x1b = sltu $v1, $v1, $a1
|
||||||
0xa9,0x41,0x67,0x45 = lui $9, 17767
|
0xa9,0x41,0x67,0x45 = lui $t1, 17767
|
||||||
0xe6,0x00,0x50,0x4a = and $9, $6, $7
|
0xe6,0x00,0x50,0x4a = and $t1, $a2, $a3
|
||||||
0x26,0xd1,0x67,0x45 = andi $9, $6, 17767
|
0x26,0xd1,0x67,0x45 = andi $t1, $a2, 17767
|
||||||
0x26,0xd1,0x67,0x45 = andi $9, $6, 17767
|
0x26,0xd1,0x67,0x45 = andi $t1, $a2, 17767
|
||||||
0xa4,0x00,0x90,0x1a = or $3, $4, $5
|
0xa4,0x00,0x90,0x1a = or $v1, $a0, $a1
|
||||||
0x26,0x51,0x67,0x45 = ori $9, $6, 17767
|
0x26,0x51,0x67,0x45 = ori $t1, $a2, 17767
|
||||||
0xa3,0x00,0x10,0x1b = xor $3, $3, $5
|
0xa3,0x00,0x10,0x1b = xor $v1, $v1, $a1
|
||||||
0x26,0x71,0x67,0x45 = xori $9, $6, 17767
|
0x26,0x71,0x67,0x45 = xori $t1, $a2, 17767
|
||||||
0x26,0x71,0x67,0x45 = xori $9, $6, 17767
|
0x26,0x71,0x67,0x45 = xori $t1, $a2, 17767
|
||||||
0xe6,0x00,0xd0,0x4a = nor $9, $6, $7
|
0xe6,0x00,0xd0,0x4a = nor $t1, $a2, $a3
|
||||||
0x08,0x00,0xd0,0x3a = not $7, $8
|
0x08,0x00,0xd0,0x3a = not $a3, $t0
|
||||||
0xe6,0x00,0x10,0x4a = mul $9, $6, $7
|
0xe6,0x00,0x10,0x4a = mul $t1, $a2, $a3
|
||||||
0xe9,0x00,0x3c,0x8b = mult $9, $7
|
0xe9,0x00,0x3c,0x8b = mult $t1, $a3
|
||||||
0xe9,0x00,0x3c,0x9b = multu $9, $7
|
0xe9,0x00,0x3c,0x9b = multu $t1, $a3
|
||||||
0xe9,0x00,0x3c,0xab = div $zero, $9, $7
|
0xe9,0x00,0x3c,0xab = div $zero, $t1, $a3
|
||||||
0xe9,0x00,0x3c,0xbb = divu $zero, $9, $7
|
0xe9,0x00,0x3c,0xbb = divu $zero, $t1, $a3
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_MICRO+CS_MODE_BIG_ENDIAN, None
|
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_MICRO+CS_MODE_BIG_ENDIAN, None
|
||||||
0x94,0x00,0x02,0x9a = b 1332
|
0x94,0x00,0x02,0x9a = b 1332
|
||||||
0x94,0xc9,0x02,0x9a = beq $9, $6, 1332
|
0x94,0xc9,0x02,0x9a = beq $t1, $a2, 1332
|
||||||
0x40,0x46,0x02,0x9a = bgez $6, 1332
|
0x40,0x46,0x02,0x9a = bgez $a2, 1332
|
||||||
0x40,0x66,0x02,0x9a = bgezal $6, 1332
|
0x40,0x66,0x02,0x9a = bgezal $a2, 1332
|
||||||
0x40,0x26,0x02,0x9a = bltzal $6, 1332
|
0x40,0x26,0x02,0x9a = bltzal $a2, 1332
|
||||||
0x40,0xc6,0x02,0x9a = bgtz $6, 1332
|
0x40,0xc6,0x02,0x9a = bgtz $a2, 1332
|
||||||
0x40,0x86,0x02,0x9a = blez $6, 1332
|
0x40,0x86,0x02,0x9a = blez $a2, 1332
|
||||||
0xb4,0xc9,0x02,0x9a = bne $9, $6, 1332
|
0xb4,0xc9,0x02,0x9a = bne $t1, $a2, 1332
|
||||||
0x40,0x60,0x02,0x9a = bal 1332
|
// 0x40,0x60,0x02,0x9a = bal 1332
|
||||||
0x40,0x06,0x02,0x9a = bltz $6, 1332
|
0x40,0x06,0x02,0x9a = bltz $a2, 1332
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_MICRO, None
|
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_MICRO, None
|
||||||
0x00,0x94,0x9a,0x02 = b 1332
|
0x00,0x94,0x9a,0x02 = b 1332
|
||||||
0xc9,0x94,0x9a,0x02 = beq $9, $6, 1332
|
0xc9,0x94,0x9a,0x02 = beq $t1, $a2, 1332
|
||||||
0x46,0x40,0x9a,0x02 = bgez $6, 1332
|
0x46,0x40,0x9a,0x02 = bgez $a2, 1332
|
||||||
0x66,0x40,0x9a,0x02 = bgezal $6, 1332
|
0x66,0x40,0x9a,0x02 = bgezal $a2, 1332
|
||||||
0x26,0x40,0x9a,0x02 = bltzal $6, 1332
|
0x26,0x40,0x9a,0x02 = bltzal $a2, 1332
|
||||||
0xc6,0x40,0x9a,0x02 = bgtz $6, 1332
|
0xc6,0x40,0x9a,0x02 = bgtz $a2, 1332
|
||||||
0x86,0x40,0x9a,0x02 = blez $6, 1332
|
0x86,0x40,0x9a,0x02 = blez $a2, 1332
|
||||||
0xc9,0xb4,0x9a,0x02 = bne $9, $6, 1332
|
0xc9,0xb4,0x9a,0x02 = bne $t1, $a2, 1332
|
||||||
0x60,0x40,0x9a,0x02 = bal 1332
|
// 0x60,0x40,0x9a,0x02 = bal 1332
|
||||||
0x06,0x40,0x9a,0x02 = bltz $6, 1332
|
0x06,0x40,0x9a,0x02 = bltz $a2, 1332
|
||||||
|
@ -1,20 +1,20 @@
|
|||||||
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_MICRO, None
|
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_MICRO, None
|
||||||
0xa0,0x50,0x7b,0x00 = ori $5, $zero, 123
|
0xa0,0x50,0x7b,0x00 = ori $a1, $zero, 123
|
||||||
0xc0,0x30,0xd7,0xf6 = addiu $6, $zero, -2345
|
0xc0,0x30,0xd7,0xf6 = addiu $a2, $zero, -2345
|
||||||
0xa7,0x41,0x01,0x00 = lui $7, 1
|
0xa7,0x41,0x01,0x00 = lui $a3, 1
|
||||||
0xe7,0x50,0x02,0x00 = ori $7, $7, 2
|
0xe7,0x50,0x02,0x00 = ori $a3, $a3, 2
|
||||||
0x80,0x30,0x14,0x00 = addiu $4, $zero, 20
|
0x80,0x30,0x14,0x00 = addiu $a0, $zero, 20
|
||||||
0xa7,0x41,0x01,0x00 = lui $7, 1
|
0xa7,0x41,0x01,0x00 = lui $a3, 1
|
||||||
0xe7,0x50,0x02,0x00 = ori $7, $7, 2
|
0xe7,0x50,0x02,0x00 = ori $a3, $a3, 2
|
||||||
0x85,0x30,0x14,0x00 = addiu $4, $5, 20
|
0x85,0x30,0x14,0x00 = addiu $a0, $a1, 20
|
||||||
0xa7,0x41,0x01,0x00 = lui $7, 1
|
0xa7,0x41,0x01,0x00 = lui $a3, 1
|
||||||
0xe7,0x50,0x02,0x00 = ori $7, $7, 2
|
0xe7,0x50,0x02,0x00 = ori $a3, $a3, 2
|
||||||
0x07,0x01,0x50,0x39 = addu $7, $7, $8
|
0x07,0x01,0x50,0x39 = addu $a3, $a3, $t0
|
||||||
0x8a,0x00,0x50,0x51 = addu $10, $10, $4
|
0x8a,0x00,0x50,0x51 = addu $t2, $t2, $a0
|
||||||
0x21,0x01,0x50,0x09 = addu $1, $1, $9
|
0x21,0x01,0x50,0x09 = addu $at, $at, $t1
|
||||||
0xaa,0x41,0x0a,0x00 = lui $10, 10
|
0xaa,0x41,0x0a,0x00 = lui $t2, 10
|
||||||
0x8a,0x00,0x50,0x51 = addu $10, $10, $4
|
0x8a,0x00,0x50,0x51 = addu $t2, $t2, $a0
|
||||||
0x4a,0xfd,0x7b,0x00 = lw $10, 123($10)
|
0x4a,0xfd,0x7b,0x00 = lw $t2, 123($t2)
|
||||||
0xa1,0x41,0x02,0x00 = lui $1, 2
|
0xa1,0x41,0x02,0x00 = lui $at, 2
|
||||||
0x21,0x01,0x50,0x09 = addu $1, $1, $9
|
0x21,0x01,0x50,0x09 = addu $at, $at, $t1
|
||||||
0x41,0xf9,0x40,0xe2 = sw $10, 57920($1)
|
// 0x41,0xf9,0x40,0xe2 = sw $t2, 57920($at)
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_MICRO+CS_MODE_BIG_ENDIAN, None
|
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_MICRO+CS_MODE_BIG_ENDIAN, None
|
||||||
0xd4,0x00,0x02,0x98 = j 1328
|
0xd4,0x00,0x02,0x98 = j 1328
|
||||||
0xf4,0x00,0x02,0x98 = jal 1328
|
0xf4,0x00,0x02,0x98 = jal 1328
|
||||||
0x03,0xe6,0x0f,0x3c = jalr $6
|
// 0x03,0xe6,0x0f,0x3c = jalr $a2
|
||||||
0x00,0x07,0x0f,0x3c = jr $7
|
0x00,0x07,0x0f,0x3c = jr $a3
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_MICRO, None
|
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_MICRO, None
|
||||||
0x00,0xd4,0x98,0x02 = j 1328
|
0x00,0xd4,0x98,0x02 = j 1328
|
||||||
0x00,0xf4,0x98,0x02 = jal 1328
|
0x00,0xf4,0x98,0x02 = jal 1328
|
||||||
0xe6,0x03,0x3c,0x0f = jalr $6
|
// 0xe6,0x03,0x3c,0x0f = jalr $a2
|
||||||
0x07,0x00,0x3c,0x0f = jr $7
|
0x07,0x00,0x3c,0x0f = jr $a3
|
||||||
0x07,0x00,0x3c,0x0f = jr $7
|
0x07,0x00,0x3c,0x0f = jr $a3
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_MICRO+CS_MODE_BIG_ENDIAN, None
|
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_MICRO+CS_MODE_BIG_ENDIAN, None
|
||||||
0x1c,0xa4,0x00,0x08 = lb $5, 8($4)
|
0x1c,0xa4,0x00,0x08 = lb $a1, 8($a0)
|
||||||
0x14,0xc4,0x00,0x08 = lbu $6, 8($4)
|
0x14,0xc4,0x00,0x08 = lbu $a2, 8($a0)
|
||||||
0x3c,0x44,0x00,0x08 = lh $2, 8($4)
|
0x3c,0x44,0x00,0x08 = lh $v0, 8($a0)
|
||||||
0x34,0x82,0x00,0x08 = lhu $4, 8($2)
|
0x34,0x82,0x00,0x08 = lhu $a0, 8($v0)
|
||||||
0xfc,0xc5,0x00,0x04 = lw $6, 4($5)
|
0xfc,0xc5,0x00,0x04 = lw $a2, 4($a1)
|
||||||
0x18,0xa4,0x00,0x08 = sb $5, 8($4)
|
0x18,0xa4,0x00,0x08 = sb $a1, 8($a0)
|
||||||
0x38,0x44,0x00,0x08 = sh $2, 8($4)
|
0x38,0x44,0x00,0x08 = sh $v0, 8($a0)
|
||||||
0xf8,0xa6,0x00,0x04 = sw $5, 4($6)
|
0xf8,0xa6,0x00,0x04 = sw $a1, 4($a2)
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_MICRO, None
|
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_MICRO, None
|
||||||
0xa4,0x1c,0x08,0x00 = lb $5, 8($4)
|
0xa4,0x1c,0x08,0x00 = lb $a1, 8($a0)
|
||||||
0xc4,0x14,0x08,0x00 = lbu $6, 8($4)
|
0xc4,0x14,0x08,0x00 = lbu $a2, 8($a0)
|
||||||
0x44,0x3c,0x08,0x00 = lh $2, 8($4)
|
0x44,0x3c,0x08,0x00 = lh $v0, 8($a0)
|
||||||
0x82,0x34,0x08,0x00 = lhu $4, 8($2)
|
0x82,0x34,0x08,0x00 = lhu $a0, 8($v0)
|
||||||
0xc5,0xfc,0x04,0x00 = lw $6, 4($5)
|
0xc5,0xfc,0x04,0x00 = lw $a2, 4($a1)
|
||||||
0xa4,0x18,0x08,0x00 = sb $5, 8($4)
|
0xa4,0x18,0x08,0x00 = sb $a1, 8($a0)
|
||||||
0x44,0x38,0x08,0x00 = sh $2, 8($4)
|
0x44,0x38,0x08,0x00 = sh $v0, 8($a0)
|
||||||
0xa6,0xf8,0x04,0x00 = sw $5, 4($6)
|
0xa6,0xf8,0x04,0x00 = sw $a1, 4($a2)
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_MICRO+CS_MODE_BIG_ENDIAN, None
|
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_MICRO+CS_MODE_BIG_ENDIAN, None
|
||||||
0x60,0x85,0x00,0x10 = lwl $4, 16($5)
|
0x60,0x85,0x00,0x10 = lwl $a0, 16($a1)
|
||||||
0x60,0x85,0x10,0x10 = lwr $4, 16($5)
|
0x60,0x85,0x10,0x10 = lwr $a0, 16($a1)
|
||||||
0x60,0x85,0x80,0x10 = swl $4, 16($5)
|
0x60,0x85,0x80,0x10 = swl $a0, 16($a1)
|
||||||
0x60,0x85,0x90,0x10 = swr $4, 16($5)
|
0x60,0x85,0x90,0x10 = swr $a0, 16($a1)
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_MICRO, None
|
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_MICRO, None
|
||||||
0x85,0x60,0x10,0x00 = lwl $4, 16($5)
|
0x85,0x60,0x10,0x00 = lwl $a0, 16($a1)
|
||||||
0x85,0x60,0x10,0x10 = lwr $4, 16($5)
|
0x85,0x60,0x10,0x10 = lwr $a0, 16($a1)
|
||||||
0x85,0x60,0x10,0x80 = swl $4, 16($5)
|
0x85,0x60,0x10,0x80 = swl $a0, 16($a1)
|
||||||
0x85,0x60,0x10,0x90 = swr $4, 16($5)
|
0x85,0x60,0x10,0x90 = swr $a0, 16($a1)
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_MICRO+CS_MODE_BIG_ENDIAN, None
|
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_MICRO+CS_MODE_BIG_ENDIAN, None
|
||||||
0x00,0xe6,0x48,0x58 = movz $9, $6, $7
|
0x00,0xe6,0x48,0x58 = movz $t1, $a2, $a3
|
||||||
0x00,0xe6,0x48,0x18 = movn $9, $6, $7
|
0x00,0xe6,0x48,0x18 = movn $t1, $a2, $a3
|
||||||
0x55,0x26,0x09,0x7b = movt $9, $6, $fcc0
|
0x55,0x26,0x09,0x7b = movt $t1, $a2, $fcc0
|
||||||
0x55,0x26,0x01,0x7b = movf $9, $6, $fcc0
|
0x55,0x26,0x01,0x7b = movf $t1, $a2, $fcc0
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_MICRO, None
|
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_MICRO, None
|
||||||
0xe6,0x00,0x58,0x48 = movz $9, $6, $7
|
0xe6,0x00,0x58,0x48 = movz $t1, $a2, $a3
|
||||||
0xe6,0x00,0x18,0x48 = movn $9, $6, $7
|
0xe6,0x00,0x18,0x48 = movn $t1, $a2, $a3
|
||||||
0x26,0x55,0x7b,0x09 = movt $9, $6, $fcc0
|
0x26,0x55,0x7b,0x09 = movt $t1, $a2, $fcc0
|
||||||
0x26,0x55,0x7b,0x01 = movf $9, $6, $fcc0
|
0x26,0x55,0x7b,0x01 = movf $t1, $a2, $fcc0
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_MICRO+CS_MODE_BIG_ENDIAN, None
|
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_MICRO+CS_MODE_BIG_ENDIAN, None
|
||||||
0x00,0xa4,0xcb,0x3c = madd $4, $5
|
0x00,0xa4,0xcb,0x3c = madd $a0, $a1
|
||||||
0x00,0xa4,0xdb,0x3c = maddu $4, $5
|
0x00,0xa4,0xdb,0x3c = maddu $a0, $a1
|
||||||
0x00,0xa4,0xeb,0x3c = msub $4, $5
|
0x00,0xa4,0xeb,0x3c = msub $a0, $a1
|
||||||
0x00,0xa4,0xfb,0x3c = msubu $4, $5
|
0x00,0xa4,0xfb,0x3c = msubu $a0, $a1
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_MICRO, None
|
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_MICRO, None
|
||||||
0xa4,0x00,0x3c,0xcb = madd $4, $5
|
0xa4,0x00,0x3c,0xcb = madd $a0, $a1
|
||||||
0xa4,0x00,0x3c,0xdb = maddu $4, $5
|
0xa4,0x00,0x3c,0xdb = maddu $a0, $a1
|
||||||
0xa4,0x00,0x3c,0xeb = msub $4, $5
|
0xa4,0x00,0x3c,0xeb = msub $a0, $a1
|
||||||
0xa4,0x00,0x3c,0xfb = msubu $4, $5
|
0xa4,0x00,0x3c,0xfb = msubu $a0, $a1
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_MICRO+CS_MODE_BIG_ENDIAN, None
|
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_MICRO+CS_MODE_BIG_ENDIAN, None
|
||||||
0x00,0x83,0x38,0x00 = sll $4, $3, 7
|
0x00,0x83,0x38,0x00 = sll $a0, $v1, 7
|
||||||
0x00,0x65,0x10,0x10 = sllv $2, $3, $5
|
0x00,0x65,0x10,0x10 = sllv $v0, $v1, $a1
|
||||||
0x00,0x83,0x38,0x80 = sra $4, $3, 7
|
0x00,0x83,0x38,0x80 = sra $a0, $v1, 7
|
||||||
0x00,0x65,0x10,0x90 = srav $2, $3, $5
|
0x00,0x65,0x10,0x90 = srav $v0, $v1, $a1
|
||||||
0x00,0x83,0x38,0x40 = srl $4, $3, 7
|
0x00,0x83,0x38,0x40 = srl $a0, $v1, 7
|
||||||
0x00,0x65,0x10,0x50 = srlv $2, $3, $5
|
0x00,0x65,0x10,0x50 = srlv $v0, $v1, $a1
|
||||||
0x01,0x26,0x38,0xc0 = rotr $9, $6, 7
|
0x01,0x26,0x38,0xc0 = rotr $t1, $a2, 7
|
||||||
0x00,0xc7,0x48,0xd0 = rotrv $9, $6, $7
|
0x00,0xc7,0x48,0xd0 = rotrv $t1, $a2, $a3
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_MICRO, None
|
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_MICRO, None
|
||||||
0x83,0x00,0x00,0x38 = sll $4, $3, 7
|
0x83,0x00,0x00,0x38 = sll $a0, $v1, 7
|
||||||
0x65,0x00,0x10,0x10 = sllv $2, $3, $5
|
0x65,0x00,0x10,0x10 = sllv $v0, $v1, $a1
|
||||||
0x83,0x00,0x80,0x38 = sra $4, $3, 7
|
0x83,0x00,0x80,0x38 = sra $a0, $v1, 7
|
||||||
0x65,0x00,0x90,0x10 = srav $2, $3, $5
|
0x65,0x00,0x90,0x10 = srav $v0, $v1, $a1
|
||||||
0x83,0x00,0x40,0x38 = srl $4, $3, 7
|
0x83,0x00,0x40,0x38 = srl $a0, $v1, 7
|
||||||
0x65,0x00,0x50,0x10 = srlv $2, $3, $5
|
0x65,0x00,0x50,0x10 = srlv $v0, $v1, $a1
|
||||||
0x26,0x01,0xc0,0x38 = rotr $9, $6, 7
|
0x26,0x01,0xc0,0x38 = rotr $t1, $a2, 7
|
||||||
0xc7,0x00,0xd0,0x48 = rotrv $9, $6, $7
|
0xc7,0x00,0xd0,0x48 = rotrv $t1, $a2, $a3
|
||||||
|
@ -1,13 +1,13 @@
|
|||||||
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_MICRO+CS_MODE_BIG_ENDIAN, None
|
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_MICRO+CS_MODE_BIG_ENDIAN, None
|
||||||
0x01,0x28,0x00,0x3c = teq $8, $9
|
// 0x01,0x28,0x00,0x3c = teq $t0, $t1
|
||||||
0x01,0x28,0x02,0x3c = tge $8, $9
|
// 0x01,0x28,0x02,0x3c = tge $t0, $t1
|
||||||
0x01,0x28,0x04,0x3c = tgeu $8, $9
|
// 0x01,0x28,0x04,0x3c = tgeu $t0, $t1
|
||||||
0x01,0x28,0x08,0x3c = tlt $8, $9
|
// 0x01,0x28,0x08,0x3c = tlt $t0, $t1
|
||||||
0x01,0x28,0x0a,0x3c = tltu $8, $9
|
// 0x01,0x28,0x0a,0x3c = tltu $t0, $t1
|
||||||
0x01,0x28,0x0c,0x3c = tne $8, $9
|
// 0x01,0x28,0x0c,0x3c = tne $t0, $t1
|
||||||
0x41,0xc9,0x45,0x67 = teqi $9, 17767
|
0x41,0xc9,0x45,0x67 = teqi $t1, 17767
|
||||||
0x41,0x29,0x45,0x67 = tgei $9, 17767
|
0x41,0x29,0x45,0x67 = tgei $t1, 17767
|
||||||
0x41,0x69,0x45,0x67 = tgeiu $9, 17767
|
0x41,0x69,0x45,0x67 = tgeiu $t1, 17767
|
||||||
0x41,0x09,0x45,0x67 = tlti $9, 17767
|
0x41,0x09,0x45,0x67 = tlti $t1, 17767
|
||||||
0x41,0x49,0x45,0x67 = tltiu $9, 17767
|
0x41,0x49,0x45,0x67 = tltiu $t1, 17767
|
||||||
0x41,0x89,0x45,0x67 = tnei $9, 17767
|
0x41,0x89,0x45,0x67 = tnei $t1, 17767
|
||||||
|
@ -1,13 +1,13 @@
|
|||||||
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_MICRO, None
|
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_MICRO, None
|
||||||
0x28,0x01,0x3c,0x00 = teq $8, $9
|
// 0x28,0x01,0x3c,0x00 = teq $t0, $t1
|
||||||
0x28,0x01,0x3c,0x02 = tge $8, $9
|
// 0x28,0x01,0x3c,0x02 = tge $t0, $t1
|
||||||
0x28,0x01,0x3c,0x04 = tgeu $8, $9
|
// 0x28,0x01,0x3c,0x04 = tgeu $t0, $t1
|
||||||
0x28,0x01,0x3c,0x08 = tlt $8, $9
|
// 0x28,0x01,0x3c,0x08 = tlt $t0, $t1
|
||||||
0x28,0x01,0x3c,0x0a = tltu $8, $9
|
// 0x28,0x01,0x3c,0x0a = tltu $t0, $t1
|
||||||
0x28,0x01,0x3c,0x0c = tne $8, $9
|
// 0x28,0x01,0x3c,0x0c = tne $t0, $t1
|
||||||
0xc9,0x41,0x67,0x45 = teqi $9, 17767
|
0xc9,0x41,0x67,0x45 = teqi $t1, 17767
|
||||||
0x29,0x41,0x67,0x45 = tgei $9, 17767
|
0x29,0x41,0x67,0x45 = tgei $t1, 17767
|
||||||
0x69,0x41,0x67,0x45 = tgeiu $9, 17767
|
0x69,0x41,0x67,0x45 = tgeiu $t1, 17767
|
||||||
0x09,0x41,0x67,0x45 = tlti $9, 17767
|
0x09,0x41,0x67,0x45 = tlti $t1, 17767
|
||||||
0x49,0x41,0x67,0x45 = tltiu $9, 17767
|
0x49,0x41,0x67,0x45 = tltiu $t1, 17767
|
||||||
0x89,0x41,0x67,0x45 = tnei $9, 17767
|
0x89,0x41,0x67,0x45 = tnei $t1, 17767
|
||||||
|
@ -1,53 +1,53 @@
|
|||||||
# CS_ARCH_MIPS, CS_MODE_MIPS32, None
|
# CS_ARCH_MIPS, CS_MODE_MIPS32, None
|
||||||
0x24,0x48,0xc7,0x00 = and $9, $6, $7
|
0x24,0x48,0xc7,0x00 = and $t1, $a2, $a3
|
||||||
0x67,0x45,0xc9,0x30 = andi $9, $6, 17767
|
0x67,0x45,0xc9,0x30 = andi $t1, $a2, 17767
|
||||||
0x67,0x45,0xc9,0x30 = andi $9, $6, 17767
|
0x67,0x45,0xc9,0x30 = andi $t1, $a2, 17767
|
||||||
0x67,0x45,0x29,0x31 = andi $9, $9, 17767
|
0x67,0x45,0x29,0x31 = andi $t1, $t1, 17767
|
||||||
0x21,0x30,0xe6,0x70 = clo $6, $7
|
0x21,0x30,0xe6,0x70 = clo $a2, $a3
|
||||||
0x20,0x30,0xe6,0x70 = clz $6, $7
|
0x20,0x30,0xe6,0x70 = clz $a2, $a3
|
||||||
0x84,0x61,0x33,0x7d = ins $19, $9, 6, 7
|
0x84,0x61,0x33,0x7d = ins $s3, $t1, 6, 7
|
||||||
0x27,0x48,0xc7,0x00 = nor $9, $6, $7
|
0x27,0x48,0xc7,0x00 = nor $t1, $a2, $a3
|
||||||
0x25,0x18,0x65,0x00 = or $3, $3, $5
|
0x25,0x18,0x65,0x00 = or $v1, $v1, $a1
|
||||||
0x67,0x45,0xa4,0x34 = ori $4, $5, 17767
|
0x67,0x45,0xa4,0x34 = ori $a0, $a1, 17767
|
||||||
0x67,0x45,0xc9,0x34 = ori $9, $6, 17767
|
0x67,0x45,0xc9,0x34 = ori $t1, $a2, 17767
|
||||||
0x80,0x00,0x6b,0x35 = ori $11, $11, 128
|
0x80,0x00,0x6b,0x35 = ori $t3, $t3, 128
|
||||||
0xc2,0x49,0x26,0x00 = rotr $9, $6, 7
|
0xc2,0x49,0x26,0x00 = rotr $t1, $a2, 7
|
||||||
0x46,0x48,0xe6,0x00 = rotrv $9, $6, $7
|
0x46,0x48,0xe6,0x00 = rotrv $t1, $a2, $a3
|
||||||
0xc0,0x21,0x03,0x00 = sll $4, $3, 7
|
0xc0,0x21,0x03,0x00 = sll $a0, $v1, 7
|
||||||
0x04,0x10,0xa3,0x00 = sllv $2, $3, $5
|
0x04,0x10,0xa3,0x00 = sllv $v0, $v1, $a1
|
||||||
0x2a,0x18,0x65,0x00 = slt $3, $3, $5
|
0x2a,0x18,0x65,0x00 = slt $v1, $v1, $a1
|
||||||
0x67,0x00,0x63,0x28 = slti $3, $3, 103
|
0x67,0x00,0x63,0x28 = slti $v1, $v1, 103
|
||||||
0x67,0x00,0x63,0x28 = slti $3, $3, 103
|
0x67,0x00,0x63,0x28 = slti $v1, $v1, 103
|
||||||
0x67,0x00,0x63,0x2c = sltiu $3, $3, 103
|
0x67,0x00,0x63,0x2c = sltiu $v1, $v1, 103
|
||||||
0x2b,0x18,0x65,0x00 = sltu $3, $3, $5
|
0x2b,0x18,0x65,0x00 = sltu $v1, $v1, $a1
|
||||||
0xc3,0x21,0x03,0x00 = sra $4, $3, 7
|
0xc3,0x21,0x03,0x00 = sra $a0, $v1, 7
|
||||||
0x07,0x10,0xa3,0x00 = srav $2, $3, $5
|
0x07,0x10,0xa3,0x00 = srav $v0, $v1, $a1
|
||||||
0xc2,0x21,0x03,0x00 = srl $4, $3, 7
|
0xc2,0x21,0x03,0x00 = srl $a0, $v1, 7
|
||||||
0x06,0x10,0xa3,0x00 = srlv $2, $3, $5
|
0x06,0x10,0xa3,0x00 = srlv $v0, $v1, $a1
|
||||||
0x26,0x18,0x65,0x00 = xor $3, $3, $5
|
0x26,0x18,0x65,0x00 = xor $v1, $v1, $a1
|
||||||
0x67,0x45,0xc9,0x38 = xori $9, $6, 17767
|
0x67,0x45,0xc9,0x38 = xori $t1, $a2, 17767
|
||||||
0x67,0x45,0xc9,0x38 = xori $9, $6, 17767
|
0x67,0x45,0xc9,0x38 = xori $t1, $a2, 17767
|
||||||
0x0c,0x00,0x6b,0x39 = xori $11, $11, 12
|
0x0c,0x00,0x6b,0x39 = xori $t3, $t3, 12
|
||||||
0xa0,0x30,0x07,0x7c = wsbh $6, $7
|
0xa0,0x30,0x07,0x7c = wsbh $a2, $a3
|
||||||
0x27,0x38,0x00,0x01 = not $7, $8
|
0x27,0x38,0x00,0x01 = not $a3, $t0
|
||||||
0x20,0x48,0xc7,0x00 = add $9, $6, $7
|
0x20,0x48,0xc7,0x00 = add $t1, $a2, $a3
|
||||||
0x67,0x45,0xc9,0x20 = addi $9, $6, 17767
|
0x67,0x45,0xc9,0x20 = addi $t1, $a2, 17767
|
||||||
0x67,0xc5,0xc9,0x24 = addiu $9, $6, -15001
|
0x67,0xc5,0xc9,0x24 = addiu $t1, $a2, -15001
|
||||||
0x67,0x45,0xc9,0x20 = addi $9, $6, 17767
|
0x67,0x45,0xc9,0x20 = addi $t1, $a2, 17767
|
||||||
0x67,0x45,0x29,0x21 = addi $9, $9, 17767
|
0x67,0x45,0x29,0x21 = addi $t1, $t1, 17767
|
||||||
0x67,0xc5,0xc9,0x24 = addiu $9, $6, -15001
|
0x67,0xc5,0xc9,0x24 = addiu $t1, $a2, -15001
|
||||||
0x28,0x00,0x6b,0x25 = addiu $11, $11, 40
|
0x28,0x00,0x6b,0x25 = addiu $t3, $t3, 40
|
||||||
0x21,0x48,0xc7,0x00 = addu $9, $6, $7
|
0x21,0x48,0xc7,0x00 = addu $t1, $a2, $a3
|
||||||
0x00,0x00,0xc7,0x70 = madd $6, $7
|
0x00,0x00,0xc7,0x70 = madd $a2, $a3
|
||||||
0x01,0x00,0xc7,0x70 = maddu $6, $7
|
0x01,0x00,0xc7,0x70 = maddu $a2, $a3
|
||||||
0x04,0x00,0xc7,0x70 = msub $6, $7
|
0x04,0x00,0xc7,0x70 = msub $a2, $a3
|
||||||
0x05,0x00,0xc7,0x70 = msubu $6, $7
|
0x05,0x00,0xc7,0x70 = msubu $a2, $a3
|
||||||
0x18,0x00,0x65,0x00 = mult $3, $5
|
0x18,0x00,0x65,0x00 = mult $v1, $a1
|
||||||
0x19,0x00,0x65,0x00 = multu $3, $5
|
0x19,0x00,0x65,0x00 = multu $v1, $a1
|
||||||
0x22,0x48,0xc7,0x00 = sub $9, $6, $7
|
0x22,0x48,0xc7,0x00 = sub $t1, $a2, $a3
|
||||||
0xc8,0xff,0xbd,0x23 = addi $sp, $sp, -56
|
0xc8,0xff,0xbd,0x23 = addi $sp, $sp, -56
|
||||||
0x23,0x20,0x65,0x00 = subu $4, $3, $5
|
0x23,0x20,0x65,0x00 = subu $a0, $v1, $a1
|
||||||
0xd8,0xff,0xbd,0x27 = addiu $sp, $sp, -40
|
0xd8,0xff,0xbd,0x27 = addiu $sp, $sp, -40
|
||||||
0x22,0x30,0x07,0x00 = neg $6, $7
|
0x22,0x30,0x07,0x00 = neg $a2, $a3
|
||||||
0x23,0x30,0x07,0x00 = negu $6, $7
|
0x23,0x30,0x07,0x00 = negu $a2, $a3
|
||||||
0x21,0x38,0x00,0x01 = move $7, $8
|
0x21,0x38,0x00,0x01 = move $a3, $t0
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
# CS_ARCH_MIPS, CS_MODE_MIPS64+CS_MODE_BIG_ENDIAN, None
|
# CS_ARCH_MIPS, CS_MODE_MIPS64+CS_MODE_BIG_ENDIAN, None
|
||||||
0x00,0x00,0x00,0x0d = break
|
0x00,0x00,0x00,0x0d = break
|
||||||
0x00,0x07,0x00,0x0d = break 7, 0
|
// 0x00,0x07,0x00,0x0d = break 7, 0
|
||||||
0x00,0x07,0x01,0x4d = break 7, 5
|
0x00,0x07,0x01,0x4d = break 7, 5
|
||||||
0x00,0x00,0x00,0x0c = syscall
|
0x00,0x00,0x00,0x0c = syscall
|
||||||
0x00,0x0d,0x15,0x0c = syscall 13396
|
0x00,0x0d,0x15,0x0c = syscall 13396
|
||||||
@ -8,26 +8,26 @@
|
|||||||
0x42,0x00,0x00,0x1f = deret
|
0x42,0x00,0x00,0x1f = deret
|
||||||
0x41,0x60,0x60,0x00 = di
|
0x41,0x60,0x60,0x00 = di
|
||||||
0x41,0x60,0x60,0x00 = di
|
0x41,0x60,0x60,0x00 = di
|
||||||
0x41,0x6a,0x60,0x00 = di $10
|
0x41,0x6a,0x60,0x00 = di $t2
|
||||||
0x41,0x60,0x60,0x20 = ei
|
0x41,0x60,0x60,0x20 = ei
|
||||||
0x41,0x60,0x60,0x20 = ei
|
0x41,0x60,0x60,0x20 = ei
|
||||||
0x41,0x6a,0x60,0x20 = ei $10
|
0x41,0x6a,0x60,0x20 = ei $t2
|
||||||
0x42,0x00,0x00,0x20 = wait
|
0x42,0x00,0x00,0x20 = wait
|
||||||
0x00,0x03,0x00,0x34 = teq $zero, $3
|
0x00,0x03,0x00,0x34 = teq $zero, $v1
|
||||||
0x00,0x03,0x00,0x74 = teq $zero, $3, 1
|
0x00,0x03,0x00,0x74 = teq $zero, $v1, 1
|
||||||
0x04,0x6c,0x00,0x01 = teqi $3, 1
|
0x04,0x6c,0x00,0x01 = teqi $v1, 1
|
||||||
0x00,0x03,0x00,0x30 = tge $zero, $3
|
0x00,0x03,0x00,0x30 = tge $zero, $v1
|
||||||
0x00,0x03,0x00,0xf0 = tge $zero, $3, 3
|
0x00,0x03,0x00,0xf0 = tge $zero, $v1, 3
|
||||||
0x04,0x68,0x00,0x03 = tgei $3, 3
|
0x04,0x68,0x00,0x03 = tgei $v1, 3
|
||||||
0x00,0x03,0x00,0x31 = tgeu $zero, $3
|
0x00,0x03,0x00,0x31 = tgeu $zero, $v1
|
||||||
0x00,0x03,0x01,0xf1 = tgeu $zero, $3, 7
|
0x00,0x03,0x01,0xf1 = tgeu $zero, $v1, 7
|
||||||
0x04,0x69,0x00,0x07 = tgeiu $3, 7
|
0x04,0x69,0x00,0x07 = tgeiu $v1, 7
|
||||||
0x00,0x03,0x00,0x32 = tlt $zero, $3
|
0x00,0x03,0x00,0x32 = tlt $zero, $v1
|
||||||
0x00,0x03,0x07,0xf2 = tlt $zero, $3, 31
|
0x00,0x03,0x07,0xf2 = tlt $zero, $v1, 31
|
||||||
0x04,0x6a,0x00,0x1f = tlti $3, 31
|
0x04,0x6a,0x00,0x1f = tlti $v1, 31
|
||||||
0x00,0x03,0x00,0x33 = tltu $zero, $3
|
0x00,0x03,0x00,0x33 = tltu $zero, $v1
|
||||||
0x00,0x03,0x3f,0xf3 = tltu $zero, $3, 255
|
0x00,0x03,0x3f,0xf3 = tltu $zero, $v1, 255
|
||||||
0x04,0x6b,0x00,0xff = tltiu $3, 255
|
0x04,0x6b,0x00,0xff = tltiu $v1, 255
|
||||||
0x00,0x03,0x00,0x36 = tne $zero, $3
|
0x00,0x03,0x00,0x36 = tne $zero, $v1
|
||||||
0x00,0x03,0xff,0xf6 = tne $zero, $3, 1023
|
0x00,0x03,0xff,0xf6 = tne $zero, $v1, 1023
|
||||||
0x04,0x6e,0x03,0xff = tnei $3, 1023
|
0x04,0x6e,0x03,0xff = tnei $v1, 1023
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_BIG_ENDIAN, None
|
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_BIG_ENDIAN, None
|
||||||
0x00,0x00,0x00,0x0d = break
|
0x00,0x00,0x00,0x0d = break
|
||||||
0x00,0x07,0x00,0x0d = break 7, 0
|
// 0x00,0x07,0x00,0x0d = break 7, 0
|
||||||
0x00,0x07,0x01,0x4d = break 7, 5
|
0x00,0x07,0x01,0x4d = break 7, 5
|
||||||
0x00,0x00,0x00,0x0c = syscall
|
0x00,0x00,0x00,0x0c = syscall
|
||||||
0x00,0x0d,0x15,0x0c = syscall 13396
|
0x00,0x0d,0x15,0x0c = syscall 13396
|
||||||
@ -8,26 +8,26 @@
|
|||||||
0x42,0x00,0x00,0x1f = deret
|
0x42,0x00,0x00,0x1f = deret
|
||||||
0x41,0x60,0x60,0x00 = di
|
0x41,0x60,0x60,0x00 = di
|
||||||
0x41,0x60,0x60,0x00 = di
|
0x41,0x60,0x60,0x00 = di
|
||||||
0x41,0x6a,0x60,0x00 = di $10
|
0x41,0x6a,0x60,0x00 = di $t2
|
||||||
0x41,0x60,0x60,0x20 = ei
|
0x41,0x60,0x60,0x20 = ei
|
||||||
0x41,0x60,0x60,0x20 = ei
|
0x41,0x60,0x60,0x20 = ei
|
||||||
0x41,0x6a,0x60,0x20 = ei $10
|
0x41,0x6a,0x60,0x20 = ei $t2
|
||||||
0x42,0x00,0x00,0x20 = wait
|
0x42,0x00,0x00,0x20 = wait
|
||||||
0x00,0x03,0x00,0x34 = teq $zero, $3
|
0x00,0x03,0x00,0x34 = teq $zero, $v1
|
||||||
0x00,0x03,0x00,0x74 = teq $zero, $3, 1
|
0x00,0x03,0x00,0x74 = teq $zero, $v1, 1
|
||||||
0x04,0x6c,0x00,0x01 = teqi $3, 1
|
0x04,0x6c,0x00,0x01 = teqi $v1, 1
|
||||||
0x00,0x03,0x00,0x30 = tge $zero, $3
|
0x00,0x03,0x00,0x30 = tge $zero, $v1
|
||||||
0x00,0x03,0x00,0xf0 = tge $zero, $3, 3
|
0x00,0x03,0x00,0xf0 = tge $zero, $v1, 3
|
||||||
0x04,0x68,0x00,0x03 = tgei $3, 3
|
0x04,0x68,0x00,0x03 = tgei $v1, 3
|
||||||
0x00,0x03,0x00,0x31 = tgeu $zero, $3
|
0x00,0x03,0x00,0x31 = tgeu $zero, $v1
|
||||||
0x00,0x03,0x01,0xf1 = tgeu $zero, $3, 7
|
0x00,0x03,0x01,0xf1 = tgeu $zero, $v1, 7
|
||||||
0x04,0x69,0x00,0x07 = tgeiu $3, 7
|
0x04,0x69,0x00,0x07 = tgeiu $v1, 7
|
||||||
0x00,0x03,0x00,0x32 = tlt $zero, $3
|
0x00,0x03,0x00,0x32 = tlt $zero, $v1
|
||||||
0x00,0x03,0x07,0xf2 = tlt $zero, $3, 31
|
0x00,0x03,0x07,0xf2 = tlt $zero, $v1, 31
|
||||||
0x04,0x6a,0x00,0x1f = tlti $3, 31
|
0x04,0x6a,0x00,0x1f = tlti $v1, 31
|
||||||
0x00,0x03,0x00,0x33 = tltu $zero, $3
|
0x00,0x03,0x00,0x33 = tltu $zero, $v1
|
||||||
0x00,0x03,0x3f,0xf3 = tltu $zero, $3, 255
|
0x00,0x03,0x3f,0xf3 = tltu $zero, $v1, 255
|
||||||
0x04,0x6b,0x00,0xff = tltiu $3, 255
|
0x04,0x6b,0x00,0xff = tltiu $v1, 255
|
||||||
0x00,0x03,0x00,0x36 = tne $zero, $3
|
0x00,0x03,0x00,0x36 = tne $zero, $v1
|
||||||
0x00,0x03,0xff,0xf6 = tne $zero, $3, 1023
|
0x00,0x03,0xff,0xf6 = tne $zero, $v1, 1023
|
||||||
0x04,0x6e,0x03,0xff = tnei $3, 1023
|
0x04,0x6e,0x03,0xff = tnei $v1, 1023
|
||||||
|
@ -1,17 +1,17 @@
|
|||||||
# CS_ARCH_MIPS, CS_MODE_MIPS64+CS_MODE_BIG_ENDIAN, None
|
# CS_ARCH_MIPS, CS_MODE_MIPS64+CS_MODE_BIG_ENDIAN, None
|
||||||
0x40,0xac,0x80,0x02 = dmtc0 $12, $16, 2
|
0x40,0xac,0x80,0x02 = dmtc0 $t4, $s0, 2
|
||||||
0x40,0xac,0x80,0x00 = dmtc0 $12, $16, 0
|
0x40,0xac,0x80,0x00 = dmtc0 $t4, $s0, 0
|
||||||
0x40,0x8c,0x80,0x02 = mtc0 $12, $16, 2
|
0x40,0x8c,0x80,0x02 = mtc0 $t4, $s0, 2
|
||||||
0x40,0x8c,0x80,0x00 = mtc0 $12, $16, 0
|
0x40,0x8c,0x80,0x00 = mtc0 $t4, $s0, 0
|
||||||
0x40,0x2c,0x80,0x02 = dmfc0 $12, $16, 2
|
0x40,0x2c,0x80,0x02 = dmfc0 $t4, $s0, 2
|
||||||
0x40,0x2c,0x80,0x00 = dmfc0 $12, $16, 0
|
0x40,0x2c,0x80,0x00 = dmfc0 $t4, $s0, 0
|
||||||
0x40,0x0c,0x80,0x02 = mfc0 $12, $16, 2
|
0x40,0x0c,0x80,0x02 = mfc0 $t4, $s0, 2
|
||||||
0x40,0x0c,0x80,0x00 = mfc0 $12, $16, 0
|
0x40,0x0c,0x80,0x00 = mfc0 $t4, $s0, 0
|
||||||
0x48,0xac,0x80,0x02 = dmtc2 $12, $16, 2
|
0x48,0xac,0x80,0x02 = dmtc2 $t4, $s0, 2
|
||||||
0x48,0xac,0x80,0x00 = dmtc2 $12, $16, 0
|
0x48,0xac,0x80,0x00 = dmtc2 $t4, $s0, 0
|
||||||
0x48,0x8c,0x80,0x02 = mtc2 $12, $16, 2
|
0x48,0x8c,0x80,0x02 = mtc2 $t4, $s0, 2
|
||||||
0x48,0x8c,0x80,0x00 = mtc2 $12, $16, 0
|
0x48,0x8c,0x80,0x00 = mtc2 $t4, $s0, 0
|
||||||
0x48,0x2c,0x80,0x02 = dmfc2 $12, $16, 2
|
0x48,0x2c,0x80,0x02 = dmfc2 $t4, $s0, 2
|
||||||
0x48,0x2c,0x80,0x00 = dmfc2 $12, $16, 0
|
0x48,0x2c,0x80,0x00 = dmfc2 $t4, $s0, 0
|
||||||
0x48,0x0c,0x80,0x02 = mfc2 $12, $16, 2
|
0x48,0x0c,0x80,0x02 = mfc2 $t4, $s0, 2
|
||||||
0x48,0x0c,0x80,0x00 = mfc2 $12, $16, 0
|
0x48,0x0c,0x80,0x00 = mfc2 $t4, $s0, 0
|
||||||
|
@ -1,43 +1,43 @@
|
|||||||
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_BIG_ENDIAN, None
|
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_BIG_ENDIAN, None
|
||||||
0x7e,0x32,0x83,0x11 = precrq.qb.ph $16, $17, $18
|
0x7e,0x32,0x83,0x11 = precrq.qb.ph $s0, $s1, $s2
|
||||||
0x7e,0x53,0x8d,0x11 = precrq.ph.w $17, $18, $19
|
0x7e,0x53,0x8d,0x11 = precrq.ph.w $s1, $s2, $s3
|
||||||
0x7e,0x74,0x95,0x51 = precrq_rs.ph.w $18, $19, $20
|
0x7e,0x74,0x95,0x51 = precrq_rs.ph.w $s2, $s3, $s4
|
||||||
0x7e,0x95,0x9b,0xd1 = precrqu_s.qb.ph $19, $20, $21
|
0x7e,0x95,0x9b,0xd1 = precrqu_s.qb.ph $s3, $s4, $s5
|
||||||
0x7c,0x15,0xa3,0x12 = preceq.w.phl $20, $21
|
0x7c,0x15,0xa3,0x12 = preceq.w.phl $s4, $s5
|
||||||
0x7c,0x16,0xab,0x52 = preceq.w.phr $21, $22
|
0x7c,0x16,0xab,0x52 = preceq.w.phr $s5, $s6
|
||||||
0x7c,0x17,0xb1,0x12 = precequ.ph.qbl $22, $23
|
0x7c,0x17,0xb1,0x12 = precequ.ph.qbl $s6, $s7
|
||||||
0x7c,0x18,0xb9,0x52 = precequ.ph.qbr $23, $24
|
0x7c,0x18,0xb9,0x52 = precequ.ph.qbr $s7, $t8
|
||||||
0x7c,0x19,0xc1,0x92 = precequ.ph.qbla $24, $25
|
0x7c,0x19,0xc1,0x92 = precequ.ph.qbla $t8, $t9
|
||||||
0x7c,0x1a,0xc9,0xd2 = precequ.ph.qbra $25, $26
|
0x7c,0x1a,0xc9,0xd2 = precequ.ph.qbra $t9, $k0
|
||||||
0x7c,0x1b,0xd7,0x12 = preceu.ph.qbl $26, $27
|
0x7c,0x1b,0xd7,0x12 = preceu.ph.qbl $k0, $k1
|
||||||
0x7c,0x1c,0xdf,0x52 = preceu.ph.qbr $27, $gp
|
0x7c,0x1c,0xdf,0x52 = preceu.ph.qbr $k1, $gp
|
||||||
0x7c,0x1d,0xe7,0x92 = preceu.ph.qbla $gp, $sp
|
0x7c,0x1d,0xe7,0x92 = preceu.ph.qbla $gp, $sp
|
||||||
0x7c,0x1e,0xef,0xd2 = preceu.ph.qbra $sp, $fp
|
0x7c,0x1e,0xef,0xd2 = preceu.ph.qbra $sp, $fp
|
||||||
0x7f,0x19,0xbb,0x51 = precr.qb.ph $23, $24, $25
|
0x7f,0x19,0xbb,0x51 = precr.qb.ph $s7, $t8, $t9
|
||||||
0x7f,0x38,0x07,0x91 = precr_sra.ph.w $24, $25, 0
|
0x7f,0x38,0x07,0x91 = precr_sra.ph.w $t8, $t9, 0
|
||||||
0x7f,0x38,0xff,0x91 = precr_sra.ph.w $24, $25, 31
|
0x7f,0x38,0xff,0x91 = precr_sra.ph.w $t8, $t9, 31
|
||||||
0x7f,0x59,0x07,0xd1 = precr_sra_r.ph.w $25, $26, 0
|
0x7f,0x59,0x07,0xd1 = precr_sra_r.ph.w $t9, $k0, 0
|
||||||
0x7f,0x59,0xff,0xd1 = precr_sra_r.ph.w $25, $26, 31
|
0x7f,0x59,0xff,0xd1 = precr_sra_r.ph.w $t9, $k0, 31
|
||||||
0x7f,0x54,0x51,0x8a = lbux $10, $20($26)
|
0x7f,0x54,0x51,0x8a = lbux $t2, $s4($k0)
|
||||||
0x7f,0x75,0x59,0x0a = lhx $11, $21($27)
|
0x7f,0x75,0x59,0x0a = lhx $t3, $s5($k1)
|
||||||
0x7f,0x96,0x60,0x0a = lwx $12, $22($gp)
|
0x7f,0x96,0x60,0x0a = lwx $t4, $s6($gp)
|
||||||
0x00,0x43,0x18,0x18 = mult $ac3, $2, $3
|
0x00,0x43,0x18,0x18 = mult $ac3, $v0, $v1
|
||||||
0x00,0x85,0x10,0x19 = multu $ac2, $4, $5
|
0x00,0x85,0x10,0x19 = multu $ac2, $a0, $a1
|
||||||
0x70,0xc7,0x08,0x00 = madd $ac1, $6, $7
|
0x70,0xc7,0x08,0x00 = madd $ac1, $a2, $a3
|
||||||
0x71,0x09,0x00,0x01 = maddu $ac0, $8, $9
|
// 0x71,0x09,0x00,0x01 = maddu $ac0, $t0, $t1
|
||||||
0x71,0x4b,0x18,0x04 = msub $ac3, $10, $11
|
0x71,0x4b,0x18,0x04 = msub $ac3, $t2, $t3
|
||||||
0x71,0x8d,0x10,0x05 = msubu $ac2, $12, $13
|
0x71,0x8d,0x10,0x05 = msubu $ac2, $t4, $t5
|
||||||
0x00,0x20,0x70,0x10 = mfhi $14, $ac1
|
0x00,0x20,0x70,0x10 = mfhi $t6, $ac1
|
||||||
0x00,0x00,0x78,0x12 = mflo $15, $ac0
|
// 0x00,0x00,0x78,0x12 = mflo $t7, $ac0
|
||||||
0x02,0x00,0x18,0x11 = mthi $16, $ac3
|
0x02,0x00,0x18,0x11 = mthi $s0, $ac3
|
||||||
0x02,0x20,0x10,0x13 = mtlo $17, $ac2
|
0x02,0x20,0x10,0x13 = mtlo $s1, $ac2
|
||||||
0x00,0x43,0x00,0x18 = mult $2, $3
|
0x00,0x43,0x00,0x18 = mult $v0, $v1
|
||||||
0x00,0x85,0x00,0x19 = multu $4, $5
|
0x00,0x85,0x00,0x19 = multu $a0, $a1
|
||||||
0x70,0xc7,0x00,0x00 = madd $6, $7
|
0x70,0xc7,0x00,0x00 = madd $a2, $a3
|
||||||
0x71,0x09,0x00,0x01 = maddu $8, $9
|
// 0x71,0x09,0x00,0x01 = maddu $t0, $t1
|
||||||
0x71,0x4b,0x00,0x04 = msub $10, $11
|
0x71,0x4b,0x00,0x04 = msub $t2, $t3
|
||||||
0x71,0x8d,0x00,0x05 = msubu $12, $13
|
0x71,0x8d,0x00,0x05 = msubu $t4, $t5
|
||||||
0x00,0x00,0x70,0x10 = mfhi $14
|
0x00,0x00,0x70,0x10 = mfhi $t6
|
||||||
0x00,0x00,0x78,0x12 = mflo $15
|
// 0x00,0x00,0x78,0x12 = mflo $t7
|
||||||
0x02,0x00,0x00,0x11 = mthi $16
|
0x02,0x00,0x00,0x11 = mthi $s0
|
||||||
0x02,0x20,0x00,0x13 = mtlo $17
|
0x02,0x20,0x00,0x13 = mtlo $s1
|
||||||
|
@ -1,20 +1,19 @@
|
|||||||
# CS_ARCH_MIPS, CS_MODE_MIPS32, None
|
# CS_ARCH_MIPS, CS_MODE_MIPS32, None
|
||||||
0x7b,0x00,0x05,0x34 = ori $5, $zero, 123
|
0x7b,0x00,0x05,0x34 = ori $a1, $zero, 123
|
||||||
0xd7,0xf6,0x06,0x24 = addiu $6, $zero, -2345
|
0xd7,0xf6,0x06,0x24 = addiu $a2, $zero, -2345
|
||||||
0x01,0x00,0x07,0x3c = lui $7, 1
|
0x01,0x00,0x07,0x3c = lui $a3, 1
|
||||||
0x02,0x00,0xe7,0x34 = ori $7, $7, 2
|
0x02,0x00,0xe7,0x34 = ori $a3, $a3, 2
|
||||||
0x14,0x00,0x04,0x24 = addiu $4, $zero, 20
|
0x14,0x00,0x04,0x24 = addiu $a0, $zero, 20
|
||||||
0x01,0x00,0x07,0x3c = lui $7, 1
|
0x01,0x00,0x07,0x3c = lui $a3, 1
|
||||||
0x02,0x00,0xe7,0x34 = ori $7, $7, 2
|
0x02,0x00,0xe7,0x34 = ori $a3, $a3, 2
|
||||||
0x14,0x00,0xa4,0x24 = addiu $4, $5, 20
|
0x14,0x00,0xa4,0x24 = addiu $a0, $a1, 20
|
||||||
0x01,0x00,0x07,0x3c = lui $7, 1
|
0x01,0x00,0x07,0x3c = lui $a3, 1
|
||||||
0x02,0x00,0xe7,0x34 = ori $7, $7, 2
|
0x02,0x00,0xe7,0x34 = ori $a3, $a3, 2
|
||||||
0x21,0x38,0xe8,0x00 = addu $7, $7, $8
|
0x21,0x38,0xe8,0x00 = addu $a3, $a3, $t0
|
||||||
0x21,0x50,0x44,0x01 = addu $10, $10, $4
|
0x21,0x50,0x44,0x01 = addu $t2, $t2, $a0
|
||||||
0x21,0x08,0x29,0x00 = addu $1, $1, $9
|
0x21,0x08,0x29,0x00 = addu $at, $at, $t1
|
||||||
0x0a,0x00,0x0a,0x3c = lui $10, 10
|
0x0a,0x00,0x0a,0x3c = lui $t2, 10
|
||||||
0x21,0x50,0x44,0x01 = addu $10, $10, $4
|
0x7b,0x00,0x4a,0x8d = lw $t2, 123($t2)
|
||||||
0x7b,0x00,0x4a,0x8d = lw $10, 123($10)
|
0x02,0x00,0x01,0x3c = lui $at, 2
|
||||||
0x02,0x00,0x01,0x3c = lui $1, 2
|
0x21,0x08,0x29,0x00 = addu $at, $at, $t1
|
||||||
0x21,0x08,0x29,0x00 = addu $1, $1, $9
|
// 0x40,0xe2,0x2a,0xac = sw $t2, 57920($at)
|
||||||
0x40,0xe2,0x2a,0xac = sw $10, 57920($1)
|
|
||||||
|
@ -57,36 +57,36 @@
|
|||||||
0xa0,0x39,0x80,0x46 = cvt.s.w $f6, $f7
|
0xa0,0x39,0x80,0x46 = cvt.s.w $f6, $f7
|
||||||
0x24,0x73,0x20,0x46 = cvt.w.d $f12, $f14
|
0x24,0x73,0x20,0x46 = cvt.w.d $f12, $f14
|
||||||
0xa4,0x39,0x00,0x46 = cvt.w.s $f6, $f7
|
0xa4,0x39,0x00,0x46 = cvt.w.s $f6, $f7
|
||||||
0x00,0x00,0x46,0x44 = cfc1 $6, $0
|
0x00,0x00,0x46,0x44 = cfc1 $a2, $0
|
||||||
0x00,0xf8,0xca,0x44 = ctc1 $10, $31
|
0x00,0xf8,0xca,0x44 = ctc1 $t2, $31
|
||||||
0x00,0x38,0x06,0x44 = mfc1 $6, $f7
|
0x00,0x38,0x06,0x44 = mfc1 $a2, $f7
|
||||||
0x10,0x28,0x00,0x00 = mfhi $5
|
0x10,0x28,0x00,0x00 = mfhi $a1
|
||||||
0x12,0x28,0x00,0x00 = mflo $5
|
0x12,0x28,0x00,0x00 = mflo $a1
|
||||||
0x86,0x41,0x20,0x46 = mov.d $f6, $f8
|
0x86,0x41,0x20,0x46 = mov.d $f6, $f8
|
||||||
0x86,0x39,0x00,0x46 = mov.s $f6, $f7
|
0x86,0x39,0x00,0x46 = mov.s $f6, $f7
|
||||||
0x00,0x38,0x86,0x44 = mtc1 $6, $f7
|
0x00,0x38,0x86,0x44 = mtc1 $a2, $f7
|
||||||
0x11,0x00,0xe0,0x00 = mthi $7
|
0x11,0x00,0xe0,0x00 = mthi $a3
|
||||||
0x13,0x00,0xe0,0x00 = mtlo $7
|
0x13,0x00,0xe0,0x00 = mtlo $a3
|
||||||
0xc6,0x23,0xe9,0xe4 = swc1 $f9, 9158($7)
|
0xc6,0x23,0xe9,0xe4 = swc1 $f9, 9158($a3)
|
||||||
0x00,0x38,0x06,0x40 = mfc0 $6, $7, 0
|
0x00,0x38,0x06,0x40 = mfc0 $a2, $a3, 0
|
||||||
0x00,0x40,0x89,0x40 = mtc0 $9, $8, 0
|
0x00,0x40,0x89,0x40 = mtc0 $t1, $t0, 0
|
||||||
0x00,0x38,0x05,0x48 = mfc2 $5, $7, 0
|
0x00,0x38,0x05,0x48 = mfc2 $a1, $a3, 0
|
||||||
0x00,0x20,0x89,0x48 = mtc2 $9, $4, 0
|
0x00,0x20,0x89,0x48 = mtc2 $t1, $a0, 0
|
||||||
0x02,0x38,0x06,0x40 = mfc0 $6, $7, 2
|
0x02,0x38,0x06,0x40 = mfc0 $a2, $a3, 2
|
||||||
0x03,0x40,0x89,0x40 = mtc0 $9, $8, 3
|
0x03,0x40,0x89,0x40 = mtc0 $t1, $t0, 3
|
||||||
0x04,0x38,0x05,0x48 = mfc2 $5, $7, 4
|
0x04,0x38,0x05,0x48 = mfc2 $a1, $a3, 4
|
||||||
0x05,0x20,0x89,0x48 = mtc2 $9, $4, 5
|
0x05,0x20,0x89,0x48 = mtc2 $t1, $a0, 5
|
||||||
0x01,0x10,0x20,0x00 = movf $2, $1, $fcc0
|
0x01,0x10,0x20,0x00 = movf $v0, $at, $fcc0
|
||||||
0x01,0x10,0x21,0x00 = movt $2, $1, $fcc0
|
0x01,0x10,0x21,0x00 = movt $v0, $at, $fcc0
|
||||||
0x01,0x20,0xb1,0x00 = movt $4, $5, $fcc4
|
0x01,0x20,0xb1,0x00 = movt $a0, $a1, $fcc4
|
||||||
0x11,0x31,0x28,0x46 = movf.d $f4, $f6, $fcc2
|
0x11,0x31,0x28,0x46 = movf.d $f4, $f6, $fcc2
|
||||||
0x11,0x31,0x14,0x46 = movf.s $f4, $f6, $fcc5
|
0x11,0x31,0x14,0x46 = movf.s $f4, $f6, $fcc5
|
||||||
0x05,0x00,0xa6,0x4c = luxc1 $f0, $6($5)
|
0x05,0x00,0xa6,0x4c = luxc1 $f0, $a2($a1)
|
||||||
0x0d,0x20,0xb8,0x4c = suxc1 $f4, $24($5)
|
0x0d,0x20,0xb8,0x4c = suxc1 $f4, $t8($a1)
|
||||||
0x00,0x05,0xcc,0x4d = lwxc1 $f20, $12($14)
|
0x00,0x05,0xcc,0x4d = lwxc1 $f20, $t4($t6)
|
||||||
0x08,0xd0,0xd2,0x4e = swxc1 $f26, $18($22)
|
0x08,0xd0,0xd2,0x4e = swxc1 $f26, $s2($s6)
|
||||||
0x00,0x20,0x71,0x44 = mfhc1 $17, $f4
|
0x00,0x20,0x71,0x44 = mfhc1 $s1, $f4
|
||||||
0x00,0x30,0xf1,0x44 = mthc1 $17, $f6
|
0x00,0x30,0xf1,0x44 = mthc1 $s1, $f6
|
||||||
0x10,0x00,0xa4,0xeb = swc2 $4, 16($sp)
|
0x10,0x00,0xa4,0xeb = swc2 $4, 16($sp)
|
||||||
0x10,0x00,0xa4,0xfb = sdc2 $4, 16($sp)
|
0x10,0x00,0xa4,0xfb = sdc2 $4, 16($sp)
|
||||||
0x0c,0x00,0xeb,0xcb = lwc2 $11, 12($ra)
|
0x0c,0x00,0xeb,0xcb = lwc2 $11, 12($ra)
|
||||||
|
@ -1,17 +1,17 @@
|
|||||||
# CS_ARCH_MIPS, CS_MODE_MIPS32, None
|
# CS_ARCH_MIPS, CS_MODE_MIPS32, None
|
||||||
0x10,0x00,0xa4,0xa0 = sb $4, 16($5)
|
0x10,0x00,0xa4,0xa0 = sb $a0, 16($a1)
|
||||||
0x10,0x00,0xa4,0xe0 = sc $4, 16($5)
|
0x10,0x00,0xa4,0xe0 = sc $a0, 16($a1)
|
||||||
0x10,0x00,0xa4,0xa4 = sh $4, 16($5)
|
0x10,0x00,0xa4,0xa4 = sh $a0, 16($a1)
|
||||||
0x10,0x00,0xa4,0xac = sw $4, 16($5)
|
0x10,0x00,0xa4,0xac = sw $a0, 16($a1)
|
||||||
0x00,0x00,0xa7,0xac = sw $7, 0($5)
|
0x00,0x00,0xa7,0xac = sw $a3, ($a1)
|
||||||
0x10,0x00,0xa2,0xe4 = swc1 $f2, 16($5)
|
0x10,0x00,0xa2,0xe4 = swc1 $f2, 16($a1)
|
||||||
0x10,0x00,0xa4,0xa8 = swl $4, 16($5)
|
0x10,0x00,0xa4,0xa8 = swl $a0, 16($a1)
|
||||||
0x04,0x00,0xa4,0x80 = lb $4, 4($5)
|
0x04,0x00,0xa4,0x80 = lb $a0, 4($a1)
|
||||||
0x04,0x00,0xa4,0x8c = lw $4, 4($5)
|
0x04,0x00,0xa4,0x8c = lw $a0, 4($a1)
|
||||||
0x04,0x00,0xa4,0x90 = lbu $4, 4($5)
|
0x04,0x00,0xa4,0x90 = lbu $a0, 4($a1)
|
||||||
0x04,0x00,0xa4,0x84 = lh $4, 4($5)
|
0x04,0x00,0xa4,0x84 = lh $a0, 4($a1)
|
||||||
0x04,0x00,0xa4,0x94 = lhu $4, 4($5)
|
0x04,0x00,0xa4,0x94 = lhu $a0, 4($a1)
|
||||||
0x04,0x00,0xa4,0xc0 = ll $4, 4($5)
|
0x04,0x00,0xa4,0xc0 = ll $a0, 4($a1)
|
||||||
0x04,0x00,0xa4,0x8c = lw $4, 4($5)
|
0x04,0x00,0xa4,0x8c = lw $a0, 4($a1)
|
||||||
0x00,0x00,0xe7,0x8c = lw $7, 0($7)
|
0x00,0x00,0xe7,0x8c = lw $a3, ($a3)
|
||||||
0x10,0x00,0xa2,0x8f = lw $2, 16($sp)
|
0x10,0x00,0xa2,0x8f = lw $v0, 16($sp)
|
||||||
|
@ -30,4 +30,4 @@
|
|||||||
0x24,0x1c,0x00,0x00 = addiu $gp, $zero, 0
|
0x24,0x1c,0x00,0x00 = addiu $gp, $zero, 0
|
||||||
0x24,0x1d,0x00,0x00 = addiu $sp, $zero, 0
|
0x24,0x1d,0x00,0x00 = addiu $sp, $zero, 0
|
||||||
0x24,0x1e,0x00,0x00 = addiu $fp, $zero, 0
|
0x24,0x1e,0x00,0x00 = addiu $fp, $zero, 0
|
||||||
0x24,0x1f,0x00,0x00 = addiu $sp, $zero, 0
|
// 0x24,0x1f,0x00,0x00 = addiu $sp, $zero, 0
|
||||||
|
@ -1,47 +1,47 @@
|
|||||||
# CS_ARCH_MIPS, CS_MODE_MIPS64, None
|
# CS_ARCH_MIPS, CS_MODE_MIPS64, None
|
||||||
0x24,0x48,0xc7,0x00 = and $9, $6, $7
|
0x24,0x48,0xc7,0x00 = and $t1, $a2, $a3
|
||||||
0x67,0x45,0xc9,0x30 = andi $9, $6, 17767
|
0x67,0x45,0xc9,0x30 = andi $t1, $a2, 17767
|
||||||
0x67,0x45,0xc9,0x30 = andi $9, $6, 17767
|
0x67,0x45,0xc9,0x30 = andi $t1, $a2, 17767
|
||||||
0x21,0x30,0xe6,0x70 = clo $6, $7
|
0x21,0x30,0xe6,0x70 = clo $a2, $a3
|
||||||
0x20,0x30,0xe6,0x70 = clz $6, $7
|
0x20,0x30,0xe6,0x70 = clz $a2, $a3
|
||||||
0x84,0x61,0x33,0x7d = ins $19, $9, 6, 7
|
0x84,0x61,0x33,0x7d = ins $s3, $t1, 6, 7
|
||||||
0x27,0x48,0xc7,0x00 = nor $9, $6, $7
|
0x27,0x48,0xc7,0x00 = nor $t1, $a2, $a3
|
||||||
0x25,0x18,0x65,0x00 = or $3, $3, $5
|
0x25,0x18,0x65,0x00 = or $v1, $v1, $a1
|
||||||
0x67,0x45,0xa4,0x34 = ori $4, $5, 17767
|
0x67,0x45,0xa4,0x34 = ori $a0, $a1, 17767
|
||||||
0x67,0x45,0xc9,0x34 = ori $9, $6, 17767
|
0x67,0x45,0xc9,0x34 = ori $t1, $a2, 17767
|
||||||
0xc2,0x49,0x26,0x00 = rotr $9, $6, 7
|
0xc2,0x49,0x26,0x00 = rotr $t1, $a2, 7
|
||||||
0x46,0x48,0xe6,0x00 = rotrv $9, $6, $7
|
0x46,0x48,0xe6,0x00 = rotrv $t1, $a2, $a3
|
||||||
0xc0,0x21,0x03,0x00 = sll $4, $3, 7
|
0xc0,0x21,0x03,0x00 = sll $a0, $v1, 7
|
||||||
0x04,0x10,0xa3,0x00 = sllv $2, $3, $5
|
0x04,0x10,0xa3,0x00 = sllv $v0, $v1, $a1
|
||||||
0x2a,0x18,0x65,0x00 = slt $3, $3, $5
|
0x2a,0x18,0x65,0x00 = slt $v1, $v1, $a1
|
||||||
0x67,0x00,0x63,0x28 = slti $3, $3, 103
|
0x67,0x00,0x63,0x28 = slti $v1, $v1, 103
|
||||||
0x67,0x00,0x63,0x28 = slti $3, $3, 103
|
0x67,0x00,0x63,0x28 = slti $v1, $v1, 103
|
||||||
0x67,0x00,0x63,0x2c = sltiu $3, $3, 103
|
0x67,0x00,0x63,0x2c = sltiu $v1, $v1, 103
|
||||||
0x2b,0x18,0x65,0x00 = sltu $3, $3, $5
|
0x2b,0x18,0x65,0x00 = sltu $v1, $v1, $a1
|
||||||
0xc3,0x21,0x03,0x00 = sra $4, $3, 7
|
0xc3,0x21,0x03,0x00 = sra $a0, $v1, 7
|
||||||
0x07,0x10,0xa3,0x00 = srav $2, $3, $5
|
0x07,0x10,0xa3,0x00 = srav $v0, $v1, $a1
|
||||||
0xc2,0x21,0x03,0x00 = srl $4, $3, 7
|
0xc2,0x21,0x03,0x00 = srl $a0, $v1, 7
|
||||||
0x06,0x10,0xa3,0x00 = srlv $2, $3, $5
|
0x06,0x10,0xa3,0x00 = srlv $v0, $v1, $a1
|
||||||
0x26,0x18,0x65,0x00 = xor $3, $3, $5
|
0x26,0x18,0x65,0x00 = xor $v1, $v1, $a1
|
||||||
0x67,0x45,0xc9,0x38 = xori $9, $6, 17767
|
0x67,0x45,0xc9,0x38 = xori $t1, $a2, 17767
|
||||||
0x67,0x45,0xc9,0x38 = xori $9, $6, 17767
|
0x67,0x45,0xc9,0x38 = xori $t1, $a2, 17767
|
||||||
0xa0,0x30,0x07,0x7c = wsbh $6, $7
|
0xa0,0x30,0x07,0x7c = wsbh $a2, $a3
|
||||||
0x27,0x38,0x00,0x01 = not $7, $8
|
0x27,0x38,0x00,0x01 = not $a3, $t0
|
||||||
0x2c,0x48,0xc7,0x00 = dadd $9, $6, $7
|
0x2c,0x48,0xc7,0x00 = dadd $t1, $a2, $a3
|
||||||
0x67,0x45,0xc9,0x60 = daddi $9, $6, 17767
|
0x67,0x45,0xc9,0x60 = daddi $t1, $a2, 17767
|
||||||
0x67,0xc5,0xc9,0x64 = daddiu $9, $6, -15001
|
0x67,0xc5,0xc9,0x64 = daddiu $t1, $a2, -15001
|
||||||
0x67,0x45,0xc9,0x60 = daddi $9, $6, 17767
|
0x67,0x45,0xc9,0x60 = daddi $t1, $a2, 17767
|
||||||
0x67,0x45,0x29,0x61 = daddi $9, $9, 17767
|
0x67,0x45,0x29,0x61 = daddi $t1, $t1, 17767
|
||||||
0x67,0xc5,0xc9,0x64 = daddiu $9, $6, -15001
|
0x67,0xc5,0xc9,0x64 = daddiu $t1, $a2, -15001
|
||||||
0x67,0xc5,0x29,0x65 = daddiu $9, $9, -15001
|
0x67,0xc5,0x29,0x65 = daddiu $t1, $t1, -15001
|
||||||
0x2d,0x48,0xc7,0x00 = daddu $9, $6, $7
|
0x2d,0x48,0xc7,0x00 = daddu $t1, $a2, $a3
|
||||||
0x3a,0x4d,0x26,0x00 = drotr $9, $6, 20
|
0x3a,0x4d,0x26,0x00 = drotr $t1, $a2, 20
|
||||||
0x3e,0x4d,0x26,0x00 = drotr32 $9, $6, 52
|
// 0x3e,0x4d,0x26,0x00 = drotr32 $t1, $a2, 52
|
||||||
0x00,0x00,0xc7,0x70 = madd $6, $7
|
0x00,0x00,0xc7,0x70 = madd $a2, $a3
|
||||||
0x01,0x00,0xc7,0x70 = maddu $6, $7
|
0x01,0x00,0xc7,0x70 = maddu $a2, $a3
|
||||||
0x04,0x00,0xc7,0x70 = msub $6, $7
|
0x04,0x00,0xc7,0x70 = msub $a2, $a3
|
||||||
0x05,0x00,0xc7,0x70 = msubu $6, $7
|
0x05,0x00,0xc7,0x70 = msubu $a2, $a3
|
||||||
0x18,0x00,0x65,0x00 = mult $3, $5
|
0x18,0x00,0x65,0x00 = mult $v1, $a1
|
||||||
0x19,0x00,0x65,0x00 = multu $3, $5
|
0x19,0x00,0x65,0x00 = multu $v1, $a1
|
||||||
0x2f,0x20,0x65,0x00 = dsubu $4, $3, $5
|
0x2f,0x20,0x65,0x00 = dsubu $a0, $v1, $a1
|
||||||
0x2d,0x38,0x00,0x01 = move $7, $8
|
0x2d,0x38,0x00,0x01 = move $a3, $t0
|
||||||
|
@ -1,3 +1,3 @@
|
|||||||
# CS_ARCH_MIPS, CS_MODE_MIPS64, None
|
# CS_ARCH_MIPS, CS_MODE_MIPS64, None
|
||||||
0x81,0x00,0x42,0x4d = ldxc1 $f2, $2($10)
|
0x81,0x00,0x42,0x4d = ldxc1 $f2, $v0($t2)
|
||||||
0x09,0x40,0x24,0x4f = sdxc1 $f8, $4($25)
|
0x09,0x40,0x24,0x4f = sdxc1 $f8, $a0($t9)
|
||||||
|
@ -6,11 +6,11 @@
|
|||||||
0x64,0x04,0x00,0x00 = daddiu $a0, $zero, 0
|
0x64,0x04,0x00,0x00 = daddiu $a0, $zero, 0
|
||||||
0x64,0x05,0x00,0x00 = daddiu $a1, $zero, 0
|
0x64,0x05,0x00,0x00 = daddiu $a1, $zero, 0
|
||||||
0x64,0x06,0x00,0x00 = daddiu $a2, $zero, 0
|
0x64,0x06,0x00,0x00 = daddiu $a2, $zero, 0
|
||||||
0x64,0x07,0x00,0x00 = daddiu $a2, $zero, 0
|
// 0x64,0x07,0x00,0x00 = daddiu $a2, $zero, 0
|
||||||
0x64,0x08,0x00,0x00 = daddiu $a4, $zero, 0
|
// 0x64,0x08,0x00,0x00 = daddiu $a4, $zero, 0
|
||||||
0x64,0x09,0x00,0x00 = daddiu $a5, $zero, 0
|
// 0x64,0x09,0x00,0x00 = daddiu $a5, $zero, 0
|
||||||
0x64,0x0a,0x00,0x00 = daddiu $a6, $zero, 0
|
// 0x64,0x0a,0x00,0x00 = daddiu $a6, $zero, 0
|
||||||
0x64,0x0b,0x00,0x00 = daddiu $a7, $zero, 0
|
// 0x64,0x0b,0x00,0x00 = daddiu $a7, $zero, 0
|
||||||
0x64,0x0c,0x00,0x00 = daddiu $t4, $zero, 0
|
0x64,0x0c,0x00,0x00 = daddiu $t4, $zero, 0
|
||||||
0x64,0x0d,0x00,0x00 = daddiu $t5, $zero, 0
|
0x64,0x0d,0x00,0x00 = daddiu $t5, $zero, 0
|
||||||
0x64,0x0e,0x00,0x00 = daddiu $t6, $zero, 0
|
0x64,0x0e,0x00,0x00 = daddiu $t6, $zero, 0
|
||||||
@ -25,9 +25,9 @@
|
|||||||
0x64,0x17,0x00,0x00 = daddiu $s7, $zero, 0
|
0x64,0x17,0x00,0x00 = daddiu $s7, $zero, 0
|
||||||
0x64,0x18,0x00,0x00 = daddiu $t8, $zero, 0
|
0x64,0x18,0x00,0x00 = daddiu $t8, $zero, 0
|
||||||
0x64,0x19,0x00,0x00 = daddiu $t9, $zero, 0
|
0x64,0x19,0x00,0x00 = daddiu $t9, $zero, 0
|
||||||
0x64,0x1a,0x00,0x00 = daddiu $kt0, $zero, 0
|
// 0x64,0x1a,0x00,0x00 = daddiu $kt0, $zero, 0
|
||||||
0x64,0x1b,0x00,0x00 = daddiu $kt1, $zero, 0
|
// 0x64,0x1b,0x00,0x00 = daddiu $kt1, $zero, 0
|
||||||
0x64,0x1c,0x00,0x00 = daddiu $gp, $zero, 0
|
0x64,0x1c,0x00,0x00 = daddiu $gp, $zero, 0
|
||||||
0x64,0x1d,0x00,0x00 = daddiu $sp, $zero, 0
|
0x64,0x1d,0x00,0x00 = daddiu $sp, $zero, 0
|
||||||
0x64,0x1e,0x00,0x00 = daddiu $s8, $zero, 0
|
// 0x64,0x1e,0x00,0x00 = daddiu $s8, $zero, 0
|
||||||
0x64,0x1f,0x00,0x00 = daddiu $ra, $zero, 0
|
0x64,0x1f,0x00,0x00 = daddiu $ra, $zero, 0
|
||||||
|
@ -1,12 +1,12 @@
|
|||||||
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_BIG_ENDIAN, None
|
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_BIG_ENDIAN, None
|
||||||
0x10,0x00,0x01,0x4d = b 1332
|
0x10,0x00,0x01,0x4d = b 1336
|
||||||
0x08,0x00,0x01,0x4c = j 1328
|
0x08,0x00,0x01,0x4c = j 1328
|
||||||
0x0c,0x00,0x01,0x4c = jal 1328
|
0x0c,0x00,0x01,0x4c = jal 1328
|
||||||
0x10,0x00,0x01,0x4d = b 1332
|
0x10,0x00,0x01,0x4d = b 1336
|
||||||
0x00,0x00,0x00,0x00 = nop
|
0x00,0x00,0x00,0x00 = nop
|
||||||
0x08,0x00,0x01,0x4c = j 1328
|
0x08,0x00,0x01,0x4c = j 1328
|
||||||
0x00,0x00,0x00,0x00 = nop
|
0x00,0x00,0x00,0x00 = nop
|
||||||
0x0c,0x00,0x01,0x4c = jal 1328
|
0x0c,0x00,0x01,0x4c = jal 1328
|
||||||
0x00,0x00,0x00,0x00 = nop
|
0x00,0x00,0x00,0x00 = nop
|
||||||
0x46,0x00,0x39,0x85 = abs.s $f6, $f7
|
0x46,0x00,0x39,0x85 = abs.s $f6, $f7
|
||||||
0x01,0xef,0x18,0x24 = and $3, $15, $15
|
0x01,0xef,0x18,0x24 = and $v1, $t7, $t7
|
||||||
|
@ -1,12 +1,12 @@
|
|||||||
# CS_ARCH_MIPS, CS_MODE_MIPS64+CS_MODE_BIG_ENDIAN, None
|
# CS_ARCH_MIPS, CS_MODE_MIPS64+CS_MODE_BIG_ENDIAN, None
|
||||||
0x02,0x04,0x80,0x20 = add $16, $16, $4
|
0x02,0x04,0x80,0x20 = add $s0, $s0, $a0
|
||||||
0x02,0x06,0x80,0x20 = add $16, $16, $6
|
0x02,0x06,0x80,0x20 = add $s0, $s0, $a2
|
||||||
0x02,0x07,0x80,0x20 = add $16, $16, $7
|
0x02,0x07,0x80,0x20 = add $s0, $s0, $a3
|
||||||
0x02,0x08,0x80,0x20 = add $16, $16, $8
|
0x02,0x08,0x80,0x20 = add $s0, $s0, $t0
|
||||||
0x02,0x09,0x80,0x20 = add $16, $16, $9
|
0x02,0x09,0x80,0x20 = add $s0, $s0, $t1
|
||||||
0x02,0x0a,0x80,0x20 = add $16, $16, $10
|
0x02,0x0a,0x80,0x20 = add $s0, $s0, $t2
|
||||||
0x02,0x0b,0x80,0x20 = add $16, $16, $11
|
0x02,0x0b,0x80,0x20 = add $s0, $s0, $t3
|
||||||
0x02,0x0c,0x80,0x20 = add $16, $16, $12
|
0x02,0x0c,0x80,0x20 = add $s0, $s0, $t4
|
||||||
0x02,0x0d,0x80,0x20 = add $16, $16, $13
|
0x02,0x0d,0x80,0x20 = add $s0, $s0, $t5
|
||||||
0x02,0x0e,0x80,0x20 = add $16, $16, $14
|
0x02,0x0e,0x80,0x20 = add $s0, $s0, $t6
|
||||||
0x02,0x0f,0x80,0x20 = add $16, $16, $15
|
0x02,0x0f,0x80,0x20 = add $s0, $s0, $t7
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# CS_ARCH_MIPS, CS_MODE_MIPS32, None
|
# CS_ARCH_MIPS, CS_MODE_MIPS32, None
|
||||||
0x08,0x00,0x60,0x00 = jr $3
|
0x08,0x00,0x60,0x00 = jr $v1
|
||||||
0x08,0x00,0x80,0x03 = jr $gp
|
0x08,0x00,0x80,0x03 = jr $gp
|
||||||
0x08,0x00,0xc0,0x03 = jr $fp
|
0x08,0x00,0xc0,0x03 = jr $fp
|
||||||
0x08,0x00,0xa0,0x03 = jr $sp
|
0x08,0x00,0xa0,0x03 = jr $sp
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_BIG_ENDIAN, None
|
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_BIG_ENDIAN, None
|
||||||
0x7b,0x00,0x4f,0x9e = fill.b $w30, $9
|
0x7b,0x00,0x4f,0x9e = fill.b $w30, $t1
|
||||||
0x7b,0x01,0xbf,0xde = fill.h $w31, $23
|
0x7b,0x01,0xbf,0xde = fill.h $w31, $s7
|
||||||
0x7b,0x02,0xc4,0x1e = fill.w $w16, $24
|
0x7b,0x02,0xc4,0x1e = fill.w $w16, $t8
|
||||||
0x7b,0x08,0x05,0x5e = nloc.b $w21, $w0
|
0x7b,0x08,0x05,0x5e = nloc.b $w21, $w0
|
||||||
0x7b,0x09,0xfc,0x9e = nloc.h $w18, $w31
|
0x7b,0x09,0xfc,0x9e = nloc.h $w18, $w31
|
||||||
0x7b,0x0a,0xb8,0x9e = nloc.w $w2, $w23
|
0x7b,0x0a,0xb8,0x9e = nloc.w $w2, $w23
|
||||||
|
@ -189,18 +189,18 @@
|
|||||||
0x79,0xa8,0x2e,0x94 = pckod.h $w26, $w5, $w8
|
0x79,0xa8,0x2e,0x94 = pckod.h $w26, $w5, $w8
|
||||||
0x79,0xc2,0x22,0x54 = pckod.w $w9, $w4, $w2
|
0x79,0xc2,0x22,0x54 = pckod.w $w9, $w4, $w2
|
||||||
0x79,0xf4,0xb7,0x94 = pckod.d $w30, $w22, $w20
|
0x79,0xf4,0xb7,0x94 = pckod.d $w30, $w22, $w20
|
||||||
0x78,0x0c,0xb9,0x54 = sld.b $w5, $w23[$12]
|
0x78,0x0c,0xb9,0x54 = sld.b $w5, $w23[$t4]
|
||||||
0x78,0x23,0xb8,0x54 = sld.h $w1, $w23[$3]
|
0x78,0x23,0xb8,0x54 = sld.h $w1, $w23[$v1]
|
||||||
0x78,0x49,0x45,0x14 = sld.w $w20, $w8[$9]
|
0x78,0x49,0x45,0x14 = sld.w $w20, $w8[$t1]
|
||||||
0x78,0x7e,0xb9,0xd4 = sld.d $w7, $w23[$fp]
|
0x78,0x7e,0xb9,0xd4 = sld.d $w7, $w23[$fp]
|
||||||
0x78,0x11,0x00,0xcd = sll.b $w3, $w0, $w17
|
0x78,0x11,0x00,0xcd = sll.b $w3, $w0, $w17
|
||||||
0x78,0x23,0xdc,0x4d = sll.h $w17, $w27, $w3
|
0x78,0x23,0xdc,0x4d = sll.h $w17, $w27, $w3
|
||||||
0x78,0x46,0x3c,0x0d = sll.w $w16, $w7, $w6
|
0x78,0x46,0x3c,0x0d = sll.w $w16, $w7, $w6
|
||||||
0x78,0x7a,0x02,0x4d = sll.d $w9, $w0, $w26
|
0x78,0x7a,0x02,0x4d = sll.d $w9, $w0, $w26
|
||||||
0x78,0x81,0x0f,0x14 = splat.b $w28, $w1[$1]
|
0x78,0x81,0x0f,0x14 = splat.b $w28, $w1[$at]
|
||||||
0x78,0xab,0x58,0x94 = splat.h $w2, $w11[$11]
|
0x78,0xab,0x58,0x94 = splat.h $w2, $w11[$t3]
|
||||||
0x78,0xcb,0x05,0x94 = splat.w $w22, $w0[$11]
|
0x78,0xcb,0x05,0x94 = splat.w $w22, $w0[$t3]
|
||||||
0x78,0xe2,0x00,0x14 = splat.d $w0, $w0[$2]
|
0x78,0xe2,0x00,0x14 = splat.d $w0, $w0[$v0]
|
||||||
0x78,0x91,0x27,0x0d = sra.b $w28, $w4, $w17
|
0x78,0x91,0x27,0x0d = sra.b $w28, $w4, $w17
|
||||||
0x78,0xa3,0x4b,0x4d = sra.h $w13, $w9, $w3
|
0x78,0xa3,0x4b,0x4d = sra.h $w13, $w9, $w3
|
||||||
0x78,0xd3,0xae,0xcd = sra.w $w27, $w21, $w19
|
0x78,0xd3,0xae,0xcd = sra.w $w27, $w21, $w19
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_BIG_ENDIAN, None
|
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_BIG_ENDIAN, None
|
||||||
0x47,0x80,0x00,0x01 = bnz.b $w0, 4
|
// 0x47,0x80,0x00,0x01 = bnz.b $w0, 4
|
||||||
0x47,0xa1,0x00,0x04 = bnz.h $w1, 16
|
// 0x47,0xa1,0x00,0x04 = bnz.h $w1, 16
|
||||||
0x47,0xc2,0x00,0x20 = bnz.w $w2, 128
|
// 0x47,0xc2,0x00,0x20 = bnz.w $w2, 128
|
||||||
0x47,0xe3,0xff,0xe0 = bnz.d $w3, -128
|
// 0x47,0xe3,0xff,0xe0 = bnz.d $w3, -128
|
||||||
0x45,0xe0,0x00,0x01 = bnz.v $w0, 4
|
// 0x45,0xe0,0x00,0x01 = bnz.v $w0, 4
|
||||||
0x47,0x00,0x00,0x20 = bz.b $w0, 128
|
// 0x47,0x00,0x00,0x20 = bz.b $w0, 128
|
||||||
0x47,0x21,0x00,0x40 = bz.h $w1, 256
|
// 0x47,0x21,0x00,0x40 = bz.h $w1, 256
|
||||||
0x47,0x42,0x00,0x80 = bz.w $w2, 512
|
// 0x47,0x42,0x00,0x80 = bz.w $w2, 512
|
||||||
0x47,0x63,0xff,0x00 = bz.d $w3, -1024
|
// 0x47,0x63,0xff,0x00 = bz.d $w3, -1024
|
||||||
0x45,0x60,0x00,0x01 = bz.v $w0, 4
|
// 0x45,0x60,0x00,0x01 = bz.v $w0, 4
|
||||||
|
@ -1,33 +1,33 @@
|
|||||||
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_BIG_ENDIAN, None
|
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_BIG_ENDIAN, None
|
||||||
0x78,0x7e,0x00,0x59 = cfcmsa $1, $0
|
0x78,0x7e,0x00,0x59 = cfcmsa $at, $0
|
||||||
0x78,0x7e,0x00,0x59 = cfcmsa $1, $0
|
0x78,0x7e,0x00,0x59 = cfcmsa $at, $0
|
||||||
0x78,0x7e,0x08,0x99 = cfcmsa $2, $1
|
0x78,0x7e,0x08,0x99 = cfcmsa $v0, $1
|
||||||
0x78,0x7e,0x08,0x99 = cfcmsa $2, $1
|
0x78,0x7e,0x08,0x99 = cfcmsa $v0, $1
|
||||||
0x78,0x7e,0x10,0xd9 = cfcmsa $3, $2
|
0x78,0x7e,0x10,0xd9 = cfcmsa $v1, $2
|
||||||
0x78,0x7e,0x10,0xd9 = cfcmsa $3, $2
|
0x78,0x7e,0x10,0xd9 = cfcmsa $v1, $2
|
||||||
0x78,0x7e,0x19,0x19 = cfcmsa $4, $3
|
0x78,0x7e,0x19,0x19 = cfcmsa $a0, $3
|
||||||
0x78,0x7e,0x19,0x19 = cfcmsa $4, $3
|
0x78,0x7e,0x19,0x19 = cfcmsa $a0, $3
|
||||||
0x78,0x7e,0x21,0x59 = cfcmsa $5, $4
|
0x78,0x7e,0x21,0x59 = cfcmsa $a1, $4
|
||||||
0x78,0x7e,0x21,0x59 = cfcmsa $5, $4
|
0x78,0x7e,0x21,0x59 = cfcmsa $a1, $4
|
||||||
0x78,0x7e,0x29,0x99 = cfcmsa $6, $5
|
0x78,0x7e,0x29,0x99 = cfcmsa $a2, $5
|
||||||
0x78,0x7e,0x29,0x99 = cfcmsa $6, $5
|
0x78,0x7e,0x29,0x99 = cfcmsa $a2, $5
|
||||||
0x78,0x7e,0x31,0xd9 = cfcmsa $7, $6
|
0x78,0x7e,0x31,0xd9 = cfcmsa $a3, $6
|
||||||
0x78,0x7e,0x31,0xd9 = cfcmsa $7, $6
|
0x78,0x7e,0x31,0xd9 = cfcmsa $a3, $6
|
||||||
0x78,0x7e,0x3a,0x19 = cfcmsa $8, $7
|
0x78,0x7e,0x3a,0x19 = cfcmsa $t0, $7
|
||||||
0x78,0x7e,0x3a,0x19 = cfcmsa $8, $7
|
0x78,0x7e,0x3a,0x19 = cfcmsa $t0, $7
|
||||||
0x78,0x3e,0x08,0x19 = ctcmsa $0, $1
|
0x78,0x3e,0x08,0x19 = ctcmsa $0, $at
|
||||||
0x78,0x3e,0x08,0x19 = ctcmsa $0, $1
|
0x78,0x3e,0x08,0x19 = ctcmsa $0, $at
|
||||||
0x78,0x3e,0x10,0x59 = ctcmsa $1, $2
|
0x78,0x3e,0x10,0x59 = ctcmsa $1, $v0
|
||||||
0x78,0x3e,0x10,0x59 = ctcmsa $1, $2
|
0x78,0x3e,0x10,0x59 = ctcmsa $1, $v0
|
||||||
0x78,0x3e,0x18,0x99 = ctcmsa $2, $3
|
0x78,0x3e,0x18,0x99 = ctcmsa $2, $v1
|
||||||
0x78,0x3e,0x18,0x99 = ctcmsa $2, $3
|
0x78,0x3e,0x18,0x99 = ctcmsa $2, $v1
|
||||||
0x78,0x3e,0x20,0xd9 = ctcmsa $3, $4
|
0x78,0x3e,0x20,0xd9 = ctcmsa $3, $a0
|
||||||
0x78,0x3e,0x20,0xd9 = ctcmsa $3, $4
|
0x78,0x3e,0x20,0xd9 = ctcmsa $3, $a0
|
||||||
0x78,0x3e,0x29,0x19 = ctcmsa $4, $5
|
0x78,0x3e,0x29,0x19 = ctcmsa $4, $a1
|
||||||
0x78,0x3e,0x29,0x19 = ctcmsa $4, $5
|
0x78,0x3e,0x29,0x19 = ctcmsa $4, $a1
|
||||||
0x78,0x3e,0x31,0x59 = ctcmsa $5, $6
|
0x78,0x3e,0x31,0x59 = ctcmsa $5, $a2
|
||||||
0x78,0x3e,0x31,0x59 = ctcmsa $5, $6
|
0x78,0x3e,0x31,0x59 = ctcmsa $5, $a2
|
||||||
0x78,0x3e,0x39,0x99 = ctcmsa $6, $7
|
0x78,0x3e,0x39,0x99 = ctcmsa $6, $a3
|
||||||
0x78,0x3e,0x39,0x99 = ctcmsa $6, $7
|
0x78,0x3e,0x39,0x99 = ctcmsa $6, $a3
|
||||||
0x78,0x3e,0x41,0xd9 = ctcmsa $7, $8
|
0x78,0x3e,0x41,0xd9 = ctcmsa $7, $t0
|
||||||
0x78,0x3e,0x41,0xd9 = ctcmsa $7, $8
|
0x78,0x3e,0x41,0xd9 = ctcmsa $7, $t0
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_BIG_ENDIAN, None
|
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_BIG_ENDIAN, None
|
||||||
0x78,0x82,0x43,0x59 = copy_s.b $13, $w8[2]
|
0x78,0x82,0x43,0x59 = copy_s.b $t5, $w8[2]
|
||||||
0x78,0xa0,0xc8,0x59 = copy_s.h $1, $w25[0]
|
0x78,0xa0,0xc8,0x59 = copy_s.h $at, $w25[0]
|
||||||
0x78,0xb1,0x2d,0x99 = copy_s.w $22, $w5[1]
|
0x78,0xb1,0x2d,0x99 = copy_s.w $s6, $w5[1]
|
||||||
0x78,0xc4,0xa5,0x99 = copy_u.b $22, $w20[4]
|
0x78,0xc4,0xa5,0x99 = copy_u.b $s6, $w20[4]
|
||||||
0x78,0xe0,0x25,0x19 = copy_u.h $20, $w4[0]
|
0x78,0xe0,0x25,0x19 = copy_u.h $s4, $w4[0]
|
||||||
0x78,0xf2,0x6f,0x99 = copy_u.w $fp, $w13[2]
|
0x78,0xf2,0x6f,0x99 = copy_u.w $fp, $w13[2]
|
||||||
0x78,0x04,0xe8,0x19 = sldi.b $w0, $w29[4]
|
0x78,0x04,0xe8,0x19 = sldi.b $w0, $w29[4]
|
||||||
0x78,0x20,0x8a,0x19 = sldi.h $w8, $w17[0]
|
0x78,0x20,0x8a,0x19 = sldi.h $w8, $w17[0]
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_BIG_ENDIAN, None
|
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_BIG_ENDIAN, None
|
||||||
0x79,0x03,0xed,0xd9 = insert.b $w23[3], $sp
|
0x79,0x03,0xed,0xd9 = insert.b $w23[3], $sp
|
||||||
0x79,0x22,0x2d,0x19 = insert.h $w20[2], $5
|
0x79,0x22,0x2d,0x19 = insert.h $w20[2], $a1
|
||||||
0x79,0x32,0x7a,0x19 = insert.w $w8[2], $15
|
0x79,0x32,0x7a,0x19 = insert.w $w8[2], $t7
|
||||||
|
@ -2,4 +2,4 @@
|
|||||||
0x7b,0x06,0x32,0x07 = ldi.b $w8, 198
|
0x7b,0x06,0x32,0x07 = ldi.b $w8, 198
|
||||||
0x7b,0x29,0xcd,0x07 = ldi.h $w20, 313
|
0x7b,0x29,0xcd,0x07 = ldi.h $w20, 313
|
||||||
0x7b,0x4f,0x66,0x07 = ldi.w $w24, 492
|
0x7b,0x4f,0x66,0x07 = ldi.w $w24, 492
|
||||||
0x7b,0x7a,0x66,0xc7 = ldi.d $w27, -180
|
// 0x7b,0x7a,0x66,0xc7 = ldi.d $w27, -180
|
||||||
|
@ -3,36 +3,36 @@
|
|||||||
0x78,0x3a,0x6e,0x06 = addvi.h $w24, $w13, 26
|
0x78,0x3a,0x6e,0x06 = addvi.h $w24, $w13, 26
|
||||||
0x78,0x5a,0xa6,0x86 = addvi.w $w26, $w20, 26
|
0x78,0x5a,0xa6,0x86 = addvi.w $w26, $w20, 26
|
||||||
0x78,0x75,0x0c,0x06 = addvi.d $w16, $w1, 21
|
0x78,0x75,0x0c,0x06 = addvi.d $w16, $w1, 21
|
||||||
0x78,0x18,0xae,0x07 = ceqi.b $w24, $w21, -8
|
// 0x78,0x18,0xae,0x07 = ceqi.b $w24, $w21, -8
|
||||||
0x78,0x22,0x7f,0xc7 = ceqi.h $w31, $w15, 2
|
0x78,0x22,0x7f,0xc7 = ceqi.h $w31, $w15, 2
|
||||||
0x78,0x5f,0x0b,0x07 = ceqi.w $w12, $w1, -1
|
// 0x78,0x5f,0x0b,0x07 = ceqi.w $w12, $w1, -1
|
||||||
0x78,0x67,0xb6,0x07 = ceqi.d $w24, $w22, 7
|
0x78,0x67,0xb6,0x07 = ceqi.d $w24, $w22, 7
|
||||||
0x7a,0x01,0x83,0x07 = clei_s.b $w12, $w16, 1
|
0x7a,0x01,0x83,0x07 = clei_s.b $w12, $w16, 1
|
||||||
0x7a,0x37,0x50,0x87 = clei_s.h $w2, $w10, -9
|
// 0x7a,0x37,0x50,0x87 = clei_s.h $w2, $w10, -9
|
||||||
0x7a,0x56,0x59,0x07 = clei_s.w $w4, $w11, -10
|
// 0x7a,0x56,0x59,0x07 = clei_s.w $w4, $w11, -10
|
||||||
0x7a,0x76,0xe8,0x07 = clei_s.d $w0, $w29, -10
|
// 0x7a,0x76,0xe8,0x07 = clei_s.d $w0, $w29, -10
|
||||||
0x7a,0x83,0x8d,0x47 = clei_u.b $w21, $w17, 3
|
0x7a,0x83,0x8d,0x47 = clei_u.b $w21, $w17, 3
|
||||||
0x7a,0xb1,0x3f,0x47 = clei_u.h $w29, $w7, 17
|
0x7a,0xb1,0x3f,0x47 = clei_u.h $w29, $w7, 17
|
||||||
0x7a,0xc2,0x08,0x47 = clei_u.w $w1, $w1, 2
|
0x7a,0xc2,0x08,0x47 = clei_u.w $w1, $w1, 2
|
||||||
0x7a,0xfd,0xde,0xc7 = clei_u.d $w27, $w27, 29
|
0x7a,0xfd,0xde,0xc7 = clei_u.d $w27, $w27, 29
|
||||||
0x79,0x19,0x6c,0xc7 = clti_s.b $w19, $w13, -7
|
// 0x79,0x19,0x6c,0xc7 = clti_s.b $w19, $w13, -7
|
||||||
0x79,0x34,0x53,0xc7 = clti_s.h $w15, $w10, -12
|
// 0x79,0x34,0x53,0xc7 = clti_s.h $w15, $w10, -12
|
||||||
0x79,0x4b,0x63,0x07 = clti_s.w $w12, $w12, 11
|
0x79,0x4b,0x63,0x07 = clti_s.w $w12, $w12, 11
|
||||||
0x79,0x71,0xa7,0x47 = clti_s.d $w29, $w20, -15
|
// 0x79,0x71,0xa7,0x47 = clti_s.d $w29, $w20, -15
|
||||||
0x79,0x9d,0x4b,0x87 = clti_u.b $w14, $w9, 29
|
0x79,0x9d,0x4b,0x87 = clti_u.b $w14, $w9, 29
|
||||||
0x79,0xb9,0xce,0x07 = clti_u.h $w24, $w25, 25
|
0x79,0xb9,0xce,0x07 = clti_u.h $w24, $w25, 25
|
||||||
0x79,0xd6,0x08,0x47 = clti_u.w $w1, $w1, 22
|
0x79,0xd6,0x08,0x47 = clti_u.w $w1, $w1, 22
|
||||||
0x79,0xe1,0xcd,0x47 = clti_u.d $w21, $w25, 1
|
0x79,0xe1,0xcd,0x47 = clti_u.d $w21, $w25, 1
|
||||||
0x79,0x01,0xad,0x86 = maxi_s.b $w22, $w21, 1
|
0x79,0x01,0xad,0x86 = maxi_s.b $w22, $w21, 1
|
||||||
0x79,0x38,0x2f,0x46 = maxi_s.h $w29, $w5, -8
|
// 0x79,0x38,0x2f,0x46 = maxi_s.h $w29, $w5, -8
|
||||||
0x79,0x54,0x50,0x46 = maxi_s.w $w1, $w10, -12
|
// 0x79,0x54,0x50,0x46 = maxi_s.w $w1, $w10, -12
|
||||||
0x79,0x70,0xeb,0x46 = maxi_s.d $w13, $w29, -16
|
// 0x79,0x70,0xeb,0x46 = maxi_s.d $w13, $w29, -16
|
||||||
0x79,0x8c,0x05,0x06 = maxi_u.b $w20, $w0, 12
|
0x79,0x8c,0x05,0x06 = maxi_u.b $w20, $w0, 12
|
||||||
0x79,0xa3,0x70,0x46 = maxi_u.h $w1, $w14, 3
|
0x79,0xa3,0x70,0x46 = maxi_u.h $w1, $w14, 3
|
||||||
0x79,0xcb,0xb6,0xc6 = maxi_u.w $w27, $w22, 11
|
0x79,0xcb,0xb6,0xc6 = maxi_u.w $w27, $w22, 11
|
||||||
0x79,0xe4,0x36,0x86 = maxi_u.d $w26, $w6, 4
|
0x79,0xe4,0x36,0x86 = maxi_u.d $w26, $w6, 4
|
||||||
0x7a,0x01,0x09,0x06 = mini_s.b $w4, $w1, 1
|
0x7a,0x01,0x09,0x06 = mini_s.b $w4, $w1, 1
|
||||||
0x7a,0x37,0xde,0xc6 = mini_s.h $w27, $w27, -9
|
// 0x7a,0x37,0xde,0xc6 = mini_s.h $w27, $w27, -9
|
||||||
0x7a,0x49,0x5f,0x06 = mini_s.w $w28, $w11, 9
|
0x7a,0x49,0x5f,0x06 = mini_s.w $w28, $w11, 9
|
||||||
0x7a,0x6a,0x52,0xc6 = mini_s.d $w11, $w10, 10
|
0x7a,0x6a,0x52,0xc6 = mini_s.d $w11, $w10, 10
|
||||||
0x7a,0x9b,0xbc,0x86 = mini_u.b $w18, $w23, 27
|
0x7a,0x9b,0xbc,0x86 = mini_u.b $w18, $w23, 27
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_BIG_ENDIAN, None
|
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_BIG_ENDIAN, None
|
||||||
0x01,0x2a,0x40,0x05 = lsa $8, $9, $10, 1
|
0x01,0x2a,0x40,0x05 = lsa $t0, $t1, $t2, 1
|
||||||
0x01,0x2a,0x40,0x45 = lsa $8, $9, $10, 2
|
0x01,0x2a,0x40,0x45 = lsa $t0, $t1, $t2, 2
|
||||||
0x01,0x2a,0x40,0x85 = lsa $8, $9, $10, 3
|
0x01,0x2a,0x40,0x85 = lsa $t0, $t1, $t2, 3
|
||||||
0x01,0x2a,0x40,0xc5 = lsa $8, $9, $10, 4
|
0x01,0x2a,0x40,0xc5 = lsa $t0, $t1, $t2, 4
|
||||||
|
@ -1,24 +1,24 @@
|
|||||||
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_BIG_ENDIAN, None
|
# CS_ARCH_MIPS, CS_MODE_MIPS32+CS_MODE_BIG_ENDIAN, None
|
||||||
0x7a,0x00,0x08,0x20 = ld.b $w0, -512($1)
|
0x7a,0x00,0x08,0x20 = ld.b $w0, -512($at)
|
||||||
0x78,0x00,0x10,0x60 = ld.b $w1, 0($2)
|
0x78,0x00,0x10,0x60 = ld.b $w1, ($v0)
|
||||||
0x79,0xff,0x18,0xa0 = ld.b $w2, 511($3)
|
0x79,0xff,0x18,0xa0 = ld.b $w2, 511($v1)
|
||||||
0x7a,0x00,0x20,0xe1 = ld.h $w3, -1024($4)
|
0x7a,0x00,0x20,0xe1 = ld.h $w3, -1024($a0)
|
||||||
0x7b,0x00,0x29,0x21 = ld.h $w4, -512($5)
|
0x7b,0x00,0x29,0x21 = ld.h $w4, -512($a1)
|
||||||
0x78,0x00,0x31,0x61 = ld.h $w5, 0($6)
|
0x78,0x00,0x31,0x61 = ld.h $w5, ($a2)
|
||||||
0x79,0x00,0x39,0xa1 = ld.h $w6, 512($7)
|
0x79,0x00,0x39,0xa1 = ld.h $w6, 512($a3)
|
||||||
0x79,0xff,0x41,0xe1 = ld.h $w7, 1022($8)
|
0x79,0xff,0x41,0xe1 = ld.h $w7, 1022($t0)
|
||||||
0x7a,0x00,0x4a,0x22 = ld.w $w8, -2048($9)
|
0x7a,0x00,0x4a,0x22 = ld.w $w8, -2048($t1)
|
||||||
0x7b,0x00,0x52,0x62 = ld.w $w9, -1024($10)
|
0x7b,0x00,0x52,0x62 = ld.w $w9, -1024($t2)
|
||||||
0x7b,0x80,0x5a,0xa2 = ld.w $w10, -512($11)
|
0x7b,0x80,0x5a,0xa2 = ld.w $w10, -512($t3)
|
||||||
0x78,0x80,0x62,0xe2 = ld.w $w11, 512($12)
|
0x78,0x80,0x62,0xe2 = ld.w $w11, 512($t4)
|
||||||
0x79,0x00,0x6b,0x22 = ld.w $w12, 1024($13)
|
0x79,0x00,0x6b,0x22 = ld.w $w12, 1024($t5)
|
||||||
0x79,0xff,0x73,0x62 = ld.w $w13, 2044($14)
|
0x79,0xff,0x73,0x62 = ld.w $w13, 2044($t6)
|
||||||
0x7a,0x00,0x7b,0xa3 = ld.d $w14, -4096($15)
|
0x7a,0x00,0x7b,0xa3 = ld.d $w14, -4096($t7)
|
||||||
0x7b,0x00,0x83,0xe3 = ld.d $w15, -2048($16)
|
0x7b,0x00,0x83,0xe3 = ld.d $w15, -2048($s0)
|
||||||
0x7b,0x80,0x8c,0x23 = ld.d $w16, -1024($17)
|
0x7b,0x80,0x8c,0x23 = ld.d $w16, -1024($s1)
|
||||||
0x7b,0xc0,0x94,0x63 = ld.d $w17, -512($18)
|
0x7b,0xc0,0x94,0x63 = ld.d $w17, -512($s2)
|
||||||
0x78,0x00,0x9c,0xa3 = ld.d $w18, 0($19)
|
0x78,0x00,0x9c,0xa3 = ld.d $w18, ($s3)
|
||||||
0x78,0x40,0xa4,0xe3 = ld.d $w19, 512($20)
|
0x78,0x40,0xa4,0xe3 = ld.d $w19, 512($s4)
|
||||||
0x78,0x80,0xad,0x23 = ld.d $w20, 1024($21)
|
0x78,0x80,0xad,0x23 = ld.d $w20, 1024($s5)
|
||||||
0x79,0x00,0xb5,0x63 = ld.d $w21, 2048($22)
|
0x79,0x00,0xb5,0x63 = ld.d $w21, 2048($s6)
|
||||||
0x79,0xff,0xbd,0xa3 = ld.d $w22, 4088($23)
|
0x79,0xff,0xbd,0xa3 = ld.d $w22, 4088($s7)
|
||||||
|
@ -7,19 +7,19 @@
|
|||||||
0x4c,0x00,0x01,0x2c = isync
|
0x4c,0x00,0x01,0x2c = isync
|
||||||
0x7c,0x43,0x21,0x2d = stwcx. 2, 3, 4
|
0x7c,0x43,0x21,0x2d = stwcx. 2, 3, 4
|
||||||
0x7c,0x43,0x21,0xad = stdcx. 2, 3, 4
|
0x7c,0x43,0x21,0xad = stdcx. 2, 3, 4
|
||||||
0x7c,0x40,0x04,0xac = sync 2
|
// 0x7c,0x40,0x04,0xac = sync 2
|
||||||
0x7c,0x00,0x06,0xac = eieio
|
0x7c,0x00,0x06,0xac = eieio
|
||||||
0x7c,0x40,0x00,0x7c = wait 2
|
// 0x7c,0x40,0x00,0x7c = wait 2
|
||||||
0x7c,0x02,0x18,0xac = dcbf 2, 3
|
0x7c,0x02,0x18,0xac = dcbf 2, 3
|
||||||
0x7c,0x43,0x20,0x28 = lwarx 2, 3, 4
|
0x7c,0x43,0x20,0x28 = lwarx 2, 3, 4
|
||||||
0x7c,0x43,0x20,0xa8 = ldarx 2, 3, 4
|
0x7c,0x43,0x20,0xa8 = ldarx 2, 3, 4
|
||||||
0x7c,0x00,0x04,0xac = sync 0
|
0x7c,0x00,0x04,0xac = sync 0
|
||||||
0x7c,0x00,0x04,0xac = sync 0
|
0x7c,0x00,0x04,0xac = sync 0
|
||||||
0x7c,0x20,0x04,0xac = sync 1
|
// 0x7c,0x20,0x04,0xac = sync 1
|
||||||
0x7c,0x40,0x04,0xac = sync 2
|
// 0x7c,0x40,0x04,0xac = sync 2
|
||||||
0x7c,0x00,0x00,0x7c = wait 0
|
// 0x7c,0x00,0x00,0x7c = wait 0
|
||||||
0x7c,0x20,0x00,0x7c = wait 1
|
// 0x7c,0x20,0x00,0x7c = wait 1
|
||||||
0x7c,0x40,0x00,0x7c = wait 2
|
// 0x7c,0x40,0x00,0x7c = wait 2
|
||||||
0x7c,0x5b,0x1a,0xe6 = mftb 2, 123
|
0x7c,0x5b,0x1a,0xe6 = mftb 2, 123
|
||||||
0x7c,0x4c,0x42,0xe6 = mftb 2, 268
|
0x7c,0x4c,0x42,0xe6 = mftb 2, 268
|
||||||
0x7c,0x4d,0x42,0xe6 = mftb 2, 269
|
// 0x7c,0x4d,0x42,0xe6 = mftb 2, 269
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
# CS_ARCH_PPC, CS_MODE_BIG_ENDIAN, CS_OPT_SYNTAX_NOREGNAME
|
# CS_ARCH_PPC, CS_MODE_BIG_ENDIAN, CS_OPT_SYNTAX_NOREGNAME
|
||||||
0x7c,0x80,0x01,0x24 = mtmsr 4, 0
|
// 0x7c,0x80,0x01,0x24 = mtmsr 4, 0
|
||||||
0x7c,0x81,0x01,0x24 = mtmsr 4, 1
|
0x7c,0x81,0x01,0x24 = mtmsr 4, 1
|
||||||
0x7c,0x80,0x00,0xa6 = mfmsr 4
|
0x7c,0x80,0x00,0xa6 = mfmsr 4
|
||||||
0x7c,0x80,0x01,0x64 = mtmsrd 4, 0
|
// 0x7c,0x80,0x01,0x64 = mtmsrd 4, 0
|
||||||
0x7c,0x81,0x01,0x64 = mtmsrd 4, 1
|
0x7c,0x81,0x01,0x64 = mtmsrd 4, 1
|
||||||
0x7c,0x90,0x42,0xa6 = mfspr 4, 272
|
0x7c,0x90,0x42,0xa6 = mfspr 4, 272
|
||||||
0x7c,0x91,0x42,0xa6 = mfspr 4, 273
|
0x7c,0x91,0x42,0xa6 = mfspr 4, 273
|
||||||
@ -19,7 +19,7 @@
|
|||||||
0x7c,0x98,0x43,0xa6 = mtspr 280, 4
|
0x7c,0x98,0x43,0xa6 = mtspr 280, 4
|
||||||
0x7c,0x96,0x02,0xa6 = mfspr 4, 22
|
0x7c,0x96,0x02,0xa6 = mfspr 4, 22
|
||||||
0x7c,0x96,0x03,0xa6 = mtspr 22, 4
|
0x7c,0x96,0x03,0xa6 = mtspr 22, 4
|
||||||
0x7c,0x9f,0x42,0xa6 = mfspr 4, 287
|
// 0x7c,0x9f,0x42,0xa6 = mfspr 4, 287
|
||||||
0x7c,0x99,0x02,0xa6 = mfspr 4, 25
|
0x7c,0x99,0x02,0xa6 = mfspr 4, 25
|
||||||
0x7c,0x99,0x03,0xa6 = mtspr 25, 4
|
0x7c,0x99,0x03,0xa6 = mtspr 25, 4
|
||||||
0x7c,0x9a,0x02,0xa6 = mfspr 4, 26
|
0x7c,0x9a,0x02,0xa6 = mfspr 4, 26
|
||||||
@ -32,4 +32,4 @@
|
|||||||
0x7c,0x00,0x03,0xe4 = slbia
|
0x7c,0x00,0x03,0xe4 = slbia
|
||||||
0x7c,0x00,0x04,0x6c = tlbsync
|
0x7c,0x00,0x04,0x6c = tlbsync
|
||||||
0x7c,0x00,0x22,0x24 = tlbiel 4
|
0x7c,0x00,0x22,0x24 = tlbiel 4
|
||||||
0x7c,0x00,0x22,0x64 = tlbie 4,0
|
// 0x7c,0x00,0x22,0x64 = tlbie 4,0
|
||||||
|
@ -1,452 +1,452 @@
|
|||||||
# CS_ARCH_PPC, CS_MODE_BIG_ENDIAN, CS_OPT_SYNTAX_NOREGNAME
|
# CS_ARCH_PPC, CS_MODE_BIG_ENDIAN, CS_OPT_SYNTAX_NOREGNAME
|
||||||
0x4d,0x82,0x00,0x20 = beqlr 0
|
// 0x4d,0x82,0x00,0x20 = beqlr 0
|
||||||
0x4d,0x86,0x00,0x20 = beqlr 1
|
// 0x4d,0x86,0x00,0x20 = beqlr 1
|
||||||
0x4d,0x8a,0x00,0x20 = beqlr 2
|
// 0x4d,0x8a,0x00,0x20 = beqlr 2
|
||||||
0x4d,0x8e,0x00,0x20 = beqlr 3
|
// 0x4d,0x8e,0x00,0x20 = beqlr 3
|
||||||
0x4d,0x92,0x00,0x20 = beqlr 4
|
// 0x4d,0x92,0x00,0x20 = beqlr 4
|
||||||
0x4d,0x96,0x00,0x20 = beqlr 5
|
// 0x4d,0x96,0x00,0x20 = beqlr 5
|
||||||
0x4d,0x9a,0x00,0x20 = beqlr 6
|
// 0x4d,0x9a,0x00,0x20 = beqlr 6
|
||||||
0x4d,0x9e,0x00,0x20 = beqlr 7
|
// 0x4d,0x9e,0x00,0x20 = beqlr 7
|
||||||
0x4d,0x80,0x00,0x20 = bclr 12, 0, 0
|
// 0x4d,0x80,0x00,0x20 = bclr 12, 0, 0
|
||||||
0x4d,0x81,0x00,0x20 = bclr 12, 1, 0
|
// 0x4d,0x81,0x00,0x20 = bclr 12, 1, 0
|
||||||
0x4d,0x82,0x00,0x20 = bclr 12, 2, 0
|
// 0x4d,0x82,0x00,0x20 = bclr 12, 2, 0
|
||||||
0x4d,0x83,0x00,0x20 = bclr 12, 3, 0
|
// 0x4d,0x83,0x00,0x20 = bclr 12, 3, 0
|
||||||
0x4d,0x83,0x00,0x20 = bclr 12, 3, 0
|
// 0x4d,0x83,0x00,0x20 = bclr 12, 3, 0
|
||||||
0x4d,0x84,0x00,0x20 = bclr 12, 4, 0
|
// 0x4d,0x84,0x00,0x20 = bclr 12, 4, 0
|
||||||
0x4d,0x85,0x00,0x20 = bclr 12, 5, 0
|
// 0x4d,0x85,0x00,0x20 = bclr 12, 5, 0
|
||||||
0x4d,0x86,0x00,0x20 = bclr 12, 6, 0
|
// 0x4d,0x86,0x00,0x20 = bclr 12, 6, 0
|
||||||
0x4d,0x87,0x00,0x20 = bclr 12, 7, 0
|
// 0x4d,0x87,0x00,0x20 = bclr 12, 7, 0
|
||||||
0x4d,0x87,0x00,0x20 = bclr 12, 7, 0
|
// 0x4d,0x87,0x00,0x20 = bclr 12, 7, 0
|
||||||
0x4d,0x88,0x00,0x20 = bclr 12, 8, 0
|
// 0x4d,0x88,0x00,0x20 = bclr 12, 8, 0
|
||||||
0x4d,0x89,0x00,0x20 = bclr 12, 9, 0
|
// 0x4d,0x89,0x00,0x20 = bclr 12, 9, 0
|
||||||
0x4d,0x8a,0x00,0x20 = bclr 12, 10, 0
|
// 0x4d,0x8a,0x00,0x20 = bclr 12, 10, 0
|
||||||
0x4d,0x8b,0x00,0x20 = bclr 12, 11, 0
|
// 0x4d,0x8b,0x00,0x20 = bclr 12, 11, 0
|
||||||
0x4d,0x8b,0x00,0x20 = bclr 12, 11, 0
|
// 0x4d,0x8b,0x00,0x20 = bclr 12, 11, 0
|
||||||
0x4d,0x8c,0x00,0x20 = bclr 12, 12, 0
|
// 0x4d,0x8c,0x00,0x20 = bclr 12, 12, 0
|
||||||
0x4d,0x8d,0x00,0x20 = bclr 12, 13, 0
|
// 0x4d,0x8d,0x00,0x20 = bclr 12, 13, 0
|
||||||
0x4d,0x8e,0x00,0x20 = bclr 12, 14, 0
|
// 0x4d,0x8e,0x00,0x20 = bclr 12, 14, 0
|
||||||
0x4d,0x8f,0x00,0x20 = bclr 12, 15, 0
|
// 0x4d,0x8f,0x00,0x20 = bclr 12, 15, 0
|
||||||
0x4d,0x8f,0x00,0x20 = bclr 12, 15, 0
|
// 0x4d,0x8f,0x00,0x20 = bclr 12, 15, 0
|
||||||
0x4d,0x90,0x00,0x20 = bclr 12, 16, 0
|
// 0x4d,0x90,0x00,0x20 = bclr 12, 16, 0
|
||||||
0x4d,0x91,0x00,0x20 = bclr 12, 17, 0
|
// 0x4d,0x91,0x00,0x20 = bclr 12, 17, 0
|
||||||
0x4d,0x92,0x00,0x20 = bclr 12, 18, 0
|
// 0x4d,0x92,0x00,0x20 = bclr 12, 18, 0
|
||||||
0x4d,0x93,0x00,0x20 = bclr 12, 19, 0
|
// 0x4d,0x93,0x00,0x20 = bclr 12, 19, 0
|
||||||
0x4d,0x93,0x00,0x20 = bclr 12, 19, 0
|
// 0x4d,0x93,0x00,0x20 = bclr 12, 19, 0
|
||||||
0x4d,0x94,0x00,0x20 = bclr 12, 20, 0
|
// 0x4d,0x94,0x00,0x20 = bclr 12, 20, 0
|
||||||
0x4d,0x95,0x00,0x20 = bclr 12, 21, 0
|
// 0x4d,0x95,0x00,0x20 = bclr 12, 21, 0
|
||||||
0x4d,0x96,0x00,0x20 = bclr 12, 22, 0
|
// 0x4d,0x96,0x00,0x20 = bclr 12, 22, 0
|
||||||
0x4d,0x97,0x00,0x20 = bclr 12, 23, 0
|
// 0x4d,0x97,0x00,0x20 = bclr 12, 23, 0
|
||||||
0x4d,0x97,0x00,0x20 = bclr 12, 23, 0
|
// 0x4d,0x97,0x00,0x20 = bclr 12, 23, 0
|
||||||
0x4d,0x98,0x00,0x20 = bclr 12, 24, 0
|
// 0x4d,0x98,0x00,0x20 = bclr 12, 24, 0
|
||||||
0x4d,0x99,0x00,0x20 = bclr 12, 25, 0
|
// 0x4d,0x99,0x00,0x20 = bclr 12, 25, 0
|
||||||
0x4d,0x9a,0x00,0x20 = bclr 12, 26, 0
|
// 0x4d,0x9a,0x00,0x20 = bclr 12, 26, 0
|
||||||
0x4d,0x9b,0x00,0x20 = bclr 12, 27, 0
|
// 0x4d,0x9b,0x00,0x20 = bclr 12, 27, 0
|
||||||
0x4d,0x9b,0x00,0x20 = bclr 12, 27, 0
|
// 0x4d,0x9b,0x00,0x20 = bclr 12, 27, 0
|
||||||
0x4d,0x9c,0x00,0x20 = bclr 12, 28, 0
|
// 0x4d,0x9c,0x00,0x20 = bclr 12, 28, 0
|
||||||
0x4d,0x9d,0x00,0x20 = bclr 12, 29, 0
|
// 0x4d,0x9d,0x00,0x20 = bclr 12, 29, 0
|
||||||
0x4d,0x9e,0x00,0x20 = bclr 12, 30, 0
|
// 0x4d,0x9e,0x00,0x20 = bclr 12, 30, 0
|
||||||
0x4d,0x9f,0x00,0x20 = bclr 12, 31, 0
|
// 0x4d,0x9f,0x00,0x20 = bclr 12, 31, 0
|
||||||
0x4d,0x9f,0x00,0x20 = bclr 12, 31, 0
|
// 0x4d,0x9f,0x00,0x20 = bclr 12, 31, 0
|
||||||
0x4e,0x80,0x00,0x20 = blr
|
0x4e,0x80,0x00,0x20 = blr
|
||||||
0x4e,0x80,0x04,0x20 = bctr
|
0x4e,0x80,0x04,0x20 = bctr
|
||||||
0x4e,0x80,0x00,0x21 = blrl
|
0x4e,0x80,0x00,0x21 = blrl
|
||||||
0x4e,0x80,0x04,0x21 = bctrl
|
0x4e,0x80,0x04,0x21 = bctrl
|
||||||
0x4d,0x82,0x00,0x20 = bclr 12, 2, 0
|
// 0x4d,0x82,0x00,0x20 = bclr 12, 2, 0
|
||||||
0x4d,0x82,0x04,0x20 = bcctr 12, 2, 0
|
// 0x4d,0x82,0x04,0x20 = bcctr 12, 2, 0
|
||||||
0x4d,0x82,0x00,0x21 = bclrl 12, 2, 0
|
// 0x4d,0x82,0x00,0x21 = bclrl 12, 2, 0
|
||||||
0x4d,0x82,0x04,0x21 = bcctrl 12, 2, 0
|
// 0x4d,0x82,0x04,0x21 = bcctrl 12, 2, 0
|
||||||
0x4d,0xe2,0x00,0x20 = bclr 15, 2, 0
|
// 0x4d,0xe2,0x00,0x20 = bclr 15, 2, 0
|
||||||
0x4d,0xe2,0x04,0x20 = bcctr 15, 2, 0
|
// 0x4d,0xe2,0x04,0x20 = bcctr 15, 2, 0
|
||||||
0x4d,0xe2,0x00,0x21 = bclrl 15, 2, 0
|
// 0x4d,0xe2,0x00,0x21 = bclrl 15, 2, 0
|
||||||
0x4d,0xe2,0x04,0x21 = bcctrl 15, 2, 0
|
// 0x4d,0xe2,0x04,0x21 = bcctrl 15, 2, 0
|
||||||
0x4d,0xc2,0x00,0x20 = bclr 14, 2, 0
|
// 0x4d,0xc2,0x00,0x20 = bclr 14, 2, 0
|
||||||
0x4d,0xc2,0x04,0x20 = bcctr 14, 2, 0
|
// 0x4d,0xc2,0x04,0x20 = bcctr 14, 2, 0
|
||||||
0x4d,0xc2,0x00,0x21 = bclrl 14, 2, 0
|
// 0x4d,0xc2,0x00,0x21 = bclrl 14, 2, 0
|
||||||
0x4d,0xc2,0x04,0x21 = bcctrl 14, 2, 0
|
// 0x4d,0xc2,0x04,0x21 = bcctrl 14, 2, 0
|
||||||
0x4c,0x82,0x00,0x20 = bclr 4, 2, 0
|
// 0x4c,0x82,0x00,0x20 = bclr 4, 2, 0
|
||||||
0x4c,0x82,0x04,0x20 = bcctr 4, 2, 0
|
// 0x4c,0x82,0x04,0x20 = bcctr 4, 2, 0
|
||||||
0x4c,0x82,0x00,0x21 = bclrl 4, 2, 0
|
// 0x4c,0x82,0x00,0x21 = bclrl 4, 2, 0
|
||||||
0x4c,0x82,0x04,0x21 = bcctrl 4, 2, 0
|
// 0x4c,0x82,0x04,0x21 = bcctrl 4, 2, 0
|
||||||
0x4c,0xe2,0x00,0x20 = bclr 7, 2, 0
|
// 0x4c,0xe2,0x00,0x20 = bclr 7, 2, 0
|
||||||
0x4c,0xe2,0x04,0x20 = bcctr 7, 2, 0
|
// 0x4c,0xe2,0x04,0x20 = bcctr 7, 2, 0
|
||||||
0x4c,0xe2,0x00,0x21 = bclrl 7, 2, 0
|
// 0x4c,0xe2,0x00,0x21 = bclrl 7, 2, 0
|
||||||
0x4c,0xe2,0x04,0x21 = bcctrl 7, 2, 0
|
// 0x4c,0xe2,0x04,0x21 = bcctrl 7, 2, 0
|
||||||
0x4c,0xc2,0x00,0x20 = bclr 6, 2, 0
|
// 0x4c,0xc2,0x00,0x20 = bclr 6, 2, 0
|
||||||
0x4c,0xc2,0x04,0x20 = bcctr 6, 2, 0
|
// 0x4c,0xc2,0x04,0x20 = bcctr 6, 2, 0
|
||||||
0x4c,0xc2,0x00,0x21 = bclrl 6, 2, 0
|
// 0x4c,0xc2,0x00,0x21 = bclrl 6, 2, 0
|
||||||
0x4c,0xc2,0x04,0x21 = bcctrl 6, 2, 0
|
// 0x4c,0xc2,0x04,0x21 = bcctrl 6, 2, 0
|
||||||
0x4e,0x00,0x00,0x20 = bdnzlr
|
0x4e,0x00,0x00,0x20 = bdnzlr
|
||||||
0x4e,0x00,0x00,0x21 = bdnzlrl
|
0x4e,0x00,0x00,0x21 = bdnzlrl
|
||||||
0x4f,0x20,0x00,0x20 = bdnzlr+
|
0x4f,0x20,0x00,0x20 = bdnzlr+
|
||||||
0x4f,0x20,0x00,0x21 = bdnzlrl+
|
0x4f,0x20,0x00,0x21 = bdnzlrl+
|
||||||
0x4f,0x00,0x00,0x20 = bdnzlr-
|
0x4f,0x00,0x00,0x20 = bdnzlr-
|
||||||
0x4f,0x00,0x00,0x21 = bdnzlrl-
|
0x4f,0x00,0x00,0x21 = bdnzlrl-
|
||||||
0x4d,0x02,0x00,0x20 = bclr 8, 2, 0
|
// 0x4d,0x02,0x00,0x20 = bclr 8, 2, 0
|
||||||
0x4d,0x02,0x00,0x21 = bclrl 8, 2, 0
|
// 0x4d,0x02,0x00,0x21 = bclrl 8, 2, 0
|
||||||
0x4c,0x02,0x00,0x20 = bclr 0, 2, 0
|
// 0x4c,0x02,0x00,0x20 = bclr 0, 2, 0
|
||||||
0x4c,0x02,0x00,0x21 = bclrl 0, 2, 0
|
// 0x4c,0x02,0x00,0x21 = bclrl 0, 2, 0
|
||||||
0x4e,0x40,0x00,0x20 = bdzlr
|
0x4e,0x40,0x00,0x20 = bdzlr
|
||||||
0x4e,0x40,0x00,0x21 = bdzlrl
|
0x4e,0x40,0x00,0x21 = bdzlrl
|
||||||
0x4f,0x60,0x00,0x20 = bdzlr+
|
0x4f,0x60,0x00,0x20 = bdzlr+
|
||||||
0x4f,0x60,0x00,0x21 = bdzlrl+
|
0x4f,0x60,0x00,0x21 = bdzlrl+
|
||||||
0x4f,0x40,0x00,0x20 = bdzlr-
|
0x4f,0x40,0x00,0x20 = bdzlr-
|
||||||
0x4f,0x40,0x00,0x21 = bdzlrl-
|
0x4f,0x40,0x00,0x21 = bdzlrl-
|
||||||
0x4d,0x42,0x00,0x20 = bclr 10, 2, 0
|
// 0x4d,0x42,0x00,0x20 = bclr 10, 2, 0
|
||||||
0x4d,0x42,0x00,0x21 = bclrl 10, 2, 0
|
// 0x4d,0x42,0x00,0x21 = bclrl 10, 2, 0
|
||||||
0x4c,0x42,0x00,0x20 = bclr 2, 2, 0
|
// 0x4c,0x42,0x00,0x20 = bclr 2, 2, 0
|
||||||
0x4c,0x42,0x00,0x21 = bclrl 2, 2, 0
|
// 0x4c,0x42,0x00,0x21 = bclrl 2, 2, 0
|
||||||
0x4d,0x88,0x00,0x20 = bltlr 2
|
// 0x4d,0x88,0x00,0x20 = bltlr 2
|
||||||
0x4d,0x80,0x00,0x20 = bltlr 0
|
// 0x4d,0x80,0x00,0x20 = bltlr 0
|
||||||
0x4d,0x88,0x04,0x20 = bltctr 2
|
// 0x4d,0x88,0x04,0x20 = bltctr 2
|
||||||
0x4d,0x80,0x04,0x20 = bltctr 0
|
// 0x4d,0x80,0x04,0x20 = bltctr 0
|
||||||
0x4d,0x88,0x00,0x21 = bltlrl 2
|
// 0x4d,0x88,0x00,0x21 = bltlrl 2
|
||||||
0x4d,0x80,0x00,0x21 = bltlrl 0
|
// 0x4d,0x80,0x00,0x21 = bltlrl 0
|
||||||
0x4d,0x88,0x04,0x21 = bltctrl 2
|
// 0x4d,0x88,0x04,0x21 = bltctrl 2
|
||||||
0x4d,0x80,0x04,0x21 = bltctrl 0
|
// 0x4d,0x80,0x04,0x21 = bltctrl 0
|
||||||
0x4d,0xe8,0x00,0x20 = bltlr+ 2
|
// 0x4d,0xe8,0x00,0x20 = bltlr+ 2
|
||||||
0x4d,0xe0,0x00,0x20 = bltlr+ 0
|
// 0x4d,0xe0,0x00,0x20 = bltlr+ 0
|
||||||
0x4d,0xe8,0x04,0x20 = bltctr+ 2
|
// 0x4d,0xe8,0x04,0x20 = bltctr+ 2
|
||||||
0x4d,0xe0,0x04,0x20 = bltctr+ 0
|
// 0x4d,0xe0,0x04,0x20 = bltctr+ 0
|
||||||
0x4d,0xe8,0x00,0x21 = bltlrl+ 2
|
// 0x4d,0xe8,0x00,0x21 = bltlrl+ 2
|
||||||
0x4d,0xe0,0x00,0x21 = bltlrl+ 0
|
// 0x4d,0xe0,0x00,0x21 = bltlrl+ 0
|
||||||
0x4d,0xe8,0x04,0x21 = bltctrl+ 2
|
// 0x4d,0xe8,0x04,0x21 = bltctrl+ 2
|
||||||
0x4d,0xe0,0x04,0x21 = bltctrl+ 0
|
// 0x4d,0xe0,0x04,0x21 = bltctrl+ 0
|
||||||
0x4d,0xc8,0x00,0x20 = bltlr- 2
|
// 0x4d,0xc8,0x00,0x20 = bltlr- 2
|
||||||
0x4d,0xc0,0x00,0x20 = bltlr- 0
|
// 0x4d,0xc0,0x00,0x20 = bltlr- 0
|
||||||
0x4d,0xc8,0x04,0x20 = bltctr- 2
|
// 0x4d,0xc8,0x04,0x20 = bltctr- 2
|
||||||
0x4d,0xc0,0x04,0x20 = bltctr- 0
|
// 0x4d,0xc0,0x04,0x20 = bltctr- 0
|
||||||
0x4d,0xc8,0x00,0x21 = bltlrl- 2
|
// 0x4d,0xc8,0x00,0x21 = bltlrl- 2
|
||||||
0x4d,0xc0,0x00,0x21 = bltlrl- 0
|
// 0x4d,0xc0,0x00,0x21 = bltlrl- 0
|
||||||
0x4d,0xc8,0x04,0x21 = bltctrl- 2
|
// 0x4d,0xc8,0x04,0x21 = bltctrl- 2
|
||||||
0x4d,0xc0,0x04,0x21 = bltctrl- 0
|
// 0x4d,0xc0,0x04,0x21 = bltctrl- 0
|
||||||
0x4c,0x89,0x00,0x20 = blelr 2
|
// 0x4c,0x89,0x00,0x20 = blelr 2
|
||||||
0x4c,0x81,0x00,0x20 = blelr 0
|
// 0x4c,0x81,0x00,0x20 = blelr 0
|
||||||
0x4c,0x89,0x04,0x20 = blectr 2
|
// 0x4c,0x89,0x04,0x20 = blectr 2
|
||||||
0x4c,0x81,0x04,0x20 = blectr 0
|
// 0x4c,0x81,0x04,0x20 = blectr 0
|
||||||
0x4c,0x89,0x00,0x21 = blelrl 2
|
// 0x4c,0x89,0x00,0x21 = blelrl 2
|
||||||
0x4c,0x81,0x00,0x21 = blelrl 0
|
// 0x4c,0x81,0x00,0x21 = blelrl 0
|
||||||
0x4c,0x89,0x04,0x21 = blectrl 2
|
// 0x4c,0x89,0x04,0x21 = blectrl 2
|
||||||
0x4c,0x81,0x04,0x21 = blectrl 0
|
// 0x4c,0x81,0x04,0x21 = blectrl 0
|
||||||
0x4c,0xe9,0x00,0x20 = blelr+ 2
|
// 0x4c,0xe9,0x00,0x20 = blelr+ 2
|
||||||
0x4c,0xe1,0x00,0x20 = blelr+ 0
|
// 0x4c,0xe1,0x00,0x20 = blelr+ 0
|
||||||
0x4c,0xe9,0x04,0x20 = blectr+ 2
|
// 0x4c,0xe9,0x04,0x20 = blectr+ 2
|
||||||
0x4c,0xe1,0x04,0x20 = blectr+ 0
|
// 0x4c,0xe1,0x04,0x20 = blectr+ 0
|
||||||
0x4c,0xe9,0x00,0x21 = blelrl+ 2
|
// 0x4c,0xe9,0x00,0x21 = blelrl+ 2
|
||||||
0x4c,0xe1,0x00,0x21 = blelrl+ 0
|
// 0x4c,0xe1,0x00,0x21 = blelrl+ 0
|
||||||
0x4c,0xe9,0x04,0x21 = blectrl+ 2
|
// 0x4c,0xe9,0x04,0x21 = blectrl+ 2
|
||||||
0x4c,0xe1,0x04,0x21 = blectrl+ 0
|
// 0x4c,0xe1,0x04,0x21 = blectrl+ 0
|
||||||
0x4c,0xc9,0x00,0x20 = blelr- 2
|
// 0x4c,0xc9,0x00,0x20 = blelr- 2
|
||||||
0x4c,0xc1,0x00,0x20 = blelr- 0
|
// 0x4c,0xc1,0x00,0x20 = blelr- 0
|
||||||
0x4c,0xc9,0x04,0x20 = blectr- 2
|
// 0x4c,0xc9,0x04,0x20 = blectr- 2
|
||||||
0x4c,0xc1,0x04,0x20 = blectr- 0
|
// 0x4c,0xc1,0x04,0x20 = blectr- 0
|
||||||
0x4c,0xc9,0x00,0x21 = blelrl- 2
|
// 0x4c,0xc9,0x00,0x21 = blelrl- 2
|
||||||
0x4c,0xc1,0x00,0x21 = blelrl- 0
|
// 0x4c,0xc1,0x00,0x21 = blelrl- 0
|
||||||
0x4c,0xc9,0x04,0x21 = blectrl- 2
|
// 0x4c,0xc9,0x04,0x21 = blectrl- 2
|
||||||
0x4c,0xc1,0x04,0x21 = blectrl- 0
|
// 0x4c,0xc1,0x04,0x21 = blectrl- 0
|
||||||
0x4d,0x8a,0x00,0x20 = beqlr 2
|
// 0x4d,0x8a,0x00,0x20 = beqlr 2
|
||||||
0x4d,0x82,0x00,0x20 = beqlr 0
|
// 0x4d,0x82,0x00,0x20 = beqlr 0
|
||||||
0x4d,0x8a,0x04,0x20 = beqctr 2
|
// 0x4d,0x8a,0x04,0x20 = beqctr 2
|
||||||
0x4d,0x82,0x04,0x20 = beqctr 0
|
// 0x4d,0x82,0x04,0x20 = beqctr 0
|
||||||
0x4d,0x8a,0x00,0x21 = beqlrl 2
|
// 0x4d,0x8a,0x00,0x21 = beqlrl 2
|
||||||
0x4d,0x82,0x00,0x21 = beqlrl 0
|
// 0x4d,0x82,0x00,0x21 = beqlrl 0
|
||||||
0x4d,0x8a,0x04,0x21 = beqctrl 2
|
// 0x4d,0x8a,0x04,0x21 = beqctrl 2
|
||||||
0x4d,0x82,0x04,0x21 = beqctrl 0
|
// 0x4d,0x82,0x04,0x21 = beqctrl 0
|
||||||
0x4d,0xea,0x00,0x20 = beqlr+ 2
|
// 0x4d,0xea,0x00,0x20 = beqlr+ 2
|
||||||
0x4d,0xe2,0x00,0x20 = beqlr+ 0
|
// 0x4d,0xe2,0x00,0x20 = beqlr+ 0
|
||||||
0x4d,0xea,0x04,0x20 = beqctr+ 2
|
// 0x4d,0xea,0x04,0x20 = beqctr+ 2
|
||||||
0x4d,0xe2,0x04,0x20 = beqctr+ 0
|
// 0x4d,0xe2,0x04,0x20 = beqctr+ 0
|
||||||
0x4d,0xea,0x00,0x21 = beqlrl+ 2
|
// 0x4d,0xea,0x00,0x21 = beqlrl+ 2
|
||||||
0x4d,0xe2,0x00,0x21 = beqlrl+ 0
|
// 0x4d,0xe2,0x00,0x21 = beqlrl+ 0
|
||||||
0x4d,0xea,0x04,0x21 = beqctrl+ 2
|
// 0x4d,0xea,0x04,0x21 = beqctrl+ 2
|
||||||
0x4d,0xe2,0x04,0x21 = beqctrl+ 0
|
// 0x4d,0xe2,0x04,0x21 = beqctrl+ 0
|
||||||
0x4d,0xca,0x00,0x20 = beqlr- 2
|
// 0x4d,0xca,0x00,0x20 = beqlr- 2
|
||||||
0x4d,0xc2,0x00,0x20 = beqlr- 0
|
// 0x4d,0xc2,0x00,0x20 = beqlr- 0
|
||||||
0x4d,0xca,0x04,0x20 = beqctr- 2
|
// 0x4d,0xca,0x04,0x20 = beqctr- 2
|
||||||
0x4d,0xc2,0x04,0x20 = beqctr- 0
|
// 0x4d,0xc2,0x04,0x20 = beqctr- 0
|
||||||
0x4d,0xca,0x00,0x21 = beqlrl- 2
|
// 0x4d,0xca,0x00,0x21 = beqlrl- 2
|
||||||
0x4d,0xc2,0x00,0x21 = beqlrl- 0
|
// 0x4d,0xc2,0x00,0x21 = beqlrl- 0
|
||||||
0x4d,0xca,0x04,0x21 = beqctrl- 2
|
// 0x4d,0xca,0x04,0x21 = beqctrl- 2
|
||||||
0x4d,0xc2,0x04,0x21 = beqctrl- 0
|
// 0x4d,0xc2,0x04,0x21 = beqctrl- 0
|
||||||
0x4c,0x88,0x00,0x20 = bgelr 2
|
// 0x4c,0x88,0x00,0x20 = bgelr 2
|
||||||
0x4c,0x80,0x00,0x20 = bgelr 0
|
// 0x4c,0x80,0x00,0x20 = bgelr 0
|
||||||
0x4c,0x88,0x04,0x20 = bgectr 2
|
// 0x4c,0x88,0x04,0x20 = bgectr 2
|
||||||
0x4c,0x80,0x04,0x20 = bgectr 0
|
// 0x4c,0x80,0x04,0x20 = bgectr 0
|
||||||
0x4c,0x88,0x00,0x21 = bgelrl 2
|
// 0x4c,0x88,0x00,0x21 = bgelrl 2
|
||||||
0x4c,0x80,0x00,0x21 = bgelrl 0
|
// 0x4c,0x80,0x00,0x21 = bgelrl 0
|
||||||
0x4c,0x88,0x04,0x21 = bgectrl 2
|
// 0x4c,0x88,0x04,0x21 = bgectrl 2
|
||||||
0x4c,0x80,0x04,0x21 = bgectrl 0
|
// 0x4c,0x80,0x04,0x21 = bgectrl 0
|
||||||
0x4c,0xe8,0x00,0x20 = bgelr+ 2
|
// 0x4c,0xe8,0x00,0x20 = bgelr+ 2
|
||||||
0x4c,0xe0,0x00,0x20 = bgelr+ 0
|
// 0x4c,0xe0,0x00,0x20 = bgelr+ 0
|
||||||
0x4c,0xe8,0x04,0x20 = bgectr+ 2
|
// 0x4c,0xe8,0x04,0x20 = bgectr+ 2
|
||||||
0x4c,0xe0,0x04,0x20 = bgectr+ 0
|
// 0x4c,0xe0,0x04,0x20 = bgectr+ 0
|
||||||
0x4c,0xe8,0x00,0x21 = bgelrl+ 2
|
// 0x4c,0xe8,0x00,0x21 = bgelrl+ 2
|
||||||
0x4c,0xe0,0x00,0x21 = bgelrl+ 0
|
// 0x4c,0xe0,0x00,0x21 = bgelrl+ 0
|
||||||
0x4c,0xe8,0x04,0x21 = bgectrl+ 2
|
// 0x4c,0xe8,0x04,0x21 = bgectrl+ 2
|
||||||
0x4c,0xe0,0x04,0x21 = bgectrl+ 0
|
// 0x4c,0xe0,0x04,0x21 = bgectrl+ 0
|
||||||
0x4c,0xc8,0x00,0x20 = bgelr- 2
|
// 0x4c,0xc8,0x00,0x20 = bgelr- 2
|
||||||
0x4c,0xc0,0x00,0x20 = bgelr- 0
|
// 0x4c,0xc0,0x00,0x20 = bgelr- 0
|
||||||
0x4c,0xc8,0x04,0x20 = bgectr- 2
|
// 0x4c,0xc8,0x04,0x20 = bgectr- 2
|
||||||
0x4c,0xc0,0x04,0x20 = bgectr- 0
|
// 0x4c,0xc0,0x04,0x20 = bgectr- 0
|
||||||
0x4c,0xc8,0x00,0x21 = bgelrl- 2
|
// 0x4c,0xc8,0x00,0x21 = bgelrl- 2
|
||||||
0x4c,0xc0,0x00,0x21 = bgelrl- 0
|
// 0x4c,0xc0,0x00,0x21 = bgelrl- 0
|
||||||
0x4c,0xc8,0x04,0x21 = bgectrl- 2
|
// 0x4c,0xc8,0x04,0x21 = bgectrl- 2
|
||||||
0x4c,0xc0,0x04,0x21 = bgectrl- 0
|
// 0x4c,0xc0,0x04,0x21 = bgectrl- 0
|
||||||
0x4d,0x89,0x00,0x20 = bgtlr 2
|
// 0x4d,0x89,0x00,0x20 = bgtlr 2
|
||||||
0x4d,0x81,0x00,0x20 = bgtlr 0
|
// 0x4d,0x81,0x00,0x20 = bgtlr 0
|
||||||
0x4d,0x89,0x04,0x20 = bgtctr 2
|
// 0x4d,0x89,0x04,0x20 = bgtctr 2
|
||||||
0x4d,0x81,0x04,0x20 = bgtctr 0
|
// 0x4d,0x81,0x04,0x20 = bgtctr 0
|
||||||
0x4d,0x89,0x00,0x21 = bgtlrl 2
|
// 0x4d,0x89,0x00,0x21 = bgtlrl 2
|
||||||
0x4d,0x81,0x00,0x21 = bgtlrl 0
|
// 0x4d,0x81,0x00,0x21 = bgtlrl 0
|
||||||
0x4d,0x89,0x04,0x21 = bgtctrl 2
|
// 0x4d,0x89,0x04,0x21 = bgtctrl 2
|
||||||
0x4d,0x81,0x04,0x21 = bgtctrl 0
|
// 0x4d,0x81,0x04,0x21 = bgtctrl 0
|
||||||
0x4d,0xe9,0x00,0x20 = bgtlr+ 2
|
// 0x4d,0xe9,0x00,0x20 = bgtlr+ 2
|
||||||
0x4d,0xe1,0x00,0x20 = bgtlr+ 0
|
// 0x4d,0xe1,0x00,0x20 = bgtlr+ 0
|
||||||
0x4d,0xe9,0x04,0x20 = bgtctr+ 2
|
// 0x4d,0xe9,0x04,0x20 = bgtctr+ 2
|
||||||
0x4d,0xe1,0x04,0x20 = bgtctr+ 0
|
// 0x4d,0xe1,0x04,0x20 = bgtctr+ 0
|
||||||
0x4d,0xe9,0x00,0x21 = bgtlrl+ 2
|
// 0x4d,0xe9,0x00,0x21 = bgtlrl+ 2
|
||||||
0x4d,0xe1,0x00,0x21 = bgtlrl+ 0
|
// 0x4d,0xe1,0x00,0x21 = bgtlrl+ 0
|
||||||
0x4d,0xe9,0x04,0x21 = bgtctrl+ 2
|
// 0x4d,0xe9,0x04,0x21 = bgtctrl+ 2
|
||||||
0x4d,0xe1,0x04,0x21 = bgtctrl+ 0
|
// 0x4d,0xe1,0x04,0x21 = bgtctrl+ 0
|
||||||
0x4d,0xc9,0x00,0x20 = bgtlr- 2
|
// 0x4d,0xc9,0x00,0x20 = bgtlr- 2
|
||||||
0x4d,0xc1,0x00,0x20 = bgtlr- 0
|
// 0x4d,0xc1,0x00,0x20 = bgtlr- 0
|
||||||
0x4d,0xc9,0x04,0x20 = bgtctr- 2
|
// 0x4d,0xc9,0x04,0x20 = bgtctr- 2
|
||||||
0x4d,0xc1,0x04,0x20 = bgtctr- 0
|
// 0x4d,0xc1,0x04,0x20 = bgtctr- 0
|
||||||
0x4d,0xc9,0x00,0x21 = bgtlrl- 2
|
// 0x4d,0xc9,0x00,0x21 = bgtlrl- 2
|
||||||
0x4d,0xc1,0x00,0x21 = bgtlrl- 0
|
// 0x4d,0xc1,0x00,0x21 = bgtlrl- 0
|
||||||
0x4d,0xc9,0x04,0x21 = bgtctrl- 2
|
// 0x4d,0xc9,0x04,0x21 = bgtctrl- 2
|
||||||
0x4d,0xc1,0x04,0x21 = bgtctrl- 0
|
// 0x4d,0xc1,0x04,0x21 = bgtctrl- 0
|
||||||
0x4c,0x88,0x00,0x20 = bgelr 2
|
// 0x4c,0x88,0x00,0x20 = bgelr 2
|
||||||
0x4c,0x80,0x00,0x20 = bgelr 0
|
// 0x4c,0x80,0x00,0x20 = bgelr 0
|
||||||
0x4c,0x88,0x04,0x20 = bgectr 2
|
// 0x4c,0x88,0x04,0x20 = bgectr 2
|
||||||
0x4c,0x80,0x04,0x20 = bgectr 0
|
// 0x4c,0x80,0x04,0x20 = bgectr 0
|
||||||
0x4c,0x88,0x00,0x21 = bgelrl 2
|
// 0x4c,0x88,0x00,0x21 = bgelrl 2
|
||||||
0x4c,0x80,0x00,0x21 = bgelrl 0
|
// 0x4c,0x80,0x00,0x21 = bgelrl 0
|
||||||
0x4c,0x88,0x04,0x21 = bgectrl 2
|
// 0x4c,0x88,0x04,0x21 = bgectrl 2
|
||||||
0x4c,0x80,0x04,0x21 = bgectrl 0
|
// 0x4c,0x80,0x04,0x21 = bgectrl 0
|
||||||
0x4c,0xe8,0x00,0x20 = bgelr+ 2
|
// 0x4c,0xe8,0x00,0x20 = bgelr+ 2
|
||||||
0x4c,0xe0,0x00,0x20 = bgelr+ 0
|
// 0x4c,0xe0,0x00,0x20 = bgelr+ 0
|
||||||
0x4c,0xe8,0x04,0x20 = bgectr+ 2
|
// 0x4c,0xe8,0x04,0x20 = bgectr+ 2
|
||||||
0x4c,0xe0,0x04,0x20 = bgectr+ 0
|
// 0x4c,0xe0,0x04,0x20 = bgectr+ 0
|
||||||
0x4c,0xe8,0x00,0x21 = bgelrl+ 2
|
// 0x4c,0xe8,0x00,0x21 = bgelrl+ 2
|
||||||
0x4c,0xe0,0x00,0x21 = bgelrl+ 0
|
// 0x4c,0xe0,0x00,0x21 = bgelrl+ 0
|
||||||
0x4c,0xe8,0x04,0x21 = bgectrl+ 2
|
// 0x4c,0xe8,0x04,0x21 = bgectrl+ 2
|
||||||
0x4c,0xe0,0x04,0x21 = bgectrl+ 0
|
// 0x4c,0xe0,0x04,0x21 = bgectrl+ 0
|
||||||
0x4c,0xc8,0x00,0x20 = bgelr- 2
|
// 0x4c,0xc8,0x00,0x20 = bgelr- 2
|
||||||
0x4c,0xc0,0x00,0x20 = bgelr- 0
|
// 0x4c,0xc0,0x00,0x20 = bgelr- 0
|
||||||
0x4c,0xc8,0x04,0x20 = bgectr- 2
|
// 0x4c,0xc8,0x04,0x20 = bgectr- 2
|
||||||
0x4c,0xc0,0x04,0x20 = bgectr- 0
|
// 0x4c,0xc0,0x04,0x20 = bgectr- 0
|
||||||
0x4c,0xc8,0x00,0x21 = bgelrl- 2
|
// 0x4c,0xc8,0x00,0x21 = bgelrl- 2
|
||||||
0x4c,0xc0,0x00,0x21 = bgelrl- 0
|
// 0x4c,0xc0,0x00,0x21 = bgelrl- 0
|
||||||
0x4c,0xc8,0x04,0x21 = bgectrl- 2
|
// 0x4c,0xc8,0x04,0x21 = bgectrl- 2
|
||||||
0x4c,0xc0,0x04,0x21 = bgectrl- 0
|
// 0x4c,0xc0,0x04,0x21 = bgectrl- 0
|
||||||
0x4c,0x8a,0x00,0x20 = bnelr 2
|
// 0x4c,0x8a,0x00,0x20 = bnelr 2
|
||||||
0x4c,0x82,0x00,0x20 = bnelr 0
|
// 0x4c,0x82,0x00,0x20 = bnelr 0
|
||||||
0x4c,0x8a,0x04,0x20 = bnectr 2
|
// 0x4c,0x8a,0x04,0x20 = bnectr 2
|
||||||
0x4c,0x82,0x04,0x20 = bnectr 0
|
// 0x4c,0x82,0x04,0x20 = bnectr 0
|
||||||
0x4c,0x8a,0x00,0x21 = bnelrl 2
|
// 0x4c,0x8a,0x00,0x21 = bnelrl 2
|
||||||
0x4c,0x82,0x00,0x21 = bnelrl 0
|
// 0x4c,0x82,0x00,0x21 = bnelrl 0
|
||||||
0x4c,0x8a,0x04,0x21 = bnectrl 2
|
// 0x4c,0x8a,0x04,0x21 = bnectrl 2
|
||||||
0x4c,0x82,0x04,0x21 = bnectrl 0
|
// 0x4c,0x82,0x04,0x21 = bnectrl 0
|
||||||
0x4c,0xea,0x00,0x20 = bnelr+ 2
|
// 0x4c,0xea,0x00,0x20 = bnelr+ 2
|
||||||
0x4c,0xe2,0x00,0x20 = bnelr+ 0
|
// 0x4c,0xe2,0x00,0x20 = bnelr+ 0
|
||||||
0x4c,0xea,0x04,0x20 = bnectr+ 2
|
// 0x4c,0xea,0x04,0x20 = bnectr+ 2
|
||||||
0x4c,0xe2,0x04,0x20 = bnectr+ 0
|
// 0x4c,0xe2,0x04,0x20 = bnectr+ 0
|
||||||
0x4c,0xea,0x00,0x21 = bnelrl+ 2
|
// 0x4c,0xea,0x00,0x21 = bnelrl+ 2
|
||||||
0x4c,0xe2,0x00,0x21 = bnelrl+ 0
|
// 0x4c,0xe2,0x00,0x21 = bnelrl+ 0
|
||||||
0x4c,0xea,0x04,0x21 = bnectrl+ 2
|
// 0x4c,0xea,0x04,0x21 = bnectrl+ 2
|
||||||
0x4c,0xe2,0x04,0x21 = bnectrl+ 0
|
// 0x4c,0xe2,0x04,0x21 = bnectrl+ 0
|
||||||
0x4c,0xca,0x00,0x20 = bnelr- 2
|
// 0x4c,0xca,0x00,0x20 = bnelr- 2
|
||||||
0x4c,0xc2,0x00,0x20 = bnelr- 0
|
// 0x4c,0xc2,0x00,0x20 = bnelr- 0
|
||||||
0x4c,0xca,0x04,0x20 = bnectr- 2
|
// 0x4c,0xca,0x04,0x20 = bnectr- 2
|
||||||
0x4c,0xc2,0x04,0x20 = bnectr- 0
|
// 0x4c,0xc2,0x04,0x20 = bnectr- 0
|
||||||
0x4c,0xca,0x00,0x21 = bnelrl- 2
|
// 0x4c,0xca,0x00,0x21 = bnelrl- 2
|
||||||
0x4c,0xc2,0x00,0x21 = bnelrl- 0
|
// 0x4c,0xc2,0x00,0x21 = bnelrl- 0
|
||||||
0x4c,0xca,0x04,0x21 = bnectrl- 2
|
// 0x4c,0xca,0x04,0x21 = bnectrl- 2
|
||||||
0x4c,0xc2,0x04,0x21 = bnectrl- 0
|
// 0x4c,0xc2,0x04,0x21 = bnectrl- 0
|
||||||
0x4c,0x89,0x00,0x20 = blelr 2
|
// 0x4c,0x89,0x00,0x20 = blelr 2
|
||||||
0x4c,0x81,0x00,0x20 = blelr 0
|
// 0x4c,0x81,0x00,0x20 = blelr 0
|
||||||
0x4c,0x89,0x04,0x20 = blectr 2
|
// 0x4c,0x89,0x04,0x20 = blectr 2
|
||||||
0x4c,0x81,0x04,0x20 = blectr 0
|
// 0x4c,0x81,0x04,0x20 = blectr 0
|
||||||
0x4c,0x89,0x00,0x21 = blelrl 2
|
// 0x4c,0x89,0x00,0x21 = blelrl 2
|
||||||
0x4c,0x81,0x00,0x21 = blelrl 0
|
// 0x4c,0x81,0x00,0x21 = blelrl 0
|
||||||
0x4c,0x89,0x04,0x21 = blectrl 2
|
// 0x4c,0x89,0x04,0x21 = blectrl 2
|
||||||
0x4c,0x81,0x04,0x21 = blectrl 0
|
// 0x4c,0x81,0x04,0x21 = blectrl 0
|
||||||
0x4c,0xe9,0x00,0x20 = blelr+ 2
|
// 0x4c,0xe9,0x00,0x20 = blelr+ 2
|
||||||
0x4c,0xe1,0x00,0x20 = blelr+ 0
|
// 0x4c,0xe1,0x00,0x20 = blelr+ 0
|
||||||
0x4c,0xe9,0x04,0x20 = blectr+ 2
|
// 0x4c,0xe9,0x04,0x20 = blectr+ 2
|
||||||
0x4c,0xe1,0x04,0x20 = blectr+ 0
|
// 0x4c,0xe1,0x04,0x20 = blectr+ 0
|
||||||
0x4c,0xe9,0x00,0x21 = blelrl+ 2
|
// 0x4c,0xe9,0x00,0x21 = blelrl+ 2
|
||||||
0x4c,0xe1,0x00,0x21 = blelrl+ 0
|
// 0x4c,0xe1,0x00,0x21 = blelrl+ 0
|
||||||
0x4c,0xe9,0x04,0x21 = blectrl+ 2
|
// 0x4c,0xe9,0x04,0x21 = blectrl+ 2
|
||||||
0x4c,0xe1,0x04,0x21 = blectrl+ 0
|
// 0x4c,0xe1,0x04,0x21 = blectrl+ 0
|
||||||
0x4c,0xc9,0x00,0x20 = blelr- 2
|
// 0x4c,0xc9,0x00,0x20 = blelr- 2
|
||||||
0x4c,0xc1,0x00,0x20 = blelr- 0
|
// 0x4c,0xc1,0x00,0x20 = blelr- 0
|
||||||
0x4c,0xc9,0x04,0x20 = blectr- 2
|
// 0x4c,0xc9,0x04,0x20 = blectr- 2
|
||||||
0x4c,0xc1,0x04,0x20 = blectr- 0
|
// 0x4c,0xc1,0x04,0x20 = blectr- 0
|
||||||
0x4c,0xc9,0x00,0x21 = blelrl- 2
|
// 0x4c,0xc9,0x00,0x21 = blelrl- 2
|
||||||
0x4c,0xc1,0x00,0x21 = blelrl- 0
|
// 0x4c,0xc1,0x00,0x21 = blelrl- 0
|
||||||
0x4c,0xc9,0x04,0x21 = blectrl- 2
|
// 0x4c,0xc9,0x04,0x21 = blectrl- 2
|
||||||
0x4c,0xc1,0x04,0x21 = blectrl- 0
|
// 0x4c,0xc1,0x04,0x21 = blectrl- 0
|
||||||
0x4d,0x8b,0x00,0x20 = bunlr 2
|
// 0x4d,0x8b,0x00,0x20 = bunlr 2
|
||||||
0x4d,0x83,0x00,0x20 = bunlr 0
|
// 0x4d,0x83,0x00,0x20 = bunlr 0
|
||||||
0x4d,0x8b,0x04,0x20 = bunctr 2
|
// 0x4d,0x8b,0x04,0x20 = bunctr 2
|
||||||
0x4d,0x83,0x04,0x20 = bunctr 0
|
// 0x4d,0x83,0x04,0x20 = bunctr 0
|
||||||
0x4d,0x8b,0x00,0x21 = bunlrl 2
|
// 0x4d,0x8b,0x00,0x21 = bunlrl 2
|
||||||
0x4d,0x83,0x00,0x21 = bunlrl 0
|
// 0x4d,0x83,0x00,0x21 = bunlrl 0
|
||||||
0x4d,0x8b,0x04,0x21 = bunctrl 2
|
// 0x4d,0x8b,0x04,0x21 = bunctrl 2
|
||||||
0x4d,0x83,0x04,0x21 = bunctrl 0
|
// 0x4d,0x83,0x04,0x21 = bunctrl 0
|
||||||
0x4d,0xeb,0x00,0x20 = bunlr+ 2
|
// 0x4d,0xeb,0x00,0x20 = bunlr+ 2
|
||||||
0x4d,0xe3,0x00,0x20 = bunlr+ 0
|
// 0x4d,0xe3,0x00,0x20 = bunlr+ 0
|
||||||
0x4d,0xeb,0x04,0x20 = bunctr+ 2
|
// 0x4d,0xeb,0x04,0x20 = bunctr+ 2
|
||||||
0x4d,0xe3,0x04,0x20 = bunctr+ 0
|
// 0x4d,0xe3,0x04,0x20 = bunctr+ 0
|
||||||
0x4d,0xeb,0x00,0x21 = bunlrl+ 2
|
// 0x4d,0xeb,0x00,0x21 = bunlrl+ 2
|
||||||
0x4d,0xe3,0x00,0x21 = bunlrl+ 0
|
// 0x4d,0xe3,0x00,0x21 = bunlrl+ 0
|
||||||
0x4d,0xeb,0x04,0x21 = bunctrl+ 2
|
// 0x4d,0xeb,0x04,0x21 = bunctrl+ 2
|
||||||
0x4d,0xe3,0x04,0x21 = bunctrl+ 0
|
// 0x4d,0xe3,0x04,0x21 = bunctrl+ 0
|
||||||
0x4d,0xcb,0x00,0x20 = bunlr- 2
|
// 0x4d,0xcb,0x00,0x20 = bunlr- 2
|
||||||
0x4d,0xc3,0x00,0x20 = bunlr- 0
|
// 0x4d,0xc3,0x00,0x20 = bunlr- 0
|
||||||
0x4d,0xcb,0x04,0x20 = bunctr- 2
|
// 0x4d,0xcb,0x04,0x20 = bunctr- 2
|
||||||
0x4d,0xc3,0x04,0x20 = bunctr- 0
|
// 0x4d,0xc3,0x04,0x20 = bunctr- 0
|
||||||
0x4d,0xcb,0x00,0x21 = bunlrl- 2
|
// 0x4d,0xcb,0x00,0x21 = bunlrl- 2
|
||||||
0x4d,0xc3,0x00,0x21 = bunlrl- 0
|
// 0x4d,0xc3,0x00,0x21 = bunlrl- 0
|
||||||
0x4d,0xcb,0x04,0x21 = bunctrl- 2
|
// 0x4d,0xcb,0x04,0x21 = bunctrl- 2
|
||||||
0x4d,0xc3,0x04,0x21 = bunctrl- 0
|
// 0x4d,0xc3,0x04,0x21 = bunctrl- 0
|
||||||
0x4c,0x8b,0x00,0x20 = bnulr 2
|
// 0x4c,0x8b,0x00,0x20 = bnulr 2
|
||||||
0x4c,0x83,0x00,0x20 = bnulr 0
|
// 0x4c,0x83,0x00,0x20 = bnulr 0
|
||||||
0x4c,0x8b,0x04,0x20 = bnuctr 2
|
// 0x4c,0x8b,0x04,0x20 = bnuctr 2
|
||||||
0x4c,0x83,0x04,0x20 = bnuctr 0
|
// 0x4c,0x83,0x04,0x20 = bnuctr 0
|
||||||
0x4c,0x8b,0x00,0x21 = bnulrl 2
|
// 0x4c,0x8b,0x00,0x21 = bnulrl 2
|
||||||
0x4c,0x83,0x00,0x21 = bnulrl 0
|
// 0x4c,0x83,0x00,0x21 = bnulrl 0
|
||||||
0x4c,0x8b,0x04,0x21 = bnuctrl 2
|
// 0x4c,0x8b,0x04,0x21 = bnuctrl 2
|
||||||
0x4c,0x83,0x04,0x21 = bnuctrl 0
|
// 0x4c,0x83,0x04,0x21 = bnuctrl 0
|
||||||
0x4c,0xeb,0x00,0x20 = bnulr+ 2
|
// 0x4c,0xeb,0x00,0x20 = bnulr+ 2
|
||||||
0x4c,0xe3,0x00,0x20 = bnulr+ 0
|
// 0x4c,0xe3,0x00,0x20 = bnulr+ 0
|
||||||
0x4c,0xeb,0x04,0x20 = bnuctr+ 2
|
// 0x4c,0xeb,0x04,0x20 = bnuctr+ 2
|
||||||
0x4c,0xe3,0x04,0x20 = bnuctr+ 0
|
// 0x4c,0xe3,0x04,0x20 = bnuctr+ 0
|
||||||
0x4c,0xeb,0x00,0x21 = bnulrl+ 2
|
// 0x4c,0xeb,0x00,0x21 = bnulrl+ 2
|
||||||
0x4c,0xe3,0x00,0x21 = bnulrl+ 0
|
// 0x4c,0xe3,0x00,0x21 = bnulrl+ 0
|
||||||
0x4c,0xeb,0x04,0x21 = bnuctrl+ 2
|
// 0x4c,0xeb,0x04,0x21 = bnuctrl+ 2
|
||||||
0x4c,0xe3,0x04,0x21 = bnuctrl+ 0
|
// 0x4c,0xe3,0x04,0x21 = bnuctrl+ 0
|
||||||
0x4c,0xcb,0x00,0x20 = bnulr- 2
|
// 0x4c,0xcb,0x00,0x20 = bnulr- 2
|
||||||
0x4c,0xc3,0x00,0x20 = bnulr- 0
|
// 0x4c,0xc3,0x00,0x20 = bnulr- 0
|
||||||
0x4c,0xcb,0x04,0x20 = bnuctr- 2
|
// 0x4c,0xcb,0x04,0x20 = bnuctr- 2
|
||||||
0x4c,0xc3,0x04,0x20 = bnuctr- 0
|
// 0x4c,0xc3,0x04,0x20 = bnuctr- 0
|
||||||
0x4c,0xcb,0x00,0x21 = bnulrl- 2
|
// 0x4c,0xcb,0x00,0x21 = bnulrl- 2
|
||||||
0x4c,0xc3,0x00,0x21 = bnulrl- 0
|
// 0x4c,0xc3,0x00,0x21 = bnulrl- 0
|
||||||
0x4c,0xcb,0x04,0x21 = bnuctrl- 2
|
// 0x4c,0xcb,0x04,0x21 = bnuctrl- 2
|
||||||
0x4c,0xc3,0x04,0x21 = bnuctrl- 0
|
// 0x4c,0xc3,0x04,0x21 = bnuctrl- 0
|
||||||
0x4d,0x8b,0x00,0x20 = bunlr 2
|
// 0x4d,0x8b,0x00,0x20 = bunlr 2
|
||||||
0x4d,0x83,0x00,0x20 = bunlr 0
|
// 0x4d,0x83,0x00,0x20 = bunlr 0
|
||||||
0x4d,0x8b,0x04,0x20 = bunctr 2
|
// 0x4d,0x8b,0x04,0x20 = bunctr 2
|
||||||
0x4d,0x83,0x04,0x20 = bunctr 0
|
// 0x4d,0x83,0x04,0x20 = bunctr 0
|
||||||
0x4d,0x8b,0x00,0x21 = bunlrl 2
|
// 0x4d,0x8b,0x00,0x21 = bunlrl 2
|
||||||
0x4d,0x83,0x00,0x21 = bunlrl 0
|
// 0x4d,0x83,0x00,0x21 = bunlrl 0
|
||||||
0x4d,0x8b,0x04,0x21 = bunctrl 2
|
// 0x4d,0x8b,0x04,0x21 = bunctrl 2
|
||||||
0x4d,0x83,0x04,0x21 = bunctrl 0
|
// 0x4d,0x83,0x04,0x21 = bunctrl 0
|
||||||
0x4d,0xeb,0x00,0x20 = bunlr+ 2
|
// 0x4d,0xeb,0x00,0x20 = bunlr+ 2
|
||||||
0x4d,0xe3,0x00,0x20 = bunlr+ 0
|
// 0x4d,0xe3,0x00,0x20 = bunlr+ 0
|
||||||
0x4d,0xeb,0x04,0x20 = bunctr+ 2
|
// 0x4d,0xeb,0x04,0x20 = bunctr+ 2
|
||||||
0x4d,0xe3,0x04,0x20 = bunctr+ 0
|
// 0x4d,0xe3,0x04,0x20 = bunctr+ 0
|
||||||
0x4d,0xeb,0x00,0x21 = bunlrl+ 2
|
// 0x4d,0xeb,0x00,0x21 = bunlrl+ 2
|
||||||
0x4d,0xe3,0x00,0x21 = bunlrl+ 0
|
// 0x4d,0xe3,0x00,0x21 = bunlrl+ 0
|
||||||
0x4d,0xeb,0x04,0x21 = bunctrl+ 2
|
// 0x4d,0xeb,0x04,0x21 = bunctrl+ 2
|
||||||
0x4d,0xe3,0x04,0x21 = bunctrl+ 0
|
// 0x4d,0xe3,0x04,0x21 = bunctrl+ 0
|
||||||
0x4d,0xcb,0x00,0x20 = bunlr- 2
|
// 0x4d,0xcb,0x00,0x20 = bunlr- 2
|
||||||
0x4d,0xc3,0x00,0x20 = bunlr- 0
|
// 0x4d,0xc3,0x00,0x20 = bunlr- 0
|
||||||
0x4d,0xcb,0x04,0x20 = bunctr- 2
|
// 0x4d,0xcb,0x04,0x20 = bunctr- 2
|
||||||
0x4d,0xc3,0x04,0x20 = bunctr- 0
|
// 0x4d,0xc3,0x04,0x20 = bunctr- 0
|
||||||
0x4d,0xcb,0x00,0x21 = bunlrl- 2
|
// 0x4d,0xcb,0x00,0x21 = bunlrl- 2
|
||||||
0x4d,0xc3,0x00,0x21 = bunlrl- 0
|
// 0x4d,0xc3,0x00,0x21 = bunlrl- 0
|
||||||
0x4d,0xcb,0x04,0x21 = bunctrl- 2
|
// 0x4d,0xcb,0x04,0x21 = bunctrl- 2
|
||||||
0x4d,0xc3,0x04,0x21 = bunctrl- 0
|
// 0x4d,0xc3,0x04,0x21 = bunctrl- 0
|
||||||
0x4c,0x8b,0x00,0x20 = bnulr 2
|
// 0x4c,0x8b,0x00,0x20 = bnulr 2
|
||||||
0x4c,0x83,0x00,0x20 = bnulr 0
|
// 0x4c,0x83,0x00,0x20 = bnulr 0
|
||||||
0x4c,0x8b,0x04,0x20 = bnuctr 2
|
// 0x4c,0x8b,0x04,0x20 = bnuctr 2
|
||||||
0x4c,0x83,0x04,0x20 = bnuctr 0
|
// 0x4c,0x83,0x04,0x20 = bnuctr 0
|
||||||
0x4c,0x8b,0x00,0x21 = bnulrl 2
|
// 0x4c,0x8b,0x00,0x21 = bnulrl 2
|
||||||
0x4c,0x83,0x00,0x21 = bnulrl 0
|
// 0x4c,0x83,0x00,0x21 = bnulrl 0
|
||||||
0x4c,0x8b,0x04,0x21 = bnuctrl 2
|
// 0x4c,0x8b,0x04,0x21 = bnuctrl 2
|
||||||
0x4c,0x83,0x04,0x21 = bnuctrl 0
|
// 0x4c,0x83,0x04,0x21 = bnuctrl 0
|
||||||
0x4c,0xeb,0x00,0x20 = bnulr+ 2
|
// 0x4c,0xeb,0x00,0x20 = bnulr+ 2
|
||||||
0x4c,0xe3,0x00,0x20 = bnulr+ 0
|
// 0x4c,0xe3,0x00,0x20 = bnulr+ 0
|
||||||
0x4c,0xeb,0x04,0x20 = bnuctr+ 2
|
// 0x4c,0xeb,0x04,0x20 = bnuctr+ 2
|
||||||
0x4c,0xe3,0x04,0x20 = bnuctr+ 0
|
// 0x4c,0xe3,0x04,0x20 = bnuctr+ 0
|
||||||
0x4c,0xeb,0x00,0x21 = bnulrl+ 2
|
// 0x4c,0xeb,0x00,0x21 = bnulrl+ 2
|
||||||
0x4c,0xe3,0x00,0x21 = bnulrl+ 0
|
// 0x4c,0xe3,0x00,0x21 = bnulrl+ 0
|
||||||
0x4c,0xeb,0x04,0x21 = bnuctrl+ 2
|
// 0x4c,0xeb,0x04,0x21 = bnuctrl+ 2
|
||||||
0x4c,0xe3,0x04,0x21 = bnuctrl+ 0
|
// 0x4c,0xe3,0x04,0x21 = bnuctrl+ 0
|
||||||
0x4c,0xcb,0x00,0x20 = bnulr- 2
|
// 0x4c,0xcb,0x00,0x20 = bnulr- 2
|
||||||
0x4c,0xc3,0x00,0x20 = bnulr- 0
|
// 0x4c,0xc3,0x00,0x20 = bnulr- 0
|
||||||
0x4c,0xcb,0x04,0x20 = bnuctr- 2
|
// 0x4c,0xcb,0x04,0x20 = bnuctr- 2
|
||||||
0x4c,0xc3,0x04,0x20 = bnuctr- 0
|
// 0x4c,0xc3,0x04,0x20 = bnuctr- 0
|
||||||
0x4c,0xcb,0x00,0x21 = bnulrl- 2
|
// 0x4c,0xcb,0x00,0x21 = bnulrl- 2
|
||||||
0x4c,0xc3,0x00,0x21 = bnulrl- 0
|
// 0x4c,0xc3,0x00,0x21 = bnulrl- 0
|
||||||
0x4c,0xcb,0x04,0x21 = bnuctrl- 2
|
// 0x4c,0xcb,0x04,0x21 = bnuctrl- 2
|
||||||
0x4c,0xc3,0x04,0x21 = bnuctrl- 0
|
// 0x4c,0xc3,0x04,0x21 = bnuctrl- 0
|
||||||
0x4c,0x42,0x12,0x42 = creqv 2, 2, 2
|
// 0x4c,0x42,0x12,0x42 = creqv 2, 2, 2
|
||||||
0x4c,0x42,0x11,0x82 = crxor 2, 2, 2
|
// 0x4c,0x42,0x11,0x82 = crxor 2, 2, 2
|
||||||
0x4c,0x43,0x1b,0x82 = cror 2, 3, 3
|
// 0x4c,0x43,0x1b,0x82 = cror 2, 3, 3
|
||||||
0x4c,0x43,0x18,0x42 = crnor 2, 3, 3
|
// 0x4c,0x43,0x18,0x42 = crnor 2, 3, 3
|
||||||
0x38,0x43,0xff,0x80 = addi 2, 3, -128
|
// 0x38,0x43,0xff,0x80 = addi 2, 3, -128
|
||||||
0x3c,0x43,0xff,0x80 = addis 2, 3, -128
|
// 0x3c,0x43,0xff,0x80 = addis 2, 3, -128
|
||||||
0x30,0x43,0xff,0x80 = addic 2, 3, -128
|
// 0x30,0x43,0xff,0x80 = addic 2, 3, -128
|
||||||
0x34,0x43,0xff,0x80 = addic. 2, 3, -128
|
// 0x34,0x43,0xff,0x80 = addic. 2, 3, -128
|
||||||
0x7c,0x44,0x18,0x50 = subf 2, 4, 3
|
0x7c,0x44,0x18,0x50 = subf 2, 4, 3
|
||||||
0x7c,0x44,0x18,0x51 = subf. 2, 4, 3
|
0x7c,0x44,0x18,0x51 = subf. 2, 4, 3
|
||||||
0x7c,0x44,0x18,0x10 = subfc 2, 4, 3
|
0x7c,0x44,0x18,0x10 = subfc 2, 4, 3
|
||||||
0x7c,0x44,0x18,0x11 = subfc. 2, 4, 3
|
0x7c,0x44,0x18,0x11 = subfc. 2, 4, 3
|
||||||
0x2d,0x23,0x00,0x80 = cmpdi 2, 3, 128
|
0x2d,0x23,0x00,0x80 = cmpdi 2, 3, 128
|
||||||
0x2c,0x23,0x00,0x80 = cmpdi 0, 3, 128
|
// 0x2c,0x23,0x00,0x80 = cmpdi 0, 3, 128
|
||||||
0x7d,0x23,0x20,0x00 = cmpd 2, 3, 4
|
0x7d,0x23,0x20,0x00 = cmpd 2, 3, 4
|
||||||
0x7c,0x23,0x20,0x00 = cmpd 0, 3, 4
|
// 0x7c,0x23,0x20,0x00 = cmpd 0, 3, 4
|
||||||
0x29,0x23,0x00,0x80 = cmpldi 2, 3, 128
|
0x29,0x23,0x00,0x80 = cmpldi 2, 3, 128
|
||||||
0x28,0x23,0x00,0x80 = cmpldi 0, 3, 128
|
// 0x28,0x23,0x00,0x80 = cmpldi 0, 3, 128
|
||||||
0x7d,0x23,0x20,0x40 = cmpld 2, 3, 4
|
0x7d,0x23,0x20,0x40 = cmpld 2, 3, 4
|
||||||
0x7c,0x23,0x20,0x40 = cmpld 0, 3, 4
|
// 0x7c,0x23,0x20,0x40 = cmpld 0, 3, 4
|
||||||
0x2d,0x03,0x00,0x80 = cmpwi 2, 3, 128
|
0x2d,0x03,0x00,0x80 = cmpwi 2, 3, 128
|
||||||
0x2c,0x03,0x00,0x80 = cmpwi 0, 3, 128
|
// 0x2c,0x03,0x00,0x80 = cmpwi 0, 3, 128
|
||||||
0x7d,0x03,0x20,0x00 = cmpw 2, 3, 4
|
0x7d,0x03,0x20,0x00 = cmpw 2, 3, 4
|
||||||
0x7c,0x03,0x20,0x00 = cmpw 0, 3, 4
|
// 0x7c,0x03,0x20,0x00 = cmpw 0, 3, 4
|
||||||
0x29,0x03,0x00,0x80 = cmplwi 2, 3, 128
|
0x29,0x03,0x00,0x80 = cmplwi 2, 3, 128
|
||||||
0x28,0x03,0x00,0x80 = cmplwi 0, 3, 128
|
// 0x28,0x03,0x00,0x80 = cmplwi 0, 3, 128
|
||||||
0x7d,0x03,0x20,0x40 = cmplw 2, 3, 4
|
0x7d,0x03,0x20,0x40 = cmplw 2, 3, 4
|
||||||
0x7c,0x03,0x20,0x40 = cmplw 0, 3, 4
|
// 0x7c,0x03,0x20,0x40 = cmplw 0, 3, 4
|
||||||
0x0e,0x03,0x00,0x04 = twi 16, 3, 4
|
// 0x0e,0x03,0x00,0x04 = twi 16, 3, 4
|
||||||
0x7e,0x03,0x20,0x08 = tw 16, 3, 4
|
// 0x7e,0x03,0x20,0x08 = tw 16, 3, 4
|
||||||
0x0a,0x03,0x00,0x04 = tdi 16, 3, 4
|
// 0x0a,0x03,0x00,0x04 = tdi 16, 3, 4
|
||||||
0x7e,0x03,0x20,0x88 = td 16, 3, 4
|
// 0x7e,0x03,0x20,0x88 = td 16, 3, 4
|
||||||
0x0e,0x83,0x00,0x04 = twi 20, 3, 4
|
0x0e,0x83,0x00,0x04 = twi 20, 3, 4
|
||||||
0x7e,0x83,0x20,0x08 = tw 20, 3, 4
|
0x7e,0x83,0x20,0x08 = tw 20, 3, 4
|
||||||
0x0a,0x83,0x00,0x04 = tdi 20, 3, 4
|
0x0a,0x83,0x00,0x04 = tdi 20, 3, 4
|
||||||
0x7e,0x83,0x20,0x88 = td 20, 3, 4
|
0x7e,0x83,0x20,0x88 = td 20, 3, 4
|
||||||
0x0c,0x83,0x00,0x04 = twi 4, 3, 4
|
// 0x0c,0x83,0x00,0x04 = twi 4, 3, 4
|
||||||
0x7c,0x83,0x20,0x08 = tw 4, 3, 4
|
// 0x7c,0x83,0x20,0x08 = tw 4, 3, 4
|
||||||
0x08,0x83,0x00,0x04 = tdi 4, 3, 4
|
// 0x08,0x83,0x00,0x04 = tdi 4, 3, 4
|
||||||
0x7c,0x83,0x20,0x88 = td 4, 3, 4
|
// 0x7c,0x83,0x20,0x88 = td 4, 3, 4
|
||||||
0x0d,0x83,0x00,0x04 = twi 12, 3, 4
|
0x0d,0x83,0x00,0x04 = twi 12, 3, 4
|
||||||
0x7d,0x83,0x20,0x08 = tw 12, 3, 4
|
0x7d,0x83,0x20,0x08 = tw 12, 3, 4
|
||||||
0x09,0x83,0x00,0x04 = tdi 12, 3, 4
|
0x09,0x83,0x00,0x04 = tdi 12, 3, 4
|
||||||
0x7d,0x83,0x20,0x88 = td 12, 3, 4
|
0x7d,0x83,0x20,0x88 = td 12, 3, 4
|
||||||
0x0d,0x03,0x00,0x04 = twi 8, 3, 4
|
// 0x0d,0x03,0x00,0x04 = twi 8, 3, 4
|
||||||
0x7d,0x03,0x20,0x08 = tw 8, 3, 4
|
// 0x7d,0x03,0x20,0x08 = tw 8, 3, 4
|
||||||
0x09,0x03,0x00,0x04 = tdi 8, 3, 4
|
// 0x09,0x03,0x00,0x04 = tdi 8, 3, 4
|
||||||
0x7d,0x03,0x20,0x88 = td 8, 3, 4
|
// 0x7d,0x03,0x20,0x88 = td 8, 3, 4
|
||||||
0x0d,0x83,0x00,0x04 = twi 12, 3, 4
|
0x0d,0x83,0x00,0x04 = twi 12, 3, 4
|
||||||
0x7d,0x83,0x20,0x08 = tw 12, 3, 4
|
0x7d,0x83,0x20,0x08 = tw 12, 3, 4
|
||||||
0x09,0x83,0x00,0x04 = tdi 12, 3, 4
|
0x09,0x83,0x00,0x04 = tdi 12, 3, 4
|
||||||
0x7d,0x83,0x20,0x88 = td 12, 3, 4
|
0x7d,0x83,0x20,0x88 = td 12, 3, 4
|
||||||
0x0f,0x03,0x00,0x04 = twi 24, 3, 4
|
// 0x0f,0x03,0x00,0x04 = twi 24, 3, 4
|
||||||
0x7f,0x03,0x20,0x08 = tw 24, 3, 4
|
// 0x7f,0x03,0x20,0x08 = tw 24, 3, 4
|
||||||
0x0b,0x03,0x00,0x04 = tdi 24, 3, 4
|
// 0x0b,0x03,0x00,0x04 = tdi 24, 3, 4
|
||||||
0x7f,0x03,0x20,0x88 = td 24, 3, 4
|
// 0x7f,0x03,0x20,0x88 = td 24, 3, 4
|
||||||
0x0e,0x83,0x00,0x04 = twi 20, 3, 4
|
0x0e,0x83,0x00,0x04 = twi 20, 3, 4
|
||||||
0x7e,0x83,0x20,0x08 = tw 20, 3, 4
|
0x7e,0x83,0x20,0x08 = tw 20, 3, 4
|
||||||
0x0a,0x83,0x00,0x04 = tdi 20, 3, 4
|
0x0a,0x83,0x00,0x04 = tdi 20, 3, 4
|
||||||
0x7e,0x83,0x20,0x88 = td 20, 3, 4
|
0x7e,0x83,0x20,0x88 = td 20, 3, 4
|
||||||
0x0c,0x43,0x00,0x04 = twi 2, 3, 4
|
// 0x0c,0x43,0x00,0x04 = twi 2, 3, 4
|
||||||
0x7c,0x43,0x20,0x08 = tw 2, 3, 4
|
// 0x7c,0x43,0x20,0x08 = tw 2, 3, 4
|
||||||
0x08,0x43,0x00,0x04 = tdi 2, 3, 4
|
// 0x08,0x43,0x00,0x04 = tdi 2, 3, 4
|
||||||
0x7c,0x43,0x20,0x88 = td 2, 3, 4
|
// 0x7c,0x43,0x20,0x88 = td 2, 3, 4
|
||||||
0x0c,0xc3,0x00,0x04 = twi 6, 3, 4
|
0x0c,0xc3,0x00,0x04 = twi 6, 3, 4
|
||||||
0x7c,0xc3,0x20,0x08 = tw 6, 3, 4
|
0x7c,0xc3,0x20,0x08 = tw 6, 3, 4
|
||||||
0x08,0xc3,0x00,0x04 = tdi 6, 3, 4
|
0x08,0xc3,0x00,0x04 = tdi 6, 3, 4
|
||||||
@ -455,10 +455,10 @@
|
|||||||
0x7c,0xa3,0x20,0x08 = tw 5, 3, 4
|
0x7c,0xa3,0x20,0x08 = tw 5, 3, 4
|
||||||
0x08,0xa3,0x00,0x04 = tdi 5, 3, 4
|
0x08,0xa3,0x00,0x04 = tdi 5, 3, 4
|
||||||
0x7c,0xa3,0x20,0x88 = td 5, 3, 4
|
0x7c,0xa3,0x20,0x88 = td 5, 3, 4
|
||||||
0x0c,0x23,0x00,0x04 = twi 1, 3, 4
|
// 0x0c,0x23,0x00,0x04 = twi 1, 3, 4
|
||||||
0x7c,0x23,0x20,0x08 = tw 1, 3, 4
|
// 0x7c,0x23,0x20,0x08 = tw 1, 3, 4
|
||||||
0x08,0x23,0x00,0x04 = tdi 1, 3, 4
|
// 0x08,0x23,0x00,0x04 = tdi 1, 3, 4
|
||||||
0x7c,0x23,0x20,0x88 = td 1, 3, 4
|
// 0x7c,0x23,0x20,0x88 = td 1, 3, 4
|
||||||
0x0c,0xa3,0x00,0x04 = twi 5, 3, 4
|
0x0c,0xa3,0x00,0x04 = twi 5, 3, 4
|
||||||
0x7c,0xa3,0x20,0x08 = tw 5, 3, 4
|
0x7c,0xa3,0x20,0x08 = tw 5, 3, 4
|
||||||
0x08,0xa3,0x00,0x04 = tdi 5, 3, 4
|
0x08,0xa3,0x00,0x04 = tdi 5, 3, 4
|
||||||
@ -467,10 +467,10 @@
|
|||||||
0x7c,0xc3,0x20,0x08 = tw 6, 3, 4
|
0x7c,0xc3,0x20,0x08 = tw 6, 3, 4
|
||||||
0x08,0xc3,0x00,0x04 = tdi 6, 3, 4
|
0x08,0xc3,0x00,0x04 = tdi 6, 3, 4
|
||||||
0x7c,0xc3,0x20,0x88 = td 6, 3, 4
|
0x7c,0xc3,0x20,0x88 = td 6, 3, 4
|
||||||
0x0f,0xe3,0x00,0x04 = twi 31, 3, 4
|
// 0x0f,0xe3,0x00,0x04 = twi 31, 3, 4
|
||||||
0x7f,0xe3,0x20,0x08 = tw 31, 3, 4
|
// 0x7f,0xe3,0x20,0x08 = tw 31, 3, 4
|
||||||
0x0b,0xe3,0x00,0x04 = tdi 31, 3, 4
|
// 0x0b,0xe3,0x00,0x04 = tdi 31, 3, 4
|
||||||
0x7f,0xe3,0x20,0x88 = td 31, 3, 4
|
// 0x7f,0xe3,0x20,0x88 = td 31, 3, 4
|
||||||
0x7f,0xe0,0x00,0x08 = trap
|
0x7f,0xe0,0x00,0x08 = trap
|
||||||
0x78,0x62,0x28,0xc4 = rldicr 2, 3, 5, 3
|
0x78,0x62,0x28,0xc4 = rldicr 2, 3, 5, 3
|
||||||
0x78,0x62,0x28,0xc5 = rldicr. 2, 3, 5, 3
|
0x78,0x62,0x28,0xc5 = rldicr. 2, 3, 5, 3
|
||||||
@ -478,18 +478,18 @@
|
|||||||
0x78,0x62,0x4f,0x21 = rldicl. 2, 3, 9, 60
|
0x78,0x62,0x4f,0x21 = rldicl. 2, 3, 9, 60
|
||||||
0x78,0x62,0xb9,0x4e = rldimi 2, 3, 55, 5
|
0x78,0x62,0xb9,0x4e = rldimi 2, 3, 55, 5
|
||||||
0x78,0x62,0xb9,0x4f = rldimi. 2, 3, 55, 5
|
0x78,0x62,0xb9,0x4f = rldimi. 2, 3, 55, 5
|
||||||
0x78,0x62,0x20,0x00 = rldicl 2, 3, 4, 0
|
// 0x78,0x62,0x20,0x00 = rldicl 2, 3, 4, 0
|
||||||
0x78,0x62,0x20,0x01 = rldicl. 2, 3, 4, 0
|
// 0x78,0x62,0x20,0x01 = rldicl. 2, 3, 4, 0
|
||||||
0x78,0x62,0xe0,0x02 = rldicl 2, 3, 60, 0
|
// 0x78,0x62,0xe0,0x02 = rldicl 2, 3, 60, 0
|
||||||
0x78,0x62,0xe0,0x03 = rldicl. 2, 3, 60, 0
|
// 0x78,0x62,0xe0,0x03 = rldicl. 2, 3, 60, 0
|
||||||
0x78,0x62,0x20,0x10 = rldcl 2, 3, 4, 0
|
// 0x78,0x62,0x20,0x10 = rldcl 2, 3, 4, 0
|
||||||
0x78,0x62,0x20,0x11 = rldcl. 2, 3, 4, 0
|
// 0x78,0x62,0x20,0x11 = rldcl. 2, 3, 4, 0
|
||||||
0x78,0x62,0x26,0xe4 = sldi 2, 3, 4
|
0x78,0x62,0x26,0xe4 = sldi 2, 3, 4
|
||||||
0x78,0x62,0x26,0xe5 = rldicr. 2, 3, 4, 59
|
0x78,0x62,0x26,0xe5 = rldicr. 2, 3, 4, 59
|
||||||
0x78,0x62,0xe1,0x02 = rldicl 2, 3, 60, 4
|
0x78,0x62,0xe1,0x02 = rldicl 2, 3, 60, 4
|
||||||
0x78,0x62,0xe1,0x03 = rldicl. 2, 3, 60, 4
|
0x78,0x62,0xe1,0x03 = rldicl. 2, 3, 60, 4
|
||||||
0x78,0x62,0x01,0x00 = rldicl 2, 3, 0, 4
|
// 0x78,0x62,0x01,0x00 = rldicl 2, 3, 0, 4
|
||||||
0x78,0x62,0x01,0x01 = rldicl. 2, 3, 0, 4
|
// 0x78,0x62,0x01,0x01 = rldicl. 2, 3, 0, 4
|
||||||
0x78,0x62,0x06,0xe4 = rldicr 2, 3, 0, 59
|
0x78,0x62,0x06,0xe4 = rldicr 2, 3, 0, 59
|
||||||
0x78,0x62,0x06,0xe5 = rldicr. 2, 3, 0, 59
|
0x78,0x62,0x06,0xe5 = rldicr. 2, 3, 0, 59
|
||||||
0x78,0x62,0x20,0x48 = rldic 2, 3, 4, 1
|
0x78,0x62,0x20,0x48 = rldic 2, 3, 4, 1
|
||||||
@ -502,30 +502,30 @@
|
|||||||
0x50,0x62,0xd9,0x51 = rlwimi. 2, 3, 27, 5, 8
|
0x50,0x62,0xd9,0x51 = rlwimi. 2, 3, 27, 5, 8
|
||||||
0x50,0x62,0xb9,0x50 = rlwimi 2, 3, 23, 5, 8
|
0x50,0x62,0xb9,0x50 = rlwimi 2, 3, 23, 5, 8
|
||||||
0x50,0x62,0xb9,0x51 = rlwimi. 2, 3, 23, 5, 8
|
0x50,0x62,0xb9,0x51 = rlwimi. 2, 3, 23, 5, 8
|
||||||
0x54,0x62,0x20,0x3e = rlwinm 2, 3, 4, 0, 31
|
// 0x54,0x62,0x20,0x3e = rlwinm 2, 3, 4, 0, 31
|
||||||
0x54,0x62,0x20,0x3f = rlwinm. 2, 3, 4, 0, 31
|
// 0x54,0x62,0x20,0x3f = rlwinm. 2, 3, 4, 0, 31
|
||||||
0x54,0x62,0xe0,0x3e = rlwinm 2, 3, 28, 0, 31
|
// 0x54,0x62,0xe0,0x3e = rlwinm 2, 3, 28, 0, 31
|
||||||
0x54,0x62,0xe0,0x3f = rlwinm. 2, 3, 28, 0, 31
|
// 0x54,0x62,0xe0,0x3f = rlwinm. 2, 3, 28, 0, 31
|
||||||
0x5c,0x62,0x20,0x3e = rlwnm 2, 3, 4, 0, 31
|
// 0x5c,0x62,0x20,0x3e = rlwnm 2, 3, 4, 0, 31
|
||||||
0x5c,0x62,0x20,0x3f = rlwnm. 2, 3, 4, 0, 31
|
// 0x5c,0x62,0x20,0x3f = rlwnm. 2, 3, 4, 0, 31
|
||||||
0x54,0x62,0x20,0x36 = slwi 2, 3, 4
|
0x54,0x62,0x20,0x36 = slwi 2, 3, 4
|
||||||
0x54,0x62,0x20,0x37 = rlwinm. 2, 3, 4, 0, 27
|
0x54,0x62,0x20,0x37 = rlwinm. 2, 3, 4, 0, 27
|
||||||
0x54,0x62,0xe1,0x3e = srwi 2, 3, 4
|
0x54,0x62,0xe1,0x3e = srwi 2, 3, 4
|
||||||
0x54,0x62,0xe1,0x3f = rlwinm. 2, 3, 28, 4, 31
|
0x54,0x62,0xe1,0x3f = rlwinm. 2, 3, 28, 4, 31
|
||||||
0x54,0x62,0x01,0x3e = rlwinm 2, 3, 0, 4, 31
|
// 0x54,0x62,0x01,0x3e = rlwinm 2, 3, 0, 4, 31
|
||||||
0x54,0x62,0x01,0x3f = rlwinm. 2, 3, 0, 4, 31
|
// 0x54,0x62,0x01,0x3f = rlwinm. 2, 3, 0, 4, 31
|
||||||
0x54,0x62,0x00,0x36 = rlwinm 2, 3, 0, 0, 27
|
0x54,0x62,0x00,0x36 = rlwinm 2, 3, 0, 0, 27
|
||||||
0x54,0x62,0x00,0x37 = rlwinm. 2, 3, 0, 0, 27
|
0x54,0x62,0x00,0x37 = rlwinm. 2, 3, 0, 0, 27
|
||||||
0x54,0x62,0x20,0x76 = rlwinm 2, 3, 4, 1, 27
|
0x54,0x62,0x20,0x76 = rlwinm 2, 3, 4, 1, 27
|
||||||
0x54,0x62,0x20,0x77 = rlwinm. 2, 3, 4, 1, 27
|
0x54,0x62,0x20,0x77 = rlwinm. 2, 3, 4, 1, 27
|
||||||
0x7c,0x41,0x03,0xa6 = mtspr 1, 2
|
// 0x7c,0x41,0x03,0xa6 = mtspr 1, 2
|
||||||
0x7c,0x41,0x02,0xa6 = mfspr 2, 1
|
// 0x7c,0x41,0x02,0xa6 = mfspr 2, 1
|
||||||
0x7c,0x48,0x03,0xa6 = mtlr 2
|
0x7c,0x48,0x03,0xa6 = mtlr 2
|
||||||
0x7c,0x48,0x02,0xa6 = mflr 2
|
0x7c,0x48,0x02,0xa6 = mflr 2
|
||||||
0x7c,0x49,0x03,0xa6 = mtctr 2
|
0x7c,0x49,0x03,0xa6 = mtctr 2
|
||||||
0x7c,0x49,0x02,0xa6 = mfctr 2
|
0x7c,0x49,0x02,0xa6 = mfctr 2
|
||||||
0x60,0x00,0x00,0x00 = nop
|
0x60,0x00,0x00,0x00 = nop
|
||||||
0x68,0x00,0x00,0x00 = xori 0, 0, 0
|
// 0x68,0x00,0x00,0x00 = xori 0, 0, 0
|
||||||
0x38,0x40,0x00,0x80 = li 2, 128
|
0x38,0x40,0x00,0x80 = li 2, 128
|
||||||
0x3c,0x40,0x00,0x80 = lis 2, 128
|
0x3c,0x40,0x00,0x80 = lis 2, 128
|
||||||
0x7c,0x62,0x1b,0x78 = mr 2, 3
|
0x7c,0x62,0x1b,0x78 = mr 2, 3
|
||||||
|
@ -1,12 +1,12 @@
|
|||||||
# CS_ARCH_PPC, CS_MODE_BIG_ENDIAN, CS_OPT_SYNTAX_NOREGNAME
|
# CS_ARCH_PPC, CS_MODE_BIG_ENDIAN, CS_OPT_SYNTAX_NOREGNAME
|
||||||
0x4c,0x8a,0x18,0x20 = bclr 4, 10, 3
|
// 0x4c,0x8a,0x18,0x20 = bclr 4, 10, 3
|
||||||
0x4c,0x8a,0x00,0x20 = bclr 4, 10, 0
|
// 0x4c,0x8a,0x00,0x20 = bclr 4, 10, 0
|
||||||
0x4c,0x8a,0x18,0x21 = bclrl 4, 10, 3
|
// 0x4c,0x8a,0x18,0x21 = bclrl 4, 10, 3
|
||||||
0x4c,0x8a,0x00,0x21 = bclrl 4, 10, 0
|
// 0x4c,0x8a,0x00,0x21 = bclrl 4, 10, 0
|
||||||
0x4c,0x8a,0x1c,0x20 = bcctr 4, 10, 3
|
// 0x4c,0x8a,0x1c,0x20 = bcctr 4, 10, 3
|
||||||
0x4c,0x8a,0x04,0x20 = bcctr 4, 10, 0
|
// 0x4c,0x8a,0x04,0x20 = bcctr 4, 10, 0
|
||||||
0x4c,0x8a,0x1c,0x21 = bcctrl 4, 10, 3
|
// 0x4c,0x8a,0x1c,0x21 = bcctrl 4, 10, 3
|
||||||
0x4c,0x8a,0x04,0x21 = bcctrl 4, 10, 0
|
// 0x4c,0x8a,0x04,0x21 = bcctrl 4, 10, 0
|
||||||
0x4c,0x43,0x22,0x02 = crand 2, 3, 4
|
0x4c,0x43,0x22,0x02 = crand 2, 3, 4
|
||||||
0x4c,0x43,0x21,0xc2 = crnand 2, 3, 4
|
0x4c,0x43,0x21,0xc2 = crnand 2, 3, 4
|
||||||
0x4c,0x43,0x23,0x82 = cror 2, 3, 4
|
0x4c,0x43,0x23,0x82 = cror 2, 3, 4
|
||||||
@ -17,7 +17,7 @@
|
|||||||
0x4c,0x43,0x23,0x42 = crorc 2, 3, 4
|
0x4c,0x43,0x23,0x42 = crorc 2, 3, 4
|
||||||
0x4d,0x0c,0x00,0x00 = mcrf 2, 3
|
0x4d,0x0c,0x00,0x00 = mcrf 2, 3
|
||||||
0x44,0x00,0x00,0x22 = sc 1
|
0x44,0x00,0x00,0x22 = sc 1
|
||||||
0x44,0x00,0x00,0x02 = sc 0
|
// 0x44,0x00,0x00,0x02 = sc 0
|
||||||
0x88,0x44,0x00,0x80 = lbz 2, 128(4)
|
0x88,0x44,0x00,0x80 = lbz 2, 128(4)
|
||||||
0x7c,0x43,0x20,0xae = lbzx 2, 3, 4
|
0x7c,0x43,0x20,0xae = lbzx 2, 3, 4
|
||||||
0x8c,0x44,0x00,0x80 = lbzu 2, 128(4)
|
0x8c,0x44,0x00,0x80 = lbzu 2, 128(4)
|
||||||
@ -121,10 +121,10 @@
|
|||||||
0x7d,0x03,0x20,0x00 = cmpw 2, 3, 4
|
0x7d,0x03,0x20,0x00 = cmpw 2, 3, 4
|
||||||
0x29,0x03,0x00,0x80 = cmplwi 2, 3, 128
|
0x29,0x03,0x00,0x80 = cmplwi 2, 3, 128
|
||||||
0x7d,0x03,0x20,0x40 = cmplw 2, 3, 4
|
0x7d,0x03,0x20,0x40 = cmplw 2, 3, 4
|
||||||
0x0c,0x43,0x00,0x04 = twi 2, 3, 4
|
// 0x0c,0x43,0x00,0x04 = twi 2, 3, 4
|
||||||
0x7c,0x43,0x20,0x08 = tw 2, 3, 4
|
// 0x7c,0x43,0x20,0x08 = tw 2, 3, 4
|
||||||
0x08,0x43,0x00,0x04 = tdi 2, 3, 4
|
// 0x08,0x43,0x00,0x04 = tdi 2, 3, 4
|
||||||
0x7c,0x43,0x20,0x88 = td 2, 3, 4
|
// 0x7c,0x43,0x20,0x88 = td 2, 3, 4
|
||||||
0x7c,0x43,0x21,0x5e = isel 2, 3, 4, 5
|
0x7c,0x43,0x21,0x5e = isel 2, 3, 4, 5
|
||||||
0x70,0x62,0x00,0x80 = andi. 2, 3, 128
|
0x70,0x62,0x00,0x80 = andi. 2, 3, 128
|
||||||
0x74,0x62,0x00,0x80 = andis. 2, 3, 128
|
0x74,0x62,0x00,0x80 = andis. 2, 3, 128
|
||||||
@ -152,8 +152,8 @@
|
|||||||
0x7c,0x62,0x07,0x75 = extsb. 2, 3
|
0x7c,0x62,0x07,0x75 = extsb. 2, 3
|
||||||
0x7c,0x62,0x07,0x34 = extsh 2, 3
|
0x7c,0x62,0x07,0x34 = extsh 2, 3
|
||||||
0x7c,0x62,0x07,0x35 = extsh. 2, 3
|
0x7c,0x62,0x07,0x35 = extsh. 2, 3
|
||||||
0x7c,0x62,0x00,0x34 = cntlzw 2, 3
|
// 0x7c,0x62,0x00,0x34 = cntlzw 2, 3
|
||||||
0x7c,0x62,0x00,0x35 = cntlzw. 2, 3
|
// 0x7c,0x62,0x00,0x35 = cntlzw. 2, 3
|
||||||
0x7c,0x62,0x02,0xf4 = popcntw 2, 3
|
0x7c,0x62,0x02,0xf4 = popcntw 2, 3
|
||||||
0x7c,0x62,0x07,0xb4 = extsw 2, 3
|
0x7c,0x62,0x07,0xb4 = extsw 2, 3
|
||||||
0x7c,0x62,0x07,0xb5 = extsw. 2, 3
|
0x7c,0x62,0x07,0xb5 = extsw. 2, 3
|
||||||
|
@ -3,15 +3,14 @@
|
|||||||
0x7c,0x22,0x1a,0x14 = add 1, 2, 3
|
0x7c,0x22,0x1a,0x14 = add 1, 2, 3
|
||||||
0x7c,0x00,0x02,0x14 = add 0, 0, 0
|
0x7c,0x00,0x02,0x14 = add 0, 0, 0
|
||||||
0x7f,0xff,0xfa,0x14 = add 31, 31, 31
|
0x7f,0xff,0xfa,0x14 = add 31, 31, 31
|
||||||
0x38,0x20,0x00,0x00 = addi 1, 0, 0
|
0x38,0x20,0x00,0x00 = li 1, 0
|
||||||
0x38,0x20,0x00,0x00 = addi 1, 0, 0
|
|
||||||
0x38,0x22,0x00,0x00 = addi 1, 2, 0
|
0x38,0x22,0x00,0x00 = addi 1, 2, 0
|
||||||
0x38,0x20,0x80,0x00 = addi 1, 0, -32768
|
0x38,0x20,0x80,0x00 = li 1, 0x8000
|
||||||
0x38,0x20,0x7f,0xff = addi 1, 0, 32767
|
0x38,0x20,0x7f,0xff = li 1, 0x7fff
|
||||||
0x60,0x41,0x00,0x00 = ori 1, 2, 0
|
0x60,0x41,0x00,0x00 = ori 1, 2, 0
|
||||||
0x60,0x41,0xff,0xff = ori 1, 2, 65535
|
0x60,0x41,0xff,0xff = ori 1, 2, 65535
|
||||||
0x3c,0x20,0x00,0x00 = addis 1, 0, 0
|
0x3c,0x20,0x00,0x00 = lis 1, 0
|
||||||
0x3c,0x20,0xff,0xff = addis 1, 0, -1
|
0x3c,0x20,0xff,0xff = lis 1, 0xffff
|
||||||
0x80,0x20,0x00,0x00 = lwz 1, 0(0)
|
0x80,0x20,0x00,0x00 = lwz 1, 0(0)
|
||||||
0x80,0x20,0x00,0x00 = lwz 1, 0(0)
|
0x80,0x20,0x00,0x00 = lwz 1, 0(0)
|
||||||
0x80,0x3f,0x00,0x00 = lwz 1, 0(31)
|
0x80,0x3f,0x00,0x00 = lwz 1, 0(31)
|
||||||
@ -26,7 +25,7 @@
|
|||||||
0xe8,0x22,0x7f,0xfc = ld 1, 32764(2)
|
0xe8,0x22,0x7f,0xfc = ld 1, 32764(2)
|
||||||
0xe8,0x22,0x00,0x04 = ld 1, 4(2)
|
0xe8,0x22,0x00,0x04 = ld 1, 4(2)
|
||||||
0xe8,0x22,0xff,0xfc = ld 1, -4(2)
|
0xe8,0x22,0xff,0xfc = ld 1, -4(2)
|
||||||
0x48,0x00,0x04,0x00 = b .+1024
|
// 0x48,0x00,0x04,0x00 = b .+1024
|
||||||
0x48,0x00,0x04,0x02 = ba 1024
|
0x48,0x00,0x04,0x02 = ba 1024
|
||||||
0x41,0x82,0x04,0x00 = beq 0, .+1024
|
// 0x41,0x82,0x04,0x00 = beq 0, .+1024
|
||||||
0x41,0x82,0x04,0x02 = beqa 0, 1024
|
// 0x41,0x82,0x04,0x02 = beqa 0, 1024
|
||||||
|
@ -4,3 +4,7 @@ Format of input files:
|
|||||||
|
|
||||||
# ARCH, MODE, OPTION
|
# ARCH, MODE, OPTION
|
||||||
hexcode = assembly
|
hexcode = assembly
|
||||||
|
|
||||||
|
Format of issue file:
|
||||||
|
# ARCH, MODE, OPTION
|
||||||
|
hexcode = assembly | regs_read | regs_read_count | regs_write | regs_write_count | groups | groups_count
|
@ -2,7 +2,7 @@
|
|||||||
0x80,0x00,0x00,0x00 = add %g0, %g0, %g0
|
0x80,0x00,0x00,0x00 = add %g0, %g0, %g0
|
||||||
0x86,0x00,0x40,0x02 = add %g1, %g2, %g3
|
0x86,0x00,0x40,0x02 = add %g1, %g2, %g3
|
||||||
0xa0,0x02,0x00,0x09 = add %o0, %o1, %l0
|
0xa0,0x02,0x00,0x09 = add %o0, %o1, %l0
|
||||||
0xa0,0x02,0x20,0x0a = add %o0, 10, %l0
|
0xa0,0x02,0x20,0x0a = add %o0, 10, %l0
|
||||||
0x86,0x80,0x40,0x02 = addcc %g1, %g2, %g3
|
0x86,0x80,0x40,0x02 = addcc %g1, %g2, %g3
|
||||||
0x86,0xc0,0x40,0x02 = addxcc %g1, %g2, %g3
|
0x86,0xc0,0x40,0x02 = addxcc %g1, %g2, %g3
|
||||||
0x86,0x70,0x40,0x02 = udiv %g1, %g2, %g3
|
0x86,0x70,0x40,0x02 = udiv %g1, %g2, %g3
|
||||||
@ -26,8 +26,8 @@
|
|||||||
0x86,0x20,0x40,0x02 = sub %g1, %g2, %g3
|
0x86,0x20,0x40,0x02 = sub %g1, %g2, %g3
|
||||||
0x86,0xa0,0x40,0x02 = subcc %g1, %g2, %g3
|
0x86,0xa0,0x40,0x02 = subcc %g1, %g2, %g3
|
||||||
0x86,0xe0,0x40,0x02 = subxcc %g1, %g2, %g3
|
0x86,0xe0,0x40,0x02 = subxcc %g1, %g2, %g3
|
||||||
0x86,0x10,0x00,0x01 = or %g0, %g1, %g3
|
0x86,0x10,0x00,0x01 = mov %g1, %g3
|
||||||
0x86,0x10,0x20,0xff = or %g0, 255, %g3
|
0x86,0x10,0x20,0xff = mov 0xff, %g3
|
||||||
0x81,0xe8,0x00,0x00 = restore
|
0x81,0xe8,0x00,0x00 = restore
|
||||||
0x86,0x40,0x80,0x01 = addx %g2, %g1, %g3
|
0x86,0x40,0x80,0x01 = addx %g2, %g1, %g3
|
||||||
0x86,0x60,0x80,0x01 = subx %g2, %g1, %g3
|
0x86,0x60,0x80,0x01 = subx %g2, %g1, %g3
|
||||||
|
@ -39,12 +39,12 @@
|
|||||||
0x91,0xa0,0x09,0xa4 = fdivs %f0, %f4, %f8
|
0x91,0xa0,0x09,0xa4 = fdivs %f0, %f4, %f8
|
||||||
0x91,0xa0,0x09,0xc4 = fdivd %f0, %f4, %f8
|
0x91,0xa0,0x09,0xc4 = fdivd %f0, %f4, %f8
|
||||||
0x91,0xa0,0x09,0xe4 = fdivq %f0, %f4, %f8
|
0x91,0xa0,0x09,0xe4 = fdivq %f0, %f4, %f8
|
||||||
0x81,0xa8,0x0a,0x24 = fcmps %fcc0, %f0, %f4
|
// 0x81,0xa8,0x0a,0x24 = fcmps %fcc0, %f0, %f4
|
||||||
0x81,0xa8,0x0a,0x44 = fcmpd %fcc0, %f0, %f4
|
// 0x81,0xa8,0x0a,0x44 = fcmpd %fcc0, %f0, %f4
|
||||||
0x81,0xa8,0x0a,0x64 = fcmpq %fcc0, %f0, %f4
|
// 0x81,0xa8,0x0a,0x64 = fcmpq %fcc0, %f0, %f4
|
||||||
0x81,0xa8,0x0a,0xa4 = fcmpes %fcc0, %f0, %f4
|
// 0x81,0xa8,0x0a,0xa4 = fcmpes %fcc0, %f0, %f4
|
||||||
0x81,0xa8,0x0a,0xc4 = fcmped %fcc0, %f0, %f4
|
// 0x81,0xa8,0x0a,0xc4 = fcmped %fcc0, %f0, %f4
|
||||||
0x81,0xa8,0x0a,0xe4 = fcmpeq %fcc0, %f0, %f4
|
// 0x81,0xa8,0x0a,0xe4 = fcmpeq %fcc0, %f0, %f4
|
||||||
0x85,0xa8,0x0a,0x24 = fcmps %fcc2, %f0, %f4
|
0x85,0xa8,0x0a,0x24 = fcmps %fcc2, %f0, %f4
|
||||||
0x85,0xa8,0x0a,0x44 = fcmpd %fcc2, %f0, %f4
|
0x85,0xa8,0x0a,0x44 = fcmpd %fcc2, %f0, %f4
|
||||||
0x85,0xa8,0x0a,0x64 = fcmpq %fcc2, %f0, %f4
|
0x85,0xa8,0x0a,0x64 = fcmpq %fcc2, %f0, %f4
|
||||||
|
@ -98,5 +98,5 @@
|
|||||||
0x87,0xa8,0x58,0xa2 = fmovrsgz %g1, %f2, %f3
|
0x87,0xa8,0x58,0xa2 = fmovrsgz %g1, %f2, %f3
|
||||||
0x87,0xa8,0x5c,0xa2 = fmovrsgez %g1, %f2, %f3
|
0x87,0xa8,0x5c,0xa2 = fmovrsgez %g1, %f2, %f3
|
||||||
0x81,0xcf,0xe0,0x08 = rett %i7+8
|
0x81,0xcf,0xe0,0x08 = rett %i7+8
|
||||||
0x91,0xd0,0x20,0x05 = ta %icc, %g0 + 5
|
// 0x91,0xd0,0x20,0x05 = ta %icc, %g0 + 5
|
||||||
0x83,0xd0,0x30,0x03 = te %xcc, %g0 + 3
|
0x83,0xd0,0x30,0x03 = te %xcc, %g0 + 3
|
||||||
|
@ -88,8 +88,8 @@
|
|||||||
0xe3,0x00,0x0f,0xff,0x7f,0xcd = chf %r0, 524287
|
0xe3,0x00,0x0f,0xff,0x7f,0xcd = chf %r0, 524287
|
||||||
0xe3,0x00,0x10,0x00,0x00,0xcd = chf %r0, 0(%r1)
|
0xe3,0x00,0x10,0x00,0x00,0xcd = chf %r0, 0(%r1)
|
||||||
0xe3,0x00,0xf0,0x00,0x00,0xcd = chf %r0, 0(%r15)
|
0xe3,0x00,0xf0,0x00,0x00,0xcd = chf %r0, 0(%r15)
|
||||||
0xe3,0x01,0xff,0xff,0x7f,0xcd = chf %r0, 524287(%r1,%r15)
|
0xe3,0x01,0xff,0xff,0x7f,0xcd = chf %r0, 524287(%r1, %r15)
|
||||||
0xe3,0x0f,0x1f,0xff,0x7f,0xcd = chf %r0, 524287(%r15,%r1)
|
0xe3,0x0f,0x1f,0xff,0x7f,0xcd = chf %r0, 524287(%r15, %r1)
|
||||||
0xe3,0xf0,0x00,0x00,0x00,0xcd = chf %r15, 0
|
0xe3,0xf0,0x00,0x00,0x00,0xcd = chf %r15, 0
|
||||||
0xcc,0x0d,0x80,0x00,0x00,0x00 = cih %r0, -2147483648
|
0xcc,0x0d,0x80,0x00,0x00,0x00 = cih %r0, -2147483648
|
||||||
0xcc,0x0d,0xff,0xff,0xff,0xff = cih %r0, -1
|
0xcc,0x0d,0xff,0xff,0xff,0xff = cih %r0, -1
|
||||||
@ -140,8 +140,8 @@
|
|||||||
0xe3,0x00,0x0f,0xff,0x7f,0xcf = clhf %r0, 524287
|
0xe3,0x00,0x0f,0xff,0x7f,0xcf = clhf %r0, 524287
|
||||||
0xe3,0x00,0x10,0x00,0x00,0xcf = clhf %r0, 0(%r1)
|
0xe3,0x00,0x10,0x00,0x00,0xcf = clhf %r0, 0(%r1)
|
||||||
0xe3,0x00,0xf0,0x00,0x00,0xcf = clhf %r0, 0(%r15)
|
0xe3,0x00,0xf0,0x00,0x00,0xcf = clhf %r0, 0(%r15)
|
||||||
0xe3,0x01,0xff,0xff,0x7f,0xcf = clhf %r0, 524287(%r1,%r15)
|
0xe3,0x01,0xff,0xff,0x7f,0xcf = clhf %r0, 524287(%r1, %r15)
|
||||||
0xe3,0x0f,0x1f,0xff,0x7f,0xcf = clhf %r0, 524287(%r15,%r1)
|
0xe3,0x0f,0x1f,0xff,0x7f,0xcf = clhf %r0, 524287(%r15, %r1)
|
||||||
0xe3,0xf0,0x00,0x00,0x00,0xcf = clhf %r15, 0
|
0xe3,0xf0,0x00,0x00,0x00,0xcf = clhf %r15, 0
|
||||||
0xcc,0x0f,0x00,0x00,0x00,0x00 = clih %r0, 0
|
0xcc,0x0f,0x00,0x00,0x00,0x00 = clih %r0, 0
|
||||||
0xcc,0x0f,0x00,0x00,0x00,0x01 = clih %r0, 1
|
0xcc,0x0f,0x00,0x00,0x00,0x01 = clih %r0, 1
|
||||||
@ -159,24 +159,24 @@
|
|||||||
0xb3,0xa2,0xf0,0x00 = cxlgbr %f0, 15, %r0, 0
|
0xb3,0xa2,0xf0,0x00 = cxlgbr %f0, 15, %r0, 0
|
||||||
0xb3,0xa2,0x5a,0x49 = cxlgbr %f4, 5, %r9, 10
|
0xb3,0xa2,0x5a,0x49 = cxlgbr %f4, 5, %r9, 10
|
||||||
0xb3,0xa2,0x00,0xd0 = cxlgbr %f13, 0, %r0, 0
|
0xb3,0xa2,0x00,0xd0 = cxlgbr %f13, 0, %r0, 0
|
||||||
0xb3,0x5f,0x00,0x00 = fidbra %f0, 0, %f0, 0
|
// 0xb3,0x5f,0x00,0x00 = fidbra %f0, 0, %f0, 0
|
||||||
0xb3,0x5f,0x0f,0x00 = fidbra %f0, 0, %f0, 15
|
0xb3,0x5f,0x0f,0x00 = fidbra %f0, 0, %f0, 15
|
||||||
0xb3,0x5f,0x00,0x0f = fidbra %f0, 0, %f15, 0
|
// 0xb3,0x5f,0x00,0x0f = fidbra %f0, 0, %f15, 0
|
||||||
0xb3,0x5f,0xf0,0x00 = fidbra %f0, 15, %f0, 0
|
// 0xb3,0x5f,0xf0,0x00 = fidbra %f0, 15, %f0, 0
|
||||||
0xb3,0x5f,0x57,0x46 = fidbra %f4, 5, %f6, 7
|
0xb3,0x5f,0x57,0x46 = fidbra %f4, 5, %f6, 7
|
||||||
0xb3,0x5f,0x00,0xf0 = fidbra %f15, 0, %f0, 0
|
// 0xb3,0x5f,0x00,0xf0 = fidbra %f15, 0, %f0, 0
|
||||||
0xb3,0x57,0x00,0x00 = fiebra %f0, 0, %f0, 0
|
// 0xb3,0x57,0x00,0x00 = fiebra %f0, 0, %f0, 0
|
||||||
0xb3,0x57,0x0f,0x00 = fiebra %f0, 0, %f0, 15
|
0xb3,0x57,0x0f,0x00 = fiebra %f0, 0, %f0, 15
|
||||||
0xb3,0x57,0x00,0x0f = fiebra %f0, 0, %f15, 0
|
// 0xb3,0x57,0x00,0x0f = fiebra %f0, 0, %f15, 0
|
||||||
0xb3,0x57,0xf0,0x00 = fiebra %f0, 15, %f0, 0
|
// 0xb3,0x57,0xf0,0x00 = fiebra %f0, 15, %f0, 0
|
||||||
0xb3,0x57,0x57,0x46 = fiebra %f4, 5, %f6, 7
|
0xb3,0x57,0x57,0x46 = fiebra %f4, 5, %f6, 7
|
||||||
0xb3,0x57,0x00,0xf0 = fiebra %f15, 0, %f0, 0
|
// 0xb3,0x57,0x00,0xf0 = fiebra %f15, 0, %f0, 0
|
||||||
0xb3,0x47,0x00,0x00 = fixbra %f0, 0, %f0, 0
|
// 0xb3,0x47,0x00,0x00 = fixbra %f0, 0, %f0, 0
|
||||||
0xb3,0x47,0x0f,0x00 = fixbra %f0, 0, %f0, 15
|
0xb3,0x47,0x0f,0x00 = fixbra %f0, 0, %f0, 15
|
||||||
0xb3,0x47,0x00,0x0d = fixbra %f0, 0, %f13, 0
|
// 0xb3,0x47,0x00,0x0d = fixbra %f0, 0, %f13, 0
|
||||||
0xb3,0x47,0xf0,0x00 = fixbra %f0, 15, %f0, 0
|
// 0xb3,0x47,0xf0,0x00 = fixbra %f0, 15, %f0, 0
|
||||||
0xb3,0x47,0x59,0x48 = fixbra %f4, 5, %f8, 9
|
0xb3,0x47,0x59,0x48 = fixbra %f4, 5, %f8, 9
|
||||||
0xb3,0x47,0x00,0xd0 = fixbra %f13, 0, %f0, 0
|
// 0xb3,0x47,0x00,0xd0 = fixbra %f13, 0, %f0, 0
|
||||||
0xeb,0x00,0x00,0x00,0x80,0xf8 = laa %r0, %r0, -524288
|
0xeb,0x00,0x00,0x00,0x80,0xf8 = laa %r0, %r0, -524288
|
||||||
0xeb,0x00,0x0f,0xff,0xff,0xf8 = laa %r0, %r0, -1
|
0xeb,0x00,0x0f,0xff,0xff,0xf8 = laa %r0, %r0, -1
|
||||||
0xeb,0x00,0x00,0x00,0x00,0xf8 = laa %r0, %r0, 0
|
0xeb,0x00,0x00,0x00,0x00,0xf8 = laa %r0, %r0, 0
|
||||||
@ -294,8 +294,8 @@
|
|||||||
0xe3,0x00,0x0f,0xff,0x7f,0xc0 = lbh %r0, 524287
|
0xe3,0x00,0x0f,0xff,0x7f,0xc0 = lbh %r0, 524287
|
||||||
0xe3,0x00,0x10,0x00,0x00,0xc0 = lbh %r0, 0(%r1)
|
0xe3,0x00,0x10,0x00,0x00,0xc0 = lbh %r0, 0(%r1)
|
||||||
0xe3,0x00,0xf0,0x00,0x00,0xc0 = lbh %r0, 0(%r15)
|
0xe3,0x00,0xf0,0x00,0x00,0xc0 = lbh %r0, 0(%r15)
|
||||||
0xe3,0x01,0xff,0xff,0x7f,0xc0 = lbh %r0, 524287(%r1,%r15)
|
0xe3,0x01,0xff,0xff,0x7f,0xc0 = lbh %r0, 524287(%r1, %r15)
|
||||||
0xe3,0x0f,0x1f,0xff,0x7f,0xc0 = lbh %r0, 524287(%r15,%r1)
|
0xe3,0x0f,0x1f,0xff,0x7f,0xc0 = lbh %r0, 524287(%r15, %r1)
|
||||||
0xe3,0xf0,0x00,0x00,0x00,0xc0 = lbh %r15, 0
|
0xe3,0xf0,0x00,0x00,0x00,0xc0 = lbh %r15, 0
|
||||||
0xe3,0x00,0x00,0x00,0x80,0xca = lfh %r0, -524288
|
0xe3,0x00,0x00,0x00,0x80,0xca = lfh %r0, -524288
|
||||||
0xe3,0x00,0x0f,0xff,0xff,0xca = lfh %r0, -1
|
0xe3,0x00,0x0f,0xff,0xff,0xca = lfh %r0, -1
|
||||||
@ -304,8 +304,8 @@
|
|||||||
0xe3,0x00,0x0f,0xff,0x7f,0xca = lfh %r0, 524287
|
0xe3,0x00,0x0f,0xff,0x7f,0xca = lfh %r0, 524287
|
||||||
0xe3,0x00,0x10,0x00,0x00,0xca = lfh %r0, 0(%r1)
|
0xe3,0x00,0x10,0x00,0x00,0xca = lfh %r0, 0(%r1)
|
||||||
0xe3,0x00,0xf0,0x00,0x00,0xca = lfh %r0, 0(%r15)
|
0xe3,0x00,0xf0,0x00,0x00,0xca = lfh %r0, 0(%r15)
|
||||||
0xe3,0x01,0xff,0xff,0x7f,0xca = lfh %r0, 524287(%r1,%r15)
|
0xe3,0x01,0xff,0xff,0x7f,0xca = lfh %r0, 524287(%r1, %r15)
|
||||||
0xe3,0x0f,0x1f,0xff,0x7f,0xca = lfh %r0, 524287(%r15,%r1)
|
0xe3,0x0f,0x1f,0xff,0x7f,0xca = lfh %r0, 524287(%r15, %r1)
|
||||||
0xe3,0xf0,0x00,0x00,0x00,0xca = lfh %r15, 0
|
0xe3,0xf0,0x00,0x00,0x00,0xca = lfh %r15, 0
|
||||||
0xe3,0x00,0x00,0x00,0x80,0xc4 = lhh %r0, -524288
|
0xe3,0x00,0x00,0x00,0x80,0xc4 = lhh %r0, -524288
|
||||||
0xe3,0x00,0x0f,0xff,0xff,0xc4 = lhh %r0, -1
|
0xe3,0x00,0x0f,0xff,0xff,0xc4 = lhh %r0, -1
|
||||||
@ -314,8 +314,8 @@
|
|||||||
0xe3,0x00,0x0f,0xff,0x7f,0xc4 = lhh %r0, 524287
|
0xe3,0x00,0x0f,0xff,0x7f,0xc4 = lhh %r0, 524287
|
||||||
0xe3,0x00,0x10,0x00,0x00,0xc4 = lhh %r0, 0(%r1)
|
0xe3,0x00,0x10,0x00,0x00,0xc4 = lhh %r0, 0(%r1)
|
||||||
0xe3,0x00,0xf0,0x00,0x00,0xc4 = lhh %r0, 0(%r15)
|
0xe3,0x00,0xf0,0x00,0x00,0xc4 = lhh %r0, 0(%r15)
|
||||||
0xe3,0x01,0xff,0xff,0x7f,0xc4 = lhh %r0, 524287(%r1,%r15)
|
0xe3,0x01,0xff,0xff,0x7f,0xc4 = lhh %r0, 524287(%r1, %r15)
|
||||||
0xe3,0x0f,0x1f,0xff,0x7f,0xc4 = lhh %r0, 524287(%r15,%r1)
|
0xe3,0x0f,0x1f,0xff,0x7f,0xc4 = lhh %r0, 524287(%r15, %r1)
|
||||||
0xe3,0xf0,0x00,0x00,0x00,0xc4 = lhh %r15, 0
|
0xe3,0xf0,0x00,0x00,0x00,0xc4 = lhh %r15, 0
|
||||||
0xe3,0x00,0x00,0x00,0x80,0xc2 = llch %r0, -524288
|
0xe3,0x00,0x00,0x00,0x80,0xc2 = llch %r0, -524288
|
||||||
0xe3,0x00,0x0f,0xff,0xff,0xc2 = llch %r0, -1
|
0xe3,0x00,0x0f,0xff,0xff,0xc2 = llch %r0, -1
|
||||||
@ -324,8 +324,8 @@
|
|||||||
0xe3,0x00,0x0f,0xff,0x7f,0xc2 = llch %r0, 524287
|
0xe3,0x00,0x0f,0xff,0x7f,0xc2 = llch %r0, 524287
|
||||||
0xe3,0x00,0x10,0x00,0x00,0xc2 = llch %r0, 0(%r1)
|
0xe3,0x00,0x10,0x00,0x00,0xc2 = llch %r0, 0(%r1)
|
||||||
0xe3,0x00,0xf0,0x00,0x00,0xc2 = llch %r0, 0(%r15)
|
0xe3,0x00,0xf0,0x00,0x00,0xc2 = llch %r0, 0(%r15)
|
||||||
0xe3,0x01,0xff,0xff,0x7f,0xc2 = llch %r0, 524287(%r1,%r15)
|
0xe3,0x01,0xff,0xff,0x7f,0xc2 = llch %r0, 524287(%r1, %r15)
|
||||||
0xe3,0x0f,0x1f,0xff,0x7f,0xc2 = llch %r0, 524287(%r15,%r1)
|
0xe3,0x0f,0x1f,0xff,0x7f,0xc2 = llch %r0, 524287(%r15, %r1)
|
||||||
0xe3,0xf0,0x00,0x00,0x00,0xc2 = llch %r15, 0
|
0xe3,0xf0,0x00,0x00,0x00,0xc2 = llch %r15, 0
|
||||||
0xe3,0x00,0x00,0x00,0x80,0xc6 = llhh %r0, -524288
|
0xe3,0x00,0x00,0x00,0x80,0xc6 = llhh %r0, -524288
|
||||||
0xe3,0x00,0x0f,0xff,0xff,0xc6 = llhh %r0, -1
|
0xe3,0x00,0x0f,0xff,0xff,0xc6 = llhh %r0, -1
|
||||||
@ -334,8 +334,8 @@
|
|||||||
0xe3,0x00,0x0f,0xff,0x7f,0xc6 = llhh %r0, 524287
|
0xe3,0x00,0x0f,0xff,0x7f,0xc6 = llhh %r0, 524287
|
||||||
0xe3,0x00,0x10,0x00,0x00,0xc6 = llhh %r0, 0(%r1)
|
0xe3,0x00,0x10,0x00,0x00,0xc6 = llhh %r0, 0(%r1)
|
||||||
0xe3,0x00,0xf0,0x00,0x00,0xc6 = llhh %r0, 0(%r15)
|
0xe3,0x00,0xf0,0x00,0x00,0xc6 = llhh %r0, 0(%r15)
|
||||||
0xe3,0x01,0xff,0xff,0x7f,0xc6 = llhh %r0, 524287(%r1,%r15)
|
0xe3,0x01,0xff,0xff,0x7f,0xc6 = llhh %r0, 524287(%r1, %r15)
|
||||||
0xe3,0x0f,0x1f,0xff,0x7f,0xc6 = llhh %r0, 524287(%r15,%r1)
|
0xe3,0x0f,0x1f,0xff,0x7f,0xc6 = llhh %r0, 524287(%r15, %r1)
|
||||||
0xe3,0xf0,0x00,0x00,0x00,0xc6 = llhh %r15, 0
|
0xe3,0xf0,0x00,0x00,0x00,0xc6 = llhh %r15, 0
|
||||||
0xeb,0x00,0x00,0x00,0x00,0xf2 = loc %r0, 0, 0
|
0xeb,0x00,0x00,0x00,0x00,0xf2 = loc %r0, 0, 0
|
||||||
0xeb,0x0f,0x00,0x00,0x00,0xf2 = loc %r0, 0, 15
|
0xeb,0x0f,0x00,0x00,0x00,0xf2 = loc %r0, 0, 15
|
||||||
@ -344,7 +344,7 @@
|
|||||||
0xeb,0x00,0x10,0x00,0x00,0xf2 = loc %r0, 0(%r1), 0
|
0xeb,0x00,0x10,0x00,0x00,0xf2 = loc %r0, 0(%r1), 0
|
||||||
0xeb,0x00,0xf0,0x00,0x00,0xf2 = loc %r0, 0(%r15), 0
|
0xeb,0x00,0xf0,0x00,0x00,0xf2 = loc %r0, 0(%r15), 0
|
||||||
0xeb,0xf0,0x00,0x00,0x00,0xf2 = loc %r15, 0, 0
|
0xeb,0xf0,0x00,0x00,0x00,0xf2 = loc %r15, 0, 0
|
||||||
0xeb,0x13,0x2f,0xff,0x00,0xf2 = loc %r1, 4095(%r2), 3
|
// 0xeb,0x13,0x2f,0xff,0x00,0xf2 = loc %r1, 4095(%r2), 3
|
||||||
0xeb,0x11,0x30,0x02,0x00,0xf2 = loco %r1, 2(%r3)
|
0xeb,0x11,0x30,0x02,0x00,0xf2 = loco %r1, 2(%r3)
|
||||||
0xeb,0x12,0x30,0x02,0x00,0xf2 = loch %r1, 2(%r3)
|
0xeb,0x12,0x30,0x02,0x00,0xf2 = loch %r1, 2(%r3)
|
||||||
0xeb,0x13,0x30,0x02,0x00,0xf2 = locnle %r1, 2(%r3)
|
0xeb,0x13,0x30,0x02,0x00,0xf2 = locnle %r1, 2(%r3)
|
||||||
@ -366,7 +366,7 @@
|
|||||||
0xeb,0x00,0x10,0x00,0x00,0xe2 = locg %r0, 0(%r1), 0
|
0xeb,0x00,0x10,0x00,0x00,0xe2 = locg %r0, 0(%r1), 0
|
||||||
0xeb,0x00,0xf0,0x00,0x00,0xe2 = locg %r0, 0(%r15), 0
|
0xeb,0x00,0xf0,0x00,0x00,0xe2 = locg %r0, 0(%r15), 0
|
||||||
0xeb,0xf0,0x00,0x00,0x00,0xe2 = locg %r15, 0, 0
|
0xeb,0xf0,0x00,0x00,0x00,0xe2 = locg %r15, 0, 0
|
||||||
0xeb,0x13,0x2f,0xff,0x00,0xe2 = locg %r1, 4095(%r2), 3
|
// 0xeb,0x13,0x2f,0xff,0x00,0xe2 = locg %r1, 4095(%r2), 3
|
||||||
0xeb,0x11,0x30,0x02,0x00,0xe2 = locgo %r1, 2(%r3)
|
0xeb,0x11,0x30,0x02,0x00,0xe2 = locgo %r1, 2(%r3)
|
||||||
0xeb,0x12,0x30,0x02,0x00,0xe2 = locgh %r1, 2(%r3)
|
0xeb,0x12,0x30,0x02,0x00,0xe2 = locgh %r1, 2(%r3)
|
||||||
0xeb,0x13,0x30,0x02,0x00,0xe2 = locgnle %r1, 2(%r3)
|
0xeb,0x13,0x30,0x02,0x00,0xe2 = locgnle %r1, 2(%r3)
|
||||||
@ -510,8 +510,8 @@
|
|||||||
0xe3,0x00,0x0f,0xff,0x7f,0xc3 = stch %r0, 524287
|
0xe3,0x00,0x0f,0xff,0x7f,0xc3 = stch %r0, 524287
|
||||||
0xe3,0x00,0x10,0x00,0x00,0xc3 = stch %r0, 0(%r1)
|
0xe3,0x00,0x10,0x00,0x00,0xc3 = stch %r0, 0(%r1)
|
||||||
0xe3,0x00,0xf0,0x00,0x00,0xc3 = stch %r0, 0(%r15)
|
0xe3,0x00,0xf0,0x00,0x00,0xc3 = stch %r0, 0(%r15)
|
||||||
0xe3,0x01,0xff,0xff,0x7f,0xc3 = stch %r0, 524287(%r1,%r15)
|
0xe3,0x01,0xff,0xff,0x7f,0xc3 = stch %r0, 524287(%r1, %r15)
|
||||||
0xe3,0x0f,0x1f,0xff,0x7f,0xc3 = stch %r0, 524287(%r15,%r1)
|
0xe3,0x0f,0x1f,0xff,0x7f,0xc3 = stch %r0, 524287(%r15, %r1)
|
||||||
0xe3,0xf0,0x00,0x00,0x00,0xc3 = stch %r15, 0
|
0xe3,0xf0,0x00,0x00,0x00,0xc3 = stch %r15, 0
|
||||||
0xe3,0x00,0x00,0x00,0x80,0xc7 = sthh %r0, -524288
|
0xe3,0x00,0x00,0x00,0x80,0xc7 = sthh %r0, -524288
|
||||||
0xe3,0x00,0x0f,0xff,0xff,0xc7 = sthh %r0, -1
|
0xe3,0x00,0x0f,0xff,0xff,0xc7 = sthh %r0, -1
|
||||||
@ -520,8 +520,8 @@
|
|||||||
0xe3,0x00,0x0f,0xff,0x7f,0xc7 = sthh %r0, 524287
|
0xe3,0x00,0x0f,0xff,0x7f,0xc7 = sthh %r0, 524287
|
||||||
0xe3,0x00,0x10,0x00,0x00,0xc7 = sthh %r0, 0(%r1)
|
0xe3,0x00,0x10,0x00,0x00,0xc7 = sthh %r0, 0(%r1)
|
||||||
0xe3,0x00,0xf0,0x00,0x00,0xc7 = sthh %r0, 0(%r15)
|
0xe3,0x00,0xf0,0x00,0x00,0xc7 = sthh %r0, 0(%r15)
|
||||||
0xe3,0x01,0xff,0xff,0x7f,0xc7 = sthh %r0, 524287(%r1,%r15)
|
0xe3,0x01,0xff,0xff,0x7f,0xc7 = sthh %r0, 524287(%r1, %r15)
|
||||||
0xe3,0x0f,0x1f,0xff,0x7f,0xc7 = sthh %r0, 524287(%r15,%r1)
|
0xe3,0x0f,0x1f,0xff,0x7f,0xc7 = sthh %r0, 524287(%r15, %r1)
|
||||||
0xe3,0xf0,0x00,0x00,0x00,0xc7 = sthh %r15, 0
|
0xe3,0xf0,0x00,0x00,0x00,0xc7 = sthh %r15, 0
|
||||||
0xe3,0x00,0x00,0x00,0x80,0xcb = stfh %r0, -524288
|
0xe3,0x00,0x00,0x00,0x80,0xcb = stfh %r0, -524288
|
||||||
0xe3,0x00,0x0f,0xff,0xff,0xcb = stfh %r0, -1
|
0xe3,0x00,0x0f,0xff,0xff,0xcb = stfh %r0, -1
|
||||||
@ -530,8 +530,8 @@
|
|||||||
0xe3,0x00,0x0f,0xff,0x7f,0xcb = stfh %r0, 524287
|
0xe3,0x00,0x0f,0xff,0x7f,0xcb = stfh %r0, 524287
|
||||||
0xe3,0x00,0x10,0x00,0x00,0xcb = stfh %r0, 0(%r1)
|
0xe3,0x00,0x10,0x00,0x00,0xcb = stfh %r0, 0(%r1)
|
||||||
0xe3,0x00,0xf0,0x00,0x00,0xcb = stfh %r0, 0(%r15)
|
0xe3,0x00,0xf0,0x00,0x00,0xcb = stfh %r0, 0(%r15)
|
||||||
0xe3,0x01,0xff,0xff,0x7f,0xcb = stfh %r0, 524287(%r1,%r15)
|
0xe3,0x01,0xff,0xff,0x7f,0xcb = stfh %r0, 524287(%r1, %r15)
|
||||||
0xe3,0x0f,0x1f,0xff,0x7f,0xcb = stfh %r0, 524287(%r15,%r1)
|
0xe3,0x0f,0x1f,0xff,0x7f,0xcb = stfh %r0, 524287(%r15, %r1)
|
||||||
0xe3,0xf0,0x00,0x00,0x00,0xcb = stfh %r15, 0
|
0xe3,0xf0,0x00,0x00,0x00,0xcb = stfh %r15, 0
|
||||||
0xeb,0x00,0x00,0x00,0x00,0xf3 = stoc %r0, 0, 0
|
0xeb,0x00,0x00,0x00,0x00,0xf3 = stoc %r0, 0, 0
|
||||||
0xeb,0x0f,0x00,0x00,0x00,0xf3 = stoc %r0, 0, 15
|
0xeb,0x0f,0x00,0x00,0x00,0xf3 = stoc %r0, 0, 15
|
||||||
@ -540,7 +540,7 @@
|
|||||||
0xeb,0x00,0x10,0x00,0x00,0xf3 = stoc %r0, 0(%r1), 0
|
0xeb,0x00,0x10,0x00,0x00,0xf3 = stoc %r0, 0(%r1), 0
|
||||||
0xeb,0x00,0xf0,0x00,0x00,0xf3 = stoc %r0, 0(%r15), 0
|
0xeb,0x00,0xf0,0x00,0x00,0xf3 = stoc %r0, 0(%r15), 0
|
||||||
0xeb,0xf0,0x00,0x00,0x00,0xf3 = stoc %r15, 0, 0
|
0xeb,0xf0,0x00,0x00,0x00,0xf3 = stoc %r15, 0, 0
|
||||||
0xeb,0x13,0x2f,0xff,0x00,0xf3 = stoc %r1, 4095(%r2), 3
|
// 0xeb,0x13,0x2f,0xff,0x00,0xf3 = stoc %r1, 4095(%r2), 3
|
||||||
0xeb,0x11,0x30,0x02,0x00,0xf3 = stoco %r1, 2(%r3)
|
0xeb,0x11,0x30,0x02,0x00,0xf3 = stoco %r1, 2(%r3)
|
||||||
0xeb,0x12,0x30,0x02,0x00,0xf3 = stoch %r1, 2(%r3)
|
0xeb,0x12,0x30,0x02,0x00,0xf3 = stoch %r1, 2(%r3)
|
||||||
0xeb,0x13,0x30,0x02,0x00,0xf3 = stocnle %r1, 2(%r3)
|
0xeb,0x13,0x30,0x02,0x00,0xf3 = stocnle %r1, 2(%r3)
|
||||||
@ -562,7 +562,7 @@
|
|||||||
0xeb,0x00,0x10,0x00,0x00,0xe3 = stocg %r0, 0(%r1), 0
|
0xeb,0x00,0x10,0x00,0x00,0xe3 = stocg %r0, 0(%r1), 0
|
||||||
0xeb,0x00,0xf0,0x00,0x00,0xe3 = stocg %r0, 0(%r15), 0
|
0xeb,0x00,0xf0,0x00,0x00,0xe3 = stocg %r0, 0(%r15), 0
|
||||||
0xeb,0xf0,0x00,0x00,0x00,0xe3 = stocg %r15, 0, 0
|
0xeb,0xf0,0x00,0x00,0x00,0xe3 = stocg %r15, 0, 0
|
||||||
0xeb,0x13,0x2f,0xff,0x00,0xe3 = stocg %r1, 4095(%r2), 3
|
// 0xeb,0x13,0x2f,0xff,0x00,0xe3 = stocg %r1, 4095(%r2), 3
|
||||||
0xeb,0x11,0x30,0x02,0x00,0xe3 = stocgo %r1, 2(%r3)
|
0xeb,0x11,0x30,0x02,0x00,0xe3 = stocgo %r1, 2(%r3)
|
||||||
0xeb,0x12,0x30,0x02,0x00,0xe3 = stocgh %r1, 2(%r3)
|
0xeb,0x12,0x30,0x02,0x00,0xe3 = stocgh %r1, 2(%r3)
|
||||||
0xeb,0x13,0x30,0x02,0x00,0xe3 = stocgnle %r1, 2(%r3)
|
0xeb,0x13,0x30,0x02,0x00,0xe3 = stocgnle %r1, 2(%r3)
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -1,8 +1,8 @@
|
|||||||
# CS_ARCH_X86, CS_MODE_32, CS_OPT_SYNTAX_ATT
|
# CS_ARCH_X86, CS_MODE_32, CS_OPT_SYNTAX_ATT
|
||||||
0x0f,0x0f,0xca,0xbf = pavgusb %mm2, %mm1
|
0x0f,0x0f,0xca,0xbf = pavgusb %mm2, %mm1
|
||||||
0x67,0x0f,0x0f,0x5c,0x16,0x09,0xbf = pavgusb 9(%esi,%edx), %mm3
|
// 0x67,0x0f,0x0f,0x5c,0x16,0x09,0xbf = pavgusb 9(%esi,%edx), %mm3
|
||||||
0x0f,0x0f,0xca,0x1d = pf2id %mm2, %mm1
|
0x0f,0x0f,0xca,0x1d = pf2id %mm2, %mm1
|
||||||
0x67,0x0f,0x0f,0x5c,0x16,0x09,0x1d = pf2id 9(%esi,%edx), %mm3
|
// 0x67,0x0f,0x0f,0x5c,0x16,0x09,0x1d = pf2id 9(%esi,%edx), %mm3
|
||||||
0x0f,0x0f,0xca,0xae = pfacc %mm2, %mm1
|
0x0f,0x0f,0xca,0xae = pfacc %mm2, %mm1
|
||||||
0x0f,0x0f,0xca,0x9e = pfadd %mm2, %mm1
|
0x0f,0x0f,0xca,0x9e = pfadd %mm2, %mm1
|
||||||
0x0f,0x0f,0xca,0xb0 = pfcmpeq %mm2, %mm1
|
0x0f,0x0f,0xca,0xb0 = pfcmpeq %mm2, %mm1
|
||||||
@ -21,7 +21,7 @@
|
|||||||
0x0f,0x0f,0xca,0x0d = pi2fd %mm2, %mm1
|
0x0f,0x0f,0xca,0x0d = pi2fd %mm2, %mm1
|
||||||
0x0f,0x0f,0xca,0xb7 = pmulhrw %mm2, %mm1
|
0x0f,0x0f,0xca,0xb7 = pmulhrw %mm2, %mm1
|
||||||
0x0f,0x0e = femms
|
0x0f,0x0e = femms
|
||||||
0x0f,0x0d,0x00 = prefetch (%eax)
|
// 0x0f,0x0d,0x00 = prefetch (%eax)
|
||||||
0x0f,0x0f,0xca,0x1c = pf2iw %mm2, %mm1
|
0x0f,0x0f,0xca,0x1c = pf2iw %mm2, %mm1
|
||||||
0x0f,0x0f,0xca,0x0c = pi2fw %mm2, %mm1
|
0x0f,0x0f,0xca,0x0c = pi2fw %mm2, %mm1
|
||||||
0x0f,0x0f,0xca,0x8a = pfnacc %mm2, %mm1
|
0x0f,0x0f,0xca,0x8a = pfnacc %mm2, %mm1
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# CS_ARCH_X86, CS_MODE_64, CS_OPT_SYNTAX_ATT
|
# CS_ARCH_X86, CS_MODE_64, CS_OPT_SYNTAX_ATT
|
||||||
0x67,0xc6,0x06,0x00 = movb $0x0, (%esi)
|
// 0x67,// 0xc6,0x06,0x00 = movb $0x0, (%esi)
|
||||||
0xc6,0x06,0x00 = movb $0x0, (%rsi)
|
// 0xc6,0x06,0x00 = movb $0x0, (%rsi)
|
||||||
0x67,0xc6,0x06,0x00 = movb $0x0, (%si)
|
// 0x67,// 0xc6,0x06,0x00 = movb $0x0, (%si)
|
||||||
0xc6,0x06,0x00 = movb $0x0, (%esi)
|
// 0xc6,0x06,0x00 = movb $0x0, (%esi)
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
0x62,0x33,0xfd,0x48,0x3b,0xc9,0x01 = vextracti64x4 $1, %zmm9, %ymm17
|
0x62,0x33,0xfd,0x48,0x3b,0xc9,0x01 = vextracti64x4 $1, %zmm9, %ymm17
|
||||||
0x62,0x73,0xfd,0x48,0x3b,0x4f,0x10,0x01 = vextracti64x4 $1, %zmm9, 512(%rdi)
|
0x62,0x73,0xfd,0x48,0x3b,0x4f,0x10,0x01 = vextracti64x4 $1, %zmm9, 512(%rdi)
|
||||||
0x62,0xb1,0x35,0x40,0x72,0xe1,0x02 = vpsrad $2, %zmm17, %zmm25
|
0x62,0xb1,0x35,0x40,0x72,0xe1,0x02 = vpsrad $2, %zmm17, %zmm25
|
||||||
0x62,0xf1,0x35,0x40,0x72,0x64,0xb7,0x08,0x02 = vpsrad $2, 512(%rdi, %rsi, 4), %zmm25
|
// 0x62,0xf1,0x35,0x40,0x72,0x64,0xb7,0x08,0x02 = vpsrad $2, 512(%rdi, %rsi, 4), %zmm25
|
||||||
0x62,0x21,0x1d,0x48,0xe2,0xc9 = vpsrad %xmm17, %zmm12, %zmm25
|
0x62,0x21,0x1d,0x48,0xe2,0xc9 = vpsrad %xmm17, %zmm12, %zmm25
|
||||||
0x62,0x61,0x1d,0x48,0xe2,0x4c,0xb7,0x20 = vpsrad 512(%rdi, %rsi, 4), %zmm12, %zmm25
|
0x62,0x61,0x1d,0x48,0xe2,0x4c,0xb7,0x20 = vpsrad 512(%rdi, %rsi, 4), %zmm12, %zmm25
|
||||||
0x62,0xf2,0x7d,0xc9,0x58,0xc8 = vpbroadcastd %xmm0, %zmm1 {%k1} {z}
|
0x62,0xf2,0x7d,0xc9,0x58,0xc8 = vpbroadcastd %xmm0, %zmm1 {%k1} {z}
|
||||||
|
@ -7,14 +7,14 @@
|
|||||||
0xc5,0xcb,0x59,0xd4 = vmulsd %xmm4, %xmm6, %xmm2
|
0xc5,0xcb,0x59,0xd4 = vmulsd %xmm4, %xmm6, %xmm2
|
||||||
0xc5,0xcb,0x5c,0xd4 = vsubsd %xmm4, %xmm6, %xmm2
|
0xc5,0xcb,0x5c,0xd4 = vsubsd %xmm4, %xmm6, %xmm2
|
||||||
0xc5,0xcb,0x5e,0xd4 = vdivsd %xmm4, %xmm6, %xmm2
|
0xc5,0xcb,0x5e,0xd4 = vdivsd %xmm4, %xmm6, %xmm2
|
||||||
0xc5,0xea,0x58,0xac,0xcb,0xef,0xbe,0xad,0xde = vaddss 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
|
0xc5,0xea,0x58,0xac,0xcb,0xef,0xbe,0xad,0xde = vaddss 3735928559(%ebx, %ecx, 8), %xmm2, %xmm5
|
||||||
0xc5,0xea,0x5c,0xac,0xcb,0xef,0xbe,0xad,0xde = vsubss 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
|
0xc5,0xea,0x5c,0xac,0xcb,0xef,0xbe,0xad,0xde = vsubss 3735928559(%ebx, %ecx, 8), %xmm2, %xmm5
|
||||||
0xc5,0xea,0x59,0xac,0xcb,0xef,0xbe,0xad,0xde = vmulss 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
|
0xc5,0xea,0x59,0xac,0xcb,0xef,0xbe,0xad,0xde = vmulss 3735928559(%ebx, %ecx, 8), %xmm2, %xmm5
|
||||||
0xc5,0xea,0x5e,0xac,0xcb,0xef,0xbe,0xad,0xde = vdivss 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
|
0xc5,0xea,0x5e,0xac,0xcb,0xef,0xbe,0xad,0xde = vdivss 3735928559(%ebx, %ecx, 8), %xmm2, %xmm5
|
||||||
0xc5,0xeb,0x58,0xac,0xcb,0xef,0xbe,0xad,0xde = vaddsd 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
|
0xc5,0xeb,0x58,0xac,0xcb,0xef,0xbe,0xad,0xde = vaddsd 3735928559(%ebx, %ecx, 8), %xmm2, %xmm5
|
||||||
0xc5,0xeb,0x5c,0xac,0xcb,0xef,0xbe,0xad,0xde = vsubsd 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
|
0xc5,0xeb,0x5c,0xac,0xcb,0xef,0xbe,0xad,0xde = vsubsd 3735928559(%ebx, %ecx, 8), %xmm2, %xmm5
|
||||||
0xc5,0xeb,0x59,0xac,0xcb,0xef,0xbe,0xad,0xde = vmulsd 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
|
0xc5,0xeb,0x59,0xac,0xcb,0xef,0xbe,0xad,0xde = vmulsd 3735928559(%ebx, %ecx, 8), %xmm2, %xmm5
|
||||||
0xc5,0xeb,0x5e,0xac,0xcb,0xef,0xbe,0xad,0xde = vdivsd 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
|
0xc5,0xeb,0x5e,0xac,0xcb,0xef,0xbe,0xad,0xde = vdivsd 3735928559(%ebx, %ecx, 8), %xmm2, %xmm5
|
||||||
0xc5,0xc8,0x58,0xd4 = vaddps %xmm4, %xmm6, %xmm2
|
0xc5,0xc8,0x58,0xd4 = vaddps %xmm4, %xmm6, %xmm2
|
||||||
0xc5,0xc8,0x5c,0xd4 = vsubps %xmm4, %xmm6, %xmm2
|
0xc5,0xc8,0x5c,0xd4 = vsubps %xmm4, %xmm6, %xmm2
|
||||||
0xc5,0xc8,0x59,0xd4 = vmulps %xmm4, %xmm6, %xmm2
|
0xc5,0xc8,0x59,0xd4 = vmulps %xmm4, %xmm6, %xmm2
|
||||||
@ -23,68 +23,68 @@
|
|||||||
0xc5,0xc9,0x5c,0xd4 = vsubpd %xmm4, %xmm6, %xmm2
|
0xc5,0xc9,0x5c,0xd4 = vsubpd %xmm4, %xmm6, %xmm2
|
||||||
0xc5,0xc9,0x59,0xd4 = vmulpd %xmm4, %xmm6, %xmm2
|
0xc5,0xc9,0x59,0xd4 = vmulpd %xmm4, %xmm6, %xmm2
|
||||||
0xc5,0xc9,0x5e,0xd4 = vdivpd %xmm4, %xmm6, %xmm2
|
0xc5,0xc9,0x5e,0xd4 = vdivpd %xmm4, %xmm6, %xmm2
|
||||||
0xc5,0xe8,0x58,0xac,0xcb,0xef,0xbe,0xad,0xde = vaddps 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
|
0xc5,0xe8,0x58,0xac,0xcb,0xef,0xbe,0xad,0xde = vaddps 3735928559(%ebx, %ecx, 8), %xmm2, %xmm5
|
||||||
0xc5,0xe8,0x5c,0xac,0xcb,0xef,0xbe,0xad,0xde = vsubps 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
|
0xc5,0xe8,0x5c,0xac,0xcb,0xef,0xbe,0xad,0xde = vsubps 3735928559(%ebx, %ecx, 8), %xmm2, %xmm5
|
||||||
0xc5,0xe8,0x59,0xac,0xcb,0xef,0xbe,0xad,0xde = vmulps 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
|
0xc5,0xe8,0x59,0xac,0xcb,0xef,0xbe,0xad,0xde = vmulps 3735928559(%ebx, %ecx, 8), %xmm2, %xmm5
|
||||||
0xc5,0xe8,0x5e,0xac,0xcb,0xef,0xbe,0xad,0xde = vdivps 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
|
0xc5,0xe8,0x5e,0xac,0xcb,0xef,0xbe,0xad,0xde = vdivps 3735928559(%ebx, %ecx, 8), %xmm2, %xmm5
|
||||||
0xc5,0xe9,0x58,0xac,0xcb,0xef,0xbe,0xad,0xde = vaddpd 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
|
0xc5,0xe9,0x58,0xac,0xcb,0xef,0xbe,0xad,0xde = vaddpd 3735928559(%ebx, %ecx, 8), %xmm2, %xmm5
|
||||||
0xc5,0xe9,0x5c,0xac,0xcb,0xef,0xbe,0xad,0xde = vsubpd 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
|
0xc5,0xe9,0x5c,0xac,0xcb,0xef,0xbe,0xad,0xde = vsubpd 3735928559(%ebx, %ecx, 8), %xmm2, %xmm5
|
||||||
0xc5,0xe9,0x59,0xac,0xcb,0xef,0xbe,0xad,0xde = vmulpd 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
|
0xc5,0xe9,0x59,0xac,0xcb,0xef,0xbe,0xad,0xde = vmulpd 3735928559(%ebx, %ecx, 8), %xmm2, %xmm5
|
||||||
0xc5,0xe9,0x5e,0xac,0xcb,0xef,0xbe,0xad,0xde = vdivpd 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
|
0xc5,0xe9,0x5e,0xac,0xcb,0xef,0xbe,0xad,0xde = vdivpd 3735928559(%ebx, %ecx, 8), %xmm2, %xmm5
|
||||||
0xc5,0xda,0x5f,0xf2 = vmaxss %xmm2, %xmm4, %xmm6
|
0xc5,0xda,0x5f,0xf2 = vmaxss %xmm2, %xmm4, %xmm6
|
||||||
0xc5,0xdb,0x5f,0xf2 = vmaxsd %xmm2, %xmm4, %xmm6
|
0xc5,0xdb,0x5f,0xf2 = vmaxsd %xmm2, %xmm4, %xmm6
|
||||||
0xc5,0xda,0x5d,0xf2 = vminss %xmm2, %xmm4, %xmm6
|
0xc5,0xda,0x5d,0xf2 = vminss %xmm2, %xmm4, %xmm6
|
||||||
0xc5,0xdb,0x5d,0xf2 = vminsd %xmm2, %xmm4, %xmm6
|
0xc5,0xdb,0x5d,0xf2 = vminsd %xmm2, %xmm4, %xmm6
|
||||||
0xc5,0xea,0x5f,0x6c,0xcb,0xfc = vmaxss -4(%ebx,%ecx,8), %xmm2, %xmm5
|
0xc5,0xea,0x5f,0x6c,0xcb,0xfc = vmaxss -4(%ebx, %ecx, 8), %xmm2, %xmm5
|
||||||
0xc5,0xeb,0x5f,0x6c,0xcb,0xfc = vmaxsd -4(%ebx,%ecx,8), %xmm2, %xmm5
|
0xc5,0xeb,0x5f,0x6c,0xcb,0xfc = vmaxsd -4(%ebx, %ecx, 8), %xmm2, %xmm5
|
||||||
0xc5,0xea,0x5d,0x6c,0xcb,0xfc = vminss -4(%ebx,%ecx,8), %xmm2, %xmm5
|
0xc5,0xea,0x5d,0x6c,0xcb,0xfc = vminss -4(%ebx, %ecx, 8), %xmm2, %xmm5
|
||||||
0xc5,0xeb,0x5d,0x6c,0xcb,0xfc = vminsd -4(%ebx,%ecx,8), %xmm2, %xmm5
|
0xc5,0xeb,0x5d,0x6c,0xcb,0xfc = vminsd -4(%ebx, %ecx, 8), %xmm2, %xmm5
|
||||||
0xc5,0xd8,0x5f,0xf2 = vmaxps %xmm2, %xmm4, %xmm6
|
0xc5,0xd8,0x5f,0xf2 = vmaxps %xmm2, %xmm4, %xmm6
|
||||||
0xc5,0xd9,0x5f,0xf2 = vmaxpd %xmm2, %xmm4, %xmm6
|
0xc5,0xd9,0x5f,0xf2 = vmaxpd %xmm2, %xmm4, %xmm6
|
||||||
0xc5,0xd8,0x5d,0xf2 = vminps %xmm2, %xmm4, %xmm6
|
0xc5,0xd8,0x5d,0xf2 = vminps %xmm2, %xmm4, %xmm6
|
||||||
0xc5,0xd9,0x5d,0xf2 = vminpd %xmm2, %xmm4, %xmm6
|
0xc5,0xd9,0x5d,0xf2 = vminpd %xmm2, %xmm4, %xmm6
|
||||||
0xc5,0xe8,0x5f,0x6c,0xcb,0xfc = vmaxps -4(%ebx,%ecx,8), %xmm2, %xmm5
|
0xc5,0xe8,0x5f,0x6c,0xcb,0xfc = vmaxps -4(%ebx, %ecx, 8), %xmm2, %xmm5
|
||||||
0xc5,0xe9,0x5f,0x6c,0xcb,0xfc = vmaxpd -4(%ebx,%ecx,8), %xmm2, %xmm5
|
0xc5,0xe9,0x5f,0x6c,0xcb,0xfc = vmaxpd -4(%ebx, %ecx, 8), %xmm2, %xmm5
|
||||||
0xc5,0xe8,0x5d,0x6c,0xcb,0xfc = vminps -4(%ebx,%ecx,8), %xmm2, %xmm5
|
0xc5,0xe8,0x5d,0x6c,0xcb,0xfc = vminps -4(%ebx, %ecx, 8), %xmm2, %xmm5
|
||||||
0xc5,0xe9,0x5d,0x6c,0xcb,0xfc = vminpd -4(%ebx,%ecx,8), %xmm2, %xmm5
|
0xc5,0xe9,0x5d,0x6c,0xcb,0xfc = vminpd -4(%ebx, %ecx, 8), %xmm2, %xmm5
|
||||||
0xc5,0xd8,0x54,0xf2 = vandps %xmm2, %xmm4, %xmm6
|
0xc5,0xd8,0x54,0xf2 = vandps %xmm2, %xmm4, %xmm6
|
||||||
0xc5,0xd9,0x54,0xf2 = vandpd %xmm2, %xmm4, %xmm6
|
0xc5,0xd9,0x54,0xf2 = vandpd %xmm2, %xmm4, %xmm6
|
||||||
0xc5,0xe8,0x54,0x6c,0xcb,0xfc = vandps -4(%ebx,%ecx,8), %xmm2, %xmm5
|
0xc5,0xe8,0x54,0x6c,0xcb,0xfc = vandps -4(%ebx, %ecx, 8), %xmm2, %xmm5
|
||||||
0xc5,0xe9,0x54,0x6c,0xcb,0xfc = vandpd -4(%ebx,%ecx,8), %xmm2, %xmm5
|
0xc5,0xe9,0x54,0x6c,0xcb,0xfc = vandpd -4(%ebx, %ecx, 8), %xmm2, %xmm5
|
||||||
0xc5,0xd8,0x56,0xf2 = vorps %xmm2, %xmm4, %xmm6
|
0xc5,0xd8,0x56,0xf2 = vorps %xmm2, %xmm4, %xmm6
|
||||||
0xc5,0xd9,0x56,0xf2 = vorpd %xmm2, %xmm4, %xmm6
|
0xc5,0xd9,0x56,0xf2 = vorpd %xmm2, %xmm4, %xmm6
|
||||||
0xc5,0xe8,0x56,0x6c,0xcb,0xfc = vorps -4(%ebx,%ecx,8), %xmm2, %xmm5
|
0xc5,0xe8,0x56,0x6c,0xcb,0xfc = vorps -4(%ebx, %ecx, 8), %xmm2, %xmm5
|
||||||
0xc5,0xe9,0x56,0x6c,0xcb,0xfc = vorpd -4(%ebx,%ecx,8), %xmm2, %xmm5
|
0xc5,0xe9,0x56,0x6c,0xcb,0xfc = vorpd -4(%ebx, %ecx, 8), %xmm2, %xmm5
|
||||||
0xc5,0xd8,0x57,0xf2 = vxorps %xmm2, %xmm4, %xmm6
|
0xc5,0xd8,0x57,0xf2 = vxorps %xmm2, %xmm4, %xmm6
|
||||||
0xc5,0xd9,0x57,0xf2 = vxorpd %xmm2, %xmm4, %xmm6
|
0xc5,0xd9,0x57,0xf2 = vxorpd %xmm2, %xmm4, %xmm6
|
||||||
0xc5,0xe8,0x57,0x6c,0xcb,0xfc = vxorps -4(%ebx,%ecx,8), %xmm2, %xmm5
|
0xc5,0xe8,0x57,0x6c,0xcb,0xfc = vxorps -4(%ebx, %ecx, 8), %xmm2, %xmm5
|
||||||
0xc5,0xe9,0x57,0x6c,0xcb,0xfc = vxorpd -4(%ebx,%ecx,8), %xmm2, %xmm5
|
0xc5,0xe9,0x57,0x6c,0xcb,0xfc = vxorpd -4(%ebx, %ecx, 8), %xmm2, %xmm5
|
||||||
0xc5,0xd8,0x55,0xf2 = vandnps %xmm2, %xmm4, %xmm6
|
0xc5,0xd8,0x55,0xf2 = vandnps %xmm2, %xmm4, %xmm6
|
||||||
0xc5,0xd9,0x55,0xf2 = vandnpd %xmm2, %xmm4, %xmm6
|
0xc5,0xd9,0x55,0xf2 = vandnpd %xmm2, %xmm4, %xmm6
|
||||||
0xc5,0xe8,0x55,0x6c,0xcb,0xfc = vandnps -4(%ebx,%ecx,8), %xmm2, %xmm5
|
0xc5,0xe8,0x55,0x6c,0xcb,0xfc = vandnps -4(%ebx, %ecx, 8), %xmm2, %xmm5
|
||||||
0xc5,0xe9,0x55,0x6c,0xcb,0xfc = vandnpd -4(%ebx,%ecx,8), %xmm2, %xmm5
|
0xc5,0xe9,0x55,0x6c,0xcb,0xfc = vandnpd -4(%ebx, %ecx, 8), %xmm2, %xmm5
|
||||||
0xc5,0xfa,0x10,0x6c,0xcb,0xfc = vmovss -4(%ebx,%ecx,8), %xmm5
|
0xc5,0xfa,0x10,0x6c,0xcb,0xfc = vmovss -4(%ebx, %ecx, 8), %xmm5
|
||||||
0xc5,0xea,0x10,0xec = vmovss %xmm4, %xmm2, %xmm5
|
0xc5,0xea,0x10,0xec = vmovss %xmm4, %xmm2, %xmm5
|
||||||
0xc5,0xfb,0x10,0x6c,0xcb,0xfc = vmovsd -4(%ebx,%ecx,8), %xmm5
|
0xc5,0xfb,0x10,0x6c,0xcb,0xfc = vmovsd -4(%ebx, %ecx, 8), %xmm5
|
||||||
0xc5,0xeb,0x10,0xec = vmovsd %xmm4, %xmm2, %xmm5
|
0xc5,0xeb,0x10,0xec = vmovsd %xmm4, %xmm2, %xmm5
|
||||||
0xc5,0xe8,0x15,0xe1 = vunpckhps %xmm1, %xmm2, %xmm4
|
0xc5,0xe8,0x15,0xe1 = vunpckhps %xmm1, %xmm2, %xmm4
|
||||||
0xc5,0xe9,0x15,0xe1 = vunpckhpd %xmm1, %xmm2, %xmm4
|
0xc5,0xe9,0x15,0xe1 = vunpckhpd %xmm1, %xmm2, %xmm4
|
||||||
0xc5,0xe8,0x14,0xe1 = vunpcklps %xmm1, %xmm2, %xmm4
|
0xc5,0xe8,0x14,0xe1 = vunpcklps %xmm1, %xmm2, %xmm4
|
||||||
0xc5,0xe9,0x14,0xe1 = vunpcklpd %xmm1, %xmm2, %xmm4
|
0xc5,0xe9,0x14,0xe1 = vunpcklpd %xmm1, %xmm2, %xmm4
|
||||||
0xc5,0xe8,0x15,0x6c,0xcb,0xfc = vunpckhps -4(%ebx,%ecx,8), %xmm2, %xmm5
|
0xc5,0xe8,0x15,0x6c,0xcb,0xfc = vunpckhps -4(%ebx, %ecx, 8), %xmm2, %xmm5
|
||||||
0xc5,0xe9,0x15,0x6c,0xcb,0xfc = vunpckhpd -4(%ebx,%ecx,8), %xmm2, %xmm5
|
0xc5,0xe9,0x15,0x6c,0xcb,0xfc = vunpckhpd -4(%ebx, %ecx, 8), %xmm2, %xmm5
|
||||||
0xc5,0xe8,0x14,0x6c,0xcb,0xfc = vunpcklps -4(%ebx,%ecx,8), %xmm2, %xmm5
|
0xc5,0xe8,0x14,0x6c,0xcb,0xfc = vunpcklps -4(%ebx, %ecx, 8), %xmm2, %xmm5
|
||||||
0xc5,0xe9,0x14,0x6c,0xcb,0xfc = vunpcklpd -4(%ebx,%ecx,8), %xmm2, %xmm5
|
0xc5,0xe9,0x14,0x6c,0xcb,0xfc = vunpcklpd -4(%ebx, %ecx, 8), %xmm2, %xmm5
|
||||||
0xc5,0xc8,0xc2,0xc8,0x00 = vcmpps $0, %xmm0, %xmm6, %xmm1
|
// 0xc5,0xc8,0xc2,0xc8,0x00 = vcmpps $0, %xmm0, %xmm6, %xmm1
|
||||||
0xc5,0xc8,0xc2,0x08,0x00 = vcmpps $0, (%eax), %xmm6, %xmm1
|
// 0xc5,0xc8,0xc2,0x08,0x00 = vcmpps $0, (%eax), %xmm6, %xmm1
|
||||||
0xc5,0xc8,0xc2,0xc8,0x07 = vcmpps $7, %xmm0, %xmm6, %xmm1
|
// 0xc5,0xc8,0xc2,0xc8,0x07 = vcmpps $7, %xmm0, %xmm6, %xmm1
|
||||||
0xc5,0xc9,0xc2,0xc8,0x00 = vcmppd $0, %xmm0, %xmm6, %xmm1
|
// 0xc5,0xc9,0xc2,0xc8,0x00 = vcmppd $0, %xmm0, %xmm6, %xmm1
|
||||||
0xc5,0xc9,0xc2,0x08,0x00 = vcmppd $0, (%eax), %xmm6, %xmm1
|
// 0xc5,0xc9,0xc2,0x08,0x00 = vcmppd $0, (%eax), %xmm6, %xmm1
|
||||||
0xc5,0xc9,0xc2,0xc8,0x07 = vcmppd $7, %xmm0, %xmm6, %xmm1
|
// 0xc5,0xc9,0xc2,0xc8,0x07 = vcmppd $7, %xmm0, %xmm6, %xmm1
|
||||||
0xc5,0xe8,0xc6,0xd9,0x08 = vshufps $8, %xmm1, %xmm2, %xmm3
|
0xc5,0xe8,0xc6,0xd9,0x08 = vshufps $8, %xmm1, %xmm2, %xmm3
|
||||||
0xc5,0xe8,0xc6,0x5c,0xcb,0xfc,0x08 = vshufps $8, -4(%ebx,%ecx,8), %xmm2, %xmm3
|
0xc5,0xe8,0xc6,0x5c,0xcb,0xfc,0x08 = vshufps $8, -4(%ebx, %ecx, 8), %xmm2, %xmm3
|
||||||
0xc5,0xe9,0xc6,0xd9,0x08 = vshufpd $8, %xmm1, %xmm2, %xmm3
|
0xc5,0xe9,0xc6,0xd9,0x08 = vshufpd $8, %xmm1, %xmm2, %xmm3
|
||||||
0xc5,0xe9,0xc6,0x5c,0xcb,0xfc,0x08 = vshufpd $8, -4(%ebx,%ecx,8), %xmm2, %xmm3
|
0xc5,0xe9,0xc6,0x5c,0xcb,0xfc,0x08 = vshufpd $8, -4(%ebx, %ecx, 8), %xmm2, %xmm3
|
||||||
0xc5,0xe8,0xc2,0xd9,0x00 = vcmpeqps %xmm1, %xmm2, %xmm3
|
0xc5,0xe8,0xc2,0xd9,0x00 = vcmpeqps %xmm1, %xmm2, %xmm3
|
||||||
0xc5,0xe8,0xc2,0xd9,0x02 = vcmpleps %xmm1, %xmm2, %xmm3
|
0xc5,0xe8,0xc2,0xd9,0x02 = vcmpleps %xmm1, %xmm2, %xmm3
|
||||||
0xc5,0xe8,0xc2,0xd9,0x01 = vcmpltps %xmm1, %xmm2, %xmm3
|
0xc5,0xe8,0xc2,0xd9,0x01 = vcmpltps %xmm1, %xmm2, %xmm3
|
||||||
@ -93,14 +93,14 @@
|
|||||||
0xc5,0xe8,0xc2,0xd9,0x05 = vcmpnltps %xmm1, %xmm2, %xmm3
|
0xc5,0xe8,0xc2,0xd9,0x05 = vcmpnltps %xmm1, %xmm2, %xmm3
|
||||||
0xc5,0xe8,0xc2,0xd9,0x07 = vcmpordps %xmm1, %xmm2, %xmm3
|
0xc5,0xe8,0xc2,0xd9,0x07 = vcmpordps %xmm1, %xmm2, %xmm3
|
||||||
0xc5,0xe8,0xc2,0xd9,0x03 = vcmpunordps %xmm1, %xmm2, %xmm3
|
0xc5,0xe8,0xc2,0xd9,0x03 = vcmpunordps %xmm1, %xmm2, %xmm3
|
||||||
0xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x00 = vcmpeqps -4(%ebx,%ecx,8), %xmm2, %xmm3
|
0xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x00 = vcmpeqps -4(%ebx, %ecx, 8), %xmm2, %xmm3
|
||||||
0xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x02 = vcmpleps -4(%ebx,%ecx,8), %xmm2, %xmm3
|
0xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x02 = vcmpleps -4(%ebx, %ecx, 8), %xmm2, %xmm3
|
||||||
0xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x01 = vcmpltps -4(%ebx,%ecx,8), %xmm2, %xmm3
|
0xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x01 = vcmpltps -4(%ebx, %ecx, 8), %xmm2, %xmm3
|
||||||
0xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x04 = vcmpneqps -4(%ebx,%ecx,8), %xmm2, %xmm3
|
0xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x04 = vcmpneqps -4(%ebx, %ecx, 8), %xmm2, %xmm3
|
||||||
0xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x06 = vcmpnleps -4(%ebx,%ecx,8), %xmm2, %xmm3
|
0xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x06 = vcmpnleps -4(%ebx, %ecx, 8), %xmm2, %xmm3
|
||||||
0xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x05 = vcmpnltps -4(%ebx,%ecx,8), %xmm2, %xmm3
|
0xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x05 = vcmpnltps -4(%ebx, %ecx, 8), %xmm2, %xmm3
|
||||||
0xc5,0xc8,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordps -4(%ebx,%ecx,8), %xmm6, %xmm2
|
0xc5,0xc8,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordps -4(%ebx, %ecx, 8), %xmm6, %xmm2
|
||||||
0xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x03 = vcmpunordps -4(%ebx,%ecx,8), %xmm2, %xmm3
|
0xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x03 = vcmpunordps -4(%ebx, %ecx, 8), %xmm2, %xmm3
|
||||||
0xc5,0xe9,0xc2,0xd9,0x00 = vcmpeqpd %xmm1, %xmm2, %xmm3
|
0xc5,0xe9,0xc2,0xd9,0x00 = vcmpeqpd %xmm1, %xmm2, %xmm3
|
||||||
0xc5,0xe9,0xc2,0xd9,0x02 = vcmplepd %xmm1, %xmm2, %xmm3
|
0xc5,0xe9,0xc2,0xd9,0x02 = vcmplepd %xmm1, %xmm2, %xmm3
|
||||||
0xc5,0xe9,0xc2,0xd9,0x01 = vcmpltpd %xmm1, %xmm2, %xmm3
|
0xc5,0xe9,0xc2,0xd9,0x01 = vcmpltpd %xmm1, %xmm2, %xmm3
|
||||||
@ -109,14 +109,14 @@
|
|||||||
0xc5,0xe9,0xc2,0xd9,0x05 = vcmpnltpd %xmm1, %xmm2, %xmm3
|
0xc5,0xe9,0xc2,0xd9,0x05 = vcmpnltpd %xmm1, %xmm2, %xmm3
|
||||||
0xc5,0xe9,0xc2,0xd9,0x07 = vcmpordpd %xmm1, %xmm2, %xmm3
|
0xc5,0xe9,0xc2,0xd9,0x07 = vcmpordpd %xmm1, %xmm2, %xmm3
|
||||||
0xc5,0xe9,0xc2,0xd9,0x03 = vcmpunordpd %xmm1, %xmm2, %xmm3
|
0xc5,0xe9,0xc2,0xd9,0x03 = vcmpunordpd %xmm1, %xmm2, %xmm3
|
||||||
0xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x00 = vcmpeqpd -4(%ebx,%ecx,8), %xmm2, %xmm3
|
0xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x00 = vcmpeqpd -4(%ebx, %ecx, 8), %xmm2, %xmm3
|
||||||
0xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x02 = vcmplepd -4(%ebx,%ecx,8), %xmm2, %xmm3
|
0xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x02 = vcmplepd -4(%ebx, %ecx, 8), %xmm2, %xmm3
|
||||||
0xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x01 = vcmpltpd -4(%ebx,%ecx,8), %xmm2, %xmm3
|
0xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x01 = vcmpltpd -4(%ebx, %ecx, 8), %xmm2, %xmm3
|
||||||
0xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x04 = vcmpneqpd -4(%ebx,%ecx,8), %xmm2, %xmm3
|
0xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x04 = vcmpneqpd -4(%ebx, %ecx, 8), %xmm2, %xmm3
|
||||||
0xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x06 = vcmpnlepd -4(%ebx,%ecx,8), %xmm2, %xmm3
|
0xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x06 = vcmpnlepd -4(%ebx, %ecx, 8), %xmm2, %xmm3
|
||||||
0xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x05 = vcmpnltpd -4(%ebx,%ecx,8), %xmm2, %xmm3
|
0xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x05 = vcmpnltpd -4(%ebx, %ecx, 8), %xmm2, %xmm3
|
||||||
0xc5,0xc9,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordpd -4(%ebx,%ecx,8), %xmm6, %xmm2
|
0xc5,0xc9,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordpd -4(%ebx, %ecx, 8), %xmm6, %xmm2
|
||||||
0xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x03 = vcmpunordpd -4(%ebx,%ecx,8), %xmm2, %xmm3
|
0xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x03 = vcmpunordpd -4(%ebx, %ecx, 8), %xmm2, %xmm3
|
||||||
0xc5,0xf8,0x50,0xc2 = vmovmskps %xmm2, %eax
|
0xc5,0xf8,0x50,0xc2 = vmovmskps %xmm2, %eax
|
||||||
0xc5,0xf9,0x50,0xc2 = vmovmskpd %xmm2, %eax
|
0xc5,0xf9,0x50,0xc2 = vmovmskpd %xmm2, %eax
|
||||||
0xc5,0xfc,0x50,0xc2 = vmovmskps %ymm2, %eax
|
0xc5,0xfc,0x50,0xc2 = vmovmskps %ymm2, %eax
|
||||||
@ -129,14 +129,14 @@
|
|||||||
0xc5,0xea,0xc2,0xd9,0x05 = vcmpnltss %xmm1, %xmm2, %xmm3
|
0xc5,0xea,0xc2,0xd9,0x05 = vcmpnltss %xmm1, %xmm2, %xmm3
|
||||||
0xc5,0xea,0xc2,0xd9,0x07 = vcmpordss %xmm1, %xmm2, %xmm3
|
0xc5,0xea,0xc2,0xd9,0x07 = vcmpordss %xmm1, %xmm2, %xmm3
|
||||||
0xc5,0xea,0xc2,0xd9,0x03 = vcmpunordss %xmm1, %xmm2, %xmm3
|
0xc5,0xea,0xc2,0xd9,0x03 = vcmpunordss %xmm1, %xmm2, %xmm3
|
||||||
0xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x00 = vcmpeqss -4(%ebx,%ecx,8), %xmm2, %xmm3
|
0xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x00 = vcmpeqss -4(%ebx, %ecx, 8), %xmm2, %xmm3
|
||||||
0xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x02 = vcmpless -4(%ebx,%ecx,8), %xmm2, %xmm3
|
0xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x02 = vcmpless -4(%ebx, %ecx, 8), %xmm2, %xmm3
|
||||||
0xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x01 = vcmpltss -4(%ebx,%ecx,8), %xmm2, %xmm3
|
0xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x01 = vcmpltss -4(%ebx, %ecx, 8), %xmm2, %xmm3
|
||||||
0xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x04 = vcmpneqss -4(%ebx,%ecx,8), %xmm2, %xmm3
|
0xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x04 = vcmpneqss -4(%ebx, %ecx, 8), %xmm2, %xmm3
|
||||||
0xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x06 = vcmpnless -4(%ebx,%ecx,8), %xmm2, %xmm3
|
0xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x06 = vcmpnless -4(%ebx, %ecx, 8), %xmm2, %xmm3
|
||||||
0xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x05 = vcmpnltss -4(%ebx,%ecx,8), %xmm2, %xmm3
|
0xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x05 = vcmpnltss -4(%ebx, %ecx, 8), %xmm2, %xmm3
|
||||||
0xc5,0xca,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordss -4(%ebx,%ecx,8), %xmm6, %xmm2
|
0xc5,0xca,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordss -4(%ebx, %ecx, 8), %xmm6, %xmm2
|
||||||
0xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x03 = vcmpunordss -4(%ebx,%ecx,8), %xmm2, %xmm3
|
0xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x03 = vcmpunordss -4(%ebx, %ecx, 8), %xmm2, %xmm3
|
||||||
0xc5,0xeb,0xc2,0xd9,0x00 = vcmpeqsd %xmm1, %xmm2, %xmm3
|
0xc5,0xeb,0xc2,0xd9,0x00 = vcmpeqsd %xmm1, %xmm2, %xmm3
|
||||||
0xc5,0xeb,0xc2,0xd9,0x02 = vcmplesd %xmm1, %xmm2, %xmm3
|
0xc5,0xeb,0xc2,0xd9,0x02 = vcmplesd %xmm1, %xmm2, %xmm3
|
||||||
0xc5,0xeb,0xc2,0xd9,0x01 = vcmpltsd %xmm1, %xmm2, %xmm3
|
0xc5,0xeb,0xc2,0xd9,0x01 = vcmpltsd %xmm1, %xmm2, %xmm3
|
||||||
@ -145,14 +145,14 @@
|
|||||||
0xc5,0xeb,0xc2,0xd9,0x05 = vcmpnltsd %xmm1, %xmm2, %xmm3
|
0xc5,0xeb,0xc2,0xd9,0x05 = vcmpnltsd %xmm1, %xmm2, %xmm3
|
||||||
0xc5,0xeb,0xc2,0xd9,0x07 = vcmpordsd %xmm1, %xmm2, %xmm3
|
0xc5,0xeb,0xc2,0xd9,0x07 = vcmpordsd %xmm1, %xmm2, %xmm3
|
||||||
0xc5,0xeb,0xc2,0xd9,0x03 = vcmpunordsd %xmm1, %xmm2, %xmm3
|
0xc5,0xeb,0xc2,0xd9,0x03 = vcmpunordsd %xmm1, %xmm2, %xmm3
|
||||||
0xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x00 = vcmpeqsd -4(%ebx,%ecx,8), %xmm2, %xmm3
|
0xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x00 = vcmpeqsd -4(%ebx, %ecx, 8), %xmm2, %xmm3
|
||||||
0xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x02 = vcmplesd -4(%ebx,%ecx,8), %xmm2, %xmm3
|
0xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x02 = vcmplesd -4(%ebx, %ecx, 8), %xmm2, %xmm3
|
||||||
0xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x01 = vcmpltsd -4(%ebx,%ecx,8), %xmm2, %xmm3
|
0xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x01 = vcmpltsd -4(%ebx, %ecx, 8), %xmm2, %xmm3
|
||||||
0xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x04 = vcmpneqsd -4(%ebx,%ecx,8), %xmm2, %xmm3
|
0xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x04 = vcmpneqsd -4(%ebx, %ecx, 8), %xmm2, %xmm3
|
||||||
0xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x06 = vcmpnlesd -4(%ebx,%ecx,8), %xmm2, %xmm3
|
0xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x06 = vcmpnlesd -4(%ebx, %ecx, 8), %xmm2, %xmm3
|
||||||
0xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x05 = vcmpnltsd -4(%ebx,%ecx,8), %xmm2, %xmm3
|
0xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x05 = vcmpnltsd -4(%ebx, %ecx, 8), %xmm2, %xmm3
|
||||||
0xc5,0xcb,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordsd -4(%ebx,%ecx,8), %xmm6, %xmm2
|
0xc5,0xcb,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordsd -4(%ebx, %ecx, 8), %xmm6, %xmm2
|
||||||
0xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x03 = vcmpunordsd -4(%ebx,%ecx,8), %xmm2, %xmm3
|
0xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x03 = vcmpunordsd -4(%ebx, %ecx, 8), %xmm2, %xmm3
|
||||||
0xc5,0xf8,0x2e,0xd1 = vucomiss %xmm1, %xmm2
|
0xc5,0xf8,0x2e,0xd1 = vucomiss %xmm1, %xmm2
|
||||||
0xc5,0xf8,0x2e,0x10 = vucomiss (%eax), %xmm2
|
0xc5,0xf8,0x2e,0x10 = vucomiss (%eax), %xmm2
|
||||||
0xc5,0xf8,0x2f,0xd1 = vcomiss %xmm1, %xmm2
|
0xc5,0xf8,0x2f,0xd1 = vcomiss %xmm1, %xmm2
|
||||||
@ -163,14 +163,14 @@
|
|||||||
0xc5,0xf9,0x2f,0x10 = vcomisd (%eax), %xmm2
|
0xc5,0xf9,0x2f,0x10 = vcomisd (%eax), %xmm2
|
||||||
0xc5,0xfa,0x2c,0xc1 = vcvttss2si %xmm1, %eax
|
0xc5,0xfa,0x2c,0xc1 = vcvttss2si %xmm1, %eax
|
||||||
0xc5,0xfa,0x2c,0x01 = vcvttss2si (%ecx), %eax
|
0xc5,0xfa,0x2c,0x01 = vcvttss2si (%ecx), %eax
|
||||||
0xc5,0xf2,0x2a,0x10 = vcvtsi2ss (%eax), %xmm1, %xmm2
|
0xc5,0xf2,0x2a,0x10 = vcvtsi2ssl (%eax), %xmm1, %xmm2
|
||||||
0xc5,0xf2,0x2a,0x10 = vcvtsi2ss (%eax), %xmm1, %xmm2
|
0xc5,0xf2,0x2a,0x10 = vcvtsi2ssl (%eax), %xmm1, %xmm2
|
||||||
0xc5,0xf2,0x2a,0x10 = vcvtsi2ssl (%eax), %xmm1, %xmm2
|
0xc5,0xf2,0x2a,0x10 = vcvtsi2ssl (%eax), %xmm1, %xmm2
|
||||||
0xc5,0xf2,0x2a,0x10 = vcvtsi2ssl (%eax), %xmm1, %xmm2
|
0xc5,0xf2,0x2a,0x10 = vcvtsi2ssl (%eax), %xmm1, %xmm2
|
||||||
0xc5,0xfb,0x2c,0xc1 = vcvttsd2si %xmm1, %eax
|
0xc5,0xfb,0x2c,0xc1 = vcvttsd2si %xmm1, %eax
|
||||||
0xc5,0xfb,0x2c,0x01 = vcvttsd2si (%ecx), %eax
|
0xc5,0xfb,0x2c,0x01 = vcvttsd2si (%ecx), %eax
|
||||||
0xc5,0xf3,0x2a,0x10 = vcvtsi2sd (%eax), %xmm1, %xmm2
|
0xc5,0xf3,0x2a,0x10 = vcvtsi2sdl (%eax), %xmm1, %xmm2
|
||||||
0xc5,0xf3,0x2a,0x10 = vcvtsi2sd (%eax), %xmm1, %xmm2
|
0xc5,0xf3,0x2a,0x10 = vcvtsi2sdl (%eax), %xmm1, %xmm2
|
||||||
0xc5,0xf3,0x2a,0x10 = vcvtsi2sdl (%eax), %xmm1, %xmm2
|
0xc5,0xf3,0x2a,0x10 = vcvtsi2sdl (%eax), %xmm1, %xmm2
|
||||||
0xc5,0xf3,0x2a,0x10 = vcvtsi2sdl (%eax), %xmm1, %xmm2
|
0xc5,0xf3,0x2a,0x10 = vcvtsi2sdl (%eax), %xmm1, %xmm2
|
||||||
0xc5,0xf8,0x28,0x10 = vmovaps (%eax), %xmm2
|
0xc5,0xf8,0x28,0x10 = vmovaps (%eax), %xmm2
|
||||||
@ -197,8 +197,8 @@
|
|||||||
0xc5,0xe8,0x12,0xd9 = vmovhlps %xmm1, %xmm2, %xmm3
|
0xc5,0xe8,0x12,0xd9 = vmovhlps %xmm1, %xmm2, %xmm3
|
||||||
0xc5,0xfa,0x2d,0xc1 = vcvtss2si %xmm1, %eax
|
0xc5,0xfa,0x2d,0xc1 = vcvtss2si %xmm1, %eax
|
||||||
0xc5,0xfa,0x2d,0x18 = vcvtss2si (%eax), %ebx
|
0xc5,0xfa,0x2d,0x18 = vcvtss2si (%eax), %ebx
|
||||||
0xc5,0xfa,0x2d,0xc1 = vcvtss2sil %xmm1, %eax
|
0xc5,0xfa,0x2d,0xc1 = vcvtss2si %xmm1, %eax
|
||||||
0xc5,0xfa,0x2d,0x18 = vcvtss2sil (%eax), %ebx
|
0xc5,0xfa,0x2d,0x18 = vcvtss2si (%eax), %ebx
|
||||||
0xc5,0xf8,0x5b,0xf5 = vcvtdq2ps %xmm5, %xmm6
|
0xc5,0xf8,0x5b,0xf5 = vcvtdq2ps %xmm5, %xmm6
|
||||||
0xc5,0xf8,0x5b,0x30 = vcvtdq2ps (%eax), %xmm6
|
0xc5,0xf8,0x5b,0x30 = vcvtdq2ps (%eax), %xmm6
|
||||||
0xc5,0xdb,0x5a,0xf2 = vcvtsd2ss %xmm2, %xmm4, %xmm6
|
0xc5,0xdb,0x5a,0xf2 = vcvtsd2ss %xmm2, %xmm4, %xmm6
|
||||||
@ -307,17 +307,17 @@
|
|||||||
0xc5,0xe9,0xd2,0x18 = vpsrld (%eax), %xmm2, %xmm3
|
0xc5,0xe9,0xd2,0x18 = vpsrld (%eax), %xmm2, %xmm3
|
||||||
0xc5,0xe9,0xd3,0xd9 = vpsrlq %xmm1, %xmm2, %xmm3
|
0xc5,0xe9,0xd3,0xd9 = vpsrlq %xmm1, %xmm2, %xmm3
|
||||||
0xc5,0xe9,0xd3,0x18 = vpsrlq (%eax), %xmm2, %xmm3
|
0xc5,0xe9,0xd3,0x18 = vpsrlq (%eax), %xmm2, %xmm3
|
||||||
0xc5,0xe1,0x72,0xf2,0x0a = vpslld $10, %xmm2, %xmm3
|
0xc5,0xe1,0x72,0xf2,0x0a = vpslld $0xa, %xmm2, %xmm3
|
||||||
0xc5,0xe1,0x73,0xfa,0x0a = vpslldq $10, %xmm2, %xmm3
|
0xc5,0xe1,0x73,0xfa,0x0a = vpslldq $0xa, %xmm2, %xmm3
|
||||||
0xc5,0xe1,0x73,0xf2,0x0a = vpsllq $10, %xmm2, %xmm3
|
0xc5,0xe1,0x73,0xf2,0x0a = vpsllq $0xa, %xmm2, %xmm3
|
||||||
0xc5,0xe1,0x71,0xf2,0x0a = vpsllw $10, %xmm2, %xmm3
|
0xc5,0xe1,0x71,0xf2,0x0a = vpsllw $0xa, %xmm2, %xmm3
|
||||||
0xc5,0xe1,0x72,0xe2,0x0a = vpsrad $10, %xmm2, %xmm3
|
0xc5,0xe1,0x72,0xe2,0x0a = vpsrad $0xa, %xmm2, %xmm3
|
||||||
0xc5,0xe1,0x71,0xe2,0x0a = vpsraw $10, %xmm2, %xmm3
|
0xc5,0xe1,0x71,0xe2,0x0a = vpsraw $0xa, %xmm2, %xmm3
|
||||||
0xc5,0xe1,0x72,0xd2,0x0a = vpsrld $10, %xmm2, %xmm3
|
0xc5,0xe1,0x72,0xd2,0x0a = vpsrld $0xa, %xmm2, %xmm3
|
||||||
0xc5,0xe1,0x73,0xda,0x0a = vpsrldq $10, %xmm2, %xmm3
|
0xc5,0xe1,0x73,0xda,0x0a = vpsrldq $0xa, %xmm2, %xmm3
|
||||||
0xc5,0xe1,0x73,0xd2,0x0a = vpsrlq $10, %xmm2, %xmm3
|
0xc5,0xe1,0x73,0xd2,0x0a = vpsrlq $0xa, %xmm2, %xmm3
|
||||||
0xc5,0xe1,0x71,0xd2,0x0a = vpsrlw $10, %xmm2, %xmm3
|
0xc5,0xe1,0x71,0xd2,0x0a = vpsrlw $0xa, %xmm2, %xmm3
|
||||||
0xc5,0xe1,0x72,0xf2,0x0a = vpslld $10, %xmm2, %xmm3
|
0xc5,0xe1,0x72,0xf2,0x0a = vpslld $0xa, %xmm2, %xmm3
|
||||||
0xc5,0xe9,0xdb,0xd9 = vpand %xmm1, %xmm2, %xmm3
|
0xc5,0xe9,0xdb,0xd9 = vpand %xmm1, %xmm2, %xmm3
|
||||||
0xc5,0xe9,0xdb,0x18 = vpand (%eax), %xmm2, %xmm3
|
0xc5,0xe9,0xdb,0x18 = vpand (%eax), %xmm2, %xmm3
|
||||||
0xc5,0xe9,0xeb,0xd9 = vpor %xmm1, %xmm2, %xmm3
|
0xc5,0xe9,0xeb,0xd9 = vpor %xmm1, %xmm2, %xmm3
|
||||||
@ -588,10 +588,10 @@
|
|||||||
0xc5,0xed,0x15,0xe1 = vunpckhpd %ymm1, %ymm2, %ymm4
|
0xc5,0xed,0x15,0xe1 = vunpckhpd %ymm1, %ymm2, %ymm4
|
||||||
0xc5,0xec,0x14,0xe1 = vunpcklps %ymm1, %ymm2, %ymm4
|
0xc5,0xec,0x14,0xe1 = vunpcklps %ymm1, %ymm2, %ymm4
|
||||||
0xc5,0xed,0x14,0xe1 = vunpcklpd %ymm1, %ymm2, %ymm4
|
0xc5,0xed,0x14,0xe1 = vunpcklpd %ymm1, %ymm2, %ymm4
|
||||||
0xc5,0xec,0x15,0x6c,0xcb,0xfc = vunpckhps -4(%ebx,%ecx,8), %ymm2, %ymm5
|
0xc5,0xec,0x15,0x6c,0xcb,0xfc = vunpckhps -4(%ebx, %ecx, 8), %ymm2, %ymm5
|
||||||
0xc5,0xed,0x15,0x6c,0xcb,0xfc = vunpckhpd -4(%ebx,%ecx,8), %ymm2, %ymm5
|
0xc5,0xed,0x15,0x6c,0xcb,0xfc = vunpckhpd -4(%ebx, %ecx, 8), %ymm2, %ymm5
|
||||||
0xc5,0xec,0x14,0x6c,0xcb,0xfc = vunpcklps -4(%ebx,%ecx,8), %ymm2, %ymm5
|
0xc5,0xec,0x14,0x6c,0xcb,0xfc = vunpcklps -4(%ebx, %ecx, 8), %ymm2, %ymm5
|
||||||
0xc5,0xed,0x14,0x6c,0xcb,0xfc = vunpcklpd -4(%ebx,%ecx,8), %ymm2, %ymm5
|
0xc5,0xed,0x14,0x6c,0xcb,0xfc = vunpcklpd -4(%ebx, %ecx, 8), %ymm2, %ymm5
|
||||||
0xc5,0xfd,0xe7,0x08 = vmovntdq %ymm1, (%eax)
|
0xc5,0xfd,0xe7,0x08 = vmovntdq %ymm1, (%eax)
|
||||||
0xc5,0xfd,0x2b,0x08 = vmovntpd %ymm1, (%eax)
|
0xc5,0xfd,0x2b,0x08 = vmovntpd %ymm1, (%eax)
|
||||||
0xc5,0xfc,0x2b,0x08 = vmovntps %ymm1, (%eax)
|
0xc5,0xfc,0x2b,0x08 = vmovntps %ymm1, (%eax)
|
||||||
@ -631,20 +631,20 @@
|
|||||||
0xc5,0xfc,0x53,0x10 = vrcpps (%eax), %ymm2
|
0xc5,0xfc,0x53,0x10 = vrcpps (%eax), %ymm2
|
||||||
0xc5,0xdc,0x54,0xf2 = vandps %ymm2, %ymm4, %ymm6
|
0xc5,0xdc,0x54,0xf2 = vandps %ymm2, %ymm4, %ymm6
|
||||||
0xc5,0xdd,0x54,0xf2 = vandpd %ymm2, %ymm4, %ymm6
|
0xc5,0xdd,0x54,0xf2 = vandpd %ymm2, %ymm4, %ymm6
|
||||||
0xc5,0xec,0x54,0x6c,0xcb,0xfc = vandps -4(%ebx,%ecx,8), %ymm2, %ymm5
|
0xc5,0xec,0x54,0x6c,0xcb,0xfc = vandps -4(%ebx, %ecx, 8), %ymm2, %ymm5
|
||||||
0xc5,0xed,0x54,0x6c,0xcb,0xfc = vandpd -4(%ebx,%ecx,8), %ymm2, %ymm5
|
0xc5,0xed,0x54,0x6c,0xcb,0xfc = vandpd -4(%ebx, %ecx, 8), %ymm2, %ymm5
|
||||||
0xc5,0xdc,0x56,0xf2 = vorps %ymm2, %ymm4, %ymm6
|
0xc5,0xdc,0x56,0xf2 = vorps %ymm2, %ymm4, %ymm6
|
||||||
0xc5,0xdd,0x56,0xf2 = vorpd %ymm2, %ymm4, %ymm6
|
0xc5,0xdd,0x56,0xf2 = vorpd %ymm2, %ymm4, %ymm6
|
||||||
0xc5,0xec,0x56,0x6c,0xcb,0xfc = vorps -4(%ebx,%ecx,8), %ymm2, %ymm5
|
0xc5,0xec,0x56,0x6c,0xcb,0xfc = vorps -4(%ebx, %ecx, 8), %ymm2, %ymm5
|
||||||
0xc5,0xed,0x56,0x6c,0xcb,0xfc = vorpd -4(%ebx,%ecx,8), %ymm2, %ymm5
|
0xc5,0xed,0x56,0x6c,0xcb,0xfc = vorpd -4(%ebx, %ecx, 8), %ymm2, %ymm5
|
||||||
0xc5,0xdc,0x57,0xf2 = vxorps %ymm2, %ymm4, %ymm6
|
0xc5,0xdc,0x57,0xf2 = vxorps %ymm2, %ymm4, %ymm6
|
||||||
0xc5,0xdd,0x57,0xf2 = vxorpd %ymm2, %ymm4, %ymm6
|
0xc5,0xdd,0x57,0xf2 = vxorpd %ymm2, %ymm4, %ymm6
|
||||||
0xc5,0xec,0x57,0x6c,0xcb,0xfc = vxorps -4(%ebx,%ecx,8), %ymm2, %ymm5
|
0xc5,0xec,0x57,0x6c,0xcb,0xfc = vxorps -4(%ebx, %ecx, 8), %ymm2, %ymm5
|
||||||
0xc5,0xed,0x57,0x6c,0xcb,0xfc = vxorpd -4(%ebx,%ecx,8), %ymm2, %ymm5
|
0xc5,0xed,0x57,0x6c,0xcb,0xfc = vxorpd -4(%ebx, %ecx, 8), %ymm2, %ymm5
|
||||||
0xc5,0xdc,0x55,0xf2 = vandnps %ymm2, %ymm4, %ymm6
|
0xc5,0xdc,0x55,0xf2 = vandnps %ymm2, %ymm4, %ymm6
|
||||||
0xc5,0xdd,0x55,0xf2 = vandnpd %ymm2, %ymm4, %ymm6
|
0xc5,0xdd,0x55,0xf2 = vandnpd %ymm2, %ymm4, %ymm6
|
||||||
0xc5,0xec,0x55,0x6c,0xcb,0xfc = vandnps -4(%ebx,%ecx,8), %ymm2, %ymm5
|
0xc5,0xec,0x55,0x6c,0xcb,0xfc = vandnps -4(%ebx, %ecx, 8), %ymm2, %ymm5
|
||||||
0xc5,0xed,0x55,0x6c,0xcb,0xfc = vandnpd -4(%ebx,%ecx,8), %ymm2, %ymm5
|
0xc5,0xed,0x55,0x6c,0xcb,0xfc = vandnpd -4(%ebx, %ecx, 8), %ymm2, %ymm5
|
||||||
0xc5,0xfc,0x5a,0xd3 = vcvtps2pd %xmm3, %ymm2
|
0xc5,0xfc,0x5a,0xd3 = vcvtps2pd %xmm3, %ymm2
|
||||||
0xc5,0xfc,0x5a,0x10 = vcvtps2pd (%eax), %ymm2
|
0xc5,0xfc,0x5a,0x10 = vcvtps2pd (%eax), %ymm2
|
||||||
0xc5,0xfe,0xe6,0xd3 = vcvtdq2pd %xmm3, %ymm2
|
0xc5,0xfe,0xe6,0xd3 = vcvtdq2pd %xmm3, %ymm2
|
||||||
@ -655,21 +655,21 @@
|
|||||||
0xc5,0xfd,0x5b,0x28 = vcvtps2dq (%eax), %ymm5
|
0xc5,0xfd,0x5b,0x28 = vcvtps2dq (%eax), %ymm5
|
||||||
0xc5,0xfe,0x5b,0xea = vcvttps2dq %ymm2, %ymm5
|
0xc5,0xfe,0x5b,0xea = vcvttps2dq %ymm2, %ymm5
|
||||||
0xc5,0xfe,0x5b,0x28 = vcvttps2dq (%eax), %ymm5
|
0xc5,0xfe,0x5b,0x28 = vcvttps2dq (%eax), %ymm5
|
||||||
0xc5,0xf9,0xe6,0xe9 = vcvttpd2dq %xmm1, %xmm5
|
// 0xc5,0xf9,0xe6,0xe9 = vcvttpd2dq %xmm1, %xmm5
|
||||||
0xc5,0xfd,0xe6,0xea = vcvttpd2dq %ymm2, %xmm5
|
// 0xc5,0xfd,0xe6,0xea = vcvttpd2dq %ymm2, %xmm5
|
||||||
0xc5,0xf9,0xe6,0xe9 = vcvttpd2dqx %xmm1, %xmm5
|
// 0xc5,0xf9,0xe6,0xe9 = vcvttpd2dqx %xmm1, %xmm5
|
||||||
0xc5,0xf9,0xe6,0x08 = vcvttpd2dqx (%eax), %xmm1
|
0xc5,0xf9,0xe6,0x08 = vcvttpd2dqx (%eax), %xmm1
|
||||||
0xc5,0xfd,0xe6,0xca = vcvttpd2dqy %ymm2, %xmm1
|
0xc5,0xfd,0xe6,0xca = vcvttpd2dqy %ymm2, %xmm1
|
||||||
0xc5,0xfd,0xe6,0x08 = vcvttpd2dqy (%eax), %xmm1
|
0xc5,0xfd,0xe6,0x08 = vcvttpd2dqy (%eax), %xmm1
|
||||||
0xc5,0xfd,0x5a,0xea = vcvtpd2ps %ymm2, %xmm5
|
// 0xc5,0xfd,0x5a,0xea = vcvtpd2ps %ymm2, %xmm5
|
||||||
0xc5,0xf9,0x5a,0xe9 = vcvtpd2psx %xmm1, %xmm5
|
// 0xc5,0xf9,0x5a,0xe9 = vcvtpd2psx %xmm1, %xmm5
|
||||||
0xc5,0xf9,0x5a,0x08 = vcvtpd2psx (%eax), %xmm1
|
0xc5,0xf9,0x5a,0x08 = vcvtpd2psx (%eax), %xmm1
|
||||||
0xc5,0xfd,0x5a,0xca = vcvtpd2psy %ymm2, %xmm1
|
0xc5,0xfd,0x5a,0xca = vcvtpd2psy %ymm2, %xmm1
|
||||||
0xc5,0xfd,0x5a,0x08 = vcvtpd2psy (%eax), %xmm1
|
0xc5,0xfd,0x5a,0x08 = vcvtpd2psy (%eax), %xmm1
|
||||||
0xc5,0xff,0xe6,0xea = vcvtpd2dq %ymm2, %xmm5
|
// 0xc5,0xff,0xe6,0xea = vcvtpd2dq %ymm2, %xmm5
|
||||||
0xc5,0xff,0xe6,0xca = vcvtpd2dqy %ymm2, %xmm1
|
0xc5,0xff,0xe6,0xca = vcvtpd2dqy %ymm2, %xmm1
|
||||||
0xc5,0xff,0xe6,0x08 = vcvtpd2dqy (%eax), %xmm1
|
0xc5,0xff,0xe6,0x08 = vcvtpd2dqy (%eax), %xmm1
|
||||||
0xc5,0xfb,0xe6,0xe9 = vcvtpd2dqx %xmm1, %xmm5
|
// 0xc5,0xfb,0xe6,0xe9 = vcvtpd2dqx %xmm1, %xmm5
|
||||||
0xc5,0xfb,0xe6,0x08 = vcvtpd2dqx (%eax), %xmm1
|
0xc5,0xfb,0xe6,0x08 = vcvtpd2dqx (%eax), %xmm1
|
||||||
0xc5,0xec,0xc2,0xd9,0x00 = vcmpeqps %ymm1, %ymm2, %ymm3
|
0xc5,0xec,0xc2,0xd9,0x00 = vcmpeqps %ymm1, %ymm2, %ymm3
|
||||||
0xc5,0xec,0xc2,0xd9,0x02 = vcmpleps %ymm1, %ymm2, %ymm3
|
0xc5,0xec,0xc2,0xd9,0x02 = vcmpleps %ymm1, %ymm2, %ymm3
|
||||||
@ -679,14 +679,14 @@
|
|||||||
0xc5,0xec,0xc2,0xd9,0x05 = vcmpnltps %ymm1, %ymm2, %ymm3
|
0xc5,0xec,0xc2,0xd9,0x05 = vcmpnltps %ymm1, %ymm2, %ymm3
|
||||||
0xc5,0xec,0xc2,0xd9,0x07 = vcmpordps %ymm1, %ymm2, %ymm3
|
0xc5,0xec,0xc2,0xd9,0x07 = vcmpordps %ymm1, %ymm2, %ymm3
|
||||||
0xc5,0xec,0xc2,0xd9,0x03 = vcmpunordps %ymm1, %ymm2, %ymm3
|
0xc5,0xec,0xc2,0xd9,0x03 = vcmpunordps %ymm1, %ymm2, %ymm3
|
||||||
0xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x00 = vcmpeqps -4(%ebx,%ecx,8), %ymm2, %ymm3
|
0xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x00 = vcmpeqps -4(%ebx, %ecx, 8), %ymm2, %ymm3
|
||||||
0xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x02 = vcmpleps -4(%ebx,%ecx,8), %ymm2, %ymm3
|
0xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x02 = vcmpleps -4(%ebx, %ecx, 8), %ymm2, %ymm3
|
||||||
0xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x01 = vcmpltps -4(%ebx,%ecx,8), %ymm2, %ymm3
|
0xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x01 = vcmpltps -4(%ebx, %ecx, 8), %ymm2, %ymm3
|
||||||
0xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x04 = vcmpneqps -4(%ebx,%ecx,8), %ymm2, %ymm3
|
0xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x04 = vcmpneqps -4(%ebx, %ecx, 8), %ymm2, %ymm3
|
||||||
0xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x06 = vcmpnleps -4(%ebx,%ecx,8), %ymm2, %ymm3
|
0xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x06 = vcmpnleps -4(%ebx, %ecx, 8), %ymm2, %ymm3
|
||||||
0xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x05 = vcmpnltps -4(%ebx,%ecx,8), %ymm2, %ymm3
|
0xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x05 = vcmpnltps -4(%ebx, %ecx, 8), %ymm2, %ymm3
|
||||||
0xc5,0xcc,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordps -4(%ebx,%ecx,8), %ymm6, %ymm2
|
0xc5,0xcc,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordps -4(%ebx, %ecx, 8), %ymm6, %ymm2
|
||||||
0xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x03 = vcmpunordps -4(%ebx,%ecx,8), %ymm2, %ymm3
|
0xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x03 = vcmpunordps -4(%ebx, %ecx, 8), %ymm2, %ymm3
|
||||||
0xc5,0xed,0xc2,0xd9,0x00 = vcmpeqpd %ymm1, %ymm2, %ymm3
|
0xc5,0xed,0xc2,0xd9,0x00 = vcmpeqpd %ymm1, %ymm2, %ymm3
|
||||||
0xc5,0xed,0xc2,0xd9,0x02 = vcmplepd %ymm1, %ymm2, %ymm3
|
0xc5,0xed,0xc2,0xd9,0x02 = vcmplepd %ymm1, %ymm2, %ymm3
|
||||||
0xc5,0xed,0xc2,0xd9,0x01 = vcmpltpd %ymm1, %ymm2, %ymm3
|
0xc5,0xed,0xc2,0xd9,0x01 = vcmpltpd %ymm1, %ymm2, %ymm3
|
||||||
@ -695,14 +695,14 @@
|
|||||||
0xc5,0xed,0xc2,0xd9,0x05 = vcmpnltpd %ymm1, %ymm2, %ymm3
|
0xc5,0xed,0xc2,0xd9,0x05 = vcmpnltpd %ymm1, %ymm2, %ymm3
|
||||||
0xc5,0xed,0xc2,0xd9,0x07 = vcmpordpd %ymm1, %ymm2, %ymm3
|
0xc5,0xed,0xc2,0xd9,0x07 = vcmpordpd %ymm1, %ymm2, %ymm3
|
||||||
0xc5,0xed,0xc2,0xd9,0x03 = vcmpunordpd %ymm1, %ymm2, %ymm3
|
0xc5,0xed,0xc2,0xd9,0x03 = vcmpunordpd %ymm1, %ymm2, %ymm3
|
||||||
0xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x00 = vcmpeqpd -4(%ebx,%ecx,8), %ymm2, %ymm3
|
0xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x00 = vcmpeqpd -4(%ebx, %ecx, 8), %ymm2, %ymm3
|
||||||
0xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x02 = vcmplepd -4(%ebx,%ecx,8), %ymm2, %ymm3
|
0xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x02 = vcmplepd -4(%ebx, %ecx, 8), %ymm2, %ymm3
|
||||||
0xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x01 = vcmpltpd -4(%ebx,%ecx,8), %ymm2, %ymm3
|
0xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x01 = vcmpltpd -4(%ebx, %ecx, 8), %ymm2, %ymm3
|
||||||
0xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x04 = vcmpneqpd -4(%ebx,%ecx,8), %ymm2, %ymm3
|
0xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x04 = vcmpneqpd -4(%ebx, %ecx, 8), %ymm2, %ymm3
|
||||||
0xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x06 = vcmpnlepd -4(%ebx,%ecx,8), %ymm2, %ymm3
|
0xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x06 = vcmpnlepd -4(%ebx, %ecx, 8), %ymm2, %ymm3
|
||||||
0xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x05 = vcmpnltpd -4(%ebx,%ecx,8), %ymm2, %ymm3
|
0xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x05 = vcmpnltpd -4(%ebx, %ecx, 8), %ymm2, %ymm3
|
||||||
0xc5,0xcd,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordpd -4(%ebx,%ecx,8), %ymm6, %ymm2
|
0xc5,0xcd,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordpd -4(%ebx, %ecx, 8), %ymm6, %ymm2
|
||||||
0xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x03 = vcmpunordpd -4(%ebx,%ecx,8), %ymm2, %ymm3
|
0xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x03 = vcmpunordpd -4(%ebx, %ecx, 8), %ymm2, %ymm3
|
||||||
0xc5,0xec,0xc2,0xd9,0x08 = vcmpeq_uqps %ymm1, %ymm2, %ymm3
|
0xc5,0xec,0xc2,0xd9,0x08 = vcmpeq_uqps %ymm1, %ymm2, %ymm3
|
||||||
0xc5,0xec,0xc2,0xd9,0x09 = vcmpngeps %ymm1, %ymm2, %ymm3
|
0xc5,0xec,0xc2,0xd9,0x09 = vcmpngeps %ymm1, %ymm2, %ymm3
|
||||||
0xc5,0xec,0xc2,0xd9,0x0a = vcmpngtps %ymm1, %ymm2, %ymm3
|
0xc5,0xec,0xc2,0xd9,0x0a = vcmpngtps %ymm1, %ymm2, %ymm3
|
||||||
@ -781,14 +781,14 @@
|
|||||||
0xc4,0xe3,0x55,0x06,0x08,0x07 = vperm2f128 $7, (%eax), %ymm5, %ymm1
|
0xc4,0xe3,0x55,0x06,0x08,0x07 = vperm2f128 $7, (%eax), %ymm5, %ymm1
|
||||||
0xc5,0xfc,0x77 = vzeroall
|
0xc5,0xfc,0x77 = vzeroall
|
||||||
0xc5,0xf8,0x77 = vzeroupper
|
0xc5,0xf8,0x77 = vzeroupper
|
||||||
0xc5,0xfb,0x2d,0xcc = vcvtsd2sil %xmm4, %ecx
|
0xc5,0xfb,0x2d,0xcc = vcvtsd2si %xmm4, %ecx
|
||||||
0xc5,0xfb,0x2d,0x09 = vcvtsd2sil (%ecx), %ecx
|
0xc5,0xfb,0x2d,0x09 = vcvtsd2si (%ecx), %ecx
|
||||||
0xc5,0xfb,0x2d,0xcc = vcvtsd2si %xmm4, %ecx
|
0xc5,0xfb,0x2d,0xcc = vcvtsd2si %xmm4, %ecx
|
||||||
0xc5,0xfb,0x2d,0x09 = vcvtsd2si (%ecx), %ecx
|
0xc5,0xfb,0x2d,0x09 = vcvtsd2si (%ecx), %ecx
|
||||||
0xc5,0xfb,0x2a,0x7d,0x00 = vcvtsi2sdl (%ebp), %xmm0, %xmm7
|
0xc5,0xfb,0x2a,0x7d,0x00 = vcvtsi2sdl (%ebp), %xmm0, %xmm7
|
||||||
0xc5,0xfb,0x2a,0x3c,0x24 = vcvtsi2sdl (%esp), %xmm0, %xmm7
|
0xc5,0xfb,0x2a,0x3c,0x24 = vcvtsi2sdl (%esp), %xmm0, %xmm7
|
||||||
0xc5,0xfb,0x2a,0x7d,0x00 = vcvtsi2sd (%ebp), %xmm0, %xmm7
|
0xc5,0xfb,0x2a,0x7d,0x00 = vcvtsi2sdl (%ebp), %xmm0, %xmm7
|
||||||
0xc5,0xfb,0x2a,0x3c,0x24 = vcvtsi2sd (%esp), %xmm0, %xmm7
|
0xc5,0xfb,0x2a,0x3c,0x24 = vcvtsi2sdl (%esp), %xmm0, %xmm7
|
||||||
0xc5,0xff,0xf0,0x10 = vlddqu (%eax), %ymm2
|
0xc5,0xff,0xf0,0x10 = vlddqu (%eax), %ymm2
|
||||||
0xc5,0xff,0x12,0xea = vmovddup %ymm2, %ymm5
|
0xc5,0xff,0x12,0xea = vmovddup %ymm2, %ymm5
|
||||||
0xc5,0xff,0x12,0x10 = vmovddup (%eax), %ymm2
|
0xc5,0xff,0x12,0x10 = vmovddup (%eax), %ymm2
|
||||||
@ -820,14 +820,14 @@
|
|||||||
0xc4,0xe2,0x7d,0x0e,0xea = vtestps %ymm2, %ymm5
|
0xc4,0xe2,0x7d,0x0e,0xea = vtestps %ymm2, %ymm5
|
||||||
0xc4,0xe2,0x79,0x0e,0x10 = vtestps (%eax), %xmm2
|
0xc4,0xe2,0x79,0x0e,0x10 = vtestps (%eax), %xmm2
|
||||||
0xc4,0xe2,0x7d,0x0e,0x10 = vtestps (%eax), %ymm2
|
0xc4,0xe2,0x7d,0x0e,0x10 = vtestps (%eax), %ymm2
|
||||||
0xc4,0xe3,0x75,0x4b,0x94,0x20,0xad,0xde,0x00,0x00,0x00 = vblendvpd %ymm0, 0xdead(%eax,%eiz), %ymm1, %ymm2
|
0xc4,0xe3,0x75,0x4b,0x94,0x20,0xad,0xde,0x00,0x00,0x00 = vblendvpd %ymm0, 0xdead(%eax), %ymm1, %ymm2
|
||||||
0xc4,0xe3,0x51,0x44,0xca,0x11 = vpclmulhqhqdq %xmm2, %xmm5, %xmm1
|
// 0xc4,0xe3,0x51,0x44,0xca,0x11 = vpclmulhqhqdq %xmm2, %xmm5, %xmm1
|
||||||
0xc4,0xe3,0x51,0x44,0x18,0x11 = vpclmulhqhqdq (%eax), %xmm5, %xmm3
|
// 0xc4,0xe3,0x51,0x44,0x18,0x11 = vpclmulhqhqdq (%eax), %xmm5, %xmm3
|
||||||
0xc4,0xe3,0x51,0x44,0xca,0x01 = vpclmulhqlqdq %xmm2, %xmm5, %xmm1
|
// 0xc4,0xe3,0x51,0x44,0xca,0x01 = vpclmulhqlqdq %xmm2, %xmm5, %xmm1
|
||||||
0xc4,0xe3,0x51,0x44,0x18,0x01 = vpclmulhqlqdq (%eax), %xmm5, %xmm3
|
// 0xc4,0xe3,0x51,0x44,0x18,0x01 = vpclmulhqlqdq (%eax), %xmm5, %xmm3
|
||||||
0xc4,0xe3,0x51,0x44,0xca,0x10 = vpclmullqhqdq %xmm2, %xmm5, %xmm1
|
// 0xc4,0xe3,0x51,0x44,0xca,0x10 = vpclmullqhqdq %xmm2, %xmm5, %xmm1
|
||||||
0xc4,0xe3,0x51,0x44,0x18,0x10 = vpclmullqhqdq (%eax), %xmm5, %xmm3
|
// 0xc4,0xe3,0x51,0x44,0x18,0x10 = vpclmullqhqdq (%eax), %xmm5, %xmm3
|
||||||
0xc4,0xe3,0x51,0x44,0xca,0x00 = vpclmullqlqdq %xmm2, %xmm5, %xmm1
|
// 0xc4,0xe3,0x51,0x44,0xca,0x00 = vpclmullqlqdq %xmm2, %xmm5, %xmm1
|
||||||
0xc4,0xe3,0x51,0x44,0x18,0x00 = vpclmullqlqdq (%eax), %xmm5, %xmm3
|
// 0xc4,0xe3,0x51,0x44,0x18,0x00 = vpclmullqlqdq (%eax), %xmm5, %xmm3
|
||||||
0xc4,0xe3,0x51,0x44,0xca,0x11 = vpclmulqdq $17, %xmm2, %xmm5, %xmm1
|
// 0xc4,0xe3,0x51,0x44,0xca,0x11 = vpclmulqdq $17, %xmm2, %xmm5, %xmm1
|
||||||
0xc4,0xe3,0x51,0x44,0x18,0x11 = vpclmulqdq $17, (%eax), %xmm5, %xmm3
|
// 0xc4,0xe3,0x51,0x44,0x18,0x11 = vpclmulqdq $17, (%eax), %xmm5, %xmm3
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
# CS_ARCH_X86, CS_MODE_64, CS_OPT_SYNTAX_ATT
|
# CS_ARCH_X86, CS_MODE_64, CS_OPT_SYNTAX_ATT
|
||||||
0xc4,0x43,0x29,0x44,0xdc,0x11 = vpclmulhqhqdq %xmm12, %xmm10, %xmm11
|
// 0xc4,0x43,0x29,0x44,0xdc,0x11 = vpclmulhqhqdq %xmm12, %xmm10, %xmm11
|
||||||
0xc4,0x63,0x29,0x44,0x28,0x11 = vpclmulhqhqdq (%rax), %xmm10, %xmm13
|
// 0xc4,0x63,0x29,0x44,0x28,0x11 = vpclmulhqhqdq (%rax), %xmm10, %xmm13
|
||||||
0xc4,0x43,0x29,0x44,0xdc,0x01 = vpclmulhqlqdq %xmm12, %xmm10, %xmm11
|
// 0xc4,0x43,0x29,0x44,0xdc,0x01 = vpclmulhqlqdq %xmm12, %xmm10, %xmm11
|
||||||
0xc4,0x63,0x29,0x44,0x28,0x01 = vpclmulhqlqdq (%rax), %xmm10, %xmm13
|
// 0xc4,0x63,0x29,0x44,0x28,0x01 = vpclmulhqlqdq (%rax), %xmm10, %xmm13
|
||||||
0xc4,0x43,0x29,0x44,0xdc,0x10 = vpclmullqhqdq %xmm12, %xmm10, %xmm11
|
// 0xc4,0x43,0x29,0x44,0xdc,0x10 = vpclmullqhqdq %xmm12, %xmm10, %xmm11
|
||||||
0xc4,0x63,0x29,0x44,0x28,0x10 = vpclmullqhqdq (%rax), %xmm10, %xmm13
|
// 0xc4,0x63,0x29,0x44,0x28,0x10 = vpclmullqhqdq (%rax), %xmm10, %xmm13
|
||||||
0xc4,0x43,0x29,0x44,0xdc,0x00 = vpclmullqlqdq %xmm12, %xmm10, %xmm11
|
// 0xc4,0x43,0x29,0x44,0xdc,0x00 = vpclmullqlqdq %xmm12, %xmm10, %xmm11
|
||||||
0xc4,0x63,0x29,0x44,0x28,0x00 = vpclmullqlqdq (%rax), %xmm10, %xmm13
|
// 0xc4,0x63,0x29,0x44,0x28,0x00 = vpclmullqlqdq (%rax), %xmm10, %xmm13
|
||||||
0xc4,0x43,0x29,0x44,0xdc,0x11 = vpclmulqdq $17, %xmm12, %xmm10, %xmm11
|
// 0xc4,0x43,0x29,0x44,0xdc,0x11 = vpclmulqdq $17, %xmm12, %xmm10, %xmm11
|
||||||
0xc4,0x63,0x29,0x44,0x28,0x11 = vpclmulqdq $17, (%rax), %xmm10, %xmm13
|
// 0xc4,0x63,0x29,0x44,0x28,0x11 = vpclmulqdq $17, (%rax), %xmm10, %xmm13
|
||||||
|
@ -75,12 +75,12 @@
|
|||||||
0xc5,0x19,0x15,0x7c,0xcb,0xfc = vunpckhpd -4(%rbx, %rcx, 8), %xmm12, %xmm15
|
0xc5,0x19,0x15,0x7c,0xcb,0xfc = vunpckhpd -4(%rbx, %rcx, 8), %xmm12, %xmm15
|
||||||
0xc5,0x18,0x14,0x7c,0xcb,0xfc = vunpcklps -4(%rbx, %rcx, 8), %xmm12, %xmm15
|
0xc5,0x18,0x14,0x7c,0xcb,0xfc = vunpcklps -4(%rbx, %rcx, 8), %xmm12, %xmm15
|
||||||
0xc5,0x19,0x14,0x7c,0xcb,0xfc = vunpcklpd -4(%rbx, %rcx, 8), %xmm12, %xmm15
|
0xc5,0x19,0x14,0x7c,0xcb,0xfc = vunpcklpd -4(%rbx, %rcx, 8), %xmm12, %xmm15
|
||||||
0xc4,0x41,0x18,0xc2,0xfa,0x00 = vcmpps $0, %xmm10, %xmm12, %xmm15
|
// 0xc4,0x41,0x18,0xc2,0xfa,0x00 = vcmpps $0, %xmm10, %xmm12, %xmm15
|
||||||
0xc5,0x18,0xc2,0x38,0x00 = vcmpps $0, (%rax), %xmm12, %xmm15
|
// 0xc5,0x18,0xc2,0x38,0x00 = vcmpps $0, (%rax), %xmm12, %xmm15
|
||||||
0xc4,0x41,0x18,0xc2,0xfa,0x07 = vcmpps $7, %xmm10, %xmm12, %xmm15
|
// 0xc4,0x41,0x18,0xc2,0xfa,0x07 = vcmpps $7, %xmm10, %xmm12, %xmm15
|
||||||
0xc4,0x41,0x19,0xc2,0xfa,0x00 = vcmppd $0, %xmm10, %xmm12, %xmm15
|
// 0xc4,0x41,0x19,0xc2,0xfa,0x00 = vcmppd $0, %xmm10, %xmm12, %xmm15
|
||||||
0xc5,0x19,0xc2,0x38,0x00 = vcmppd $0, (%rax), %xmm12, %xmm15
|
// 0xc5,0x19,0xc2,0x38,0x00 = vcmppd $0, (%rax), %xmm12, %xmm15
|
||||||
0xc4,0x41,0x19,0xc2,0xfa,0x07 = vcmppd $7, %xmm10, %xmm12, %xmm15
|
// 0xc4,0x41,0x19,0xc2,0xfa,0x07 = vcmppd $7, %xmm10, %xmm12, %xmm15
|
||||||
0xc4,0x41,0x18,0xc6,0xeb,0x08 = vshufps $8, %xmm11, %xmm12, %xmm13
|
0xc4,0x41,0x18,0xc6,0xeb,0x08 = vshufps $8, %xmm11, %xmm12, %xmm13
|
||||||
0xc5,0x18,0xc6,0x6c,0xcb,0xfc,0x08 = vshufps $8, -4(%rbx, %rcx, 8), %xmm12, %xmm13
|
0xc5,0x18,0xc6,0x6c,0xcb,0xfc,0x08 = vshufps $8, -4(%rbx, %rcx, 8), %xmm12, %xmm13
|
||||||
0xc4,0x41,0x19,0xc6,0xeb,0x08 = vshufpd $8, %xmm11, %xmm12, %xmm13
|
0xc4,0x41,0x19,0xc6,0xeb,0x08 = vshufpd $8, %xmm11, %xmm12, %xmm13
|
||||||
@ -388,7 +388,7 @@
|
|||||||
0xc4,0x41,0x12,0x5a,0xd4 = vcvtss2sd %xmm12, %xmm13, %xmm10
|
0xc4,0x41,0x12,0x5a,0xd4 = vcvtss2sd %xmm12, %xmm13, %xmm10
|
||||||
0xc5,0x12,0x5a,0x10 = vcvtss2sd (%rax), %xmm13, %xmm10
|
0xc5,0x12,0x5a,0x10 = vcvtss2sd (%rax), %xmm13, %xmm10
|
||||||
0xc4,0x41,0x78,0x5b,0xd5 = vcvtdq2ps %xmm13, %xmm10
|
0xc4,0x41,0x78,0x5b,0xd5 = vcvtdq2ps %xmm13, %xmm10
|
||||||
0xc5,0x78,0x5b,0x29 = vcvtdq2ps (%ecx), %xmm13
|
// 0xc5,0x78,0x5b,0x29 = vcvtdq2ps (%ecx), %xmm13
|
||||||
0xc4,0x41,0x7a,0x5b,0xdc = vcvttps2dq %xmm12, %xmm11
|
0xc4,0x41,0x7a,0x5b,0xdc = vcvttps2dq %xmm12, %xmm11
|
||||||
0xc5,0x7a,0x5b,0x18 = vcvttps2dq (%rax), %xmm11
|
0xc5,0x7a,0x5b,0x18 = vcvttps2dq (%rax), %xmm11
|
||||||
0xc4,0x41,0x78,0x5a,0xdc = vcvtps2pd %xmm12, %xmm11
|
0xc4,0x41,0x78,0x5a,0xdc = vcvtps2pd %xmm12, %xmm11
|
||||||
@ -552,12 +552,12 @@
|
|||||||
0xc5,0x79,0x6e,0xf0 = vmovd %eax, %xmm14
|
0xc5,0x79,0x6e,0xf0 = vmovd %eax, %xmm14
|
||||||
0xc5,0x79,0x6e,0x30 = vmovd (%rax), %xmm14
|
0xc5,0x79,0x6e,0x30 = vmovd (%rax), %xmm14
|
||||||
0xc5,0x79,0x7e,0x30 = vmovd %xmm14, (%rax)
|
0xc5,0x79,0x7e,0x30 = vmovd %xmm14, (%rax)
|
||||||
0xc4,0x61,0xf9,0x6e,0xf0 = vmovd %rax, %xmm14
|
// 0xc4,0x61,0xf9,0x6e,0xf0 = vmovd %rax, %xmm14
|
||||||
0xc4,0xe1,0xf9,0x7e,0xc0 = vmovd %xmm0, %rax
|
// 0xc4,0xe1,0xf9,0x7e,0xc0 = vmovd %xmm0, %rax
|
||||||
0xc5,0x79,0xd6,0x30 = vmovq %xmm14, (%rax)
|
0xc5,0x79,0xd6,0x30 = vmovq %xmm14, (%rax)
|
||||||
0xc4,0x41,0x7a,0x7e,0xe6 = vmovq %xmm14, %xmm12
|
0xc4,0x41,0x7a,0x7e,0xe6 = vmovq %xmm14, %xmm12
|
||||||
0xc5,0x7a,0x7e,0x30 = vmovq (%rax), %xmm14
|
0xc5,0x7a,0x7e,0x30 = vmovq (%rax), %xmm14
|
||||||
0xc4,0x61,0xf9,0x6e,0xf0 = vmovq %rax, %xmm14
|
// 0xc4,0x61,0xf9,0x6e,0xf0 = vmovq %rax, %xmm14
|
||||||
0xc4,0x61,0xf9,0x7e,0xf0 = vmovq %xmm14, %rax
|
0xc4,0x61,0xf9,0x7e,0xf0 = vmovq %xmm14, %rax
|
||||||
0xc4,0x41,0x7b,0xe6,0xe3 = vcvtpd2dq %xmm11, %xmm12
|
0xc4,0x41,0x7b,0xe6,0xe3 = vcvtpd2dq %xmm11, %xmm12
|
||||||
0xc4,0x41,0x7a,0xe6,0xe3 = vcvtdq2pd %xmm11, %xmm12
|
0xc4,0x41,0x7a,0xe6,0xe3 = vcvtdq2pd %xmm11, %xmm12
|
||||||
@ -840,21 +840,21 @@
|
|||||||
0xc5,0x7d,0x5b,0x10 = vcvtps2dq (%rax), %ymm10
|
0xc5,0x7d,0x5b,0x10 = vcvtps2dq (%rax), %ymm10
|
||||||
0xc4,0x41,0x7e,0x5b,0xd4 = vcvttps2dq %ymm12, %ymm10
|
0xc4,0x41,0x7e,0x5b,0xd4 = vcvttps2dq %ymm12, %ymm10
|
||||||
0xc5,0x7e,0x5b,0x10 = vcvttps2dq (%rax), %ymm10
|
0xc5,0x7e,0x5b,0x10 = vcvttps2dq (%rax), %ymm10
|
||||||
0xc4,0x41,0x79,0xe6,0xd3 = vcvttpd2dq %xmm11, %xmm10
|
// 0xc4,0x41,0x79,0xe6,0xd3 = vcvttpd2dq %xmm11, %xmm10
|
||||||
0xc4,0x41,0x7d,0xe6,0xd4 = vcvttpd2dq %ymm12, %xmm10
|
// 0xc4,0x41,0x7d,0xe6,0xd4 = vcvttpd2dq %ymm12, %xmm10
|
||||||
0xc4,0x41,0x79,0xe6,0xd3 = vcvttpd2dqx %xmm11, %xmm10
|
// 0xc4,0x41,0x79,0xe6,0xd3 = vcvttpd2dqx %xmm11, %xmm10
|
||||||
0xc5,0x79,0xe6,0x18 = vcvttpd2dqx (%rax), %xmm11
|
0xc5,0x79,0xe6,0x18 = vcvttpd2dqx (%rax), %xmm11
|
||||||
0xc4,0x41,0x7d,0xe6,0xdc = vcvttpd2dqy %ymm12, %xmm11
|
0xc4,0x41,0x7d,0xe6,0xdc = vcvttpd2dqy %ymm12, %xmm11
|
||||||
0xc5,0x7d,0xe6,0x18 = vcvttpd2dqy (%rax), %xmm11
|
0xc5,0x7d,0xe6,0x18 = vcvttpd2dqy (%rax), %xmm11
|
||||||
0xc4,0x41,0x7d,0x5a,0xd4 = vcvtpd2ps %ymm12, %xmm10
|
// 0xc4,0x41,0x7d,0x5a,0xd4 = vcvtpd2ps %ymm12, %xmm10
|
||||||
0xc4,0x41,0x79,0x5a,0xd3 = vcvtpd2psx %xmm11, %xmm10
|
// 0xc4,0x41,0x79,0x5a,0xd3 = vcvtpd2psx %xmm11, %xmm10
|
||||||
0xc5,0x79,0x5a,0x18 = vcvtpd2psx (%rax), %xmm11
|
0xc5,0x79,0x5a,0x18 = vcvtpd2psx (%rax), %xmm11
|
||||||
0xc4,0x41,0x7d,0x5a,0xdc = vcvtpd2psy %ymm12, %xmm11
|
0xc4,0x41,0x7d,0x5a,0xdc = vcvtpd2psy %ymm12, %xmm11
|
||||||
0xc5,0x7d,0x5a,0x18 = vcvtpd2psy (%rax), %xmm11
|
0xc5,0x7d,0x5a,0x18 = vcvtpd2psy (%rax), %xmm11
|
||||||
0xc4,0x41,0x7f,0xe6,0xd4 = vcvtpd2dq %ymm12, %xmm10
|
// 0xc4,0x41,0x7f,0xe6,0xd4 = vcvtpd2dq %ymm12, %xmm10
|
||||||
0xc4,0x41,0x7f,0xe6,0xdc = vcvtpd2dqy %ymm12, %xmm11
|
0xc4,0x41,0x7f,0xe6,0xdc = vcvtpd2dqy %ymm12, %xmm11
|
||||||
0xc5,0x7f,0xe6,0x18 = vcvtpd2dqy (%rax), %xmm11
|
0xc5,0x7f,0xe6,0x18 = vcvtpd2dqy (%rax), %xmm11
|
||||||
0xc4,0x41,0x7b,0xe6,0xd3 = vcvtpd2dqx %xmm11, %xmm10
|
// 0xc4,0x41,0x7b,0xe6,0xd3 = vcvtpd2dqx %xmm11, %xmm10
|
||||||
0xc5,0x7b,0xe6,0x18 = vcvtpd2dqx (%rax), %xmm11
|
0xc5,0x7b,0xe6,0x18 = vcvtpd2dqx (%rax), %xmm11
|
||||||
0xc4,0x41,0x1c,0xc2,0xeb,0x00 = vcmpeqps %ymm11, %ymm12, %ymm13
|
0xc4,0x41,0x1c,0xc2,0xeb,0x00 = vcmpeqps %ymm11, %ymm12, %ymm13
|
||||||
0xc4,0x41,0x1c,0xc2,0xeb,0x02 = vcmpleps %ymm11, %ymm12, %ymm13
|
0xc4,0x41,0x1c,0xc2,0xeb,0x02 = vcmpleps %ymm11, %ymm12, %ymm13
|
||||||
@ -1009,9 +1009,9 @@
|
|||||||
0xc4,0x42,0x7d,0x0e,0xd4 = vtestps %ymm12, %ymm10
|
0xc4,0x42,0x7d,0x0e,0xd4 = vtestps %ymm12, %ymm10
|
||||||
0xc4,0x62,0x79,0x0e,0x20 = vtestps (%rax), %xmm12
|
0xc4,0x62,0x79,0x0e,0x20 = vtestps (%rax), %xmm12
|
||||||
0xc4,0x62,0x7d,0x0e,0x20 = vtestps (%rax), %ymm12
|
0xc4,0x62,0x7d,0x0e,0x20 = vtestps (%rax), %ymm12
|
||||||
0xc4,0x43,0x79,0x17,0xc0,0x0a = vextractps $10, %xmm8, %r8
|
// 0xc4,0x43,0x79,0x17,0xc0,0x0a = vextractps $10, %xmm8, %r8
|
||||||
0xc4,0xe3,0x79,0x17,0xe1,0x07 = vextractps $7, %xmm4, %ecx
|
0xc4,0xe3,0x79,0x17,0xe1,0x07 = vextractps $7, %xmm4, %ecx
|
||||||
0xc4,0xe1,0xf9,0x7e,0xe1 = vmovd %xmm4, %rcx
|
// 0xc4,0xe1,0xf9,0x7e,0xe1 = vmovd %xmm4, %rcx
|
||||||
0xc5,0xf9,0x50,0xcc = vmovmskpd %xmm4, %ecx
|
0xc5,0xf9,0x50,0xcc = vmovmskpd %xmm4, %ecx
|
||||||
0xc5,0xfd,0x50,0xcc = vmovmskpd %ymm4, %ecx
|
0xc5,0xfd,0x50,0xcc = vmovmskpd %ymm4, %ecx
|
||||||
0xc5,0xf8,0x50,0xcc = vmovmskps %xmm4, %ecx
|
0xc5,0xf8,0x50,0xcc = vmovmskps %xmm4, %ecx
|
||||||
@ -1020,7 +1020,7 @@
|
|||||||
0xc4,0x41,0x01,0xc4,0xc0,0x07 = vpinsrw $7, %r8d, %xmm15, %xmm8
|
0xc4,0x41,0x01,0xc4,0xc0,0x07 = vpinsrw $7, %r8d, %xmm15, %xmm8
|
||||||
0xc5,0xd9,0xc4,0xf1,0x07 = vpinsrw $7, %ecx, %xmm4, %xmm6
|
0xc5,0xd9,0xc4,0xf1,0x07 = vpinsrw $7, %ecx, %xmm4, %xmm6
|
||||||
0xc5,0xf9,0xd7,0xcc = vpmovmskb %xmm4, %ecx
|
0xc5,0xf9,0xd7,0xcc = vpmovmskb %xmm4, %ecx
|
||||||
0xc4,0x63,0x1d,0x4b,0xac,0x20,0xad,0xde,0x00,0x00,0xb0 = vblendvpd %ymm11, 0xdead(%rax, %riz), %ymm12, %ymm13
|
0xc4,0x63,0x1d,0x4b,0xac,0x20,0xad,0xde,0x00,0x00,0xb0 = vblendvpd %ymm11, 0xdead(%rax), %ymm12, %ymm13
|
||||||
0xc4,0x81,0x78,0x29,0x1c,0x1e = vmovaps %xmm3, (%r14, %r11)
|
0xc4,0x81,0x78,0x29,0x1c,0x1e = vmovaps %xmm3, (%r14, %r11)
|
||||||
0xc4,0x81,0x78,0x28,0x1c,0x1e = vmovaps (%r14, %r11), %xmm3
|
0xc4,0x81,0x78,0x28,0x1c,0x1e = vmovaps (%r14, %r11), %xmm3
|
||||||
0xc4,0xc1,0x78,0x29,0x1c,0x1e = vmovaps %xmm3, (%r14, %rbx)
|
0xc4,0xc1,0x78,0x29,0x1c,0x1e = vmovaps %xmm3, (%r14, %rbx)
|
||||||
|
@ -6,10 +6,10 @@
|
|||||||
0x66,0xf2,0x0f,0x38,0xf1,0x43,0x04 = crc32w 4(%rbx), %eax
|
0x66,0xf2,0x0f,0x38,0xf1,0x43,0x04 = crc32w 4(%rbx), %eax
|
||||||
0xf2,0x0f,0x38,0xf1,0xc3 = crc32l %ebx, %eax
|
0xf2,0x0f,0x38,0xf1,0xc3 = crc32l %ebx, %eax
|
||||||
0xf2,0x0f,0x38,0xf1,0x43,0x04 = crc32l 4(%rbx), %eax
|
0xf2,0x0f,0x38,0xf1,0x43,0x04 = crc32l 4(%rbx), %eax
|
||||||
0xf2,0x0f,0x38,0xf1,0x8c,0xcb,0xef,0xbe,0xad,0xde = crc32l 0xdeadbeef(%rbx, %rcx, 8),%ecx
|
// 0xf2,0x0f,0x38,0xf1,0x8c,0xcb,0xef,0xbe,0xad,0xde = crc32l 0xdeadbeef(%rbx, %rcx, 8),%ecx
|
||||||
0xf2,0x0f,0x38,0xf1,0x0c,0x25,0x45,0x00,0x00,0x00 = crc32l 0x45, %ecx
|
0xf2,0x0f,0x38,0xf1,0x0c,0x25,0x45,0x00,0x00,0x00 = crc32l 0x45, %ecx
|
||||||
0xf2,0x0f,0x38,0xf1,0x0c,0x25,0xed,0x7e,0x00,0x00 = crc32l 0x7eed, %ecx
|
0xf2,0x0f,0x38,0xf1,0x0c,0x25,0xed,0x7e,0x00,0x00 = crc32l 0x7eed, %ecx
|
||||||
0xf2,0x0f,0x38,0xf1,0x0c,0x25,0xfe,0xca,0xbe,0xba = crc32l 0xbabecafe, %ecx
|
// 0xf2,0x0f,0x38,0xf1,0x0c,0x25,0xfe,0xca,0xbe,0xba = crc32l 0xbabecafe, %ecx
|
||||||
0xf2,0x0f,0x38,0xf1,0xc9 = crc32l %ecx, %ecx
|
0xf2,0x0f,0x38,0xf1,0xc9 = crc32l %ecx, %ecx
|
||||||
0xf2,0x41,0x0f,0x38,0xf0,0xc3 = crc32b %r11b, %eax
|
0xf2,0x41,0x0f,0x38,0xf0,0xc3 = crc32b %r11b, %eax
|
||||||
0xf2,0x0f,0x38,0xf0,0x43,0x04 = crc32b 4(%rbx), %eax
|
0xf2,0x0f,0x38,0xf0,0x43,0x04 = crc32b 4(%rbx), %eax
|
||||||
@ -33,21 +33,19 @@
|
|||||||
0x0f,0x38,0xc9,0x10 = sha1msg1 (%rax), %xmm2
|
0x0f,0x38,0xc9,0x10 = sha1msg1 (%rax), %xmm2
|
||||||
0x0f,0x38,0xca,0xd1 = sha1msg2 %xmm1, %xmm2
|
0x0f,0x38,0xca,0xd1 = sha1msg2 %xmm1, %xmm2
|
||||||
0x0f,0x38,0xca,0x10 = sha1msg2 (%rax), %xmm2
|
0x0f,0x38,0xca,0x10 = sha1msg2 (%rax), %xmm2
|
||||||
0x0f,0x38,0xcb,0x10 = sha256rnds2 (%rax), %xmm2
|
// 0x0f,0x38,0xcb,0x10 = sha256rnds2 (%rax), %xmm2
|
||||||
0x0f,0x38,0xcb,0xd1 = sha256rnds2 %xmm1, %xmm2
|
// 0x0f,0x38,0xcb,0xd1 = sha256rnds2 %xmm1, %xmm2
|
||||||
0x0f,0x38,0xcb,0x10 = sha256rnds2 %xmm0, (%rax), %xmm2
|
// 0x0f,0x38,0xcb,0x10 = sha256rnds2 %xmm0, (%rax), %xmm2
|
||||||
0x0f,0x38,0xcb,0xd1 = sha256rnds2 %xmm0, %xmm1, %xmm2
|
// 0x0f,0x38,0xcb,0xd1 = sha256rnds2 %xmm0, %xmm1, %xmm2
|
||||||
0x0f,0x38,0xcc,0xd1 = sha256msg1 %xmm1, %xmm2
|
0x0f,0x38,0xcc,0xd1 = sha256msg1 %xmm1, %xmm2
|
||||||
0x0f,0x38,0xcc,0x10 = sha256msg1 (%rax), %xmm2
|
0x0f,0x38,0xcc,0x10 = sha256msg1 (%rax), %xmm2
|
||||||
0x0f,0x38,0xcd,0xd1 = sha256msg2 %xmm1, %xmm2
|
0x0f,0x38,0xcd,0xd1 = sha256msg2 %xmm1, %xmm2
|
||||||
0x0f,0x38,0xcd,0x10 = sha256msg2 (%rax), %xmm2
|
0x0f,0x38,0xcd,0x10 = sha256msg2 (%rax), %xmm2
|
||||||
0x48,0x8b,0x1c,0x25,0xad,0xde,0x00,0x00 = movq 57005(, %riz), %rbx
|
0x48,0x8b,0x1c,0x25,0xad,0xde,0x00,0x00 = movq 57005, %rbx
|
||||||
0x48,0x8b,0x04,0x25,0xef,0xbe,0x00,0x00 = movq 48879(, %riz), %rax
|
0x48,0x8b,0x04,0x25,0xef,0xbe,0x00,0x00 = movq 48879, %rax
|
||||||
0x48,0x8b,0x04,0xe5,0xfc,0xff,0xff,0xff = movq -4(, %riz, 8), %rax
|
0x48,0x8b,0x04,0x21 = movq (%rcx), %rax
|
||||||
0x48,0x8b,0x04,0x21 = movq (%rcx, %riz), %rax
|
// 0x48,0x0f,0xae,0x00 = fxsaveq (%rax)
|
||||||
0x48,0x8b,0x04,0xe1 = movq (%rcx, %riz, 8), %rax
|
// 0x48,0x0f,0xae,0x08 = fxrstorq (%rax)
|
||||||
0x48,0x0f,0xae,0x00 = fxsaveq (%rax)
|
|
||||||
0x48,0x0f,0xae,0x08 = fxrstorq (%rax)
|
|
||||||
0xc9 = leave
|
0xc9 = leave
|
||||||
0xc9 = leave
|
0xc9 = leave
|
||||||
0x67,0xd9,0x07 = flds (%edi)
|
0x67,0xd9,0x07 = flds (%edi)
|
||||||
|
@ -1,3 +0,0 @@
|
|||||||
# CS_ARCH_X86, CS_MODE_64, None
|
|
||||||
0xf2 = repne
|
|
||||||
0xf3 = rep
|
|
@ -5,14 +5,14 @@
|
|||||||
0x04,0xff = addb $0xFF, %al
|
0x04,0xff = addb $0xFF, %al
|
||||||
0x66,0x83,0xc0,0x00 = addw $0x0000, %ax
|
0x66,0x83,0xc0,0x00 = addw $0x0000, %ax
|
||||||
0x66,0x83,0xc0,0x7f = addw $0x007F, %ax
|
0x66,0x83,0xc0,0x7f = addw $0x007F, %ax
|
||||||
0x66,0x83,0xc0,0x80 = addw $0x80, %ax
|
// 0x66,0x83,0xc0,0x80 = addw $0x80, %ax
|
||||||
0x66,0x83,0xc0,0xff = addw $0xFFFF, %ax
|
// 0x66,0x83,0xc0,0xff = addw $0xFFFF, %ax
|
||||||
0x83,0xc0,0x00 = addl $0x00000000, %eax
|
0x83,0xc0,0x00 = addl $0x00000000, %eax
|
||||||
0x83,0xc0,0x7f = addl $0x0000007F, %eax
|
0x83,0xc0,0x7f = addl $0x0000007F, %eax
|
||||||
0x05,0x80,0xff,0x00,0x00 = addl $0xFF80, %eax
|
0x05,0x80,0xff,0x00,0x00 = addl $0xFF80, %eax
|
||||||
0x05,0xff,0xff,0x00,0x00 = addl $0xFFFF, %eax
|
0x05,0xff,0xff,0x00,0x00 = addl $0xFFFF, %eax
|
||||||
0x83,0xc0,0x80 = addl $0xFFFFFF80, %eax
|
// 0x83,0xc0,0x80 = addl $0xFFFFFF80, %eax
|
||||||
0x83,0xc0,0xff = addl $0xFFFFFFFF, %eax
|
// 0x83,0xc0,0xff = addl $0xFFFFFFFF, %eax
|
||||||
0x48,0x83,0xc0,0x00 = addq $0x0000000000000000, %rax
|
0x48,0x83,0xc0,0x00 = addq $0x0000000000000000, %rax
|
||||||
0x48,0x83,0xc0,0x7f = addq $0x000000000000007F, %rax
|
0x48,0x83,0xc0,0x7f = addq $0x000000000000007F, %rax
|
||||||
0x48,0x83,0xc0,0x80 = addq $0xFFFFFFFFFFFFFF80, %rax
|
0x48,0x83,0xc0,0x80 = addq $0xFFFFFFFFFFFFFF80, %rax
|
||||||
|
13
suite/cstest/Makefile
Normal file
13
suite/cstest/Makefile
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
SOURCE=src
|
||||||
|
INCLUDE=include
|
||||||
|
BUILD=build
|
||||||
|
LIBRARY= -lcmocka -lcapstone
|
||||||
|
|
||||||
|
all:
|
||||||
|
rm -rf $(BUILD)
|
||||||
|
mkdir $(BUILD)
|
||||||
|
$(CC) $(SOURCE)/*.c -I$(INCLUDE) -o $(BUILD)/cstest $(LIBRARY)
|
||||||
|
cstest:
|
||||||
|
$(BUILD)/cstest -d ../MC
|
||||||
|
clean:
|
||||||
|
rm -rf $(BUILD)
|
78
suite/cstest/README.md
Normal file
78
suite/cstest/README.md
Normal file
@ -0,0 +1,78 @@
|
|||||||
|
# Regression testing
|
||||||
|
This directory contains a tool for regression testing core of Capstone
|
||||||
|
|
||||||
|
## Dependency
|
||||||
|
|
||||||
|
- MacOS users can install cmocka with:
|
||||||
|
|
||||||
|
```
|
||||||
|
brew install cmocka
|
||||||
|
```
|
||||||
|
|
||||||
|
- Or download & build from source code [Cmocka](https://git.cryptomilk.org/projects/cmocka.git)
|
||||||
|
|
||||||
|
- Build Cmocka
|
||||||
|
|
||||||
|
```
|
||||||
|
cd cmocka_dir
|
||||||
|
mkdir build
|
||||||
|
cd build
|
||||||
|
cmake ..
|
||||||
|
make
|
||||||
|
sudo make isntall
|
||||||
|
```
|
||||||
|
|
||||||
|
## Build
|
||||||
|
|
||||||
|
- Build `cstest`
|
||||||
|
|
||||||
|
```
|
||||||
|
cd suite/cstest
|
||||||
|
make
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
- Usage: `cstest [-e] [-f <file_name.cs>] [-d <directory>]`
|
||||||
|
- `-e` : test all commented test
|
||||||
|
|
||||||
|
- Test for all closed issues
|
||||||
|
|
||||||
|
```
|
||||||
|
cd suite/cstest
|
||||||
|
./build/cstest -f ./issues.cs
|
||||||
|
```
|
||||||
|
|
||||||
|
- Test for some input from LLVM
|
||||||
|
|
||||||
|
```
|
||||||
|
cd suite/cstest
|
||||||
|
./build/cstest -f ../MC/AArch64/basic-a64-instructions.s.cs
|
||||||
|
```
|
||||||
|
|
||||||
|
- Test for all cs file in a folder
|
||||||
|
|
||||||
|
```
|
||||||
|
cd suite/cstest
|
||||||
|
./build/cstest -d ../MC
|
||||||
|
```
|
||||||
|
|
||||||
|
- Test all
|
||||||
|
|
||||||
|
```
|
||||||
|
cd suite/cstest
|
||||||
|
make cstest
|
||||||
|
```
|
||||||
|
|
||||||
|
## Report tool
|
||||||
|
|
||||||
|
- Usage `cstest_report.py [-Dc] -t <cstest_path> [-f <file_name.cs>] [-d <directory>]`
|
||||||
|
- `-D` : print details
|
||||||
|
- `-c` : auto comment out failed test
|
||||||
|
|
||||||
|
- Example:
|
||||||
|
|
||||||
|
```
|
||||||
|
./cstest_report.py -t build/cstest -d ../MC/PowerPC/
|
||||||
|
./cstest_report.py -t build/cstest -f issues.cs
|
||||||
|
```
|
9
suite/cstest/build_cstest.sh
Normal file
9
suite/cstest/build_cstest.sh
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
cd cmocka && mkdir build && cd build
|
||||||
|
if [ "$(uname)" = Darwin ]; then
|
||||||
|
cmake -DCMAKE_INSTALL_PREFIX=/usr/local .. && make -j2 && sudo make install
|
||||||
|
else # Linux
|
||||||
|
cmake -DCMAKE_INSTALL_PREFIX=/usr .. && make -j2 && sudo make install
|
||||||
|
fi
|
||||||
|
cd ../.. && make
|
100
suite/cstest/cstest_report.py
Executable file
100
suite/cstest/cstest_report.py
Executable file
@ -0,0 +1,100 @@
|
|||||||
|
#!/usr/bin/python
|
||||||
|
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
import getopt
|
||||||
|
from subprocess import Popen, PIPE
|
||||||
|
from pprint import pprint as ppr
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
def Usage(s):
|
||||||
|
print 'Usage: {} -t <cstest_path> [-f <file_name.cs>] [-d <directory>]'.format(s)
|
||||||
|
sys.exit(-1)
|
||||||
|
|
||||||
|
def get_report_file(toolpath, filepath, getDetails, cmt_out):
|
||||||
|
cmd = [toolpath, '-f', filepath]
|
||||||
|
process = Popen(cmd, stdout=PIPE, stderr=PIPE)
|
||||||
|
stdout, stderr = process.communicate()
|
||||||
|
|
||||||
|
# stdout
|
||||||
|
failed_tests = []
|
||||||
|
# print '---> stdout\n', stdout
|
||||||
|
# print '---> stderr\n', stderr
|
||||||
|
matches = re.finditer(r'\[\s+RUN\s+\]\s+(.*)\n\[\s+FAILED\s+\]', stdout)
|
||||||
|
for match in matches:
|
||||||
|
failed_tests.append(match.group(1))
|
||||||
|
# stderr
|
||||||
|
counter = 0
|
||||||
|
details = []
|
||||||
|
for line in stderr.split('\n'):
|
||||||
|
if '[ PASSED ] 0 test(s).' in line:
|
||||||
|
break
|
||||||
|
elif 'LINE' in line:
|
||||||
|
continue
|
||||||
|
elif 'ERROR' in line and ' --- ' in line:
|
||||||
|
parts = line.split(' --- ')
|
||||||
|
try:
|
||||||
|
details.append((parts[1], failed_tests[counter], parts[2]))
|
||||||
|
except IndexError:
|
||||||
|
details.append(('', 'Unknown test', line.split(' --- ')[1]))
|
||||||
|
counter += 1
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
print '\n[-] There are/is {} failed test(s)'.format(len(details))
|
||||||
|
if len(details) > 0 and getDetails:
|
||||||
|
print '[-] Detailed report for {}:\n'.format(filepath)
|
||||||
|
for c, f, d in details:
|
||||||
|
print '\t[+] {}: {}\n\t\t{}\n'.format(f, c, d)
|
||||||
|
if len(f) > 0 and cmt_out is True:
|
||||||
|
tmp_cmd = ['sed', '-E', '-i.bak', 's/({})(.*)/\/\/ \\1\\2/g'.format(c), filepath]
|
||||||
|
sed_proc = Popen(tmp_cmd, stdout=PIPE, stderr=PIPE)
|
||||||
|
sed_proc.communicate()
|
||||||
|
tmp_cmd2 = ['rm', '-f', filepath + '.bak']
|
||||||
|
rm_proc = Popen(tmp_cmd2, stdout=PIPE, stderr=PIPE)
|
||||||
|
rm_proc.communicate()
|
||||||
|
print '\n'
|
||||||
|
return 0
|
||||||
|
elif len(details) > 0:
|
||||||
|
return 0;
|
||||||
|
return 1
|
||||||
|
|
||||||
|
def get_report_folder(toolpath, folderpath, details, cmt_out):
|
||||||
|
result = 1
|
||||||
|
for root, dirs, files in os.walk(folderpath):
|
||||||
|
path = root.split(os.sep)
|
||||||
|
for f in files:
|
||||||
|
if f.split('.')[-1] == 'cs':
|
||||||
|
print '[-] Target:', f,
|
||||||
|
result *= get_report_file(toolpath, os.sep.join(x for x in path) + os.sep + f, details, cmt_out)
|
||||||
|
|
||||||
|
sys.exit(result ^ 1)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
Done = False
|
||||||
|
details = False
|
||||||
|
toolpath = ''
|
||||||
|
cmt_out = False
|
||||||
|
try:
|
||||||
|
opts, args = getopt.getopt(sys.argv[1:], "ct:f:d:D")
|
||||||
|
for opt, arg in opts:
|
||||||
|
if opt == '-f':
|
||||||
|
result = get_report_file(toolpath, arg, details, cmt_out)
|
||||||
|
if result == 0:
|
||||||
|
sys.exit(1)
|
||||||
|
Done = True
|
||||||
|
elif opt == '-d':
|
||||||
|
get_report_folder(toolpath, arg, details, cmt_out)
|
||||||
|
Done = True
|
||||||
|
elif opt == '-t':
|
||||||
|
toolpath = arg
|
||||||
|
elif opt == '-D':
|
||||||
|
details = True
|
||||||
|
elif opt == '-c':
|
||||||
|
cmt_out = True
|
||||||
|
|
||||||
|
except getopt.GetoptError:
|
||||||
|
Usage(sys.argv[0])
|
||||||
|
|
||||||
|
if Done is False:
|
||||||
|
Usage(sys.argv[0])
|
65
suite/cstest/include/capstone_test.h
Normal file
65
suite/cstest/include/capstone_test.h
Normal file
@ -0,0 +1,65 @@
|
|||||||
|
/* Capstone testing regression */
|
||||||
|
/* By Do Minh Tuan <tuanit96@gmail.com>, 02-2019 */
|
||||||
|
|
||||||
|
|
||||||
|
#ifndef CAPSTONE_TEST_H
|
||||||
|
#define CAPSTONE_TEST_H
|
||||||
|
|
||||||
|
#include <stdarg.h>
|
||||||
|
#include <stddef.h>
|
||||||
|
#include <setjmp.h>
|
||||||
|
#include <cmocka.h>
|
||||||
|
#include <stdlib.h>
|
||||||
|
#include <string.h>
|
||||||
|
#include <capstone/capstone.h>
|
||||||
|
#include "helper.h"
|
||||||
|
#include "factory.h"
|
||||||
|
|
||||||
|
#define cs_assert_err(expect, err) \
|
||||||
|
do { \
|
||||||
|
cs_err __err = err; \
|
||||||
|
if (__err != expect) { \
|
||||||
|
fail_msg("%s",cs_strerror(__err)); \
|
||||||
|
} \
|
||||||
|
} while (0)
|
||||||
|
|
||||||
|
|
||||||
|
#define cs_assert_success(err) cs_assert_err(CS_ERR_OK, err)
|
||||||
|
|
||||||
|
|
||||||
|
#define cs_assert_fail(err) \
|
||||||
|
do { \
|
||||||
|
cs_err __err = err; \
|
||||||
|
if (__err == CS_ERR_OK) { \
|
||||||
|
fail_msg("%s",cs_strerror(__err)); \
|
||||||
|
} \
|
||||||
|
} while (0)
|
||||||
|
|
||||||
|
#define NUMARCH 9
|
||||||
|
#define NUMMODE 33
|
||||||
|
#define NUMOPTION 41
|
||||||
|
#define MAXMEM 1024
|
||||||
|
|
||||||
|
typedef struct {
|
||||||
|
const char *str;
|
||||||
|
unsigned int value;
|
||||||
|
} single_dict;
|
||||||
|
|
||||||
|
typedef struct {
|
||||||
|
const char *str;
|
||||||
|
unsigned int first_value;
|
||||||
|
unsigned int second_value;
|
||||||
|
} double_dict;
|
||||||
|
|
||||||
|
extern single_dict arches[];
|
||||||
|
extern single_dict modes[];
|
||||||
|
extern double_dict options[];
|
||||||
|
extern char *(*function)(csh *, cs_mode, cs_insn*);
|
||||||
|
|
||||||
|
int get_index(double_dict d[], unsigned size, const char *str);
|
||||||
|
int get_value(single_dict d[], unsigned size, const char *str);
|
||||||
|
void test_single_MC(csh *handle, int mc_mode, char *line);
|
||||||
|
void test_single_issue(csh *handle, cs_mode mode, char *line, int detail);
|
||||||
|
int set_function(int arch);
|
||||||
|
|
||||||
|
#endif /* CAPSTONE_TEST_H */
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user