From 79612653c3accf48c7081d314b4f4d3d4b97aad8 Mon Sep 17 00:00:00 2001 From: Rot127 Date: Thu, 24 Aug 2023 19:30:57 -0500 Subject: [PATCH] First turn of renaming to AArch64 --- librz/analysis/arch/arm/arm_accessors64.h | 28 +- librz/analysis/arch/arm/arm_cs.h | 2 +- librz/analysis/arch/arm/arm_esil64.c | 436 ++--- librz/analysis/arch/arm/arm_il64.c | 1796 ++++++++++----------- librz/analysis/p/analysis_arm_cs.c | 616 +++---- librz/asm/p/asm_arm_cs.c | 2 +- 6 files changed, 1438 insertions(+), 1442 deletions(-) diff --git a/librz/analysis/arch/arm/arm_accessors64.h b/librz/analysis/arch/arm/arm_accessors64.h index d1c1caee956..66471f9d746 100644 --- a/librz/analysis/arch/arm/arm_accessors64.h +++ b/librz/analysis/arch/arm/arm_accessors64.h @@ -9,25 +9,21 @@ #include -#define IMM64(x) (ut64)(insn->detail->arm64.operands[x].imm) -#define INSOP64(x) insn->detail->arm64.operands[x] +#define IMM64(x) (ut64)(insn->detail->aarch64.operands[x].imm) +#define INSOP64(x) insn->detail->aarch64.operands[x] -#define REGID64(x) insn->detail->arm64.operands[x].reg -#define REGBASE64(x) insn->detail->arm64.operands[x].mem.base +#define REGID64(x) insn->detail->aarch64.operands[x].reg +#define REGBASE64(x) insn->detail->aarch64.operands[x].mem.base // s/index/base|reg/ -#define HASMEMINDEX64(x) (insn->detail->arm64.operands[x].mem.index != ARM64_REG_INVALID) -#define MEMDISP64(x) (ut64) insn->detail->arm64.operands[x].mem.disp -#define ISIMM64(x) (insn->detail->arm64.operands[x].type == ARM64_OP_IMM) -#define ISREG64(x) (insn->detail->arm64.operands[x].type == ARM64_OP_REG) -#define ISMEM64(x) (insn->detail->arm64.operands[x].type == ARM64_OP_MEM) +#define HASMEMINDEX64(x) (insn->detail->aarch64.operands[x].mem.index != AArch64_REG_INVALID) +#define MEMDISP64(x) (ut64) insn->detail->aarch64.operands[x].mem.disp +#define ISIMM64(x) (insn->detail->aarch64.operands[x].type & AArch64_OP_IMM) +#define ISREG64(x) (insn->detail->aarch64.operands[x].type & AArch64_OP_REG) +#define ISMEM64(x) (insn->detail->aarch64.operands[x].type & AArch64_OP_MEM) -#if CS_API_MAJOR > 3 -#define LSHIFT2_64(x) insn->detail->arm64.operands[x].shift.value -#else -#define LSHIFT2_64(x) 0 -#endif -#define OPCOUNT64() insn->detail->arm64.op_count +#define LSHIFT2_64(x) insn->detail->aarch64.operands[x].shift.value +#define OPCOUNT64() insn->detail->aarch64.op_count -#define ISWRITEBACK64() (insn->detail->arm64.writeback == true) +#define ISWRITEBACK64() (insn->detail->writeback == true) #define ISPREINDEX64() (((OPCOUNT64() == 2) && (ISMEM64(1)) && (ISWRITEBACK64())) || ((OPCOUNT64() == 3) && (ISMEM64(2)) && (ISWRITEBACK64()))) #define ISPOSTINDEX64() (((OPCOUNT64() == 3) && (ISIMM64(2)) && (ISWRITEBACK64())) || ((OPCOUNT64() == 4) && (ISIMM64(3)) && (ISWRITEBACK64()))) diff --git a/librz/analysis/arch/arm/arm_cs.h b/librz/analysis/arch/arm/arm_cs.h index 8bfaa543e50..8f83da607d5 100644 --- a/librz/analysis/arch/arm/arm_cs.h +++ b/librz/analysis/arch/arm/arm_cs.h @@ -13,7 +13,7 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a RZ_IPI bool rz_arm_cs_is_group_member(const cs_insn *insn, arm_insn_group feature); RZ_IPI const char *rz_arm32_cs_esil_prefix_cond(RzAnalysisOp *op, ARMCC_CondCodes cond_type); -RZ_IPI const char *rz_arm64_cs_esil_prefix_cond(RzAnalysisOp *op, arm64_cc cond_type); +RZ_IPI const char *rz_arm64_cs_esil_prefix_cond(RzAnalysisOp *op, AArch64CC_CondCode cond_type); RZ_IPI RzILOpEffect *rz_arm_cs_32_il(csh *handle, cs_insn *insn, bool thumb); RZ_IPI RzAnalysisILConfig *rz_arm_cs_32_il_config(bool big_endian); diff --git a/librz/analysis/arch/arm/arm_esil64.c b/librz/analysis/arch/arm/arm_esil64.c index 30e04b51635..0b81fd6463d 100644 --- a/librz/analysis/arch/arm/arm_esil64.c +++ b/librz/analysis/arch/arm/arm_esil64.c @@ -7,75 +7,75 @@ #include "arm_cs.h" #include "arm_accessors64.h" -#define REG64(x) rz_str_get_null(cs_reg_name(*handle, insn->detail->arm64.operands[x].reg)) -#define MEMBASE64(x) rz_str_get_null(cs_reg_name(*handle, insn->detail->arm64.operands[x].mem.base)) -#define MEMINDEX64(x) rz_str_get_null(cs_reg_name(*handle, insn->detail->arm64.operands[x].mem.index)) +#define REG64(x) rz_str_get_null(cs_reg_name(*handle, insn->detail->aarch64.operands[x].reg)) +#define MEMBASE64(x) rz_str_get_null(cs_reg_name(*handle, insn->detail->aarch64.operands[x].mem.base)) +#define MEMINDEX64(x) rz_str_get_null(cs_reg_name(*handle, insn->detail->aarch64.operands[x].mem.index)) -RZ_IPI const char *rz_arm64_cs_esil_prefix_cond(RzAnalysisOp *op, arm64_cc cond_type) { +RZ_IPI const char *rz_arm64_cs_esil_prefix_cond(RzAnalysisOp *op, AArch64CC_CondCode cond_type) { const char *close_cond[2]; close_cond[0] = ""; close_cond[1] = ",}"; int close_type = 0; switch (cond_type) { - case ARM64_CC_EQ: + case AArch64CC_EQ: close_type = 1; rz_strbuf_setf(&op->esil, "zf,?{,"); break; - case ARM64_CC_NE: + case AArch64CC_NE: close_type = 1; rz_strbuf_setf(&op->esil, "zf,!,?{,"); break; - case ARM64_CC_HS: + case AArch64CC_HS: close_type = 1; rz_strbuf_setf(&op->esil, "cf,?{,"); break; - case ARM64_CC_LO: + case AArch64CC_LO: close_type = 1; rz_strbuf_setf(&op->esil, "cf,!,?{,"); break; - case ARM64_CC_MI: + case AArch64CC_MI: close_type = 1; rz_strbuf_setf(&op->esil, "nf,?{,"); break; - case ARM64_CC_PL: + case AArch64CC_PL: close_type = 1; rz_strbuf_setf(&op->esil, "nf,!,?{,"); break; - case ARM64_CC_VS: + case AArch64CC_VS: close_type = 1; rz_strbuf_setf(&op->esil, "vf,?{,"); break; - case ARM64_CC_VC: + case AArch64CC_VC: close_type = 1; rz_strbuf_setf(&op->esil, "vf,!,?{,"); break; - case ARM64_CC_HI: + case AArch64CC_HI: close_type = 1; rz_strbuf_setf(&op->esil, "cf,zf,!,&,?{,"); break; - case ARM64_CC_LS: + case AArch64CC_LS: close_type = 1; rz_strbuf_setf(&op->esil, "cf,!,zf,|,?{,"); break; - case ARM64_CC_GE: + case AArch64CC_GE: close_type = 1; rz_strbuf_setf(&op->esil, "nf,vf,^,!,?{,"); break; - case ARM64_CC_LT: + case AArch64CC_LT: close_type = 1; rz_strbuf_setf(&op->esil, "nf,vf,^,?{,"); break; - case ARM64_CC_GT: + case AArch64CC_GT: // zf == 0 && nf == vf close_type = 1; rz_strbuf_setf(&op->esil, "zf,!,nf,vf,^,!,&,?{,"); break; - case ARM64_CC_LE: + case AArch64CC_LE: // zf == 1 || nf != vf close_type = 1; rz_strbuf_setf(&op->esil, "zf,nf,vf,^,|,?{,"); break; - case ARM64_CC_AL: + case AArch64CC_AL: // always executed break; default: @@ -84,39 +84,39 @@ RZ_IPI const char *rz_arm64_cs_esil_prefix_cond(RzAnalysisOp *op, arm64_cc cond_ return close_cond[close_type]; } -static int arm64_reg_width(int reg) { +static int aarch64_reg_width(int reg) { switch (reg) { - case ARM64_REG_W0: - case ARM64_REG_W1: - case ARM64_REG_W2: - case ARM64_REG_W3: - case ARM64_REG_W4: - case ARM64_REG_W5: - case ARM64_REG_W6: - case ARM64_REG_W7: - case ARM64_REG_W8: - case ARM64_REG_W9: - case ARM64_REG_W10: - case ARM64_REG_W11: - case ARM64_REG_W12: - case ARM64_REG_W13: - case ARM64_REG_W14: - case ARM64_REG_W15: - case ARM64_REG_W16: - case ARM64_REG_W17: - case ARM64_REG_W18: - case ARM64_REG_W19: - case ARM64_REG_W20: - case ARM64_REG_W21: - case ARM64_REG_W22: - case ARM64_REG_W23: - case ARM64_REG_W24: - case ARM64_REG_W25: - case ARM64_REG_W26: - case ARM64_REG_W27: - case ARM64_REG_W28: - case ARM64_REG_W29: - case ARM64_REG_W30: + case AArch64_REG_W0: + case AArch64_REG_W1: + case AArch64_REG_W2: + case AArch64_REG_W3: + case AArch64_REG_W4: + case AArch64_REG_W5: + case AArch64_REG_W6: + case AArch64_REG_W7: + case AArch64_REG_W8: + case AArch64_REG_W9: + case AArch64_REG_W10: + case AArch64_REG_W11: + case AArch64_REG_W12: + case AArch64_REG_W13: + case AArch64_REG_W14: + case AArch64_REG_W15: + case AArch64_REG_W16: + case AArch64_REG_W17: + case AArch64_REG_W18: + case AArch64_REG_W19: + case AArch64_REG_W20: + case AArch64_REG_W21: + case AArch64_REG_W22: + case AArch64_REG_W23: + case AArch64_REG_W24: + case AArch64_REG_W25: + case AArch64_REG_W26: + case AArch64_REG_W27: + case AArch64_REG_W28: + case AArch64_REG_W29: + case AArch64_REG_W30: return 32; break; default: @@ -125,20 +125,20 @@ static int arm64_reg_width(int reg) { return 64; } -static int decode_sign_ext(arm64_extender extender) { +static int decode_sign_ext(aarch64_extender extender) { switch (extender) { - case ARM64_EXT_UXTB: - case ARM64_EXT_UXTH: - case ARM64_EXT_UXTW: - case ARM64_EXT_UXTX: + case AArch64_EXT_UXTB: + case AArch64_EXT_UXTH: + case AArch64_EXT_UXTW: + case AArch64_EXT_UXTX: return 0; // nothing needs to be done for unsigned - case ARM64_EXT_SXTB: + case AArch64_EXT_SXTB: return 8; - case ARM64_EXT_SXTH: + case AArch64_EXT_SXTH: return 16; - case ARM64_EXT_SXTW: + case AArch64_EXT_SXTW: return 32; - case ARM64_EXT_SXTX: + case AArch64_EXT_SXTX: return 64; default: break; @@ -147,24 +147,24 @@ static int decode_sign_ext(arm64_extender extender) { return 0; } -#define EXT64(x) decode_sign_ext(insn->detail->arm64.operands[x].ext) +#define EXT64(x) decode_sign_ext(insn->detail->aarch64.operands[x].ext) -static const char *decode_shift_64(arm64_shifter shift) { +static const char *decode_shift_64(aarch64_shifter shift) { const char *E_OP_SR = ">>"; const char *E_OP_SL = "<<"; const char *E_OP_RR = ">>>"; const char *E_OP_VOID = ""; switch (shift) { - case ARM64_SFT_ASR: - case ARM64_SFT_LSR: + case AArch64_SFT_ASR: + case AArch64_SFT_LSR: return E_OP_SR; - case ARM64_SFT_LSL: - case ARM64_SFT_MSL: + case AArch64_SFT_LSL: + case AArch64_SFT_MSL: return E_OP_SL; - case ARM64_SFT_ROR: + case AArch64_SFT_ROR: return E_OP_RR; default: @@ -173,22 +173,22 @@ static const char *decode_shift_64(arm64_shifter shift) { return E_OP_VOID; } -#define DECODE_SHIFT64(x) decode_shift_64(insn->detail->arm64.operands[x].shift.type) +#define DECODE_SHIFT64(x) decode_shift_64(insn->detail->aarch64.operands[x].shift.type) static int regsize64(cs_insn *insn, int n) { - unsigned int reg = insn->detail->arm64.operands[n].reg; - if ((reg >= ARM64_REG_S0 && reg <= ARM64_REG_S31) || - (reg >= ARM64_REG_W0 && reg <= ARM64_REG_W30) || - reg == ARM64_REG_WZR) { + unsigned int reg = insn->detail->aarch64.operands[n].reg; + if ((reg >= AArch64_REG_S0 && reg <= AArch64_REG_S31) || + (reg >= AArch64_REG_W0 && reg <= AArch64_REG_W30) || + reg == AArch64_REG_WZR) { return 4; } - if (reg >= ARM64_REG_B0 && reg <= ARM64_REG_B31) { + if (reg >= AArch64_REG_B0 && reg <= AArch64_REG_B31) { return 1; } - if (reg >= ARM64_REG_H0 && reg <= ARM64_REG_H31) { + if (reg >= AArch64_REG_H0 && reg <= AArch64_REG_H31) { return 2; } - if (reg >= ARM64_REG_Q0 && reg <= ARM64_REG_Q31) { + if (reg >= AArch64_REG_Q0 && reg <= AArch64_REG_Q31) { return 16; } return 8; @@ -210,7 +210,7 @@ static void shifted_reg64_append(RzStrBuf *sb, csh *handle, cs_insn *insn, int n } if (LSHIFT2_64(n)) { - if (insn->detail->arm64.operands[n].shift.type != ARM64_SFT_ASR) { + if (insn->detail->aarch64.operands[n].shift.type != AArch64_SFT_ASR) { if (signext) { rz_strbuf_appendf(sb, "%d,%d,%s,~,%s", LSHIFT2_64(n), signext, rn, DECODE_SHIFT64(n)); } else { @@ -278,10 +278,10 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a rz_strbuf_init(&op->esil); rz_strbuf_set(&op->esil, ""); - postfix = rz_arm64_cs_esil_prefix_cond(op, insn->detail->arm64.cc); + postfix = rz_arm64_cs_esil_prefix_cond(op, insn->detail->aarch64.cc); switch (insn->id) { - case ARM64_INS_REV: + case AArch64_INS_REV: // these REV* instructions were almost right, except in the cases like rev x0, x0 // where the use of |= caused copies of the value to be erroneously present { @@ -322,7 +322,7 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a } break; } - case ARM64_INS_REV32: { + case AArch64_INS_REV32: { const char *r0 = REG64(0); const char *r1 = REG64(1); rz_strbuf_setf(&op->esil, @@ -333,7 +333,7 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a r1, r1, r1, r1, r0); break; } - case ARM64_INS_REV16: { + case AArch64_INS_REV16: { const char *r0 = REG64(0); const char *r1 = REG64(1); rz_strbuf_setf(&op->esil, @@ -342,69 +342,69 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a r1, r1, r0); break; } - case ARM64_INS_ADR: + case AArch64_INS_ADR: // TODO: must be 21bit signed rz_strbuf_setf(&op->esil, "%" PFMT64d ",%s,=", IMM64(1), REG64(0)); break; - case ARM64_INS_SMADDL: { + case AArch64_INS_SMADDL: { int size = REGSIZE64(1) * 8; rz_strbuf_setf(&op->esil, "%d,%s,~,%d,%s,~,*,%s,+,%s,=", size, REG64(2), size, REG64(1), REG64(3), REG64(0)); break; } - case ARM64_INS_UMADDL: - case ARM64_INS_FMADD: - case ARM64_INS_MADD: + case AArch64_INS_UMADDL: + case AArch64_INS_FMADD: + case AArch64_INS_MADD: rz_strbuf_setf(&op->esil, "%s,%s,*,%s,+,%s,=", REG64(2), REG64(1), REG64(3), REG64(0)); break; - case ARM64_INS_MSUB: + case AArch64_INS_MSUB: rz_strbuf_setf(&op->esil, "%s,%s,*,%s,-,%s,=", REG64(2), REG64(1), REG64(3), REG64(0)); break; - case ARM64_INS_MNEG: + case AArch64_INS_MNEG: rz_strbuf_setf(&op->esil, "%s,%s,*,0,-,%s,=", REG64(2), REG64(1), REG64(0)); break; - case ARM64_INS_ADD: - case ARM64_INS_ADC: // Add with carry. - // case ARM64_INS_ADCS: // Add with carry. + case AArch64_INS_ADD: + case AArch64_INS_ADC: // Add with carry. + // case AArch64_INS_ADCS: // Add with carry. OPCALL("+"); break; - case ARM64_INS_SUB: + case AArch64_INS_SUB: OPCALL("-"); break; - case ARM64_INS_SBC: + case AArch64_INS_SBC: // TODO have to check this more, VEX does not work rz_strbuf_setf(&op->esil, "%s,cf,+,%s,-,%s,=", REG64(2), REG64(1), REG64(0)); break; - case ARM64_INS_SMULL: { + case AArch64_INS_SMULL: { int size = REGSIZE64(1) * 8; rz_strbuf_setf(&op->esil, "%d,%s,~,%d,%s,~,*,%s,=", size, REG64(2), size, REG64(1), REG64(0)); break; } - case ARM64_INS_MUL: + case AArch64_INS_MUL: OPCALL("*"); break; - case ARM64_INS_AND: + case AArch64_INS_AND: OPCALL("&"); break; - case ARM64_INS_ORR: + case AArch64_INS_ORR: OPCALL("|"); break; - case ARM64_INS_EOR: + case AArch64_INS_EOR: OPCALL("^"); break; - case ARM64_INS_ORN: + case AArch64_INS_ORN: OPCALL_NEG("|"); break; - case ARM64_INS_EON: + case AArch64_INS_EON: OPCALL_NEG("^"); break; - case ARM64_INS_LSR: { + case AArch64_INS_LSR: { const char *r0 = REG64(0); const char *r1 = REG64(1); const int size = REGSIZE64(0) * 8; @@ -423,7 +423,7 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a } break; } - case ARM64_INS_LSL: { + case AArch64_INS_LSL: { const char *r0 = REG64(0); const char *r1 = REG64(1); const int size = REGSIZE64(0) * 8; @@ -442,15 +442,15 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a } break; } - case ARM64_INS_ROR: + case AArch64_INS_ROR: OPCALL(">>>"); break; - case ARM64_INS_NOP: + case AArch64_INS_NOP: rz_strbuf_setf(&op->esil, ","); break; - case ARM64_INS_FDIV: + case AArch64_INS_FDIV: break; - case ARM64_INS_SDIV: { + case AArch64_INS_SDIV: { /* TODO: support WZR XZR to specify 32, 64bit op */ int size = REGSIZE64(1) * 8; if (ISREG64(2)) { @@ -460,7 +460,7 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a } break; } - case ARM64_INS_UDIV: + case AArch64_INS_UDIV: /* TODO: support WZR XZR to specify 32, 64bit op */ if ISREG64 (2) { rz_strbuf_setf(&op->esil, "%s,%s,/,%s,=", REG64(2), REG64(1), REG64(0)); @@ -468,20 +468,20 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a rz_strbuf_setf(&op->esil, "%s,%s,/=", REG64(1), REG64(0)); } break; - case ARM64_INS_BR: + case AArch64_INS_BR: rz_strbuf_setf(&op->esil, "%s,pc,=", REG64(0)); break; - case ARM64_INS_B: + case AArch64_INS_B: /* capstone precompute resulting address, using PC + IMM */ rz_strbuf_appendf(&op->esil, "%" PFMT64d ",pc,=", IMM64(0)); break; - case ARM64_INS_BL: + case AArch64_INS_BL: rz_strbuf_setf(&op->esil, "pc,lr,=,%" PFMT64d ",pc,=", IMM64(0)); break; - case ARM64_INS_BLR: + case AArch64_INS_BLR: rz_strbuf_setf(&op->esil, "pc,lr,=,%s,pc,=", REG64(0)); break; - case ARM64_INS_CLZ:; + case AArch64_INS_CLZ:; int size = 8 * REGSIZE64(0); // expression is much more concise with GOTO, but GOTOs should be minimized @@ -528,43 +528,43 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a } break; - case ARM64_INS_LDRH: - case ARM64_INS_LDUR: - case ARM64_INS_LDURB: - case ARM64_INS_LDURH: - case ARM64_INS_LDR: - // case ARM64_INS_LDRSB: - // case ARM64_INS_LDRSH: - case ARM64_INS_LDRB: - // case ARM64_INS_LDRSW: - // case ARM64_INS_LDURSW: - case ARM64_INS_LDXR: - case ARM64_INS_LDXRB: - case ARM64_INS_LDXRH: - case ARM64_INS_LDAXR: - case ARM64_INS_LDAXRB: - case ARM64_INS_LDAXRH: - case ARM64_INS_LDAR: - case ARM64_INS_LDARB: - case ARM64_INS_LDARH: { + case AArch64_INS_LDRH: + case AArch64_INS_LDUR: + case AArch64_INS_LDURB: + case AArch64_INS_LDURH: + case AArch64_INS_LDR: + // case AArch64_INS_LDRSB: + // case AArch64_INS_LDRSH: + case AArch64_INS_LDRB: + // case AArch64_INS_LDRSW: + // case AArch64_INS_LDURSW: + case AArch64_INS_LDXR: + case AArch64_INS_LDXRB: + case AArch64_INS_LDXRH: + case AArch64_INS_LDAXR: + case AArch64_INS_LDAXRB: + case AArch64_INS_LDAXRH: + case AArch64_INS_LDAR: + case AArch64_INS_LDARB: + case AArch64_INS_LDARH: { int size = REGSIZE64(0); switch (insn->id) { - case ARM64_INS_LDRB: - case ARM64_INS_LDARB: - case ARM64_INS_LDAXRB: - case ARM64_INS_LDXRB: - case ARM64_INS_LDURB: + case AArch64_INS_LDRB: + case AArch64_INS_LDARB: + case AArch64_INS_LDAXRB: + case AArch64_INS_LDXRB: + case AArch64_INS_LDURB: size = 1; break; - case ARM64_INS_LDRH: - case ARM64_INS_LDARH: - case ARM64_INS_LDXRH: - case ARM64_INS_LDAXRH: - case ARM64_INS_LDURH: + case AArch64_INS_LDRH: + case AArch64_INS_LDARH: + case AArch64_INS_LDXRH: + case AArch64_INS_LDAXRH: + case AArch64_INS_LDURH: size = 2; break; - case ARM64_INS_LDRSW: - case ARM64_INS_LDURSW: + case AArch64_INS_LDRSW: + case AArch64_INS_LDURSW: size = 4; break; default: @@ -623,7 +623,7 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a instructions like ldr x16, [x13, x9] ldrb w2, [x19, x23] - are not detected as ARM64_OP_MEM type and + are not detected as AArch64_OP_MEM type and fall in this case instead. */ if (ISREG64(2)) { @@ -638,25 +638,25 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a } break; } - case ARM64_INS_LDRSB: - case ARM64_INS_LDRSH: - case ARM64_INS_LDRSW: - case ARM64_INS_LDURSB: - case ARM64_INS_LDURSH: - case ARM64_INS_LDURSW: { + case AArch64_INS_LDRSB: + case AArch64_INS_LDRSH: + case AArch64_INS_LDRSW: + case AArch64_INS_LDURSB: + case AArch64_INS_LDURSH: + case AArch64_INS_LDURSW: { // handle the sign extended instrs here int size = REGSIZE64(0); switch (insn->id) { - case ARM64_INS_LDRSB: - case ARM64_INS_LDURSB: + case AArch64_INS_LDRSB: + case AArch64_INS_LDURSB: size = 1; break; - case ARM64_INS_LDRSH: - case ARM64_INS_LDURSH: + case AArch64_INS_LDRSH: + case AArch64_INS_LDURSH: size = 2; break; - case ARM64_INS_LDRSW: - case ARM64_INS_LDURSW: + case AArch64_INS_LDRSW: + case AArch64_INS_LDURSW: size = 4; break; default: @@ -715,7 +715,7 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a instructions like ldr x16, [x13, x9] ldrb w2, [x19, x23] - are not detected as ARM64_OP_MEM type and + are not detected as AArch64_OP_MEM type and fall in this case instead. */ if (ISREG64(2)) { @@ -730,15 +730,15 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a } break; } - case ARM64_INS_FCMP: - case ARM64_INS_CCMP: - case ARM64_INS_CCMN: - case ARM64_INS_TST: // cmp w8, 0xd - case ARM64_INS_CMP: // cmp w8, 0xd - case ARM64_INS_CMN: // cmp w8, 0xd + case AArch64_INS_FCMP: + case AArch64_INS_CCMP: + case AArch64_INS_CCMN: + case AArch64_INS_TST: // cmp w8, 0xd + case AArch64_INS_CMP: // cmp w8, 0xd + case AArch64_INS_CMN: // cmp w8, 0xd { // update esil, cpu flags - int bits = arm64_reg_width(REGID64(0)); + int bits = aarch64_reg_width(REGID64(0)); if (ISIMM64(1)) { rz_strbuf_setf(&op->esil, "%" PFMT64d ",%s,==,$z,zf,:=,%d,$s,nf,:=,%d,$b,!,cf,:=,%d,$o,vf,:=", IMM64(1) << LSHIFT2_64(1), REG64(0), bits - 1, bits, bits - 1); } else { @@ -748,47 +748,47 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a } break; } - case ARM64_INS_FCSEL: - case ARM64_INS_CSEL: // csel Wd, Wn, Wm --> Wd := (cond) ? Wn : Wm + case AArch64_INS_FCSEL: + case AArch64_INS_CSEL: // csel Wd, Wn, Wm --> Wd := (cond) ? Wn : Wm rz_strbuf_appendf(&op->esil, "%s,}{,%s,},%s,=", REG64(1), REG64(2), REG64(0)); postfix = ""; break; - case ARM64_INS_CSET: // cset Wd --> Wd := (cond) ? 1 : 0 + case AArch64_INS_CSET: // cset Wd --> Wd := (cond) ? 1 : 0 rz_strbuf_appendf(&op->esil, "1,}{,0,},%s,=", REG64(0)); postfix = ""; break; - case ARM64_INS_CINC: // cinc Wd, Wn --> Wd := (cond) ? (Wn+1) : Wn + case AArch64_INS_CINC: // cinc Wd, Wn --> Wd := (cond) ? (Wn+1) : Wn rz_strbuf_appendf(&op->esil, "1,%s,+,}{,%s,},%s,=", REG64(1), REG64(1), REG64(0)); postfix = ""; break; - case ARM64_INS_CSINC: // csinc Wd, Wn, Wm --> Wd := (cond) ? Wn : (Wm+1) + case AArch64_INS_CSINC: // csinc Wd, Wn, Wm --> Wd := (cond) ? Wn : (Wm+1) rz_strbuf_appendf(&op->esil, "%s,}{,1,%s,+,},%s,=", REG64(1), REG64(2), REG64(0)); postfix = ""; break; - case ARM64_INS_STXRB: - case ARM64_INS_STXRH: - case ARM64_INS_STXR: { + case AArch64_INS_STXRB: + case AArch64_INS_STXRH: + case AArch64_INS_STXR: { int size = REGSIZE64(1); - if (insn->id == ARM64_INS_STXRB) { + if (insn->id == AArch64_INS_STXRB) { size = 1; - } else if (insn->id == ARM64_INS_STXRH) { + } else if (insn->id == AArch64_INS_STXRH) { size = 2; } rz_strbuf_setf(&op->esil, "0,%s,=,%s,%s,%" PFMT64d ",+,=[%d]", REG64(0), REG64(1), MEMBASE64(1), MEMDISP64(1), size); break; } - case ARM64_INS_STRB: - case ARM64_INS_STRH: - case ARM64_INS_STUR: - case ARM64_INS_STURB: - case ARM64_INS_STURH: - case ARM64_INS_STR: // str x6, [x6,0xf90] + case AArch64_INS_STRB: + case AArch64_INS_STRH: + case AArch64_INS_STUR: + case AArch64_INS_STURB: + case AArch64_INS_STURH: + case AArch64_INS_STR: // str x6, [x6,0xf90] { int size = REGSIZE64(0); - if (insn->id == ARM64_INS_STRB || insn->id == ARM64_INS_STURB) { + if (insn->id == AArch64_INS_STRB || insn->id == AArch64_INS_STURB) { size = 1; - } else if (insn->id == ARM64_INS_STRH || insn->id == ARM64_INS_STURH) { + } else if (insn->id == AArch64_INS_STRH || insn->id == AArch64_INS_STURH) { size = 2; } if (ISMEM64(1)) { @@ -843,7 +843,7 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a instructions like ldr x16, [x13, x9] ldrb w2, [x19, x23] - are not detected as ARM64_OP_MEM type and + are not detected as AArch64_OP_MEM type and fall in this case instead. */ if (ISREG64(2)) { @@ -858,7 +858,7 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a } break; } - case ARM64_INS_BIC: + case AArch64_INS_BIC: if (OPCOUNT64() == 2) { if (REGSIZE64(0) == 4) { rz_strbuf_appendf(&op->esil, "%s,0xffffffff,^,%s,&=", REG64(1), REG64(0)); @@ -873,28 +873,28 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a } } break; - case ARM64_INS_CBZ: + case AArch64_INS_CBZ: rz_strbuf_setf(&op->esil, "%s,!,?{,%" PFMT64d ",pc,=,}", REG64(0), IMM64(1)); break; - case ARM64_INS_CBNZ: + case AArch64_INS_CBNZ: rz_strbuf_setf(&op->esil, "%s,?{,%" PFMT64d ",pc,=,}", REG64(0), IMM64(1)); break; - case ARM64_INS_TBZ: + case AArch64_INS_TBZ: // tbnz x0, 4, label // if ((1<<4) & x0) goto label; rz_strbuf_setf(&op->esil, "%" PFMT64d ",1,<<,%s,&,!,?{,%" PFMT64d ",pc,=,}", IMM64(1), REG64(0), IMM64(2)); break; - case ARM64_INS_TBNZ: + case AArch64_INS_TBNZ: // tbnz x0, 4, label // if ((1<<4) & x0) goto label; rz_strbuf_setf(&op->esil, "%" PFMT64d ",1,<<,%s,&,?{,%" PFMT64d ",pc,=,}", IMM64(1), REG64(0), IMM64(2)); break; - case ARM64_INS_STNP: - case ARM64_INS_STP: // stp x6, x7, [x6,0xf90] + case AArch64_INS_STNP: + case AArch64_INS_STP: // stp x6, x7, [x6,0xf90] { int disp = (int)MEMDISP64(2); char sign = disp >= 0 ? '+' : '-'; @@ -930,7 +930,7 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a REG64(1), MEMBASE64(2), abs, sign, size, size); } } break; - case ARM64_INS_LDP: // ldp x29, x30, [sp], 0x10 + case AArch64_INS_LDP: // ldp x29, x30, [sp], 0x10 { int disp = (int)MEMDISP64(2); char sign = disp >= 0 ? '+' : '-'; @@ -970,18 +970,18 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a size, abs, MEMBASE64(2), sign, size, REG64(1)); } } break; - case ARM64_INS_ADRP: + case AArch64_INS_ADRP: rz_strbuf_setf(&op->esil, "%" PFMT64d ",%s,=", IMM64(1), REG64(0)); break; - case ARM64_INS_MOV: + case AArch64_INS_MOV: if (ISREG64(1)) { rz_strbuf_setf(&op->esil, "%s,%s,=", REG64(1), REG64(0)); } else { rz_strbuf_setf(&op->esil, "%" PFMT64d ",%s,=", IMM64(1), REG64(0)); } break; - case ARM64_INS_EXTR: + case AArch64_INS_EXTR: // from VEX /* 01 | t0 = GET:I64(x4) @@ -994,21 +994,21 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a rz_strbuf_setf(&op->esil, "%" PFMT64d ",%s,>>,%" PFMT64d ",%s,<<,|,%s,=", IMM64(3), REG64(2), IMM64(3), REG64(1), REG64(0)); break; - case ARM64_INS_RBIT: + case AArch64_INS_RBIT: // this expression reverses the bits. it does. do not scroll right. // Derived from VEX rz_strbuf_setf(&op->esil, "0xffffffff00000000,0x20,0xffff0000ffff0000,0x10,0xff00ff00ff00ff00,0x8,0xf0f0f0f0f0f0f0f0,0x4,0xcccccccccccccccc,0x2,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,<<,&,0x2,0xcccccccccccccccc,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,&,>>,|,<<,&,0x4,0xf0f0f0f0f0f0f0f0,0xcccccccccccccccc,0x2,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,<<,&,0x2,0xcccccccccccccccc,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,&,>>,|,&,>>,|,<<,&,0x8,0xff00ff00ff00ff00,0xf0f0f0f0f0f0f0f0,0x4,0xcccccccccccccccc,0x2,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,<<,&,0x2,0xcccccccccccccccc,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,&,>>,|,<<,&,0x4,0xf0f0f0f0f0f0f0f0,0xcccccccccccccccc,0x2,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,<<,&,0x2,0xcccccccccccccccc,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,&,>>,|,&,>>,|,&,>>,|,<<,&,0x10,0xffff0000ffff0000,0xff00ff00ff00ff00,0x8,0xf0f0f0f0f0f0f0f0,0x4,0xcccccccccccccccc,0x2,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,<<,&,0x2,0xcccccccccccccccc,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,&,>>,|,<<,&,0x4,0xf0f0f0f0f0f0f0f0,0xcccccccccccccccc,0x2,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,<<,&,0x2,0xcccccccccccccccc,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,&,>>,|,&,>>,|,<<,&,0x8,0xff00ff00ff00ff00,0xf0f0f0f0f0f0f0f0,0x4,0xcccccccccccccccc,0x2,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,<<,&,0x2,0xcccccccccccccccc,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,&,>>,|,<<,&,0x4,0xf0f0f0f0f0f0f0f0,0xcccccccccccccccc,0x2,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,<<,&,0x2,0xcccccccccccccccc,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,&,>>,|,&,>>,|,&,>>,|,&,>>,|,<<,&,0x20,0xffffffff00000000,0xffff0000ffff0000,0x10,0xff00ff00ff00ff00,0x8,0xf0f0f0f0f0f0f0f0,0x4,0xcccccccccccccccc,0x2,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,<<,&,0x2,0xcccccccccccccccc,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,&,>>,|,<<,&,0x4,0xf0f0f0f0f0f0f0f0,0xcccccccccccccccc,0x2,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,<<,&,0x2,0xcccccccccccccccc,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,&,>>,|,&,>>,|,<<,&,0x8,0xff00ff00ff00ff00,0xf0f0f0f0f0f0f0f0,0x4,0xcccccccccccccccc,0x2,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,<<,&,0x2,0xcccccccccccccccc,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,&,>>,|,<<,&,0x4,0xf0f0f0f0f0f0f0f0,0xcccccccccccccccc,0x2,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,<<,&,0x2,0xcccccccccccccccc,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,&,>>,|,&,>>,|,&,>>,|,<<,&,0x10,0xffff0000ffff0000,0xff00ff00ff00ff00,0x8,0xf0f0f0f0f0f0f0f0,0x4,0xcccccccccccccccc,0x2,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,<<,&,0x2,0xcccccccccccccccc,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,&,>>,|,<<,&,0x4,0xf0f0f0f0f0f0f0f0,0xcccccccccccccccc,0x2,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,<<,&,0x2,0xcccccccccccccccc,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,&,>>,|,&,>>,|,<<,&,0x8,0xff00ff00ff00ff00,0xf0f0f0f0f0f0f0f0,0x4,0xcccccccccccccccc,0x2,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,<<,&,0x2,0xcccccccccccccccc,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,&,>>,|,<<,&,0x4,0xf0f0f0f0f0f0f0f0,0xcccccccccccccccc,0x2,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,<<,&,0x2,0xcccccccccccccccc,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,&,>>,|,&,>>,|,&,>>,|,&,>>,|,&,>>,|,%2$s,=", REG64(1), REG64(0)); break; - case ARM64_INS_MVN: - case ARM64_INS_MOVN: + case AArch64_INS_MVN: + case AArch64_INS_MOVN: if (ISREG64(1)) { rz_strbuf_setf(&op->esil, "%d,%s,-1,^,<<,%s,=", LSHIFT2_64(1), REG64(1), REG64(0)); } else { rz_strbuf_setf(&op->esil, "%d,%" PFMT64d ",<<,-1,^,%s,=", LSHIFT2_64(1), IMM64(1), REG64(0)); } break; - case ARM64_INS_MOVK: // movk w8, 0x1290 + case AArch64_INS_MOVK: // movk w8, 0x1290 { ut64 shift = LSHIFT2_64(1); if (shift < 0) { @@ -1027,13 +1027,13 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a break; } - case ARM64_INS_MOVZ: + case AArch64_INS_MOVZ: rz_strbuf_setf(&op->esil, "%" PFMT64u ",%s,=", IMM64(1) << LSHIFT2_64(1), REG64(0)); break; /* ASR, SXTB, SXTH and SXTW are alias for SBFM */ - case ARM64_INS_ASR: { + case AArch64_INS_ASR: { // OPCALL(">>>>"); const char *r0 = REG64(0); const char *r1 = REG64(1); @@ -1053,8 +1053,8 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a } break; } - case ARM64_INS_SXTB: - if (arm64_reg_width(REGID64(0)) == 32) { + case AArch64_INS_SXTB: + if (aarch64_reg_width(REGID64(0)) == 32) { rz_strbuf_setf(&op->esil, "0xffffffff,8,0xff,%s,&,~,&,%s,=", REG64(1), REG64(0)); } else { @@ -1062,8 +1062,8 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a REG64(1), REG64(0)); } break; - case ARM64_INS_SXTH: /* halfword */ - if (arm64_reg_width(REGID64(0)) == 32) { + case AArch64_INS_SXTH: /* halfword */ + if (aarch64_reg_width(REGID64(0)) == 32) { rz_strbuf_setf(&op->esil, "0xffffffff,16,0xffff,%s,&,~,&,%s,=", REG64(1), REG64(0)); } else { @@ -1071,27 +1071,27 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a REG64(1), REG64(0)); } break; - case ARM64_INS_SXTW: /* word */ + case AArch64_INS_SXTW: /* word */ rz_strbuf_setf(&op->esil, "32,0xffffffff,%s,&,~,%s,=", REG64(1), REG64(0)); break; - case ARM64_INS_UXTB: + case AArch64_INS_UXTB: rz_strbuf_setf(&op->esil, "%s,0xff,&,%s,=", REG64(1), REG64(0)); break; - case ARM64_INS_UMULL: + case AArch64_INS_UMULL: rz_strbuf_setf(&op->esil, "%s,%s,*,%s,=", REG64(1), REG64(2), REG64(0)); break; - case ARM64_INS_UXTH: + case AArch64_INS_UXTH: rz_strbuf_setf(&op->esil, "%s,0xffff,&,%s,=", REG64(1), REG64(0)); break; - case ARM64_INS_RET: + case AArch64_INS_RET: rz_strbuf_setf(&op->esil, "lr,pc,="); break; - case ARM64_INS_ERET: + case AArch64_INS_ERET: rz_strbuf_setf(&op->esil, "lr,pc,="); break; - case ARM64_INS_BFI: // bfi w8, w8, 2, 1 - case ARM64_INS_BFXIL: { + case AArch64_INS_BFI: // bfi w8, w8, 2, 1 + case AArch64_INS_BFXIL: { if (OPCOUNT64() >= 3 && ISIMM64(3) && IMM64(3) > 0) { ut64 mask = rz_num_bitmask((ut8)IMM64(3)); ut64 shift = IMM64(2); @@ -1102,33 +1102,33 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a } break; } - case ARM64_INS_SBFIZ: + case AArch64_INS_SBFIZ: if (IMM64(3) > 0 && IMM64(3) <= 64 - IMM64(2)) { rz_strbuf_appendf(&op->esil, "%" PFMT64d ",%" PFMT64d ",%s,%" PFMT64u ",&,~,<<,%s,=", IMM64(2), IMM64(3), REG64(1), rz_num_bitmask((ut8)IMM64(3)), REG64(0)); } break; - case ARM64_INS_UBFIZ: + case AArch64_INS_UBFIZ: if (IMM64(3) > 0 && IMM64(3) <= 64 - IMM64(2)) { rz_strbuf_appendf(&op->esil, "%" PFMT64d ",%s,%" PFMT64u ",&,<<,%s,=", IMM64(2), REG64(1), rz_num_bitmask((ut8)IMM64(3)), REG64(0)); } break; - case ARM64_INS_SBFX: + case AArch64_INS_SBFX: if (IMM64(3) > 0 && IMM64(3) <= 64 - IMM64(2)) { rz_strbuf_appendf(&op->esil, "%" PFMT64d ",%" PFMT64d ",%s,%" PFMT64d ",%" PFMT64u ",<<,&,>>,~,%s,=", IMM64(3), IMM64(2), REG64(1), IMM64(2), rz_num_bitmask((ut8)IMM64(3)), REG64(0)); } break; - case ARM64_INS_UBFX: + case AArch64_INS_UBFX: if (IMM64(3) > 0 && IMM64(3) <= 64 - IMM64(2)) { rz_strbuf_appendf(&op->esil, "%" PFMT64d ",%s,%" PFMT64d ",%" PFMT64u ",<<,&,>>,%s,=", IMM64(2), REG64(1), IMM64(2), rz_num_bitmask((ut8)IMM64(3)), REG64(0)); } break; - case ARM64_INS_NEG: + case AArch64_INS_NEG: #if CS_API_MAJOR > 3 - case ARM64_INS_NEGS: + case AArch64_INS_NEGS: #endif if (LSHIFT2_64(1)) { SHIFTED_REG64_APPEND(&op->esil, 1); @@ -1137,7 +1137,7 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a } rz_strbuf_appendf(&op->esil, ",0,-,%s,=", REG64(0)); break; - case ARM64_INS_SVC: + case AArch64_INS_SVC: rz_strbuf_setf(&op->esil, "%" PFMT64u ",$", IMM64(0)); break; } diff --git a/librz/analysis/arch/arm/arm_il64.c b/librz/analysis/arch/arm/arm_il64.c index 70a29ae230b..464d12a8934 100644 --- a/librz/analysis/arch/arm/arm_il64.c +++ b/librz/analysis/arch/arm/arm_il64.c @@ -15,7 +15,7 @@ #define ISMEM ISMEM64 #define OPCOUNT OPCOUNT64 #undef MEMDISP64 // the original one casts to ut64 which we don't want here -#define MEMDISP(x) insn->detail->arm64.operands[x].mem.disp +#define MEMDISP(x) insn->detail->aarch64.operands[x].mem.disp #include @@ -35,144 +35,144 @@ static const char *regs_bound[] = { * IL for arm64 condition * unconditional is returned as NULL (rather than true), for simpler code */ -static RzILOpBool *cond(arm64_cc c) { +static RzILOpBool *cond(AArch64CC_CondCode c) { switch (c) { - case ARM64_CC_EQ: + case AArch64CC_EQ: return VARG("zf"); - case ARM64_CC_NE: + case AArch64CC_NE: return INV(VARG("zf")); - case ARM64_CC_HS: + case AArch64CC_HS: return VARG("cf"); - case ARM64_CC_LO: + case AArch64CC_LO: return INV(VARG("cf")); - case ARM64_CC_MI: + case AArch64CC_MI: return VARG("nf"); - case ARM64_CC_PL: + case AArch64CC_PL: return INV(VARG("nf")); - case ARM64_CC_VS: + case AArch64CC_VS: return VARG("vf"); - case ARM64_CC_VC: + case AArch64CC_VC: return INV(VARG("vf")); - case ARM64_CC_HI: + case AArch64CC_HI: return AND(VARG("cf"), INV(VARG("zf"))); - case ARM64_CC_LS: + case AArch64CC_LS: return OR(INV(VARG("cf")), VARG("zf")); - case ARM64_CC_GE: + case AArch64CC_GE: return INV(XOR(VARG("nf"), VARG("vf"))); - case ARM64_CC_LT: + case AArch64CC_LT: return XOR(VARG("nf"), VARG("vf")); - case ARM64_CC_GT: + case AArch64CC_GT: return INV(OR(XOR(VARG("nf"), VARG("vf")), VARG("zf"))); - case ARM64_CC_LE: + case AArch64CC_LE: return OR(XOR(VARG("nf"), VARG("vf")), VARG("zf")); default: return NULL; } } -static arm64_reg xreg(ut8 idx) { - // for some reason, the ARM64_REG_X0...ARM64_REG_X30 enum values are not contiguous, +static aarch64_reg xreg(ut8 idx) { + // for some reason, the AArch64_REG_X0...AArch64_REG_X30 enum values are not contiguous, // so use switch here and let the compiler optimize: switch (idx) { - case 0: return ARM64_REG_X0; - case 1: return ARM64_REG_X1; - case 2: return ARM64_REG_X2; - case 3: return ARM64_REG_X3; - case 4: return ARM64_REG_X4; - case 5: return ARM64_REG_X5; - case 6: return ARM64_REG_X6; - case 7: return ARM64_REG_X7; - case 8: return ARM64_REG_X8; - case 9: return ARM64_REG_X9; - case 10: return ARM64_REG_X10; - case 11: return ARM64_REG_X11; - case 12: return ARM64_REG_X12; - case 13: return ARM64_REG_X13; - case 14: return ARM64_REG_X14; - case 15: return ARM64_REG_X15; - case 16: return ARM64_REG_X16; - case 17: return ARM64_REG_X17; - case 18: return ARM64_REG_X18; - case 19: return ARM64_REG_X19; - case 20: return ARM64_REG_X20; - case 21: return ARM64_REG_X21; - case 22: return ARM64_REG_X22; - case 23: return ARM64_REG_X23; - case 24: return ARM64_REG_X24; - case 25: return ARM64_REG_X25; - case 26: return ARM64_REG_X26; - case 27: return ARM64_REG_X27; - case 28: return ARM64_REG_X28; - case 29: return ARM64_REG_X29; - case 30: return ARM64_REG_X30; - case 31: return ARM64_REG_SP; - case 32: return ARM64_REG_XZR; + case 0: return AArch64_REG_X0; + case 1: return AArch64_REG_X1; + case 2: return AArch64_REG_X2; + case 3: return AArch64_REG_X3; + case 4: return AArch64_REG_X4; + case 5: return AArch64_REG_X5; + case 6: return AArch64_REG_X6; + case 7: return AArch64_REG_X7; + case 8: return AArch64_REG_X8; + case 9: return AArch64_REG_X9; + case 10: return AArch64_REG_X10; + case 11: return AArch64_REG_X11; + case 12: return AArch64_REG_X12; + case 13: return AArch64_REG_X13; + case 14: return AArch64_REG_X14; + case 15: return AArch64_REG_X15; + case 16: return AArch64_REG_X16; + case 17: return AArch64_REG_X17; + case 18: return AArch64_REG_X18; + case 19: return AArch64_REG_X19; + case 20: return AArch64_REG_X20; + case 21: return AArch64_REG_X21; + case 22: return AArch64_REG_X22; + case 23: return AArch64_REG_X23; + case 24: return AArch64_REG_X24; + case 25: return AArch64_REG_X25; + case 26: return AArch64_REG_X26; + case 27: return AArch64_REG_X27; + case 28: return AArch64_REG_X28; + case 29: return AArch64_REG_X29; + case 30: return AArch64_REG_X30; + case 31: return AArch64_REG_SP; + case 32: return AArch64_REG_XZR; default: rz_warn_if_reached(); - return ARM64_REG_INVALID; + return AArch64_REG_INVALID; } } -static bool is_xreg(arm64_reg reg) { +static bool is_xreg(aarch64_reg reg) { switch (reg) { - case ARM64_REG_X0: - case ARM64_REG_X1: - case ARM64_REG_X2: - case ARM64_REG_X3: - case ARM64_REG_X4: - case ARM64_REG_X5: - case ARM64_REG_X6: - case ARM64_REG_X7: - case ARM64_REG_X8: - case ARM64_REG_X9: - case ARM64_REG_X10: - case ARM64_REG_X11: - case ARM64_REG_X12: - case ARM64_REG_X13: - case ARM64_REG_X14: - case ARM64_REG_X15: - case ARM64_REG_X16: - case ARM64_REG_X17: - case ARM64_REG_X18: - case ARM64_REG_X19: - case ARM64_REG_X20: - case ARM64_REG_X21: - case ARM64_REG_X22: - case ARM64_REG_X23: - case ARM64_REG_X24: - case ARM64_REG_X25: - case ARM64_REG_X26: - case ARM64_REG_X27: - case ARM64_REG_X28: - case ARM64_REG_X29: - case ARM64_REG_X30: - case ARM64_REG_SP: - case ARM64_REG_XZR: + case AArch64_REG_X0: + case AArch64_REG_X1: + case AArch64_REG_X2: + case AArch64_REG_X3: + case AArch64_REG_X4: + case AArch64_REG_X5: + case AArch64_REG_X6: + case AArch64_REG_X7: + case AArch64_REG_X8: + case AArch64_REG_X9: + case AArch64_REG_X10: + case AArch64_REG_X11: + case AArch64_REG_X12: + case AArch64_REG_X13: + case AArch64_REG_X14: + case AArch64_REG_X15: + case AArch64_REG_X16: + case AArch64_REG_X17: + case AArch64_REG_X18: + case AArch64_REG_X19: + case AArch64_REG_X20: + case AArch64_REG_X21: + case AArch64_REG_X22: + case AArch64_REG_X23: + case AArch64_REG_X24: + case AArch64_REG_X25: + case AArch64_REG_X26: + case AArch64_REG_X27: + case AArch64_REG_X28: + case AArch64_REG_X29: + case AArch64_REG_X30: + case AArch64_REG_SP: + case AArch64_REG_XZR: return true; default: return false; } } -static ut8 wreg_idx(arm64_reg reg) { - if (reg >= ARM64_REG_W0 && reg <= ARM64_REG_W30) { - return reg - ARM64_REG_W0; +static ut8 wreg_idx(aarch64_reg reg) { + if (reg >= AArch64_REG_W0 && reg <= AArch64_REG_W30) { + return reg - AArch64_REG_W0; } - if (reg == ARM64_REG_WSP) { + if (reg == AArch64_REG_WSP) { return 31; } - if (reg == ARM64_REG_WZR) { + if (reg == AArch64_REG_WZR) { return 32; } rz_warn_if_reached(); return 0; } -static bool is_wreg(arm64_reg reg) { - return (reg >= ARM64_REG_W0 && reg <= ARM64_REG_W30) || reg == ARM64_REG_WSP || reg == ARM64_REG_WZR; +static bool is_wreg(aarch64_reg reg) { + return (reg >= AArch64_REG_W0 && reg <= AArch64_REG_W30) || reg == AArch64_REG_WSP || reg == AArch64_REG_WZR; } -static arm64_reg xreg_of_reg(arm64_reg reg) { +static aarch64_reg xreg_of_reg(aarch64_reg reg) { if (is_wreg(reg)) { return xreg(wreg_idx(reg)); } @@ -182,41 +182,41 @@ static arm64_reg xreg_of_reg(arm64_reg reg) { /** * Variable name for a register given by cs */ -static const char *reg_var_name(arm64_reg reg) { +static const char *reg_var_name(aarch64_reg reg) { reg = xreg_of_reg(reg); switch (reg) { - case ARM64_REG_X0: return "x0"; - case ARM64_REG_X1: return "x1"; - case ARM64_REG_X2: return "x2"; - case ARM64_REG_X3: return "x3"; - case ARM64_REG_X4: return "x4"; - case ARM64_REG_X5: return "x5"; - case ARM64_REG_X6: return "x6"; - case ARM64_REG_X7: return "x7"; - case ARM64_REG_X8: return "x8"; - case ARM64_REG_X9: return "x9"; - case ARM64_REG_X10: return "x10"; - case ARM64_REG_X11: return "x11"; - case ARM64_REG_X12: return "x12"; - case ARM64_REG_X13: return "x13"; - case ARM64_REG_X14: return "x14"; - case ARM64_REG_X15: return "x15"; - case ARM64_REG_X16: return "x16"; - case ARM64_REG_X17: return "x17"; - case ARM64_REG_X18: return "x18"; - case ARM64_REG_X19: return "x19"; - case ARM64_REG_X20: return "x20"; - case ARM64_REG_X21: return "x21"; - case ARM64_REG_X22: return "x22"; - case ARM64_REG_X23: return "x23"; - case ARM64_REG_X24: return "x24"; - case ARM64_REG_X25: return "x25"; - case ARM64_REG_X26: return "x26"; - case ARM64_REG_X27: return "x27"; - case ARM64_REG_X28: return "x28"; - case ARM64_REG_X29: return "x29"; - case ARM64_REG_X30: return "x30"; - case ARM64_REG_SP: return "sp"; + case AArch64_REG_X0: return "x0"; + case AArch64_REG_X1: return "x1"; + case AArch64_REG_X2: return "x2"; + case AArch64_REG_X3: return "x3"; + case AArch64_REG_X4: return "x4"; + case AArch64_REG_X5: return "x5"; + case AArch64_REG_X6: return "x6"; + case AArch64_REG_X7: return "x7"; + case AArch64_REG_X8: return "x8"; + case AArch64_REG_X9: return "x9"; + case AArch64_REG_X10: return "x10"; + case AArch64_REG_X11: return "x11"; + case AArch64_REG_X12: return "x12"; + case AArch64_REG_X13: return "x13"; + case AArch64_REG_X14: return "x14"; + case AArch64_REG_X15: return "x15"; + case AArch64_REG_X16: return "x16"; + case AArch64_REG_X17: return "x17"; + case AArch64_REG_X18: return "x18"; + case AArch64_REG_X19: return "x19"; + case AArch64_REG_X20: return "x20"; + case AArch64_REG_X21: return "x21"; + case AArch64_REG_X22: return "x22"; + case AArch64_REG_X23: return "x23"; + case AArch64_REG_X24: return "x24"; + case AArch64_REG_X25: return "x25"; + case AArch64_REG_X26: return "x26"; + case AArch64_REG_X27: return "x27"; + case AArch64_REG_X28: return "x28"; + case AArch64_REG_X29: return "x29"; + case AArch64_REG_X30: return "x30"; + case AArch64_REG_SP: return "sp"; default: return NULL; } } @@ -224,11 +224,11 @@ static const char *reg_var_name(arm64_reg reg) { /** * Get the bits of the given register or 0, if it is not known (e.g. not implemented yet) */ -static ut32 reg_bits(arm64_reg reg) { - if (is_xreg(reg) || reg == ARM64_REG_XZR) { +static ut32 reg_bits(aarch64_reg reg) { + if (is_xreg(reg) || reg == AArch64_REG_XZR) { return 64; } - if (is_wreg(reg) || reg == ARM64_REG_WZR) { + if (is_wreg(reg) || reg == AArch64_REG_WZR) { return 32; } return 0; @@ -237,11 +237,11 @@ static ut32 reg_bits(arm64_reg reg) { /** * IL to read the given capstone reg */ -static RzILOpBitVector *read_reg(arm64_reg reg) { - if (reg == ARM64_REG_XZR) { +static RzILOpBitVector *read_reg(aarch64_reg reg) { + if (reg == AArch64_REG_XZR) { return U64(0); } - if (reg == ARM64_REG_WZR) { + if (reg == AArch64_REG_WZR) { return U32(0); } const char *var = reg_var_name(reg); @@ -267,35 +267,35 @@ static RzILOpBitVector *adjust_unsigned(ut32 bits, RZ_OWN RzILOpBitVector *v) { return v; } -static RzILOpBitVector *extend(ut32 dst_bits, arm64_extender ext, RZ_OWN RzILOpBitVector *v, ut32 v_bits) { +static RzILOpBitVector *extend(ut32 dst_bits, aarch64_extender ext, RZ_OWN RzILOpBitVector *v, ut32 v_bits) { bool is_signed = false; ut32 src_bits; switch (ext) { - case ARM64_EXT_SXTB: + case AArch64_EXT_SXTB: is_signed = true; // fallthrough - case ARM64_EXT_UXTB: + case AArch64_EXT_UXTB: src_bits = 8; break; - case ARM64_EXT_SXTH: + case AArch64_EXT_SXTH: is_signed = true; // fallthrough - case ARM64_EXT_UXTH: + case AArch64_EXT_UXTH: src_bits = 16; break; - case ARM64_EXT_SXTW: + case AArch64_EXT_SXTW: is_signed = true; // fallthrough - case ARM64_EXT_UXTW: + case AArch64_EXT_UXTW: src_bits = 32; break; - case ARM64_EXT_SXTX: + case AArch64_EXT_SXTX: is_signed = true; // fallthrough - case ARM64_EXT_UXTX: + case AArch64_EXT_UXTX: src_bits = 64; break; @@ -311,16 +311,16 @@ static RzILOpBitVector *extend(ut32 dst_bits, arm64_extender ext, RZ_OWN RzILOpB return is_signed ? SIGNED(dst_bits, v) : UNSIGNED(dst_bits, v); } -static RzILOpBitVector *apply_shift(arm64_shifter sft, ut32 dist, RZ_OWN RzILOpBitVector *v) { +static RzILOpBitVector *apply_shift(aarch64_shifter sft, ut32 dist, RZ_OWN RzILOpBitVector *v) { if (!dist) { return v; } switch (sft) { - case ARM64_SFT_LSL: + case AArch64_SFT_LSL: return SHIFTL0(v, UN(6, dist)); - case ARM64_SFT_LSR: + case AArch64_SFT_LSR: return SHIFTR0(v, UN(6, dist)); - case ARM64_SFT_ASR: + case AArch64_SFT_ASR: return SHIFTRA(v, UN(6, dist)); default: return v; @@ -329,13 +329,13 @@ static RzILOpBitVector *apply_shift(arm64_shifter sft, ut32 dist, RZ_OWN RzILOpB #define REG(n) read_reg(REGID(n)) #define REGBITS(n) reg_bits(REGID(n)) -#define MEMBASEID(x) insn->detail->arm64.operands[x].mem.base +#define MEMBASEID(x) insn->detail->aarch64.operands[x].mem.base #define MEMBASE(x) read_reg(MEMBASEID(x)) /** * IL to write a value to the given capstone reg */ -static RzILOpEffect *write_reg(arm64_reg reg, RZ_OWN RZ_NONNULL RzILOpBitVector *v) { +static RzILOpEffect *write_reg(aarch64_reg reg, RZ_OWN RZ_NONNULL RzILOpBitVector *v) { rz_return_val_if_fail(v, NULL); const char *var = reg_var_name(reg); if (!var) { @@ -348,8 +348,8 @@ static RzILOpEffect *write_reg(arm64_reg reg, RZ_OWN RZ_NONNULL RzILOpBitVector return SETG(var, v); } -static RzILOpBitVector *arg_mem(RzILOpBitVector *base_plus_disp, cs_arm64_op *op) { - if (op->mem.index == ARM64_REG_INVALID) { +static RzILOpBitVector *arg_mem(RzILOpBitVector *base_plus_disp, cs_aarch64_op *op) { + if (op->mem.index == AArch64_REG_INVALID) { return base_plus_disp; } RzILOpBitVector *index = read_reg(op->mem.index); @@ -366,9 +366,9 @@ static RzILOpBitVector *arg_mem(RzILOpBitVector *base_plus_disp, cs_arm64_op *op */ static RzILOpBitVector *arg(cs_insn *insn, size_t n, ut32 *bits_inout) { ut32 bits_requested = bits_inout ? *bits_inout : 0; - cs_arm64_op *op = &insn->detail->arm64.operands[n]; + cs_aarch64_op *op = &insn->detail->aarch64.operands[n]; switch (op->type) { - case ARM64_OP_REG: { + case AArch64_OP_REG: { if (!bits_requested) { bits_requested = REGBITS(n); if (!bits_requested) { @@ -384,17 +384,17 @@ static RzILOpBitVector *arg(cs_insn *insn, size_t n, ut32 *bits_inout) { } return apply_shift(op->shift.type, op->shift.value, extend(bits_requested, op->ext, r, REGBITS(n))); } - case ARM64_OP_IMM: { + case AArch64_OP_IMM: { if (!bits_requested) { return NULL; } ut64 val = IMM(n); - if (op->shift.type == ARM64_SFT_LSL) { + if (op->shift.type == AArch64_SFT_LSL) { val <<= op->shift.value; } return UN(bits_requested, val); } - case ARM64_OP_MEM: { + case AArch64_OP_MEM: { RzILOpBitVector *addr = MEMBASE(n); st64 disp = MEMDISP(n); if (disp > 0) { @@ -402,7 +402,7 @@ static RzILOpBitVector *arg(cs_insn *insn, size_t n, ut32 *bits_inout) { } else if (disp < 0) { addr = SUB(addr, U64(-disp)); } - return arg_mem(addr, &insn->detail->arm64.operands[n]); + return arg_mem(addr, &insn->detail->aarch64.operands[n]); } default: break; @@ -436,16 +436,16 @@ static RzILOpEffect *update_flags_zn00(RzILOpBitVector *v) { } /** - * Capstone: ARM64_INS_ADD, ARM64_INS_ADC, ARM64_INS_SUB, ARM64_INS_SBC + * Capstone: AArch64_INS_ADD, AArch64_INS_ADC, AArch64_INS_SUB, AArch64_INS_SBC * ARM: add, adds, adc, adcs, sub, subs, sbc, sbcs */ static RzILOpEffect *add_sub(cs_insn *insn) { if (!ISREG(0)) { return NULL; } - bool is_sub = insn->id == ARM64_INS_SUB || insn->id == ARM64_INS_SBC + bool is_sub = insn->id == AArch64_INS_SUB || insn->id == AArch64_INS_SBC #if CS_API_MAJOR > 4 - || insn->id == ARM64_INS_SUBS || insn->id == ARM64_INS_SBCS + || insn->id == AArch64_INS_SUBS || insn->id == AArch64_INS_SBCS #endif ; ut32 bits = REGBITS(0); @@ -461,23 +461,23 @@ static RzILOpEffect *add_sub(cs_insn *insn) { } RzILOpBitVector *res = is_sub ? SUB(a, b) : ADD(a, b); bool with_carry = false; - if (insn->id == ARM64_INS_ADC + if (insn->id == AArch64_INS_ADC #if CS_API_MAJOR > 4 - || insn->id == ARM64_INS_ADCS + || insn->id == AArch64_INS_ADCS #endif ) { res = ADD(res, ITE(VARG("cf"), UN(bits, 1), UN(bits, 0))); with_carry = true; - } else if (insn->id == ARM64_INS_SBC + } else if (insn->id == AArch64_INS_SBC #if CS_API_MAJOR > 4 - || insn->id == ARM64_INS_SBCS + || insn->id == AArch64_INS_SBCS #endif ) { res = SUB(res, ITE(VARG("cf"), UN(bits, 0), UN(bits, 1))); with_carry = true; } RzILOpEffect *set = write_reg(REGID(0), res); - bool update_flags = insn->detail->arm64.update_flags; + bool update_flags = insn->detail->aarch64.update_flags; if (update_flags) { return SEQ6( SETL("a", DUP(a)), @@ -491,7 +491,7 @@ static RzILOpEffect *add_sub(cs_insn *insn) { } /** - * Capstone: ARM64_INS_ADR, ARM64_INS_ADRP + * Capstone: AArch64_INS_ADR, AArch64_INS_ADRP * ARM: adr, adrp */ static RzILOpEffect *adr(cs_insn *insn) { @@ -502,7 +502,7 @@ static RzILOpEffect *adr(cs_insn *insn) { } /** - * Capstone: ARM64_INS_AND, ARM64_INS_EON, ARM64_INS_EOR, ARM64_INS_ORN, ARM64_INS_AORR + * Capstone: AArch64_INS_AND, AArch64_INS_EON, AArch64_INS_EOR, AArch64_INS_ORN, AArch64_INS_AORR * ARM: and, eon, eor, orn, orr */ static RzILOpEffect *bitwise(cs_insn *insn) { @@ -522,19 +522,19 @@ static RzILOpEffect *bitwise(cs_insn *insn) { } RzILOpBitVector *res; switch (insn->id) { - case ARM64_INS_EOR: + case AArch64_INS_EOR: res = LOGXOR(a, b); break; - case ARM64_INS_EON: + case AArch64_INS_EON: res = LOGXOR(a, LOGNOT(b)); break; - case ARM64_INS_ORN: + case AArch64_INS_ORN: res = LOGOR(a, LOGNOT(b)); break; - case ARM64_INS_ORR: + case AArch64_INS_ORR: res = LOGOR(a, b); break; - default: // ARM64_INS_AND + default: // AArch64_INS_AND res = LOGAND(a, b); break; } @@ -542,14 +542,14 @@ static RzILOpEffect *bitwise(cs_insn *insn) { if (!eff) { return NULL; } - if (insn->detail->arm64.update_flags) { + if (insn->detail->aarch64.update_flags) { return SEQ2(eff, update_flags_zn00(REG(0))); } return eff; } /** - * Capstone: ARM64_INS_ASR, ARM64_INS_LSL, ARM64_INS_LSR, ARM64_INS_ROR + * Capstone: AArch64_INS_ASR, AArch64_INS_LSL, AArch64_INS_LSR, AArch64_INS_ROR * ARM: asr, asrv, lsl, lslv, lsr, lsrv, ror, rorv */ static RzILOpEffect *shift(cs_insn *insn) { @@ -572,16 +572,16 @@ static RzILOpEffect *shift(cs_insn *insn) { } RzILOpBitVector *res; switch (insn->id) { - case ARM64_INS_ASR: + case AArch64_INS_ASR: res = SHIFTRA(a, b); break; - case ARM64_INS_LSR: + case AArch64_INS_LSR: res = SHIFTR0(a, b); break; - case ARM64_INS_ROR: + case AArch64_INS_ROR: res = LOGOR(SHIFTR0(a, b), SHIFTL0(DUP(a), NEG(DUP(b)))); break; - default: // ARM64_INS_LSL + default: // AArch64_INS_LSL res = SHIFTL0(a, b); break; } @@ -589,14 +589,14 @@ static RzILOpEffect *shift(cs_insn *insn) { } /** - * Capstone: ARM64_INS_B, ARM64_INS_RET, ARM64_INS_RETAA, ARM64_INS_RETAB + * Capstone: AArch64_INS_B, AArch64_INS_RET, AArch64_INS_RETAA, AArch64_INS_RETAB * ARM: b, b.cond, ret, retaa, retab */ static RzILOpEffect *branch(cs_insn *insn) { RzILOpBitVector *a; if (OPCOUNT() == 0) { - // for ARM64_INS_RET and similar - a = read_reg(ARM64_REG_LR); + // for AArch64_INS_RET and similar + a = read_reg(AArch64_REG_LR); } else { ut32 bits = 64; a = ARG(0, &bits); @@ -604,7 +604,7 @@ static RzILOpEffect *branch(cs_insn *insn) { if (!a) { return NULL; } - RzILOpBool *c = cond(insn->detail->arm64.cc); + RzILOpBool *c = cond(insn->detail->aarch64.cc); if (c) { return BRANCH(c, JMP(a), NOP()); } @@ -612,7 +612,7 @@ static RzILOpEffect *branch(cs_insn *insn) { } /** - * Capstone: ARM64_INS_BL, ARM64_INS_BLR, ARM64_INS_BLRAA, ARM64_INS_BLRAAZ, ARM64_INS_BLRAB, ARM64_INS_BLRABZ + * Capstone: AArch64_INS_BL, AArch64_INS_BLR, AArch64_INS_BLRAA, AArch64_INS_BLRAAZ, AArch64_INS_BLRAB, AArch64_INS_BLRABZ * ARM: bl, blr, blraa, blraaz, blrab, blrabz */ static RzILOpEffect *bl(cs_insn *insn) { @@ -627,7 +627,7 @@ static RzILOpEffect *bl(cs_insn *insn) { } /** - * Capstone: ARM64_INS_BFM, ARM64_INS_BFI, ARM64_INS_BFXIL + * Capstone: AArch64_INS_BFM, AArch64_INS_BFI, AArch64_INS_BFXIL * ARM: bfm, bfc, bfi, bfxil */ static RzILOpEffect *bfm(cs_insn *insn) { @@ -650,15 +650,15 @@ static RzILOpEffect *bfm(cs_insn *insn) { } ut64 mask_base = rz_num_bitmask(IMM(3)); ut64 mask = mask_base << RZ_MIN(63, IMM(2)); - if (insn->id == ARM64_INS_BFI) { + if (insn->id == AArch64_INS_BFI) { return write_reg(REGID(0), LOGOR(LOGAND(a, UN(bits, ~mask)), SHIFTL0(LOGAND(b, UN(bits, mask_base)), UN(6, IMM(2))))); } - // insn->id == ARM64_INS_BFXIL + // insn->id == AArch64_INS_BFXIL return write_reg(REGID(0), LOGOR(LOGAND(a, UN(bits, ~mask_base)), SHIFTR0(LOGAND(b, UN(bits, mask)), UN(6, IMM(2))))); } /** - * Capstone: ARM64_INS_BIC, ARM64_INS_BICS + * Capstone: AArch64_INS_BIC, AArch64_INS_BICS * ARM: bic, bics */ static RzILOpEffect *bic(cs_insn *insn) { @@ -678,14 +678,14 @@ static RzILOpEffect *bic(cs_insn *insn) { } RzILOpBitVector *res = LOGAND(a, LOGNOT(b)); RzILOpEffect *eff = NULL; - if (REGID(0) != ARM64_REG_XZR && REGID(0) != ARM64_REG_WZR) { + if (REGID(0) != AArch64_REG_XZR && REGID(0) != AArch64_REG_WZR) { eff = write_reg(REGID(0), res); if (!eff) { return NULL; } res = NULL; } - if (insn->detail->arm64.update_flags) { + if (insn->detail->aarch64.update_flags) { RzILOpEffect *eff1 = update_flags_zn00(res ? res : REG(0)); return eff ? SEQ2(eff, eff1) : eff1; } @@ -697,9 +697,9 @@ static RzILOpEffect *bic(cs_insn *insn) { #if CS_API_MAJOR > 4 /** - * Capstone: ARM64_INS_CAS, ARM64_INS_CASA, ARM64_INS_CASAL, ARM64_INS_CASL, - * ARM64_INS_CASB, ARM64_INS_CASAB, ARM64_INS_CASALB, ARM64_INS_CASLB, - * ARM64_INS_CASH, ARM64_INS_CASAH, ARM64_INS_CASALH, ARM64_INS_CASLH: + * Capstone: AArch64_INS_CAS, AArch64_INS_CASA, AArch64_INS_CASAL, AArch64_INS_CASL, + * AArch64_INS_CASB, AArch64_INS_CASAB, AArch64_INS_CASALB, AArch64_INS_CASLB, + * AArch64_INS_CASH, AArch64_INS_CASAH, AArch64_INS_CASALH, AArch64_INS_CASLH: * ARM: cas, casa, casal, casl, casb, casab, casalb, caslb, cash, casah, casalh, caslh */ static RzILOpEffect *cas(cs_insn *insn) { @@ -711,16 +711,16 @@ static RzILOpEffect *cas(cs_insn *insn) { return NULL; } switch (insn->id) { - case ARM64_INS_CASB: - case ARM64_INS_CASAB: - case ARM64_INS_CASALB: - case ARM64_INS_CASLB: + case AArch64_INS_CASB: + case AArch64_INS_CASAB: + case AArch64_INS_CASALB: + case AArch64_INS_CASLB: bits = 8; break; - case ARM64_INS_CASH: - case ARM64_INS_CASAH: - case ARM64_INS_CASALH: - case ARM64_INS_CASLH: + case AArch64_INS_CASH: + case AArch64_INS_CASAH: + case AArch64_INS_CASALH: + case AArch64_INS_CASLH: bits = 16; break; default: @@ -744,7 +744,7 @@ static RzILOpEffect *cas(cs_insn *insn) { } /** - * Capstone: ARM64_INS_CASP, ARM64_INS_CASPA, ARM64_INS_CASPAL, ARM64_INS_CASPL + * Capstone: AArch64_INS_CASP, AArch64_INS_CASPA, AArch64_INS_CASPAL, AArch64_INS_CASPL * ARM: casp, caspa, caspal, caspl */ static RzILOpEffect *casp(cs_insn *insn) { @@ -783,7 +783,7 @@ static RzILOpEffect *casp(cs_insn *insn) { #endif /** - * Capstone: ARM64_INS_CBZ, ARM64_INS_CBNZ + * Capstone: AArch64_INS_CBZ, AArch64_INS_CBNZ * ARM: cbz, cbnz */ static RzILOpEffect *cbz(cs_insn *insn) { @@ -795,11 +795,11 @@ static RzILOpEffect *cbz(cs_insn *insn) { rz_il_op_pure_free(tgt); return NULL; } - return BRANCH(insn->id == ARM64_INS_CBNZ ? INV(IS_ZERO(v)) : IS_ZERO(v), JMP(tgt), NULL); + return BRANCH(insn->id == AArch64_INS_CBNZ ? INV(IS_ZERO(v)) : IS_ZERO(v), JMP(tgt), NULL); } /** - * Capstone: ARM64_INS_CMP, ARM64_INS_CMN, ARM64_INS_CCMP, ARM64_INS_CCMN + * Capstone: AArch64_INS_CMP, AArch64_INS_CMN, AArch64_INS_CCMP, AArch64_INS_CCMN * ARM: cmp, cmn, ccmp, ccmn */ static RzILOpEffect *cmp(cs_insn *insn) { @@ -811,7 +811,7 @@ static RzILOpEffect *cmp(cs_insn *insn) { rz_il_op_pure_free(b); return NULL; } - bool is_neg = insn->id == ARM64_INS_CMN || insn->id == ARM64_INS_CCMN; + bool is_neg = insn->id == AArch64_INS_CMN || insn->id == AArch64_INS_CCMN; RzILOpEffect *eff = SEQ6( SETL("a", a), SETL("b", b), @@ -819,7 +819,7 @@ static RzILOpEffect *cmp(cs_insn *insn) { SETG("cf", (is_neg ? add_carry : sub_carry)(VARL("a"), VARL("b"), false, bits)), SETG("vf", (is_neg ? add_overflow : sub_overflow)(VARL("a"), VARL("b"), VARL("r"))), update_flags_zn(VARL("r"))); - RzILOpBool *c = cond(insn->detail->arm64.cc); + RzILOpBool *c = cond(insn->detail->aarch64.cc); if (c) { ut64 imm = IMM(2); return BRANCH(c, @@ -834,7 +834,7 @@ static RzILOpEffect *cmp(cs_insn *insn) { } /** - * Capstone: ARM64_INS_CINC, ARM64_INS_CSINC, ARM64_INS_CINV, ARM64_INS_CSINV, ARM64_INS_CNEG, ARM64_INS_CSNEG, ARM64_INS_CSEL + * Capstone: AArch64_INS_CINC, AArch64_INS_CSINC, AArch64_INS_CINV, AArch64_INS_CSINV, AArch64_INS_CNEG, AArch64_INS_CSNEG, AArch64_INS_CSEL * ARM: cinc, csinc, cinv, csinv, cneg, csneg, csel */ static RzILOpEffect *csinc(cs_insn *insn) { @@ -852,7 +852,7 @@ static RzILOpEffect *csinc(cs_insn *insn) { if (!src0) { return NULL; } - RzILOpBool *c = cond(insn->detail->arm64.cc); + RzILOpBool *c = cond(insn->detail->aarch64.cc); if (!c) { // al/nv conditions, only possible in cs(inc|inv|neg) return write_reg(REGID(dst_idx), src0); @@ -866,26 +866,26 @@ static RzILOpEffect *csinc(cs_insn *insn) { RzILOpBitVector *res; bool invert_cond = false; switch (insn->id) { - case ARM64_INS_CSEL: + case AArch64_INS_CSEL: invert_cond = true; res = src1; break; - case ARM64_INS_CSINV: + case AArch64_INS_CSINV: invert_cond = true; // fallthrough - case ARM64_INS_CINV: + case AArch64_INS_CINV: res = LOGNOT(src1); break; - case ARM64_INS_CSNEG: + case AArch64_INS_CSNEG: invert_cond = true; // fallthrough - case ARM64_INS_CNEG: + case AArch64_INS_CNEG: res = NEG(src1); break; - case ARM64_INS_CSINC: + case AArch64_INS_CSINC: invert_cond = true; // fallthrough - default: // ARM64_INS_CINC, ARM64_INS_CSINC + default: // AArch64_INS_CINC, AArch64_INS_CSINC res = ADD(src1, UN(bits, 1)); break; } @@ -893,23 +893,23 @@ static RzILOpEffect *csinc(cs_insn *insn) { } /** - * Capstone: ARM64_INS_CSET, ARM64_INS_CSETM + * Capstone: AArch64_INS_CSET, AArch64_INS_CSETM * ARM: cset, csetm */ static RzILOpEffect *cset(cs_insn *insn) { if (!ISREG(0) || !REGBITS(0)) { return NULL; } - RzILOpBool *c = cond(insn->detail->arm64.cc); + RzILOpBool *c = cond(insn->detail->aarch64.cc); if (!c) { return NULL; } ut32 bits = REGBITS(0); - return write_reg(REGID(0), ITE(c, SN(bits, insn->id == ARM64_INS_CSETM ? -1 : 1), SN(bits, 0))); + return write_reg(REGID(0), ITE(c, SN(bits, insn->id == AArch64_INS_CSETM ? -1 : 1), SN(bits, 0))); } /** - * Capstone: ARM64_INS_CLS + * Capstone: AArch64_INS_CLS * ARM: cls */ static RzILOpEffect *cls(cs_insn *insn) { @@ -933,7 +933,7 @@ static RzILOpEffect *cls(cs_insn *insn) { } /** - * Capstone: ARM64_INS_CLZ + * Capstone: AArch64_INS_CLZ * ARM: clz */ static RzILOpEffect *clz(cs_insn *insn) { @@ -956,7 +956,7 @@ static RzILOpEffect *clz(cs_insn *insn) { } /** - * Capstone: ARM64_INS_EXTR + * Capstone: AArch64_INS_EXTR * ARM: extr */ static RzILOpEffect *extr(cs_insn *insn) { @@ -993,7 +993,7 @@ static void label_svc(RzILVM *vm, RzILOpEffect *op) { } /** - * Capstone: ARM64_INS_HVC + * Capstone: AArch64_INS_HVC * ARM: hvc */ static RzILOpEffect *hvc(cs_insn *insn) { @@ -1004,7 +1004,7 @@ static void label_hvc(RzILVM *vm, RzILOpEffect *op) { // stub, nothing to do here } -static RzILOpEffect *load_effect(ut32 bits, bool is_signed, arm64_reg dst_reg, RZ_OWN RzILOpBitVector *addr) { +static RzILOpEffect *load_effect(ut32 bits, bool is_signed, aarch64_reg dst_reg, RZ_OWN RzILOpBitVector *addr) { RzILOpBitVector *val = bits == 8 ? LOAD(addr) : LOADW(bits, addr); if (bits != 64) { if (is_signed) { @@ -1022,7 +1022,7 @@ static RzILOpEffect *load_effect(ut32 bits, bool is_signed, arm64_reg dst_reg, R } static RzILOpEffect *writeback(cs_insn *insn, size_t addr_op, RZ_BORROW RzILOpBitVector *addr) { - if (!insn->detail->arm64.writeback || !is_xreg(MEMBASEID(addr_op))) { + if (!insn->detail->writeback || !is_xreg(MEMBASEID(addr_op))) { return NULL; } RzILOpBitVector *wbaddr = DUP(addr); @@ -1039,16 +1039,16 @@ static RzILOpEffect *writeback(cs_insn *insn, size_t addr_op, RZ_BORROW RzILOpBi } /** - * Capstone: ARM64_INS_LDR, ARM64_INS_LDRB, ARM64_INS_LDRH, ARM64_INS_LDRU, ARM64_INS_LDRUB, ARM64_INS_LDRUH, - * ARM64_INS_LDRSW, ARM64_INS_LDRSB, ARM64_INS_LDRSH, ARM64_INS_LDURSW, ARM64_INS_LDURSB, ARM64_INS_LDURSH, - * ARM64_INS_LDAPR, ARM64_INS_LDAPRB, ARM64_INS_LDAPRH, ARM64_INS_LDAPUR, ARM64_INS_LDAPURB, ARM64_INS_LDAPURH, - * ARM64_INS_LDAPURSB, ARM64_INS_LDAPURSH, ARM64_INS_LDAPURSW, ARM64_INS_LDAR, ARM64_INS_LDARB, ARM64_INS_LDARH, - * ARM64_INS_LDAXP, ARM64_INS_LDXP, ARM64_INS_LDAXR, ARM64_INS_LDAXRB, ARM64_INS_LDAXRH, - * ARM64_INS_LDLAR, ARM64_INS_LDLARB, ARM64_INS_LDLARH, - * ARM64_INS_LDP, ARM64_INS_LDNP, ARM64_INS_LDPSW, - * ARM64_INS_LDRAA, ARM64_INS_LDRAB, - * ARM64_INS_LDTR, ARM64_INS_LDTRB, ARM64_INS_LDTRH, ARM64_INS_LDTRSW, ARM64_INS_LDTRSB, ARM64_INS_LDTRSH, - * ARM64_INS_LDXR, ARM64_INS_LDXRB, ARM64_INS_LDXRH + * Capstone: AArch64_INS_LDR, AArch64_INS_LDRB, AArch64_INS_LDRH, AArch64_INS_LDRU, AArch64_INS_LDRUB, AArch64_INS_LDRUH, + * AArch64_INS_LDRSW, AArch64_INS_LDRSB, AArch64_INS_LDRSH, AArch64_INS_LDURSW, AArch64_INS_LDURSB, AArch64_INS_LDURSH, + * AArch64_INS_LDAPR, AArch64_INS_LDAPRB, AArch64_INS_LDAPRH, AArch64_INS_LDAPUR, AArch64_INS_LDAPURB, AArch64_INS_LDAPURH, + * AArch64_INS_LDAPURSB, AArch64_INS_LDAPURSH, AArch64_INS_LDAPURSW, AArch64_INS_LDAR, AArch64_INS_LDARB, AArch64_INS_LDARH, + * AArch64_INS_LDAXP, AArch64_INS_LDXP, AArch64_INS_LDAXR, AArch64_INS_LDAXRB, AArch64_INS_LDAXRH, + * AArch64_INS_LDLAR, AArch64_INS_LDLARB, AArch64_INS_LDLARH, + * AArch64_INS_LDP, AArch64_INS_LDNP, AArch64_INS_LDPSW, + * AArch64_INS_LDRAA, AArch64_INS_LDRAB, + * AArch64_INS_LDTR, AArch64_INS_LDTRB, AArch64_INS_LDTRH, AArch64_INS_LDTRSW, AArch64_INS_LDTRSB, AArch64_INS_LDTRSH, + * AArch64_INS_LDXR, AArch64_INS_LDXRB, AArch64_INS_LDXRH * ARM: ldr, ldrb, ldrh, ldru, ldrub, ldruh, ldrsw, ldrsb, ldrsh, ldursw, ldurwb, ldursh, * ldapr, ldaprb, ldaprh, ldapur, ldapurb, ldapurh, ldapursb, ldapursh, ldapursw, * ldaxp, ldxp, ldaxr, ldaxrb, ldaxrh, ldar, ldarb, ldarh, @@ -1059,8 +1059,8 @@ static RzILOpEffect *ldr(cs_insn *insn) { if (!ISREG(0)) { return NULL; } - bool pair = insn->id == ARM64_INS_LDAXP || insn->id == ARM64_INS_LDXP || - insn->id == ARM64_INS_LDP || insn->id == ARM64_INS_LDNP || insn->id == ARM64_INS_LDPSW; + bool pair = insn->id == AArch64_INS_LDAXP || insn->id == AArch64_INS_LDXP || + insn->id == AArch64_INS_LDP || insn->id == AArch64_INS_LDNP || insn->id == AArch64_INS_LDPSW; if (pair && !ISREG(1)) { return NULL; } @@ -1070,65 +1070,65 @@ static RzILOpEffect *ldr(cs_insn *insn) { if (!addr) { return NULL; } - arm64_reg dst_reg = REGID(0); + aarch64_reg dst_reg = REGID(0); ut64 loadsz; bool is_signed = false; switch (insn->id) { - case ARM64_INS_LDRSB: - case ARM64_INS_LDURSB: - case ARM64_INS_LDTRSB: + case AArch64_INS_LDRSB: + case AArch64_INS_LDURSB: + case AArch64_INS_LDTRSB: #if CS_API_MAJOR > 4 - case ARM64_INS_LDAPURSB: + case AArch64_INS_LDAPURSB: #endif is_signed = true; // fallthrough - case ARM64_INS_LDRB: - case ARM64_INS_LDURB: - case ARM64_INS_LDARB: - case ARM64_INS_LDAXRB: - case ARM64_INS_LDTRB: - case ARM64_INS_LDXRB: + case AArch64_INS_LDRB: + case AArch64_INS_LDURB: + case AArch64_INS_LDARB: + case AArch64_INS_LDAXRB: + case AArch64_INS_LDTRB: + case AArch64_INS_LDXRB: #if CS_API_MAJOR > 4 - case ARM64_INS_LDLARB: - case ARM64_INS_LDAPRB: - case ARM64_INS_LDAPURB: + case AArch64_INS_LDLARB: + case AArch64_INS_LDAPRB: + case AArch64_INS_LDAPURB: #endif loadsz = 8; break; - case ARM64_INS_LDRSH: - case ARM64_INS_LDURSH: - case ARM64_INS_LDTRSH: + case AArch64_INS_LDRSH: + case AArch64_INS_LDURSH: + case AArch64_INS_LDTRSH: #if CS_API_MAJOR > 4 - case ARM64_INS_LDAPURSH: + case AArch64_INS_LDAPURSH: #endif is_signed = true; // fallthrough - case ARM64_INS_LDRH: - case ARM64_INS_LDURH: - case ARM64_INS_LDARH: - case ARM64_INS_LDAXRH: - case ARM64_INS_LDTRH: - case ARM64_INS_LDXRH: + case AArch64_INS_LDRH: + case AArch64_INS_LDURH: + case AArch64_INS_LDARH: + case AArch64_INS_LDAXRH: + case AArch64_INS_LDTRH: + case AArch64_INS_LDXRH: #if CS_API_MAJOR > 4 - case ARM64_INS_LDAPRH: - case ARM64_INS_LDAPURH: - case ARM64_INS_LDLARH: + case AArch64_INS_LDAPRH: + case AArch64_INS_LDAPURH: + case AArch64_INS_LDLARH: #endif loadsz = 16; break; - case ARM64_INS_LDRSW: - case ARM64_INS_LDURSW: - case ARM64_INS_LDPSW: - case ARM64_INS_LDTRSW: + case AArch64_INS_LDRSW: + case AArch64_INS_LDURSW: + case AArch64_INS_LDPSW: + case AArch64_INS_LDTRSW: #if CS_API_MAJOR > 4 - case ARM64_INS_LDAPURSW: + case AArch64_INS_LDAPURSW: #endif is_signed = true; loadsz = 32; break; default: - // ARM64_INS_LDR, ARM64_INS_LDRU, ARM64_INS_LDAPR, ARM64_INS_LDAPUR, ARM64_INS_LDAR, ARM64_INS_LDAXR, ARM64_INS_LDLAR, - // ARM64_INS_LDP, ARM64_INS_LDNP, ARM64_INS_LDRAA, ARM64_INS_LDRAB, ARM64_INS_LDTR, ARM64_INS_LDXR + // AArch64_INS_LDR, AArch64_INS_LDRU, AArch64_INS_LDAPR, AArch64_INS_LDAPUR, AArch64_INS_LDAR, AArch64_INS_LDAXR, AArch64_INS_LDLAR, + // AArch64_INS_LDP, AArch64_INS_LDNP, AArch64_INS_LDRAA, AArch64_INS_LDRAB, AArch64_INS_LDTR, AArch64_INS_LDXR loadsz = is_wreg(dst_reg) ? 32 : 64; break; } @@ -1158,11 +1158,11 @@ static RzILOpEffect *ldr(cs_insn *insn) { } /** - * Capstone: ARM64_INS_STR, ARM64_INS_STUR, ARM64_INS_STRB, ARM64_INS_STURB, ARM64_INS_STRH, ARM64_INS_STURH, - * ARM64_INS_STLLR, ARM64_INS_STLLRB, ARM64_INS_STLLRH, ARM64_INS_STLR, ARM64_INS_STLRB, ARM64_INS_STLRH, - * ARM64_INS_STLUR, ARM64_INS_STLURB, ARM64_INS_STLURH, ARM64_INS_STP, ARM64_INS_STXR, ARM64_INS_STXRB, - * ARM64_INS_STXRH, ARM64_INS_STXP, ARM64_INS_STLXR, ARM64_INS_STLXRB. ARM64_INS_STLXRH, ARM64_INS_STLXP, - * ARM64_INS_STNP, ARM64_INS_STTR, ARM64_INS_STTRB, ARM64_INS_STTRH + * Capstone: AArch64_INS_STR, AArch64_INS_STUR, AArch64_INS_STRB, AArch64_INS_STURB, AArch64_INS_STRH, AArch64_INS_STURH, + * AArch64_INS_STLLR, AArch64_INS_STLLRB, AArch64_INS_STLLRH, AArch64_INS_STLR, AArch64_INS_STLRB, AArch64_INS_STLRH, + * AArch64_INS_STLUR, AArch64_INS_STLURB, AArch64_INS_STLURH, AArch64_INS_STP, AArch64_INS_STXR, AArch64_INS_STXRB, + * AArch64_INS_STXRH, AArch64_INS_STXP, AArch64_INS_STLXR, AArch64_INS_STLXRB. AArch64_INS_STLXRH, AArch64_INS_STLXP, + * AArch64_INS_STNP, AArch64_INS_STTR, AArch64_INS_STTRB, AArch64_INS_STTRH * ARM: str, stur, strb, sturb, strh, sturh, stllr, stllrb, stllrh, stlr, stlrb, stlrh, stlur, stlurb, stlurh, stp, stxr, stxrb, * stxrh, stxp, stlxr, stlxrb. stlxrh, stlxp, stnp, sttr, sttrb, sttrh */ @@ -1170,9 +1170,9 @@ static RzILOpEffect *str(cs_insn *insn) { if (!ISREG(0) || !REGBITS(0)) { return NULL; } - bool result = insn->id == ARM64_INS_STXR || insn->id == ARM64_INS_STXRB || insn->id == ARM64_INS_STXRH || insn->id == ARM64_INS_STXP || - insn->id == ARM64_INS_STLXR || insn->id == ARM64_INS_STLXRB || insn->id == ARM64_INS_STLXRH || insn->id == ARM64_INS_STLXP; - bool pair = insn->id == ARM64_INS_STP || insn->id == ARM64_INS_STNP || insn->id == ARM64_INS_STXP || insn->id == ARM64_INS_STLXP; + bool result = insn->id == AArch64_INS_STXR || insn->id == AArch64_INS_STXRB || insn->id == AArch64_INS_STXRH || insn->id == AArch64_INS_STXP || + insn->id == AArch64_INS_STLXR || insn->id == AArch64_INS_STLXRB || insn->id == AArch64_INS_STLXRH || insn->id == AArch64_INS_STLXP; + bool pair = insn->id == AArch64_INS_STP || insn->id == AArch64_INS_STNP || insn->id == AArch64_INS_STXP || insn->id == AArch64_INS_STLXP; size_t src_op = result ? 1 : 0; size_t addr_op = (result ? 1 : 0) + 1 + (pair ? 1 : 0); ut32 addr_bits = 64; @@ -1182,33 +1182,33 @@ static RzILOpEffect *str(cs_insn *insn) { } ut32 bits; switch (insn->id) { - case ARM64_INS_STRB: - case ARM64_INS_STURB: - case ARM64_INS_STLRB: - case ARM64_INS_STXRB: - case ARM64_INS_STLXRB: - case ARM64_INS_STTRB: + case AArch64_INS_STRB: + case AArch64_INS_STURB: + case AArch64_INS_STLRB: + case AArch64_INS_STXRB: + case AArch64_INS_STLXRB: + case AArch64_INS_STTRB: #if CS_API_MAJOR > 4 - case ARM64_INS_STLLRB: - case ARM64_INS_STLURB: + case AArch64_INS_STLLRB: + case AArch64_INS_STLURB: #endif bits = 8; break; - case ARM64_INS_STRH: - case ARM64_INS_STURH: - case ARM64_INS_STLRH: - case ARM64_INS_STXRH: - case ARM64_INS_STLXRH: - case ARM64_INS_STTRH: + case AArch64_INS_STRH: + case AArch64_INS_STURH: + case AArch64_INS_STLRH: + case AArch64_INS_STXRH: + case AArch64_INS_STLXRH: + case AArch64_INS_STTRH: #if CS_API_MAJOR > 4 - case ARM64_INS_STLLRH: - case ARM64_INS_STLURH: + case AArch64_INS_STLLRH: + case AArch64_INS_STLURH: #endif bits = 16; break; default: - // ARM64_INS_STR, ARM64_INS_STUR, ARM64_INS_STLLR, ARM64_INS_STLR, ARM64_INS_STLUR, ARM64_INS_STP, - // ARM64_INS_STXR, ARM64_INS_STXP, ARM64_INS_STLXR, ARM64_INS_STLXP, ARM64_INS_STNP, ARM64_INS_STTR + // AArch64_INS_STR, AArch64_INS_STUR, AArch64_INS_STLLR, AArch64_INS_STLR, AArch64_INS_STLUR, AArch64_INS_STP, + // AArch64_INS_STXR, AArch64_INS_STXP, AArch64_INS_STLXR, AArch64_INS_STLXP, AArch64_INS_STNP, AArch64_INS_STTR bits = REGBITS(src_op); if (!bits) { rz_il_op_pure_free(addr); @@ -1253,34 +1253,34 @@ static RzILOpEffect *str(cs_insn *insn) { #if CS_API_MAJOR > 4 /** - * Capstone: ARM64_INS_LDADD, ARM64_INS_LDADDA, ARM64_INS_LDADDAL, ARM64_INS_LDADDL, - * ARM64_INS_LDADDB, ARM64_INS_LDADDAB, ARM64_INS_LDADDALB, ARM64_INS_LDADDLB, - * ARM64_INS_LDADDH, ARM64_INS_LDADDAH, ARM64_INS_LDADDALH, ARM64_INS_LDADDLH, - * ARM64_INS_STADD, ARM64_INS_STADDL, ARM64_INS_STADDB, ARM64_INS_STADDLB, ARM64_INS_STADDH, ARM64_INS_STADDLH, - * ARM64_INS_LDCLRB, ARM64_INS_LDCLRAB, ARM64_INS_LDCLRALB, ARM64_INS_LDCLRLB, - * ARM64_INS_LDCLRH, ARM64_INS_LDCLRAH, ARM64_INS_LDCLRALH, ARM64_INS_LDCLRLH - * ARM64_INS_LDCLR, ARM64_INS_LDCLRA, ARM64_INS_LDCLRAL, ARM64_INS_LDCLRL, - * ARM64_INS_STSETB, ARM64_INS_STSETLB, ARM64_INS_STSETH, ARM64_INS_STSETLH, ARM64_INS_STSET, ARM64_INS_STSETL, - * ARM64_INS_LDSETB, ARM64_INS_LDSETAB, ARM64_INS_LDSETALB, ARM64_INS_LDSETLB, - * ARM64_INS_LDSETH, ARM64_INS_LDSETAH, ARM64_INS_LDSETALH, ARM64_INS_LDSETLH - * ARM64_INS_LDSET, ARM64_INS_LDSETA, ARM64_INS_LDSETAL, ARM64_INS_LDSETL, - * ARM64_INS_STSETB, ARM64_INS_STSETLB, ARM64_INS_STSETH, ARM64_INS_STSETLH, ARM64_INS_STSET, ARM64_INS_STSETL, - * ARM64_INS_LDSMAXB, ARM64_INS_LDSMAXAB, ARM64_INS_LDSMAXALB, ARM64_INS_LDSMAXLB, - * ARM64_INS_LDSMAXH, ARM64_INS_LDSMAXAH, ARM64_INS_LDSMAXALH, ARM64_INS_LDSMAXLH - * ARM64_INS_LDSMAX, ARM64_INS_LDSMAXA, ARM64_INS_LDSMAXAL, ARM64_INS_LDSMAXL, - * ARM64_INS_STSMAXB, ARM64_INS_STSMAXLB, ARM64_INS_STSMAXH, ARM64_INS_STSMAXLH, ARM64_INS_STSMAX, ARM64_INS_STSMAXL, - * ARM64_INS_LDSMINB, ARM64_INS_LDSMINAB, ARM64_INS_LDSMINALB, ARM64_INS_LDSMINLB, - * ARM64_INS_LDSMINH, ARM64_INS_LDSMINAH, ARM64_INS_LDSMINALH, ARM64_INS_LDSMINLH - * ARM64_INS_LDSMIN, ARM64_INS_LDSMINA, ARM64_INS_LDSMINAL, ARM64_INS_LDSMINL, - * ARM64_INS_STSMINB, ARM64_INS_STSMINLB, ARM64_INS_STSMINH, ARM64_INS_STSMINLH, ARM64_INS_STSMIN, ARM64_INS_STSMINL, - * ARM64_INS_LDUMAXB, ARM64_INS_LDUMAXAB, ARM64_INS_LDUMAXALB, ARM64_INS_LDUMAXLB, - * ARM64_INS_LDUMAXH, ARM64_INS_LDUMAXAH, ARM64_INS_LDUMAXALH, ARM64_INS_LDUMAXLH - * ARM64_INS_LDUMAX, ARM64_INS_LDUMAXA, ARM64_INS_LDUMAXAL, ARM64_INS_LDUMAXL, - * ARM64_INS_STUMAXB, ARM64_INS_STUMAXLB, ARM64_INS_STUMAXH, ARM64_INS_STUMAXLH, ARM64_INS_STUMAX, ARM64_INS_STUMAXL, - * ARM64_INS_LDUMINB, ARM64_INS_LDUMINAB, ARM64_INS_LDUMINALB, ARM64_INS_LDUMINLB, - * ARM64_INS_LDUMINH, ARM64_INS_LDUMINAH, ARM64_INS_LDUMINALH, ARM64_INS_LDUMINLH - * ARM64_INS_LDUMIN, ARM64_INS_LDUMINA, ARM64_INS_LDUMINAL, ARM64_INS_LDUMINL, - * ARM64_INS_STUMINB, ARM64_INS_STUMINLB, ARM64_INS_STUMINH, ARM64_INS_STUMINLH, ARM64_INS_STUMIN, ARM64_INS_STUMINL + * Capstone: AArch64_INS_LDADD, AArch64_INS_LDADDA, AArch64_INS_LDADDAL, AArch64_INS_LDADDL, + * AArch64_INS_LDADDB, AArch64_INS_LDADDAB, AArch64_INS_LDADDALB, AArch64_INS_LDADDLB, + * AArch64_INS_LDADDH, AArch64_INS_LDADDAH, AArch64_INS_LDADDALH, AArch64_INS_LDADDLH, + * AArch64_INS_STADD, AArch64_INS_STADDL, AArch64_INS_STADDB, AArch64_INS_STADDLB, AArch64_INS_STADDH, AArch64_INS_STADDLH, + * AArch64_INS_LDCLRB, AArch64_INS_LDCLRAB, AArch64_INS_LDCLRALB, AArch64_INS_LDCLRLB, + * AArch64_INS_LDCLRH, AArch64_INS_LDCLRAH, AArch64_INS_LDCLRALH, AArch64_INS_LDCLRLH + * AArch64_INS_LDCLR, AArch64_INS_LDCLRA, AArch64_INS_LDCLRAL, AArch64_INS_LDCLRL, + * AArch64_INS_STSETB, AArch64_INS_STSETLB, AArch64_INS_STSETH, AArch64_INS_STSETLH, AArch64_INS_STSET, AArch64_INS_STSETL, + * AArch64_INS_LDSETB, AArch64_INS_LDSETAB, AArch64_INS_LDSETALB, AArch64_INS_LDSETLB, + * AArch64_INS_LDSETH, AArch64_INS_LDSETAH, AArch64_INS_LDSETALH, AArch64_INS_LDSETLH + * AArch64_INS_LDSET, AArch64_INS_LDSETA, AArch64_INS_LDSETAL, AArch64_INS_LDSETL, + * AArch64_INS_STSETB, AArch64_INS_STSETLB, AArch64_INS_STSETH, AArch64_INS_STSETLH, AArch64_INS_STSET, AArch64_INS_STSETL, + * AArch64_INS_LDSMAXB, AArch64_INS_LDSMAXAB, AArch64_INS_LDSMAXALB, AArch64_INS_LDSMAXLB, + * AArch64_INS_LDSMAXH, AArch64_INS_LDSMAXAH, AArch64_INS_LDSMAXALH, AArch64_INS_LDSMAXLH + * AArch64_INS_LDSMAX, AArch64_INS_LDSMAXA, AArch64_INS_LDSMAXAL, AArch64_INS_LDSMAXL, + * AArch64_INS_STSMAXB, AArch64_INS_STSMAXLB, AArch64_INS_STSMAXH, AArch64_INS_STSMAXLH, AArch64_INS_STSMAX, AArch64_INS_STSMAXL, + * AArch64_INS_LDSMINB, AArch64_INS_LDSMINAB, AArch64_INS_LDSMINALB, AArch64_INS_LDSMINLB, + * AArch64_INS_LDSMINH, AArch64_INS_LDSMINAH, AArch64_INS_LDSMINALH, AArch64_INS_LDSMINLH + * AArch64_INS_LDSMIN, AArch64_INS_LDSMINA, AArch64_INS_LDSMINAL, AArch64_INS_LDSMINL, + * AArch64_INS_STSMINB, AArch64_INS_STSMINLB, AArch64_INS_STSMINH, AArch64_INS_STSMINLH, AArch64_INS_STSMIN, AArch64_INS_STSMINL, + * AArch64_INS_LDUMAXB, AArch64_INS_LDUMAXAB, AArch64_INS_LDUMAXALB, AArch64_INS_LDUMAXLB, + * AArch64_INS_LDUMAXH, AArch64_INS_LDUMAXAH, AArch64_INS_LDUMAXALH, AArch64_INS_LDUMAXLH + * AArch64_INS_LDUMAX, AArch64_INS_LDUMAXA, AArch64_INS_LDUMAXAL, AArch64_INS_LDUMAXL, + * AArch64_INS_STUMAXB, AArch64_INS_STUMAXLB, AArch64_INS_STUMAXH, AArch64_INS_STUMAXLH, AArch64_INS_STUMAX, AArch64_INS_STUMAXL, + * AArch64_INS_LDUMINB, AArch64_INS_LDUMINAB, AArch64_INS_LDUMINALB, AArch64_INS_LDUMINLB, + * AArch64_INS_LDUMINH, AArch64_INS_LDUMINAH, AArch64_INS_LDUMINALH, AArch64_INS_LDUMINLH + * AArch64_INS_LDUMIN, AArch64_INS_LDUMINA, AArch64_INS_LDUMINAL, AArch64_INS_LDUMINL, + * AArch64_INS_STUMINB, AArch64_INS_STUMINLB, AArch64_INS_STUMINH, AArch64_INS_STUMINLH, AArch64_INS_STUMIN, AArch64_INS_STUMINL * ARM: ldadd, ldadda, ldaddal, ldaddl, ldaddb, ldaddab, ldaddalb, ldaddlb, ldaddh, ldaddah, ldaddalh, ldaddlh, * stadd, staddl, staddb, staddlb, stadd, * ldclr, ldclra, ldclral, ldclrl, ldclrb, ldclrab, ldclralb, ldclrlb, ldclrh, ldclrah, ldclralh, ldclrlh, @@ -1301,7 +1301,7 @@ static RzILOpEffect *ldadd(cs_insn *insn) { if (!ISMEM(addr_op)) { return NULL; } - arm64_reg addend_reg = REGID(0); + aarch64_reg addend_reg = REGID(0); ut64 loadsz; enum { OP_ADD, @@ -1314,208 +1314,208 @@ static RzILOpEffect *ldadd(cs_insn *insn) { OP_UMIN } op = OP_ADD; switch (insn->id) { - case ARM64_INS_LDCLRB: - case ARM64_INS_LDCLRAB: - case ARM64_INS_LDCLRALB: - case ARM64_INS_LDCLRLB: - case ARM64_INS_STCLRB: - case ARM64_INS_STCLRLB: + case AArch64_INS_LDCLRB: + case AArch64_INS_LDCLRAB: + case AArch64_INS_LDCLRALB: + case AArch64_INS_LDCLRLB: + case AArch64_INS_STCLRB: + case AArch64_INS_STCLRLB: op = OP_CLR; loadsz = 8; break; - case ARM64_INS_LDEORB: - case ARM64_INS_LDEORAB: - case ARM64_INS_LDEORALB: - case ARM64_INS_LDEORLB: - case ARM64_INS_STEORB: - case ARM64_INS_STEORLB: + case AArch64_INS_LDEORB: + case AArch64_INS_LDEORAB: + case AArch64_INS_LDEORALB: + case AArch64_INS_LDEORLB: + case AArch64_INS_STEORB: + case AArch64_INS_STEORLB: op = OP_EOR; loadsz = 8; break; - case ARM64_INS_LDSETB: - case ARM64_INS_LDSETAB: - case ARM64_INS_LDSETALB: - case ARM64_INS_LDSETLB: - case ARM64_INS_STSETB: - case ARM64_INS_STSETLB: + case AArch64_INS_LDSETB: + case AArch64_INS_LDSETAB: + case AArch64_INS_LDSETALB: + case AArch64_INS_LDSETLB: + case AArch64_INS_STSETB: + case AArch64_INS_STSETLB: op = OP_SET; loadsz = 8; break; - case ARM64_INS_LDSMAXB: - case ARM64_INS_LDSMAXAB: - case ARM64_INS_LDSMAXALB: - case ARM64_INS_LDSMAXLB: - case ARM64_INS_STSMAXB: - case ARM64_INS_STSMAXLB: + case AArch64_INS_LDSMAXB: + case AArch64_INS_LDSMAXAB: + case AArch64_INS_LDSMAXALB: + case AArch64_INS_LDSMAXLB: + case AArch64_INS_STSMAXB: + case AArch64_INS_STSMAXLB: op = OP_SMAX; loadsz = 8; break; - case ARM64_INS_LDSMINB: - case ARM64_INS_LDSMINAB: - case ARM64_INS_LDSMINALB: - case ARM64_INS_LDSMINLB: - case ARM64_INS_STSMINB: - case ARM64_INS_STSMINLB: + case AArch64_INS_LDSMINB: + case AArch64_INS_LDSMINAB: + case AArch64_INS_LDSMINALB: + case AArch64_INS_LDSMINLB: + case AArch64_INS_STSMINB: + case AArch64_INS_STSMINLB: op = OP_SMIN; loadsz = 8; break; - case ARM64_INS_LDUMAXB: - case ARM64_INS_LDUMAXAB: - case ARM64_INS_LDUMAXALB: - case ARM64_INS_LDUMAXLB: - case ARM64_INS_STUMAXB: - case ARM64_INS_STUMAXLB: + case AArch64_INS_LDUMAXB: + case AArch64_INS_LDUMAXAB: + case AArch64_INS_LDUMAXALB: + case AArch64_INS_LDUMAXLB: + case AArch64_INS_STUMAXB: + case AArch64_INS_STUMAXLB: op = OP_UMAX; loadsz = 8; break; - case ARM64_INS_LDUMINB: - case ARM64_INS_LDUMINAB: - case ARM64_INS_LDUMINALB: - case ARM64_INS_LDUMINLB: - case ARM64_INS_STUMINB: - case ARM64_INS_STUMINLB: + case AArch64_INS_LDUMINB: + case AArch64_INS_LDUMINAB: + case AArch64_INS_LDUMINALB: + case AArch64_INS_LDUMINLB: + case AArch64_INS_STUMINB: + case AArch64_INS_STUMINLB: op = OP_UMIN; loadsz = 8; break; - case ARM64_INS_LDADDB: - case ARM64_INS_LDADDAB: - case ARM64_INS_LDADDALB: - case ARM64_INS_LDADDLB: - case ARM64_INS_STADDB: - case ARM64_INS_STADDLB: + case AArch64_INS_LDADDB: + case AArch64_INS_LDADDAB: + case AArch64_INS_LDADDALB: + case AArch64_INS_LDADDLB: + case AArch64_INS_STADDB: + case AArch64_INS_STADDLB: loadsz = 8; break; - case ARM64_INS_LDCLRH: - case ARM64_INS_LDCLRAH: - case ARM64_INS_LDCLRALH: - case ARM64_INS_LDCLRLH: - case ARM64_INS_STCLRH: - case ARM64_INS_STCLRLH: + case AArch64_INS_LDCLRH: + case AArch64_INS_LDCLRAH: + case AArch64_INS_LDCLRALH: + case AArch64_INS_LDCLRLH: + case AArch64_INS_STCLRH: + case AArch64_INS_STCLRLH: op = OP_CLR; loadsz = 16; break; - case ARM64_INS_LDEORH: - case ARM64_INS_LDEORAH: - case ARM64_INS_LDEORALH: - case ARM64_INS_LDEORLH: - case ARM64_INS_STEORH: - case ARM64_INS_STEORLH: + case AArch64_INS_LDEORH: + case AArch64_INS_LDEORAH: + case AArch64_INS_LDEORALH: + case AArch64_INS_LDEORLH: + case AArch64_INS_STEORH: + case AArch64_INS_STEORLH: op = OP_EOR; loadsz = 16; break; - case ARM64_INS_LDSETH: - case ARM64_INS_LDSETAH: - case ARM64_INS_LDSETALH: - case ARM64_INS_LDSETLH: - case ARM64_INS_STSETH: - case ARM64_INS_STSETLH: + case AArch64_INS_LDSETH: + case AArch64_INS_LDSETAH: + case AArch64_INS_LDSETALH: + case AArch64_INS_LDSETLH: + case AArch64_INS_STSETH: + case AArch64_INS_STSETLH: op = OP_SET; loadsz = 16; break; - case ARM64_INS_LDSMAXH: - case ARM64_INS_LDSMAXAH: - case ARM64_INS_LDSMAXALH: - case ARM64_INS_LDSMAXLH: - case ARM64_INS_STSMAXH: - case ARM64_INS_STSMAXLH: + case AArch64_INS_LDSMAXH: + case AArch64_INS_LDSMAXAH: + case AArch64_INS_LDSMAXALH: + case AArch64_INS_LDSMAXLH: + case AArch64_INS_STSMAXH: + case AArch64_INS_STSMAXLH: op = OP_SMAX; loadsz = 16; break; - case ARM64_INS_LDSMINH: - case ARM64_INS_LDSMINAH: - case ARM64_INS_LDSMINALH: - case ARM64_INS_LDSMINLH: - case ARM64_INS_STSMINH: - case ARM64_INS_STSMINLH: + case AArch64_INS_LDSMINH: + case AArch64_INS_LDSMINAH: + case AArch64_INS_LDSMINALH: + case AArch64_INS_LDSMINLH: + case AArch64_INS_STSMINH: + case AArch64_INS_STSMINLH: op = OP_SMIN; loadsz = 16; break; - case ARM64_INS_LDUMAXH: - case ARM64_INS_LDUMAXAH: - case ARM64_INS_LDUMAXALH: - case ARM64_INS_LDUMAXLH: - case ARM64_INS_STUMAXH: - case ARM64_INS_STUMAXLH: + case AArch64_INS_LDUMAXH: + case AArch64_INS_LDUMAXAH: + case AArch64_INS_LDUMAXALH: + case AArch64_INS_LDUMAXLH: + case AArch64_INS_STUMAXH: + case AArch64_INS_STUMAXLH: op = OP_UMAX; loadsz = 16; break; - case ARM64_INS_LDUMINH: - case ARM64_INS_LDUMINAH: - case ARM64_INS_LDUMINALH: - case ARM64_INS_LDUMINLH: - case ARM64_INS_STUMINH: - case ARM64_INS_STUMINLH: + case AArch64_INS_LDUMINH: + case AArch64_INS_LDUMINAH: + case AArch64_INS_LDUMINALH: + case AArch64_INS_LDUMINLH: + case AArch64_INS_STUMINH: + case AArch64_INS_STUMINLH: op = OP_UMIN; loadsz = 16; break; - case ARM64_INS_LDADDH: - case ARM64_INS_LDADDAH: - case ARM64_INS_LDADDALH: - case ARM64_INS_LDADDLH: - case ARM64_INS_STADDH: - case ARM64_INS_STADDLH: + case AArch64_INS_LDADDH: + case AArch64_INS_LDADDAH: + case AArch64_INS_LDADDALH: + case AArch64_INS_LDADDLH: + case AArch64_INS_STADDH: + case AArch64_INS_STADDLH: loadsz = 16; break; - case ARM64_INS_LDCLR: - case ARM64_INS_LDCLRA: - case ARM64_INS_LDCLRAL: - case ARM64_INS_LDCLRL: - case ARM64_INS_STCLR: - case ARM64_INS_STCLRL: + case AArch64_INS_LDCLR: + case AArch64_INS_LDCLRA: + case AArch64_INS_LDCLRAL: + case AArch64_INS_LDCLRL: + case AArch64_INS_STCLR: + case AArch64_INS_STCLRL: op = OP_CLR; goto size_from_reg; - case ARM64_INS_LDEOR: - case ARM64_INS_LDEORA: - case ARM64_INS_LDEORAL: - case ARM64_INS_LDEORL: - case ARM64_INS_STEOR: - case ARM64_INS_STEORL: + case AArch64_INS_LDEOR: + case AArch64_INS_LDEORA: + case AArch64_INS_LDEORAL: + case AArch64_INS_LDEORL: + case AArch64_INS_STEOR: + case AArch64_INS_STEORL: op = OP_EOR; goto size_from_reg; - case ARM64_INS_LDSET: - case ARM64_INS_LDSETA: - case ARM64_INS_LDSETAL: - case ARM64_INS_LDSETL: - case ARM64_INS_STSET: - case ARM64_INS_STSETL: + case AArch64_INS_LDSET: + case AArch64_INS_LDSETA: + case AArch64_INS_LDSETAL: + case AArch64_INS_LDSETL: + case AArch64_INS_STSET: + case AArch64_INS_STSETL: op = OP_SET; goto size_from_reg; - case ARM64_INS_LDSMAX: - case ARM64_INS_LDSMAXA: - case ARM64_INS_LDSMAXAL: - case ARM64_INS_LDSMAXL: - case ARM64_INS_STSMAX: - case ARM64_INS_STSMAXL: + case AArch64_INS_LDSMAX: + case AArch64_INS_LDSMAXA: + case AArch64_INS_LDSMAXAL: + case AArch64_INS_LDSMAXL: + case AArch64_INS_STSMAX: + case AArch64_INS_STSMAXL: op = OP_SMAX; goto size_from_reg; - case ARM64_INS_LDSMIN: - case ARM64_INS_LDSMINA: - case ARM64_INS_LDSMINAL: - case ARM64_INS_LDSMINL: - case ARM64_INS_STSMIN: - case ARM64_INS_STSMINL: + case AArch64_INS_LDSMIN: + case AArch64_INS_LDSMINA: + case AArch64_INS_LDSMINAL: + case AArch64_INS_LDSMINL: + case AArch64_INS_STSMIN: + case AArch64_INS_STSMINL: op = OP_SMIN; goto size_from_reg; - case ARM64_INS_LDUMAX: - case ARM64_INS_LDUMAXA: - case ARM64_INS_LDUMAXAL: - case ARM64_INS_LDUMAXL: - case ARM64_INS_STUMAX: - case ARM64_INS_STUMAXL: + case AArch64_INS_LDUMAX: + case AArch64_INS_LDUMAXA: + case AArch64_INS_LDUMAXAL: + case AArch64_INS_LDUMAXL: + case AArch64_INS_STUMAX: + case AArch64_INS_STUMAXL: op = OP_UMAX; goto size_from_reg; - case ARM64_INS_LDUMIN: - case ARM64_INS_LDUMINA: - case ARM64_INS_LDUMINAL: - case ARM64_INS_LDUMINL: - case ARM64_INS_STUMIN: - case ARM64_INS_STUMINL: + case AArch64_INS_LDUMIN: + case AArch64_INS_LDUMINA: + case AArch64_INS_LDUMINAL: + case AArch64_INS_LDUMINL: + case AArch64_INS_STUMIN: + case AArch64_INS_STUMINL: op = OP_UMIN; // fallthrough size_from_reg: - default: // ARM64_INS_LDADD, ARM64_INS_LDADDA, ARM64_INS_LDADDAL, ARM64_INS_LDADDL, ARM64_INS_STADD, ARM64_INS_STADDL + default: // AArch64_INS_LDADD, AArch64_INS_LDADDA, AArch64_INS_LDADDAL, AArch64_INS_LDADDL, AArch64_INS_STADD, AArch64_INS_STADDL loadsz = is_wreg(addend_reg) ? 32 : 64; break; } @@ -1532,7 +1532,7 @@ static RzILOpEffect *ldadd(cs_insn *insn) { rz_il_op_pure_free(addr); return NULL; } - arm64_reg dst_reg = REGID(1); + aarch64_reg dst_reg = REGID(1); dst_reg = xreg_of_reg(dst_reg); ld_eff = write_reg(dst_reg, loadsz != 64 ? UNSIGNED(64, VARL("old")) : VARL("old")); if (!ld_eff) { @@ -1585,7 +1585,7 @@ static RzILOpEffect *ldadd(cs_insn *insn) { #endif /** - * Capstone: ARM64_INS_MADD, ARM64_INS_MSUB + * Capstone: AArch64_INS_MADD, AArch64_INS_MSUB * ARM: madd, msub */ static RzILOpEffect *madd(cs_insn *insn) { @@ -1603,7 +1603,7 @@ static RzILOpEffect *madd(cs_insn *insn) { return NULL; } RzILOpBitVector *res; - if (insn->id == ARM64_INS_MSUB) { + if (insn->id == AArch64_INS_MSUB) { res = SUB(addend, MUL(ma, mb)); } else { res = ADD(MUL(ma, mb), addend); @@ -1612,7 +1612,7 @@ static RzILOpEffect *madd(cs_insn *insn) { } /** - * Capstone: ARM64_INS_MUL, ARM64_INS_MNEG + * Capstone: AArch64_INS_MUL, AArch64_INS_MNEG * ARM: mul, mneg */ static RzILOpEffect *mul(cs_insn *insn) { @@ -1631,7 +1631,7 @@ static RzILOpEffect *mul(cs_insn *insn) { return NULL; } RzILOpBitVector *res = MUL(ma, mb); - if (insn->id == ARM64_INS_MNEG) { + if (insn->id == AArch64_INS_MNEG) { res = NEG(res); } return write_reg(REGID(0), res); @@ -1640,7 +1640,7 @@ static RzILOpEffect *mul(cs_insn *insn) { static RzILOpEffect *movn(cs_insn *insn); /** - * Capstone: ARM64_INS_MOV, ARM64_INS_MOVZ + * Capstone: AArch64_INS_MOV, AArch64_INS_MOVZ * ARM: mov, movz */ static RzILOpEffect *mov(cs_insn *insn) { @@ -1664,7 +1664,7 @@ static RzILOpEffect *mov(cs_insn *insn) { } /** - * Capstone: ARM64_INS_MOVK + * Capstone: AArch64_INS_MOVK * ARM: movk */ static RzILOpEffect *movk(cs_insn *insn) { @@ -1676,13 +1676,13 @@ static RzILOpEffect *movk(cs_insn *insn) { if (!src) { return NULL; } - cs_arm64_op *op = &insn->detail->arm64.operands[1]; - ut32 shift = op->shift.type == ARM64_SFT_LSL ? op->shift.value : 0; + cs_aarch64_op *op = &insn->detail->aarch64.operands[1]; + ut32 shift = op->shift.type == AArch64_SFT_LSL ? op->shift.value : 0; return write_reg(REGID(0), LOGOR(LOGAND(src, UN(bits, ~(0xffffull << shift))), UN(bits, ((ut64)op->imm) << shift))); } /** - * Capstone: ARM64_INS_MOVN + * Capstone: AArch64_INS_MOVN * ARM: movn */ static RzILOpEffect *movn(cs_insn *insn) { @@ -1692,8 +1692,8 @@ static RzILOpEffect *movn(cs_insn *insn) { // The only case where the movn encoding should be disassembled as "movn" is // when (IsZero(imm16) && hw != '00'), according to the "alias conditions" in the reference manual. // Unfortunately, capstone v4 seems to always disassemble as movn, so we still have to implement this. - cs_arm64_op *op = &insn->detail->arm64.operands[1]; - ut32 shift = op->shift.type == ARM64_SFT_LSL ? op->shift.value : 0; + cs_aarch64_op *op = &insn->detail->aarch64.operands[1]; + ut32 shift = op->shift.type == AArch64_SFT_LSL ? op->shift.value : 0; ut32 bits = REGBITS(0); if (!bits) { return NULL; @@ -1702,17 +1702,17 @@ static RzILOpEffect *movn(cs_insn *insn) { } /** - * Capstone: ARM64_INS_MSR + * Capstone: AArch64_INS_MSR * ARM: msr */ static RzILOpEffect *msr(cs_insn *insn) { - cs_arm64_op *op = &insn->detail->arm64.operands[0]; + cs_aarch64_op *op = &insn->detail->aarch64.operands[0]; #if CS_API_MAJOR > 4 - if (op->type != ARM64_OP_SYS || (ut64)op->sys != (ut64)ARM64_SYSREG_NZCV) { + if (op->type != AArch64_OP_SYS || (ut64)op->sys != (ut64)ARM64_SYSREG_NZCV) { return NULL; } #else - if (op->type != ARM64_OP_REG_MSR || op->reg != 0xda10) { + if (op->type != AArch64_OP_REG_MSR || op->reg != 0xda10) { return NULL; } #endif @@ -1730,7 +1730,7 @@ static RzILOpEffect *msr(cs_insn *insn) { #if CS_API_MAJOR > 4 /** - * Capstone: ARM64_INS_RMIF + * Capstone: AArch64_INS_RMIF * ARM: rmif */ static RzILOpEffect *rmif(cs_insn *insn) { @@ -1764,7 +1764,7 @@ static RzILOpEffect *rmif(cs_insn *insn) { #endif /** - * Capstone: ARM64_INS_SBFX, ARM64_INS_SBFIZ, ARM64_INS_UBFX, ARM64_INS_UBFIZ + * Capstone: AArch64_INS_SBFX, AArch64_INS_SBFIZ, AArch64_INS_UBFX, AArch64_INS_UBFIZ * ARM: sbfx, sbfiz, ubfx, ubfiz */ static RzILOpEffect *sbfx(cs_insn *insn) { @@ -1782,32 +1782,32 @@ static RzILOpEffect *sbfx(cs_insn *insn) { ut64 lsb = IMM(2); ut64 width = IMM(3); RzILOpBitVector *res; - if (insn->id == ARM64_INS_SBFIZ || insn->id == ARM64_INS_UBFIZ) { + if (insn->id == AArch64_INS_SBFIZ || insn->id == AArch64_INS_UBFIZ) { res = SHIFTL0(UNSIGNED(width + lsb, src), UN(6, lsb)); } else { - // ARM64_INS_SBFX, ARM64_INS_UBFX + // AArch64_INS_SBFX, AArch64_INS_UBFX res = UNSIGNED(width, SHIFTR0(src, UN(6, lsb))); } - bool is_signed = insn->id == ARM64_INS_SBFX || insn->id == ARM64_INS_SBFIZ; + bool is_signed = insn->id == AArch64_INS_SBFX || insn->id == AArch64_INS_SBFIZ; res = LET("res", res, is_signed ? SIGNED(bits, VARLP("res")) : UNSIGNED(bits, VARLP("res"))); return write_reg(REGID(0), res); } /** - * Capstone: ARM64_INS_MRS + * Capstone: AArch64_INS_MRS * ARM: mrs */ static RzILOpEffect *mrs(cs_insn *insn) { if (!ISREG(0)) { return NULL; } - cs_arm64_op *op = &insn->detail->arm64.operands[1]; + cs_aarch64_op *op = &insn->detail->aarch64.operands[1]; #if CS_API_MAJOR > 4 - if (op->type != ARM64_OP_SYS || (ut64)op->sys != (ut64)ARM64_SYSREG_NZCV) { + if (op->type != AArch64_OP_SYS || (ut64)op->sys != (ut64)ARM64_SYSREG_NZCV) { return NULL; } #else - if (op->type != ARM64_OP_REG_MRS || op->reg != 0xda10) { + if (op->type != AArch64_OP_REG_MRS || op->reg != 0xda10) { return NULL; } #endif @@ -1823,7 +1823,7 @@ static RzILOpEffect *mrs(cs_insn *insn) { } /** - * Capstone: ARM64_INS_MVN, ARM64_INS_NEG, ARM64_INS_NEGS, ARM64_INS_NGC, ARM64_INS_NGCS + * Capstone: AArch64_INS_MVN, AArch64_INS_NEG, AArch64_INS_NEGS, AArch64_INS_NGC, AArch64_INS_NGCS * ARM: mvn, neg, negs, ngc, ngcs */ static RzILOpEffect *mvn(cs_insn *insn) { @@ -1837,19 +1837,19 @@ static RzILOpEffect *mvn(cs_insn *insn) { } RzILOpBitVector *res; switch (insn->id) { - case ARM64_INS_NEG: + case AArch64_INS_NEG: #if CS_API_MAJOR > 3 - case ARM64_INS_NEGS: + case AArch64_INS_NEGS: #endif res = NEG(val); break; - case ARM64_INS_NGC: + case AArch64_INS_NGC: #if CS_API_MAJOR > 3 - case ARM64_INS_NGCS: + case AArch64_INS_NGCS: #endif res = NEG(ADD(val, ITE(VARG("cf"), UN(bits, 0), UN(bits, 1)))); break; - default: // ARM64_INS_MVN + default: // AArch64_INS_MVN res = LOGNOT(val); break; } @@ -1857,11 +1857,11 @@ static RzILOpEffect *mvn(cs_insn *insn) { if (!set) { return NULL; } - if (insn->detail->arm64.update_flags) { + if (insn->detail->aarch64.update_flags) { return SEQ5( SETL("b", DUP(val)), set, - SETG("cf", sub_carry(UN(bits, 0), VARL("b"), insn->id == ARM64_INS_NGC, bits)), + SETG("cf", sub_carry(UN(bits, 0), VARL("b"), insn->id == AArch64_INS_NGC, bits)), SETG("vf", sub_overflow(UN(bits, 0), VARL("b"), REG(0))), update_flags_zn(REG(0))); } @@ -1869,7 +1869,7 @@ static RzILOpEffect *mvn(cs_insn *insn) { } /** - * Capstone: ARM64_INS_RBIT + * Capstone: AArch64_INS_RBIT * ARM: rbit */ static RzILOpEffect *rbit(cs_insn *insn) { @@ -1898,7 +1898,7 @@ static RzILOpEffect *rbit(cs_insn *insn) { } /** - * Capstone: ARM64_INS_REV, ARM64_INS_REV32, ARM64_INS_REV16 + * Capstone: AArch64_INS_REV, AArch64_INS_REV32, AArch64_INS_REV16 * ARM: rev, rev32, rev16 */ static RzILOpEffect *rev(cs_insn *insn) { @@ -1909,11 +1909,11 @@ static RzILOpEffect *rev(cs_insn *insn) { if (!dst_bits) { return NULL; } - arm64_reg src_reg = xreg_of_reg(REGID(1)); + aarch64_reg src_reg = xreg_of_reg(REGID(1)); ut32 container_bits = dst_bits; - if (insn->id == ARM64_INS_REV32) { + if (insn->id == AArch64_INS_REV32) { container_bits = 32; - } else if (insn->id == ARM64_INS_REV16) { + } else if (insn->id == AArch64_INS_REV16) { container_bits = 16; } RzILOpBitVector *src = read_reg(src_reg); @@ -1964,7 +1964,7 @@ static RzILOpEffect *rev(cs_insn *insn) { } /** - * Capstone: ARM64_INS_SDIV + * Capstone: AArch64_INS_SDIV * ARM: sdiv */ static RzILOpEffect *sdiv(cs_insn *insn) { @@ -1990,7 +1990,7 @@ static RzILOpEffect *sdiv(cs_insn *insn) { } /** - * Capstone: ARM64_INS_UDIV + * Capstone: AArch64_INS_UDIV * ARM: udiv */ static RzILOpEffect *udiv(cs_insn *insn) { @@ -2014,7 +2014,7 @@ static RzILOpEffect *udiv(cs_insn *insn) { #if CS_API_MAJOR > 4 /** - * Capstone: ARM64_INS_SETF8, ARM64_INS_SETF16 + * Capstone: AArch64_INS_SETF8, AArch64_INS_SETF16 * ARM: setf8, setf16 */ static RzILOpEffect *setf(cs_insn *insn) { @@ -2025,7 +2025,7 @@ static RzILOpEffect *setf(cs_insn *insn) { if (!val) { return NULL; } - ut32 bits = insn->id == ARM64_INS_SETF16 ? 16 : 8; + ut32 bits = insn->id == AArch64_INS_SETF16 ? 16 : 8; return SEQ2( SETG("vf", XOR(MSB(UNSIGNED(bits + 1, val)), MSB(UNSIGNED(bits, DUP(val))))), update_flags_zn(UNSIGNED(bits, DUP(val)))); @@ -2033,7 +2033,7 @@ static RzILOpEffect *setf(cs_insn *insn) { #endif /** - * Capstone: ARM64_INS_SMADDL, ARM64_INS_SMSUBL, ARM64_INS_UMADDL, ARM64_INS_UMSUBL + * Capstone: AArch64_INS_SMADDL, AArch64_INS_SMSUBL, AArch64_INS_UMADDL, AArch64_INS_UMSUBL * ARM: smaddl, smsubl, umaddl, umsubl */ static RzILOpEffect *smaddl(cs_insn *insn) { @@ -2051,9 +2051,9 @@ static RzILOpEffect *smaddl(cs_insn *insn) { rz_il_op_pure_free(addend); return NULL; } - bool is_signed = insn->id == ARM64_INS_SMADDL || insn->id == ARM64_INS_SMSUBL; + bool is_signed = insn->id == AArch64_INS_SMADDL || insn->id == AArch64_INS_SMSUBL; RzILOpBitVector *res = MUL(is_signed ? SIGNED(64, x) : UNSIGNED(64, x), is_signed ? SIGNED(64, y) : UNSIGNED(64, y)); - if (insn->id == ARM64_INS_SMSUBL || insn->id == ARM64_INS_UMSUBL) { + if (insn->id == AArch64_INS_SMSUBL || insn->id == AArch64_INS_UMSUBL) { res = SUB(addend, res); } else { res = ADD(addend, res); @@ -2062,7 +2062,7 @@ static RzILOpEffect *smaddl(cs_insn *insn) { } /** - * Capstone: ARM64_INS_SMULL, ARM64_INS_SMNEGL, ARM64_INS_UMULL, ARM64_INS_UMNEGL + * Capstone: AArch64_INS_SMULL, AArch64_INS_SMNEGL, AArch64_INS_UMULL, AArch64_INS_UMNEGL * ARM: smull, smnegl, umull, umnegl */ static RzILOpEffect *smull(cs_insn *insn) { @@ -2077,16 +2077,16 @@ static RzILOpEffect *smull(cs_insn *insn) { rz_il_op_pure_free(y); return NULL; } - bool is_signed = insn->id == ARM64_INS_SMULL || insn->id == ARM64_INS_SMNEGL; + bool is_signed = insn->id == AArch64_INS_SMULL || insn->id == AArch64_INS_SMNEGL; RzILOpBitVector *res = MUL(is_signed ? SIGNED(64, x) : UNSIGNED(64, x), is_signed ? SIGNED(64, y) : UNSIGNED(64, y)); - if (insn->id == ARM64_INS_SMNEGL || insn->id == ARM64_INS_UMNEGL) { + if (insn->id == AArch64_INS_SMNEGL || insn->id == AArch64_INS_UMNEGL) { res = NEG(res); } return write_reg(REGID(0), res); } /** - * Capstone: ARM64_INS_SMULH, ARM64_INS_UMULH + * Capstone: AArch64_INS_SMULH, AArch64_INS_UMULH * ARM: smulh, umulh */ static RzILOpEffect *smulh(cs_insn *insn) { @@ -2101,16 +2101,16 @@ static RzILOpEffect *smulh(cs_insn *insn) { rz_il_op_pure_free(y); return NULL; } - bool is_signed = insn->id == ARM64_INS_SMULH; + bool is_signed = insn->id == AArch64_INS_SMULH; RzILOpBitVector *res = MUL(is_signed ? SIGNED(128, x) : UNSIGNED(128, x), is_signed ? SIGNED(128, y) : UNSIGNED(128, y)); return write_reg(REGID(0), UNSIGNED(64, SHIFTR0(res, UN(7, 64)))); } #if CS_API_MAJOR > 4 /** - * Capstone: ARM64_INS_SWP, ARM64_INS_SWPA, ARM64_INS_SWPAL, ARM64_INS_SWPL, - * ARM64_INS_SWPB, ARM64_INS_SWPAB, ARM64_INS_SWPALB, ARM64_INS_SWPLB - * ARM64_INS_SWPH, ARM64_INS_SWPAH, ARM64_INS_SWPALH, ARM64_INS_SWPLH + * Capstone: AArch64_INS_SWP, AArch64_INS_SWPA, AArch64_INS_SWPAL, AArch64_INS_SWPL, + * AArch64_INS_SWPB, AArch64_INS_SWPAB, AArch64_INS_SWPALB, AArch64_INS_SWPLB + * AArch64_INS_SWPH, AArch64_INS_SWPAH, AArch64_INS_SWPALH, AArch64_INS_SWPLH * ARM: swp, swpa, swpal, swpl, swpb, swpab, swpalb, swplb, swph, swpah, swpalh, swplh */ static RzILOpEffect *swp(cs_insn *insn) { @@ -2119,19 +2119,19 @@ static RzILOpEffect *swp(cs_insn *insn) { } ut32 bits; switch (insn->id) { - case ARM64_INS_SWPB: - case ARM64_INS_SWPAB: - case ARM64_INS_SWPALB: - case ARM64_INS_SWPLB: + case AArch64_INS_SWPB: + case AArch64_INS_SWPAB: + case AArch64_INS_SWPALB: + case AArch64_INS_SWPLB: bits = 8; break; - case ARM64_INS_SWPH: - case ARM64_INS_SWPAH: - case ARM64_INS_SWPALH: - case ARM64_INS_SWPLH: + case AArch64_INS_SWPH: + case AArch64_INS_SWPAH: + case AArch64_INS_SWPALH: + case AArch64_INS_SWPLH: bits = 16; break; - default: // ARM64_INS_SWP, ARM64_INS_SWPA, ARM64_INS_SWPAL, ARM64_INS_SWPL: + default: // AArch64_INS_SWP, AArch64_INS_SWPA, AArch64_INS_SWPAL, AArch64_INS_SWPL: bits = REGBITS(0); if (!bits) { return NULL; @@ -2150,8 +2150,8 @@ static RzILOpEffect *swp(cs_insn *insn) { return NULL; } RzILOpEffect *store_eff = bits == 8 ? STORE(addr, store_val) : STOREW(addr, store_val); - arm64_reg ret_reg = xreg_of_reg(REGID(1)); - if (ret_reg == ARM64_REG_XZR) { + aarch64_reg ret_reg = xreg_of_reg(REGID(1)); + if (ret_reg == AArch64_REG_XZR) { return store_eff; } RzILOpEffect *ret_eff = write_reg(ret_reg, bits != 64 ? UNSIGNED(64, VARL("ret")) : VARL("ret")); @@ -2167,7 +2167,7 @@ static RzILOpEffect *swp(cs_insn *insn) { #endif /** - * Capstone: ARM64_INS_SXTB, ARM64_INS_SXTH, ARM64_INS_SXTW, ARM64_INS_UXTB, ARM64_INS_UXTH + * Capstone: AArch64_INS_SXTB, AArch64_INS_SXTH, AArch64_INS_SXTW, AArch64_INS_UXTB, AArch64_INS_UXTH * ARM: sxtb, sxth, sxtw, uxtb, uxth */ static RzILOpEffect *sxt(cs_insn *insn) { @@ -2177,19 +2177,19 @@ static RzILOpEffect *sxt(cs_insn *insn) { ut32 bits; bool is_signed = true; switch (insn->id) { - case ARM64_INS_UXTB: + case AArch64_INS_UXTB: is_signed = false; // fallthrough - case ARM64_INS_SXTB: + case AArch64_INS_SXTB: bits = 8; break; - case ARM64_INS_UXTH: + case AArch64_INS_UXTH: is_signed = false; // fallthrough - case ARM64_INS_SXTH: + case AArch64_INS_SXTH: bits = 16; break; - default: // ARM64_INS_SXTW + default: // AArch64_INS_SXTW bits = 32; break; } @@ -2201,7 +2201,7 @@ static RzILOpEffect *sxt(cs_insn *insn) { } /** - * Capstone: ARM64_INS_TBNZ, ARM64_TBZ + * Capstone: AArch64_INS_TBNZ, ARM64_TBZ * ARM: tbnz, tbz */ static RzILOpEffect *tbz(cs_insn *insn) { @@ -2217,13 +2217,13 @@ static RzILOpEffect *tbz(cs_insn *insn) { return NULL; } RzILOpBool *c = LSB(SHIFTR0(src, UN(6, IMM(1)))); - return insn->id == ARM64_INS_TBNZ + return insn->id == AArch64_INS_TBNZ ? BRANCH(c, JMP(tgt), NULL) : BRANCH(c, NULL, JMP(tgt)); } /** - * Capstone: ARM64_INS_TST + * Capstone: AArch64_INS_TST * ARM: tst */ static RzILOpEffect *tst(cs_insn *insn) { @@ -2315,441 +2315,441 @@ static RzILOpEffect *tst(cs_insn *insn) { */ RZ_IPI RzILOpEffect *rz_arm_cs_64_il(csh *handle, cs_insn *insn) { switch (insn->id) { - case ARM64_INS_NOP: - case ARM64_INS_HINT: - case ARM64_INS_PRFM: - case ARM64_INS_PRFUM: - case ARM64_INS_SEV: - case ARM64_INS_SEVL: - case ARM64_INS_WFE: - case ARM64_INS_WFI: - case ARM64_INS_YIELD: + case AArch64_INS_NOP: + case AArch64_INS_HINT: + case AArch64_INS_PRFM: + case AArch64_INS_PRFUM: + case AArch64_INS_SEV: + case AArch64_INS_SEVL: + case AArch64_INS_WFE: + case AArch64_INS_WFI: + case AArch64_INS_YIELD: return NOP(); - case ARM64_INS_ADD: - case ARM64_INS_ADC: - case ARM64_INS_SUB: - case ARM64_INS_SBC: + case AArch64_INS_ADD: + case AArch64_INS_ADC: + case AArch64_INS_SUB: + case AArch64_INS_SBC: #if CS_API_MAJOR > 4 - case ARM64_INS_ADDS: - case ARM64_INS_SUBS: - case ARM64_INS_ADCS: - case ARM64_INS_SBCS: + case AArch64_INS_ADDS: + case AArch64_INS_SUBS: + case AArch64_INS_ADCS: + case AArch64_INS_SBCS: #endif return add_sub(insn); - case ARM64_INS_ADR: - case ARM64_INS_ADRP: + case AArch64_INS_ADR: + case AArch64_INS_ADRP: return adr(insn); - case ARM64_INS_AND: + case AArch64_INS_AND: #if CS_API_MAJOR > 4 - case ARM64_INS_ANDS: + case AArch64_INS_ANDS: #endif - case ARM64_INS_EOR: - case ARM64_INS_EON: - case ARM64_INS_ORN: - case ARM64_INS_ORR: + case AArch64_INS_EOR: + case AArch64_INS_EON: + case AArch64_INS_ORN: + case AArch64_INS_ORR: return bitwise(insn); - case ARM64_INS_ASR: - case ARM64_INS_LSL: - case ARM64_INS_LSR: - case ARM64_INS_ROR: + case AArch64_INS_ASR: + case AArch64_INS_LSL: + case AArch64_INS_LSR: + case AArch64_INS_ROR: return shift(insn); - case ARM64_INS_B: - case ARM64_INS_BR: - case ARM64_INS_RET: + case AArch64_INS_B: + case AArch64_INS_BR: + case AArch64_INS_RET: #if CS_API_MAJOR > 4 - case ARM64_INS_BRAA: - case ARM64_INS_BRAAZ: - case ARM64_INS_BRAB: - case ARM64_INS_BRABZ: - case ARM64_INS_RETAA: - case ARM64_INS_RETAB: + case AArch64_INS_BRAA: + case AArch64_INS_BRAAZ: + case AArch64_INS_BRAB: + case AArch64_INS_BRABZ: + case AArch64_INS_RETAA: + case AArch64_INS_RETAB: #endif return branch(insn); - case ARM64_INS_BL: - case ARM64_INS_BLR: + case AArch64_INS_BL: + case AArch64_INS_BLR: #if CS_API_MAJOR > 4 - case ARM64_INS_BLRAA: - case ARM64_INS_BLRAAZ: - case ARM64_INS_BLRAB: - case ARM64_INS_BLRABZ: + case AArch64_INS_BLRAA: + case AArch64_INS_BLRAAZ: + case AArch64_INS_BLRAB: + case AArch64_INS_BLRABZ: #endif return bl(insn); - case ARM64_INS_BFM: - case ARM64_INS_BFI: - case ARM64_INS_BFXIL: + case AArch64_INS_BFM: + case AArch64_INS_BFI: + case AArch64_INS_BFXIL: return bfm(insn); - case ARM64_INS_BIC: + case AArch64_INS_BIC: #if CS_API_MAJOR > 4 - case ARM64_INS_BICS: + case AArch64_INS_BICS: #endif return bic(insn); #if CS_API_MAJOR > 4 - case ARM64_INS_CAS: - case ARM64_INS_CASA: - case ARM64_INS_CASAL: - case ARM64_INS_CASL: - case ARM64_INS_CASB: - case ARM64_INS_CASAB: - case ARM64_INS_CASALB: - case ARM64_INS_CASLB: - case ARM64_INS_CASH: - case ARM64_INS_CASAH: - case ARM64_INS_CASALH: - case ARM64_INS_CASLH: + case AArch64_INS_CAS: + case AArch64_INS_CASA: + case AArch64_INS_CASAL: + case AArch64_INS_CASL: + case AArch64_INS_CASB: + case AArch64_INS_CASAB: + case AArch64_INS_CASALB: + case AArch64_INS_CASLB: + case AArch64_INS_CASH: + case AArch64_INS_CASAH: + case AArch64_INS_CASALH: + case AArch64_INS_CASLH: return cas(insn); - case ARM64_INS_CASP: - case ARM64_INS_CASPA: - case ARM64_INS_CASPAL: - case ARM64_INS_CASPL: + case AArch64_INS_CASP: + case AArch64_INS_CASPA: + case AArch64_INS_CASPAL: + case AArch64_INS_CASPL: return casp(insn); #endif - case ARM64_INS_CBZ: - case ARM64_INS_CBNZ: + case AArch64_INS_CBZ: + case AArch64_INS_CBNZ: return cbz(insn); - case ARM64_INS_CMP: - case ARM64_INS_CMN: - case ARM64_INS_CCMP: - case ARM64_INS_CCMN: + case AArch64_INS_CMP: + case AArch64_INS_CMN: + case AArch64_INS_CCMP: + case AArch64_INS_CCMN: return cmp(insn); #if CS_API_MAJOR > 4 - case ARM64_INS_CFINV: + case AArch64_INS_CFINV: return SETG("cf", INV(VARG("cf"))); #endif - case ARM64_INS_CINC: - case ARM64_INS_CSINC: - case ARM64_INS_CINV: - case ARM64_INS_CSINV: - case ARM64_INS_CNEG: - case ARM64_INS_CSNEG: - case ARM64_INS_CSEL: + case AArch64_INS_CINC: + case AArch64_INS_CSINC: + case AArch64_INS_CINV: + case AArch64_INS_CSINV: + case AArch64_INS_CNEG: + case AArch64_INS_CSNEG: + case AArch64_INS_CSEL: return csinc(insn); - case ARM64_INS_CSET: - case ARM64_INS_CSETM: + case AArch64_INS_CSET: + case AArch64_INS_CSETM: return cset(insn); - case ARM64_INS_CLS: + case AArch64_INS_CLS: return cls(insn); - case ARM64_INS_CLZ: + case AArch64_INS_CLZ: return clz(insn); - case ARM64_INS_EXTR: + case AArch64_INS_EXTR: return extr(insn); - case ARM64_INS_HVC: + case AArch64_INS_HVC: return hvc(insn); - case ARM64_INS_SVC: + case AArch64_INS_SVC: return svc(insn); - case ARM64_INS_LDR: - case ARM64_INS_LDRB: - case ARM64_INS_LDRH: - case ARM64_INS_LDUR: - case ARM64_INS_LDURB: - case ARM64_INS_LDURH: - case ARM64_INS_LDRSW: - case ARM64_INS_LDRSB: - case ARM64_INS_LDRSH: - case ARM64_INS_LDURSW: - case ARM64_INS_LDURSB: - case ARM64_INS_LDURSH: - case ARM64_INS_LDAR: - case ARM64_INS_LDARB: - case ARM64_INS_LDARH: - case ARM64_INS_LDAXP: - case ARM64_INS_LDXP: - case ARM64_INS_LDAXR: - case ARM64_INS_LDAXRB: - case ARM64_INS_LDAXRH: - case ARM64_INS_LDP: - case ARM64_INS_LDNP: - case ARM64_INS_LDPSW: - case ARM64_INS_LDTR: - case ARM64_INS_LDTRB: - case ARM64_INS_LDTRH: - case ARM64_INS_LDTRSW: - case ARM64_INS_LDTRSB: - case ARM64_INS_LDTRSH: - case ARM64_INS_LDXR: - case ARM64_INS_LDXRB: - case ARM64_INS_LDXRH: + case AArch64_INS_LDR: + case AArch64_INS_LDRB: + case AArch64_INS_LDRH: + case AArch64_INS_LDUR: + case AArch64_INS_LDURB: + case AArch64_INS_LDURH: + case AArch64_INS_LDRSW: + case AArch64_INS_LDRSB: + case AArch64_INS_LDRSH: + case AArch64_INS_LDURSW: + case AArch64_INS_LDURSB: + case AArch64_INS_LDURSH: + case AArch64_INS_LDAR: + case AArch64_INS_LDARB: + case AArch64_INS_LDARH: + case AArch64_INS_LDAXP: + case AArch64_INS_LDXP: + case AArch64_INS_LDAXR: + case AArch64_INS_LDAXRB: + case AArch64_INS_LDAXRH: + case AArch64_INS_LDP: + case AArch64_INS_LDNP: + case AArch64_INS_LDPSW: + case AArch64_INS_LDTR: + case AArch64_INS_LDTRB: + case AArch64_INS_LDTRH: + case AArch64_INS_LDTRSW: + case AArch64_INS_LDTRSB: + case AArch64_INS_LDTRSH: + case AArch64_INS_LDXR: + case AArch64_INS_LDXRB: + case AArch64_INS_LDXRH: #if CS_API_MAJOR > 4 - case ARM64_INS_LDAPR: - case ARM64_INS_LDAPRB: - case ARM64_INS_LDAPRH: - case ARM64_INS_LDAPUR: - case ARM64_INS_LDAPURB: - case ARM64_INS_LDAPURH: - case ARM64_INS_LDAPURSB: - case ARM64_INS_LDAPURSH: - case ARM64_INS_LDAPURSW: - case ARM64_INS_LDLAR: - case ARM64_INS_LDLARB: - case ARM64_INS_LDLARH: - case ARM64_INS_LDRAA: - case ARM64_INS_LDRAB: + case AArch64_INS_LDAPR: + case AArch64_INS_LDAPRB: + case AArch64_INS_LDAPRH: + case AArch64_INS_LDAPUR: + case AArch64_INS_LDAPURB: + case AArch64_INS_LDAPURH: + case AArch64_INS_LDAPURSB: + case AArch64_INS_LDAPURSH: + case AArch64_INS_LDAPURSW: + case AArch64_INS_LDLAR: + case AArch64_INS_LDLARB: + case AArch64_INS_LDLARH: + case AArch64_INS_LDRAA: + case AArch64_INS_LDRAB: #endif return ldr(insn); #if CS_API_MAJOR > 4 - case ARM64_INS_LDADD: - case ARM64_INS_LDADDA: - case ARM64_INS_LDADDAL: - case ARM64_INS_LDADDL: - case ARM64_INS_LDADDB: - case ARM64_INS_LDADDAB: - case ARM64_INS_LDADDALB: - case ARM64_INS_LDADDLB: - case ARM64_INS_LDADDH: - case ARM64_INS_LDADDAH: - case ARM64_INS_LDADDALH: - case ARM64_INS_LDADDLH: - case ARM64_INS_STADD: - case ARM64_INS_STADDL: - case ARM64_INS_STADDB: - case ARM64_INS_STADDLB: - case ARM64_INS_STADDH: - case ARM64_INS_STADDLH: - case ARM64_INS_LDCLRB: - case ARM64_INS_LDCLRAB: - case ARM64_INS_LDCLRALB: - case ARM64_INS_LDCLRLB: - case ARM64_INS_LDCLRH: - case ARM64_INS_LDCLRAH: - case ARM64_INS_LDCLRALH: - case ARM64_INS_LDCLRLH: - case ARM64_INS_LDCLR: - case ARM64_INS_LDCLRA: - case ARM64_INS_LDCLRAL: - case ARM64_INS_LDCLRL: - case ARM64_INS_STCLR: - case ARM64_INS_STCLRL: - case ARM64_INS_STCLRB: - case ARM64_INS_STCLRLB: - case ARM64_INS_STCLRH: - case ARM64_INS_STCLRLH: - case ARM64_INS_LDEORB: - case ARM64_INS_LDEORAB: - case ARM64_INS_LDEORALB: - case ARM64_INS_LDEORLB: - case ARM64_INS_LDEORH: - case ARM64_INS_LDEORAH: - case ARM64_INS_LDEORALH: - case ARM64_INS_LDEORLH: - case ARM64_INS_LDEOR: - case ARM64_INS_LDEORA: - case ARM64_INS_LDEORAL: - case ARM64_INS_LDEORL: - case ARM64_INS_STEOR: - case ARM64_INS_STEORL: - case ARM64_INS_STEORB: - case ARM64_INS_STEORLB: - case ARM64_INS_STEORH: - case ARM64_INS_STEORLH: - case ARM64_INS_LDSETB: - case ARM64_INS_LDSETAB: - case ARM64_INS_LDSETALB: - case ARM64_INS_LDSETLB: - case ARM64_INS_LDSETH: - case ARM64_INS_LDSETAH: - case ARM64_INS_LDSETALH: - case ARM64_INS_LDSETLH: - case ARM64_INS_LDSET: - case ARM64_INS_LDSETA: - case ARM64_INS_LDSETAL: - case ARM64_INS_LDSETL: - case ARM64_INS_STSET: - case ARM64_INS_STSETL: - case ARM64_INS_STSETB: - case ARM64_INS_STSETLB: - case ARM64_INS_STSETH: - case ARM64_INS_STSETLH: - case ARM64_INS_LDSMAXB: - case ARM64_INS_LDSMAXAB: - case ARM64_INS_LDSMAXALB: - case ARM64_INS_LDSMAXLB: - case ARM64_INS_LDSMAXH: - case ARM64_INS_LDSMAXAH: - case ARM64_INS_LDSMAXALH: - case ARM64_INS_LDSMAXLH: - case ARM64_INS_LDSMAX: - case ARM64_INS_LDSMAXA: - case ARM64_INS_LDSMAXAL: - case ARM64_INS_LDSMAXL: - case ARM64_INS_STSMAX: - case ARM64_INS_STSMAXL: - case ARM64_INS_STSMAXB: - case ARM64_INS_STSMAXLB: - case ARM64_INS_STSMAXH: - case ARM64_INS_STSMAXLH: - case ARM64_INS_LDSMINB: - case ARM64_INS_LDSMINAB: - case ARM64_INS_LDSMINALB: - case ARM64_INS_LDSMINLB: - case ARM64_INS_LDSMINH: - case ARM64_INS_LDSMINAH: - case ARM64_INS_LDSMINALH: - case ARM64_INS_LDSMINLH: - case ARM64_INS_LDSMIN: - case ARM64_INS_LDSMINA: - case ARM64_INS_LDSMINAL: - case ARM64_INS_LDSMINL: - case ARM64_INS_STSMIN: - case ARM64_INS_STSMINL: - case ARM64_INS_STSMINB: - case ARM64_INS_STSMINLB: - case ARM64_INS_STSMINH: - case ARM64_INS_STSMINLH: - case ARM64_INS_LDUMAXB: - case ARM64_INS_LDUMAXAB: - case ARM64_INS_LDUMAXALB: - case ARM64_INS_LDUMAXLB: - case ARM64_INS_LDUMAXH: - case ARM64_INS_LDUMAXAH: - case ARM64_INS_LDUMAXALH: - case ARM64_INS_LDUMAXLH: - case ARM64_INS_LDUMAX: - case ARM64_INS_LDUMAXA: - case ARM64_INS_LDUMAXAL: - case ARM64_INS_LDUMAXL: - case ARM64_INS_STUMAX: - case ARM64_INS_STUMAXL: - case ARM64_INS_STUMAXB: - case ARM64_INS_STUMAXLB: - case ARM64_INS_STUMAXH: - case ARM64_INS_STUMAXLH: - case ARM64_INS_LDUMINB: - case ARM64_INS_LDUMINAB: - case ARM64_INS_LDUMINALB: - case ARM64_INS_LDUMINLB: - case ARM64_INS_LDUMINH: - case ARM64_INS_LDUMINAH: - case ARM64_INS_LDUMINALH: - case ARM64_INS_LDUMINLH: - case ARM64_INS_LDUMIN: - case ARM64_INS_LDUMINA: - case ARM64_INS_LDUMINAL: - case ARM64_INS_LDUMINL: - case ARM64_INS_STUMIN: - case ARM64_INS_STUMINL: - case ARM64_INS_STUMINB: - case ARM64_INS_STUMINLB: - case ARM64_INS_STUMINH: - case ARM64_INS_STUMINLH: + case AArch64_INS_LDADD: + case AArch64_INS_LDADDA: + case AArch64_INS_LDADDAL: + case AArch64_INS_LDADDL: + case AArch64_INS_LDADDB: + case AArch64_INS_LDADDAB: + case AArch64_INS_LDADDALB: + case AArch64_INS_LDADDLB: + case AArch64_INS_LDADDH: + case AArch64_INS_LDADDAH: + case AArch64_INS_LDADDALH: + case AArch64_INS_LDADDLH: + case AArch64_INS_STADD: + case AArch64_INS_STADDL: + case AArch64_INS_STADDB: + case AArch64_INS_STADDLB: + case AArch64_INS_STADDH: + case AArch64_INS_STADDLH: + case AArch64_INS_LDCLRB: + case AArch64_INS_LDCLRAB: + case AArch64_INS_LDCLRALB: + case AArch64_INS_LDCLRLB: + case AArch64_INS_LDCLRH: + case AArch64_INS_LDCLRAH: + case AArch64_INS_LDCLRALH: + case AArch64_INS_LDCLRLH: + case AArch64_INS_LDCLR: + case AArch64_INS_LDCLRA: + case AArch64_INS_LDCLRAL: + case AArch64_INS_LDCLRL: + case AArch64_INS_STCLR: + case AArch64_INS_STCLRL: + case AArch64_INS_STCLRB: + case AArch64_INS_STCLRLB: + case AArch64_INS_STCLRH: + case AArch64_INS_STCLRLH: + case AArch64_INS_LDEORB: + case AArch64_INS_LDEORAB: + case AArch64_INS_LDEORALB: + case AArch64_INS_LDEORLB: + case AArch64_INS_LDEORH: + case AArch64_INS_LDEORAH: + case AArch64_INS_LDEORALH: + case AArch64_INS_LDEORLH: + case AArch64_INS_LDEOR: + case AArch64_INS_LDEORA: + case AArch64_INS_LDEORAL: + case AArch64_INS_LDEORL: + case AArch64_INS_STEOR: + case AArch64_INS_STEORL: + case AArch64_INS_STEORB: + case AArch64_INS_STEORLB: + case AArch64_INS_STEORH: + case AArch64_INS_STEORLH: + case AArch64_INS_LDSETB: + case AArch64_INS_LDSETAB: + case AArch64_INS_LDSETALB: + case AArch64_INS_LDSETLB: + case AArch64_INS_LDSETH: + case AArch64_INS_LDSETAH: + case AArch64_INS_LDSETALH: + case AArch64_INS_LDSETLH: + case AArch64_INS_LDSET: + case AArch64_INS_LDSETA: + case AArch64_INS_LDSETAL: + case AArch64_INS_LDSETL: + case AArch64_INS_STSET: + case AArch64_INS_STSETL: + case AArch64_INS_STSETB: + case AArch64_INS_STSETLB: + case AArch64_INS_STSETH: + case AArch64_INS_STSETLH: + case AArch64_INS_LDSMAXB: + case AArch64_INS_LDSMAXAB: + case AArch64_INS_LDSMAXALB: + case AArch64_INS_LDSMAXLB: + case AArch64_INS_LDSMAXH: + case AArch64_INS_LDSMAXAH: + case AArch64_INS_LDSMAXALH: + case AArch64_INS_LDSMAXLH: + case AArch64_INS_LDSMAX: + case AArch64_INS_LDSMAXA: + case AArch64_INS_LDSMAXAL: + case AArch64_INS_LDSMAXL: + case AArch64_INS_STSMAX: + case AArch64_INS_STSMAXL: + case AArch64_INS_STSMAXB: + case AArch64_INS_STSMAXLB: + case AArch64_INS_STSMAXH: + case AArch64_INS_STSMAXLH: + case AArch64_INS_LDSMINB: + case AArch64_INS_LDSMINAB: + case AArch64_INS_LDSMINALB: + case AArch64_INS_LDSMINLB: + case AArch64_INS_LDSMINH: + case AArch64_INS_LDSMINAH: + case AArch64_INS_LDSMINALH: + case AArch64_INS_LDSMINLH: + case AArch64_INS_LDSMIN: + case AArch64_INS_LDSMINA: + case AArch64_INS_LDSMINAL: + case AArch64_INS_LDSMINL: + case AArch64_INS_STSMIN: + case AArch64_INS_STSMINL: + case AArch64_INS_STSMINB: + case AArch64_INS_STSMINLB: + case AArch64_INS_STSMINH: + case AArch64_INS_STSMINLH: + case AArch64_INS_LDUMAXB: + case AArch64_INS_LDUMAXAB: + case AArch64_INS_LDUMAXALB: + case AArch64_INS_LDUMAXLB: + case AArch64_INS_LDUMAXH: + case AArch64_INS_LDUMAXAH: + case AArch64_INS_LDUMAXALH: + case AArch64_INS_LDUMAXLH: + case AArch64_INS_LDUMAX: + case AArch64_INS_LDUMAXA: + case AArch64_INS_LDUMAXAL: + case AArch64_INS_LDUMAXL: + case AArch64_INS_STUMAX: + case AArch64_INS_STUMAXL: + case AArch64_INS_STUMAXB: + case AArch64_INS_STUMAXLB: + case AArch64_INS_STUMAXH: + case AArch64_INS_STUMAXLH: + case AArch64_INS_LDUMINB: + case AArch64_INS_LDUMINAB: + case AArch64_INS_LDUMINALB: + case AArch64_INS_LDUMINLB: + case AArch64_INS_LDUMINH: + case AArch64_INS_LDUMINAH: + case AArch64_INS_LDUMINALH: + case AArch64_INS_LDUMINLH: + case AArch64_INS_LDUMIN: + case AArch64_INS_LDUMINA: + case AArch64_INS_LDUMINAL: + case AArch64_INS_LDUMINL: + case AArch64_INS_STUMIN: + case AArch64_INS_STUMINL: + case AArch64_INS_STUMINB: + case AArch64_INS_STUMINLB: + case AArch64_INS_STUMINH: + case AArch64_INS_STUMINLH: return ldadd(insn); #endif - case ARM64_INS_MADD: - case ARM64_INS_MSUB: + case AArch64_INS_MADD: + case AArch64_INS_MSUB: return madd(insn); - case ARM64_INS_MUL: - case ARM64_INS_MNEG: + case AArch64_INS_MUL: + case AArch64_INS_MNEG: return mul(insn); - case ARM64_INS_MOV: - case ARM64_INS_MOVZ: + case AArch64_INS_MOV: + case AArch64_INS_MOVZ: return mov(insn); - case ARM64_INS_MOVK: + case AArch64_INS_MOVK: return movk(insn); - case ARM64_INS_MOVN: + case AArch64_INS_MOVN: return movn(insn); - case ARM64_INS_MSR: + case AArch64_INS_MSR: return msr(insn); - case ARM64_INS_MRS: + case AArch64_INS_MRS: return mrs(insn); - case ARM64_INS_MVN: - case ARM64_INS_NEG: - case ARM64_INS_NGC: + case AArch64_INS_MVN: + case AArch64_INS_NEG: + case AArch64_INS_NGC: #if CS_API_MAJOR > 3 - case ARM64_INS_NEGS: - case ARM64_INS_NGCS: + case AArch64_INS_NEGS: + case AArch64_INS_NGCS: #endif return mvn(insn); - case ARM64_INS_RBIT: + case AArch64_INS_RBIT: return rbit(insn); - case ARM64_INS_REV: - case ARM64_INS_REV32: - case ARM64_INS_REV16: + case AArch64_INS_REV: + case AArch64_INS_REV32: + case AArch64_INS_REV16: return rev(insn); #if CS_API_MAJOR > 4 - case ARM64_INS_RMIF: + case AArch64_INS_RMIF: return rmif(insn); #endif - case ARM64_INS_SBFIZ: - case ARM64_INS_SBFX: - case ARM64_INS_UBFIZ: - case ARM64_INS_UBFX: + case AArch64_INS_SBFIZ: + case AArch64_INS_SBFX: + case AArch64_INS_UBFIZ: + case AArch64_INS_UBFX: return sbfx(insn); - case ARM64_INS_SDIV: + case AArch64_INS_SDIV: return sdiv(insn); #if CS_API_MAJOR > 4 - case ARM64_INS_SETF8: - case ARM64_INS_SETF16: + case AArch64_INS_SETF8: + case AArch64_INS_SETF16: return setf(insn); #endif - case ARM64_INS_SMADDL: - case ARM64_INS_SMSUBL: - case ARM64_INS_UMADDL: - case ARM64_INS_UMSUBL: + case AArch64_INS_SMADDL: + case AArch64_INS_SMSUBL: + case AArch64_INS_UMADDL: + case AArch64_INS_UMSUBL: return smaddl(insn); - case ARM64_INS_SMULL: - case ARM64_INS_SMNEGL: - case ARM64_INS_UMULL: - case ARM64_INS_UMNEGL: + case AArch64_INS_SMULL: + case AArch64_INS_SMNEGL: + case AArch64_INS_UMULL: + case AArch64_INS_UMNEGL: return smull(insn); - case ARM64_INS_SMULH: - case ARM64_INS_UMULH: + case AArch64_INS_SMULH: + case AArch64_INS_UMULH: return smulh(insn); - case ARM64_INS_STR: - case ARM64_INS_STUR: - case ARM64_INS_STRB: - case ARM64_INS_STURB: - case ARM64_INS_STRH: - case ARM64_INS_STURH: - case ARM64_INS_STLR: - case ARM64_INS_STLRB: - case ARM64_INS_STLRH: - case ARM64_INS_STP: - case ARM64_INS_STNP: - case ARM64_INS_STXR: - case ARM64_INS_STXRB: - case ARM64_INS_STXRH: - case ARM64_INS_STXP: - case ARM64_INS_STLXR: - case ARM64_INS_STLXRB: - case ARM64_INS_STLXRH: - case ARM64_INS_STLXP: - case ARM64_INS_STTR: - case ARM64_INS_STTRB: - case ARM64_INS_STTRH: + case AArch64_INS_STR: + case AArch64_INS_STUR: + case AArch64_INS_STRB: + case AArch64_INS_STURB: + case AArch64_INS_STRH: + case AArch64_INS_STURH: + case AArch64_INS_STLR: + case AArch64_INS_STLRB: + case AArch64_INS_STLRH: + case AArch64_INS_STP: + case AArch64_INS_STNP: + case AArch64_INS_STXR: + case AArch64_INS_STXRB: + case AArch64_INS_STXRH: + case AArch64_INS_STXP: + case AArch64_INS_STLXR: + case AArch64_INS_STLXRB: + case AArch64_INS_STLXRH: + case AArch64_INS_STLXP: + case AArch64_INS_STTR: + case AArch64_INS_STTRB: + case AArch64_INS_STTRH: #if CS_API_MAJOR > 4 - case ARM64_INS_STLLR: - case ARM64_INS_STLLRB: - case ARM64_INS_STLLRH: - case ARM64_INS_STLUR: - case ARM64_INS_STLURB: - case ARM64_INS_STLURH: + case AArch64_INS_STLLR: + case AArch64_INS_STLLRB: + case AArch64_INS_STLLRH: + case AArch64_INS_STLUR: + case AArch64_INS_STLURB: + case AArch64_INS_STLURH: #endif return str(insn); #if CS_API_MAJOR > 4 - case ARM64_INS_SWP: - case ARM64_INS_SWPA: - case ARM64_INS_SWPAL: - case ARM64_INS_SWPL: - case ARM64_INS_SWPB: - case ARM64_INS_SWPAB: - case ARM64_INS_SWPALB: - case ARM64_INS_SWPLB: - case ARM64_INS_SWPH: - case ARM64_INS_SWPAH: - case ARM64_INS_SWPALH: - case ARM64_INS_SWPLH: + case AArch64_INS_SWP: + case AArch64_INS_SWPA: + case AArch64_INS_SWPAL: + case AArch64_INS_SWPL: + case AArch64_INS_SWPB: + case AArch64_INS_SWPAB: + case AArch64_INS_SWPALB: + case AArch64_INS_SWPLB: + case AArch64_INS_SWPH: + case AArch64_INS_SWPAH: + case AArch64_INS_SWPALH: + case AArch64_INS_SWPLH: return swp(insn); #endif - case ARM64_INS_SXTB: - case ARM64_INS_SXTH: - case ARM64_INS_SXTW: - case ARM64_INS_UXTB: - case ARM64_INS_UXTH: + case AArch64_INS_SXTB: + case AArch64_INS_SXTH: + case AArch64_INS_SXTW: + case AArch64_INS_UXTB: + case AArch64_INS_UXTH: return sxt(insn); - case ARM64_INS_TBNZ: - case ARM64_INS_TBZ: + case AArch64_INS_TBNZ: + case AArch64_INS_TBZ: return tbz(insn); - case ARM64_INS_TST: + case AArch64_INS_TST: return tst(insn); - case ARM64_INS_UDIV: + case AArch64_INS_UDIV: return udiv(insn); default: break; diff --git a/librz/analysis/p/analysis_arm_cs.c b/librz/analysis/p/analysis_arm_cs.c index 5ee9180dc42..a9471a2a6a7 100644 --- a/librz/analysis/p/analysis_arm_cs.c +++ b/librz/analysis/p/analysis_arm_cs.c @@ -266,65 +266,65 @@ static void opex(RzStrBuf *buf, csh handle, cs_insn *insn) { pj_free(pj); } -static const char *cc_name64(arm64_cc cc) { +static const char *cc_name64(AArch64CC_CondCode cc) { switch (cc) { - case ARM64_CC_EQ: // Equal + case AArch64CC_EQ: // Equal return "eq"; - case ARM64_CC_NE: // Not equal: Not equal, or unordered + case AArch64CC_NE: // Not equal: Not equal, or unordered return "ne"; - case ARM64_CC_HS: // Unsigned higher or same: >, ==, or unordered + case AArch64CC_HS: // Unsigned higher or same: >, ==, or unordered return "hs"; - case ARM64_CC_LO: // Unsigned lower or same: Less than + case AArch64CC_LO: // Unsigned lower or same: Less than return "lo"; - case ARM64_CC_MI: // Minus, negative: Less than + case AArch64CC_MI: // Minus, negative: Less than return "mi"; - case ARM64_CC_PL: // Plus, positive or zero: >, ==, or unordered + case AArch64CC_PL: // Plus, positive or zero: >, ==, or unordered return "pl"; - case ARM64_CC_VS: // Overflow: Unordered + case AArch64CC_VS: // Overflow: Unordered return "vs"; - case ARM64_CC_VC: // No overflow: Ordered + case AArch64CC_VC: // No overflow: Ordered return "vc"; - case ARM64_CC_HI: // Unsigned higher: Greater than, or unordered + case AArch64CC_HI: // Unsigned higher: Greater than, or unordered return "hi"; - case ARM64_CC_LS: // Unsigned lower or same: Less than or equal + case AArch64CC_LS: // Unsigned lower or same: Less than or equal return "ls"; - case ARM64_CC_GE: // Greater than or equal: Greater than or equal + case AArch64CC_GE: // Greater than or equal: Greater than or equal return "ge"; - case ARM64_CC_LT: // Less than: Less than, or unordered + case AArch64CC_LT: // Less than: Less than, or unordered return "lt"; - case ARM64_CC_GT: // Signed greater than: Greater than + case AArch64CC_GT: // Signed greater than: Greater than return "gt"; - case ARM64_CC_LE: // Signed less than or equal: <, ==, or unordered + case AArch64CC_LE: // Signed less than or equal: <, ==, or unordered return "le"; default: return ""; } } -static const char *extender_name(arm64_extender extender) { +static const char *extender_name(aarch64_extender extender) { switch (extender) { - case ARM64_EXT_UXTB: + case AArch64_EXT_UXTB: return "uxtb"; - case ARM64_EXT_UXTH: + case AArch64_EXT_UXTH: return "uxth"; - case ARM64_EXT_UXTW: + case AArch64_EXT_UXTW: return "uxtw"; - case ARM64_EXT_UXTX: + case AArch64_EXT_UXTX: return "uxtx"; - case ARM64_EXT_SXTB: + case AArch64_EXT_SXTB: return "sxtb"; - case ARM64_EXT_SXTH: + case AArch64_EXT_SXTH: return "sxth"; - case ARM64_EXT_SXTW: + case AArch64_EXT_SXTW: return "sxtw"; - case ARM64_EXT_SXTX: + case AArch64_EXT_SXTX: return "sxtx"; default: return ""; } } -static const char *vas_name(arm64_vas vas) { +static const char *vas_name(AArch64Layout_VectorLayout vas) { switch (vas) { case ARM64_VAS_8B: return "8b"; @@ -386,46 +386,46 @@ static void opex64(RzStrBuf *buf, csh handle, cs_insn *insn) { } pj_o(pj); pj_ka(pj, "operands"); - cs_arm64 *x = &insn->detail->arm64; + cs_aarch64 *x = &insn->detail->aarch64; for (i = 0; i < x->op_count; i++) { - cs_arm64_op *op = x->operands + i; + cs_aarch64_op *op = x->operands + i; pj_o(pj); switch (op->type) { - case ARM64_OP_REG: + case AArch64_OP_REG: pj_ks(pj, "type", "reg"); pj_ks(pj, "value", cs_reg_name(handle, op->reg)); break; - case ARM64_OP_REG_MRS: + case AArch64_OP_REG_MRS: pj_ks(pj, "type", "reg_mrs"); // TODO value break; - case ARM64_OP_REG_MSR: + case AArch64_OP_REG_MSR: pj_ks(pj, "type", "reg_msr"); // TODO value break; - case ARM64_OP_IMM: + case AArch64_OP_IMM: pj_ks(pj, "type", "imm"); pj_kN(pj, "value", op->imm); break; - case ARM64_OP_MEM: + case AArch64_OP_MEM: pj_ks(pj, "type", "mem"); - if (op->mem.base != ARM64_REG_INVALID) { + if (op->mem.base != AArch64_REG_INVALID) { pj_ks(pj, "base", cs_reg_name(handle, op->mem.base)); } - if (op->mem.index != ARM64_REG_INVALID) { + if (op->mem.index != AArch64_REG_INVALID) { pj_ks(pj, "index", cs_reg_name(handle, op->mem.index)); } pj_ki(pj, "disp", op->mem.disp); break; - case ARM64_OP_FP: + case AArch64_OP_FP: pj_ks(pj, "type", "fp"); pj_kd(pj, "value", op->fp); break; - case ARM64_OP_CIMM: + case AArch64_OP_CIMM: pj_ks(pj, "type", "cimm"); pj_kN(pj, "value", op->imm); break; - case ARM64_OP_PSTATE: + case AArch64_OP_PSTATE: pj_ks(pj, "type", "pstate"); switch (op->pstate) { case ARM64_PSTATE_SPSEL: @@ -441,15 +441,15 @@ static void opex64(RzStrBuf *buf, csh handle, cs_insn *insn) { pj_ki(pj, "value", op->pstate); } break; - case ARM64_OP_SYS: + case AArch64_OP_SYS: pj_ks(pj, "type", "sys"); pj_kn(pj, "value", (ut64)op->sys); break; - case ARM64_OP_PREFETCH: + case AArch64_OP_PREFETCH: pj_ks(pj, "type", "prefetch"); pj_ki(pj, "value", op->prefetch - 1); break; - case ARM64_OP_BARRIER: + case AArch64_OP_BARRIER: pj_ks(pj, "type", "prefetch"); pj_ki(pj, "value", op->barrier - 1); break; @@ -457,22 +457,22 @@ static void opex64(RzStrBuf *buf, csh handle, cs_insn *insn) { pj_ks(pj, "type", "invalid"); break; } - if (op->shift.type != ARM64_SFT_INVALID) { + if (op->shift.type != AArch64_SFT_INVALID) { pj_ko(pj, "shift"); switch (op->shift.type) { - case ARM64_SFT_LSL: + case AArch64_SFT_LSL: pj_ks(pj, "type", "lsl"); break; - case ARM64_SFT_MSL: + case AArch64_SFT_MSL: pj_ks(pj, "type", "msl"); break; - case ARM64_SFT_LSR: + case AArch64_SFT_LSR: pj_ks(pj, "type", "lsr"); break; - case ARM64_SFT_ASR: + case AArch64_SFT_ASR: pj_ks(pj, "type", "asr"); break; - case ARM64_SFT_ROR: + case AArch64_SFT_ROR: pj_ks(pj, "type", "ror"); break; default: @@ -481,7 +481,7 @@ static void opex64(RzStrBuf *buf, csh handle, cs_insn *insn) { pj_kn(pj, "value", (ut64)op->shift.value); pj_end(pj); } - if (op->ext != ARM64_EXT_INVALID) { + if (op->ext != AArch64_EXT_INVALID) { pj_ks(pj, "ext", extender_name(op->ext)); } if (op->vector_index != -1) { @@ -504,7 +504,7 @@ static void opex64(RzStrBuf *buf, csh handle, cs_insn *insn) { if (x->writeback) { pj_kb(pj, "writeback", true); } - if (x->cc != ARM64_CC_INVALID && x->cc != ARM64_CC_AL && x->cc != ARM64_CC_NV) { + if (x->cc != AArch64CC_INVALID && x->cc != AArch64CC_AL && x->cc != AArch64CC_NV) { pj_ks(pj, "cc", cc_name64(x->cc)); } pj_end(pj); @@ -543,20 +543,20 @@ static int cond_cs2r2_64(int cc) { cc = RZ_TYPE_COND_AL; } else { switch (cc) { - case ARM64_CC_EQ: cc = RZ_TYPE_COND_EQ; break; - case ARM64_CC_NE: cc = RZ_TYPE_COND_NE; break; - case ARM64_CC_HS: cc = RZ_TYPE_COND_HS; break; - case ARM64_CC_LO: cc = RZ_TYPE_COND_LO; break; - case ARM64_CC_MI: cc = RZ_TYPE_COND_MI; break; - case ARM64_CC_PL: cc = RZ_TYPE_COND_PL; break; - case ARM64_CC_VS: cc = RZ_TYPE_COND_VS; break; - case ARM64_CC_VC: cc = RZ_TYPE_COND_VC; break; - case ARM64_CC_HI: cc = RZ_TYPE_COND_HI; break; - case ARM64_CC_LS: cc = RZ_TYPE_COND_LS; break; - case ARM64_CC_GE: cc = RZ_TYPE_COND_GE; break; - case ARM64_CC_LT: cc = RZ_TYPE_COND_LT; break; - case ARM64_CC_GT: cc = RZ_TYPE_COND_GT; break; - case ARM64_CC_LE: cc = RZ_TYPE_COND_LE; break; + case AArch64CC_EQ: cc = RZ_TYPE_COND_EQ; break; + case AArch64CC_NE: cc = RZ_TYPE_COND_NE; break; + case AArch64CC_HS: cc = RZ_TYPE_COND_HS; break; + case AArch64CC_LO: cc = RZ_TYPE_COND_LO; break; + case AArch64CC_MI: cc = RZ_TYPE_COND_MI; break; + case AArch64CC_PL: cc = RZ_TYPE_COND_PL; break; + case AArch64CC_VS: cc = RZ_TYPE_COND_VS; break; + case AArch64CC_VC: cc = RZ_TYPE_COND_VC; break; + case AArch64CC_HI: cc = RZ_TYPE_COND_HI; break; + case AArch64CC_LS: cc = RZ_TYPE_COND_LS; break; + case AArch64CC_GE: cc = RZ_TYPE_COND_GE; break; + case AArch64CC_LT: cc = RZ_TYPE_COND_LT; break; + case AArch64CC_GT: cc = RZ_TYPE_COND_GT; break; + case AArch64CC_LE: cc = RZ_TYPE_COND_LE; break; } } return cc; @@ -583,17 +583,17 @@ static void anop64(ArmCSContext *ctx, RzAnalysisOp *op, cs_insn *insn) { op->family = RZ_ANALYSIS_OP_FAMILY_CPU; } - op->cond = cond_cs2r2_64(insn->detail->arm64.cc); + op->cond = cond_cs2r2_64(insn->detail->aarch64.cc); if (op->cond == RZ_TYPE_COND_NV) { op->type = RZ_ANALYSIS_OP_TYPE_NOP; return; } - switch (insn->detail->arm64.cc) { - case ARM64_CC_GE: - case ARM64_CC_GT: - case ARM64_CC_LE: - case ARM64_CC_LT: + switch (insn->detail->aarch64.cc) { + case AArch64CC_GE: + case AArch64CC_GT: + case AArch64CC_LE: + case AArch64CC_LT: op->sign = true; break; default: @@ -602,62 +602,62 @@ static void anop64(ArmCSContext *ctx, RzAnalysisOp *op, cs_insn *insn) { switch (insn->id) { #if CS_API_MAJOR > 4 - case ARM64_INS_PACDA: - case ARM64_INS_PACDB: - case ARM64_INS_PACDZA: - case ARM64_INS_PACDZB: - case ARM64_INS_PACGA: - case ARM64_INS_PACIA: - case ARM64_INS_PACIA1716: - case ARM64_INS_PACIASP: - case ARM64_INS_PACIAZ: - case ARM64_INS_PACIB: - case ARM64_INS_PACIB1716: - case ARM64_INS_PACIBSP: - case ARM64_INS_PACIBZ: - case ARM64_INS_PACIZA: - case ARM64_INS_PACIZB: - case ARM64_INS_AUTDA: - case ARM64_INS_AUTDB: - case ARM64_INS_AUTDZA: - case ARM64_INS_AUTDZB: - case ARM64_INS_AUTIA: - case ARM64_INS_AUTIA1716: - case ARM64_INS_AUTIASP: - case ARM64_INS_AUTIAZ: - case ARM64_INS_AUTIB: - case ARM64_INS_AUTIB1716: - case ARM64_INS_AUTIBSP: - case ARM64_INS_AUTIBZ: - case ARM64_INS_AUTIZA: - case ARM64_INS_AUTIZB: - case ARM64_INS_XPACD: - case ARM64_INS_XPACI: - case ARM64_INS_XPACLRI: + case AArch64_INS_PACDA: + case AArch64_INS_PACDB: + case AArch64_INS_PACDZA: + case AArch64_INS_PACDZB: + case AArch64_INS_PACGA: + case AArch64_INS_PACIA: + case AArch64_INS_PACIA1716: + case AArch64_INS_PACIASP: + case AArch64_INS_PACIAZ: + case AArch64_INS_PACIB: + case AArch64_INS_PACIB1716: + case AArch64_INS_PACIBSP: + case AArch64_INS_PACIBZ: + case AArch64_INS_PACIZA: + case AArch64_INS_PACIZB: + case AArch64_INS_AUTDA: + case AArch64_INS_AUTDB: + case AArch64_INS_AUTDZA: + case AArch64_INS_AUTDZB: + case AArch64_INS_AUTIA: + case AArch64_INS_AUTIA1716: + case AArch64_INS_AUTIASP: + case AArch64_INS_AUTIAZ: + case AArch64_INS_AUTIB: + case AArch64_INS_AUTIB1716: + case AArch64_INS_AUTIBSP: + case AArch64_INS_AUTIBZ: + case AArch64_INS_AUTIZA: + case AArch64_INS_AUTIZB: + case AArch64_INS_XPACD: + case AArch64_INS_XPACI: + case AArch64_INS_XPACLRI: op->type = RZ_ANALYSIS_OP_TYPE_CMP; op->family = RZ_ANALYSIS_OP_FAMILY_SECURITY; break; #endif - case ARM64_INS_SVC: + case AArch64_INS_SVC: op->type = RZ_ANALYSIS_OP_TYPE_SWI; op->val = IMM64(0); break; - case ARM64_INS_ADRP: - case ARM64_INS_ADR: + case AArch64_INS_ADRP: + case AArch64_INS_ADR: op->type = RZ_ANALYSIS_OP_TYPE_LEA; op->ptr = IMM64(1); break; - case ARM64_INS_NOP: + case AArch64_INS_NOP: op->type = RZ_ANALYSIS_OP_TYPE_NOP; op->cycles = 1; break; - case ARM64_INS_SUB: - if (ISREG64(0) && REGID64(0) == ARM64_REG_SP) { + case AArch64_INS_SUB: + if (ISREG64(0) && REGID64(0) == AArch64_REG_SP) { op->stackop = RZ_ANALYSIS_STACK_INC; if (ISIMM64(1)) { // sub sp, 0x54 op->stackptr = IMM(1); - } else if (ISIMM64(2) && ISREG64(1) && REGID64(1) == ARM64_REG_SP) { + } else if (ISIMM64(2) && ISREG64(1) && REGID64(1) == AArch64_REG_SP) { // sub sp, sp, 0x10 op->stackptr = IMM64(2); } @@ -665,31 +665,31 @@ static void anop64(ArmCSContext *ctx, RzAnalysisOp *op, cs_insn *insn) { } op->cycles = 1; /* fallthru */ - case ARM64_INS_MSUB: + case AArch64_INS_MSUB: op->type = RZ_ANALYSIS_OP_TYPE_SUB; break; - case ARM64_INS_FDIV: - case ARM64_INS_SDIV: - case ARM64_INS_UDIV: + case AArch64_INS_FDIV: + case AArch64_INS_SDIV: + case AArch64_INS_UDIV: op->cycles = 4; op->type = RZ_ANALYSIS_OP_TYPE_DIV; break; - case ARM64_INS_MUL: - case ARM64_INS_SMULL: - case ARM64_INS_FMUL: - case ARM64_INS_UMULL: + case AArch64_INS_MUL: + case AArch64_INS_SMULL: + case AArch64_INS_FMUL: + case AArch64_INS_UMULL: /* TODO: if next instruction is also a MUL, cycles are /=2 */ /* also known as Register Indexing Addressing */ op->cycles = 4; op->type = RZ_ANALYSIS_OP_TYPE_MUL; break; - case ARM64_INS_ADD: - if (ISREG64(0) && REGID64(0) == ARM64_REG_SP) { + case AArch64_INS_ADD: + if (ISREG64(0) && REGID64(0) == AArch64_REG_SP) { op->stackop = RZ_ANALYSIS_STACK_INC; if (ISIMM64(1)) { // add sp, 0x54 op->stackptr = -(st64)IMM(1); - } else if (ISIMM64(2) && ISREG64(1) && REGID64(1) == ARM64_REG_SP) { + } else if (ISIMM64(2) && ISREG64(1) && REGID64(1) == AArch64_REG_SP) { // add sp, sp, 0x10 op->stackptr = -(st64)IMM64(2); } @@ -699,22 +699,22 @@ static void anop64(ArmCSContext *ctx, RzAnalysisOp *op, cs_insn *insn) { } op->cycles = 1; /* fallthru */ - case ARM64_INS_ADC: - // case ARM64_INS_ADCS: - case ARM64_INS_UMADDL: - case ARM64_INS_SMADDL: - case ARM64_INS_FMADD: - case ARM64_INS_MADD: + case AArch64_INS_ADC: + // case AArch64_INS_ADCS: + case AArch64_INS_UMADDL: + case AArch64_INS_SMADDL: + case AArch64_INS_FMADD: + case AArch64_INS_MADD: op->type = RZ_ANALYSIS_OP_TYPE_ADD; break; - case ARM64_INS_CSEL: - case ARM64_INS_FCSEL: - case ARM64_INS_CSET: - case ARM64_INS_CINC: + case AArch64_INS_CSEL: + case AArch64_INS_FCSEL: + case AArch64_INS_CSET: + case AArch64_INS_CINC: op->type = RZ_ANALYSIS_OP_TYPE_CMOV; break; - case ARM64_INS_MOV: - if (REGID64(0) == ARM64_REG_SP) { + case AArch64_INS_MOV: + if (REGID64(0) == AArch64_REG_SP) { op->stackop = RZ_ANALYSIS_STACK_RESET; op->stackptr = 0; } @@ -723,169 +723,169 @@ static void anop64(ArmCSContext *ctx, RzAnalysisOp *op, cs_insn *insn) { } op->cycles = 1; /* fallthru */ - case ARM64_INS_MOVI: - case ARM64_INS_MOVK: - case ARM64_INS_MOVN: - case ARM64_INS_SMOV: - case ARM64_INS_UMOV: - case ARM64_INS_FMOV: - case ARM64_INS_SBFX: - case ARM64_INS_UBFX: - case ARM64_INS_UBFM: - case ARM64_INS_SBFIZ: - case ARM64_INS_UBFIZ: - case ARM64_INS_BIC: - case ARM64_INS_BFI: - case ARM64_INS_BFXIL: + case AArch64_INS_MOVI: + case AArch64_INS_MOVK: + case AArch64_INS_MOVN: + case AArch64_INS_SMOV: + case AArch64_INS_UMOV: + case AArch64_INS_FMOV: + case AArch64_INS_SBFX: + case AArch64_INS_UBFX: + case AArch64_INS_UBFM: + case AArch64_INS_SBFIZ: + case AArch64_INS_UBFIZ: + case AArch64_INS_BIC: + case AArch64_INS_BFI: + case AArch64_INS_BFXIL: op->type = RZ_ANALYSIS_OP_TYPE_MOV; break; - case ARM64_INS_MRS: - case ARM64_INS_MSR: + case AArch64_INS_MRS: + case AArch64_INS_MSR: op->type = RZ_ANALYSIS_OP_TYPE_MOV; op->family = RZ_ANALYSIS_OP_FAMILY_PRIV; break; - case ARM64_INS_MOVZ: + case AArch64_INS_MOVZ: op->type = RZ_ANALYSIS_OP_TYPE_MOV; op->ptr = 0LL; op->ptrsize = 8; op->val = IMM64(1); break; - case ARM64_INS_UXTB: - case ARM64_INS_SXTB: + case AArch64_INS_UXTB: + case AArch64_INS_SXTB: op->type = RZ_ANALYSIS_OP_TYPE_CAST; op->ptr = 0LL; op->ptrsize = 1; break; - case ARM64_INS_UXTH: - case ARM64_INS_SXTH: + case AArch64_INS_UXTH: + case AArch64_INS_SXTH: op->type = RZ_ANALYSIS_OP_TYPE_MOV; op->ptr = 0LL; op->ptrsize = 2; break; - case ARM64_INS_UXTW: - case ARM64_INS_SXTW: + case AArch64_INS_UXTW: + case AArch64_INS_SXTW: op->type = RZ_ANALYSIS_OP_TYPE_MOV; op->ptr = 0LL; op->ptrsize = 4; break; - case ARM64_INS_BRK: - case ARM64_INS_HLT: + case AArch64_INS_BRK: + case AArch64_INS_HLT: op->type = RZ_ANALYSIS_OP_TYPE_TRAP; // hlt stops the process, not skips some cycles like in x86 break; - case ARM64_INS_DMB: - case ARM64_INS_DSB: - case ARM64_INS_ISB: + case AArch64_INS_DMB: + case AArch64_INS_DSB: + case AArch64_INS_ISB: op->family = RZ_ANALYSIS_OP_FAMILY_THREAD; // intentional fallthrough - case ARM64_INS_IC: // instruction cache invalidate - case ARM64_INS_DC: // data cache invalidate + case AArch64_INS_IC: // instruction cache invalidate + case AArch64_INS_DC: // data cache invalidate op->type = RZ_ANALYSIS_OP_TYPE_SYNC; // or cache break; // XXX unimplemented instructions - case ARM64_INS_DUP: - case ARM64_INS_XTN: - case ARM64_INS_XTN2: - case ARM64_INS_REV64: - case ARM64_INS_EXT: - case ARM64_INS_INS: + case AArch64_INS_DUP: + case AArch64_INS_XTN: + case AArch64_INS_XTN2: + case AArch64_INS_REV64: + case AArch64_INS_EXT: + case AArch64_INS_INS: op->type = RZ_ANALYSIS_OP_TYPE_MOV; break; - case ARM64_INS_LSL: + case AArch64_INS_LSL: op->cycles = 1; /* fallthru */ - case ARM64_INS_SHL: - case ARM64_INS_USHLL: + case AArch64_INS_SHL: + case AArch64_INS_USHLL: op->type = RZ_ANALYSIS_OP_TYPE_SHL; break; - case ARM64_INS_LSR: + case AArch64_INS_LSR: op->cycles = 1; op->type = RZ_ANALYSIS_OP_TYPE_SHR; break; - case ARM64_INS_ASR: + case AArch64_INS_ASR: op->cycles = 1; op->type = RZ_ANALYSIS_OP_TYPE_SAR; break; - case ARM64_INS_NEG: + case AArch64_INS_NEG: #if CS_API_MAJOR > 3 - case ARM64_INS_NEGS: + case AArch64_INS_NEGS: #endif op->type = RZ_ANALYSIS_OP_TYPE_NOT; break; - case ARM64_INS_FCMP: - case ARM64_INS_CCMP: - case ARM64_INS_CCMN: - case ARM64_INS_CMP: - case ARM64_INS_CMN: - case ARM64_INS_TST: + case AArch64_INS_FCMP: + case AArch64_INS_CCMP: + case AArch64_INS_CCMN: + case AArch64_INS_CMP: + case AArch64_INS_CMN: + case AArch64_INS_TST: op->type = RZ_ANALYSIS_OP_TYPE_CMP; break; - case ARM64_INS_ROR: + case AArch64_INS_ROR: op->cycles = 1; op->type = RZ_ANALYSIS_OP_TYPE_ROR; break; - case ARM64_INS_AND: + case AArch64_INS_AND: op->type = RZ_ANALYSIS_OP_TYPE_AND; break; - case ARM64_INS_ORR: - case ARM64_INS_ORN: + case AArch64_INS_ORR: + case AArch64_INS_ORN: op->type = RZ_ANALYSIS_OP_TYPE_OR; if (ISIMM64(2)) { op->val = IMM64(2); } break; - case ARM64_INS_EOR: - case ARM64_INS_EON: + case AArch64_INS_EOR: + case AArch64_INS_EON: op->type = RZ_ANALYSIS_OP_TYPE_XOR; break; - case ARM64_INS_STRB: - case ARM64_INS_STURB: - case ARM64_INS_STUR: - case ARM64_INS_STR: - case ARM64_INS_STP: - case ARM64_INS_STNP: - case ARM64_INS_STXR: - case ARM64_INS_STXRH: - case ARM64_INS_STLXR: - case ARM64_INS_STLXRH: - case ARM64_INS_STXRB: + case AArch64_INS_STRB: + case AArch64_INS_STURB: + case AArch64_INS_STUR: + case AArch64_INS_STR: + case AArch64_INS_STP: + case AArch64_INS_STNP: + case AArch64_INS_STXR: + case AArch64_INS_STXRH: + case AArch64_INS_STLXR: + case AArch64_INS_STLXRH: + case AArch64_INS_STXRB: op->type = RZ_ANALYSIS_OP_TYPE_STORE; - if (ISPREINDEX64() && REGBASE64(2) == ARM64_REG_SP) { + if (ISPREINDEX64() && REGBASE64(2) == AArch64_REG_SP) { op->stackop = RZ_ANALYSIS_STACK_INC; op->stackptr = -MEMDISP64(2); - } else if (ISPOSTINDEX64() && REGID64(2) == ARM64_REG_SP) { + } else if (ISPOSTINDEX64() && REGID64(2) == AArch64_REG_SP) { op->stackop = RZ_ANALYSIS_STACK_INC; op->stackptr = -IMM64(3); - } else if (ISPREINDEX64() && REGBASE64(1) == ARM64_REG_SP) { + } else if (ISPREINDEX64() && REGBASE64(1) == AArch64_REG_SP) { op->stackop = RZ_ANALYSIS_STACK_INC; op->stackptr = -MEMDISP64(1); - } else if (ISPOSTINDEX64() && REGID64(1) == ARM64_REG_SP) { + } else if (ISPOSTINDEX64() && REGID64(1) == AArch64_REG_SP) { op->stackop = RZ_ANALYSIS_STACK_INC; op->stackptr = -IMM64(2); } break; - case ARM64_INS_LDUR: - case ARM64_INS_LDURB: - case ARM64_INS_LDRSW: - case ARM64_INS_LDRSB: - case ARM64_INS_LDRSH: - case ARM64_INS_LDR: - case ARM64_INS_LDURSW: - case ARM64_INS_LDP: - case ARM64_INS_LDNP: - case ARM64_INS_LDPSW: - case ARM64_INS_LDRH: - case ARM64_INS_LDRB: - if (ISPREINDEX64() && REGBASE64(2) == ARM64_REG_SP) { + case AArch64_INS_LDUR: + case AArch64_INS_LDURB: + case AArch64_INS_LDRSW: + case AArch64_INS_LDRSB: + case AArch64_INS_LDRSH: + case AArch64_INS_LDR: + case AArch64_INS_LDURSW: + case AArch64_INS_LDP: + case AArch64_INS_LDNP: + case AArch64_INS_LDPSW: + case AArch64_INS_LDRH: + case AArch64_INS_LDRB: + if (ISPREINDEX64() && REGBASE64(2) == AArch64_REG_SP) { op->stackop = RZ_ANALYSIS_STACK_INC; op->stackptr = -MEMDISP64(2); - } else if (ISPOSTINDEX64() && REGID64(2) == ARM64_REG_SP) { + } else if (ISPOSTINDEX64() && REGID64(2) == AArch64_REG_SP) { op->stackop = RZ_ANALYSIS_STACK_INC; op->stackptr = -IMM64(3); - } else if (ISPREINDEX64() && REGBASE64(1) == ARM64_REG_SP) { + } else if (ISPREINDEX64() && REGBASE64(1) == AArch64_REG_SP) { op->stackop = RZ_ANALYSIS_STACK_INC; op->stackptr = -MEMDISP64(1); - } else if (ISPOSTINDEX64() && REGID64(1) == ARM64_REG_SP) { + } else if (ISPOSTINDEX64() && REGID64(1) == AArch64_REG_SP) { op->stackop = RZ_ANALYSIS_STACK_INC; op->stackptr = -IMM64(2); } @@ -899,14 +899,14 @@ static void anop64(ArmCSContext *ctx, RzAnalysisOp *op, cs_insn *insn) { op->type = RZ_ANALYSIS_OP_TYPE_LOAD; } switch (insn->id) { - case ARM64_INS_LDPSW: - case ARM64_INS_LDRSW: - case ARM64_INS_LDRSH: - case ARM64_INS_LDRSB: + case AArch64_INS_LDPSW: + case AArch64_INS_LDRSW: + case AArch64_INS_LDRSH: + case AArch64_INS_LDRSB: op->sign = true; break; } - if (REGBASE64(1) == ARM64_REG_X29) { + if (REGBASE64(1) == AArch64_REG_X29) { op->stackop = RZ_ANALYSIS_STACK_GET; op->stackptr = 0; op->ptr = MEMDISP64(1); @@ -923,73 +923,73 @@ static void anop64(ArmCSContext *ctx, RzAnalysisOp *op, cs_insn *insn) { } break; #if CS_API_MAJOR > 4 - case ARM64_INS_BLRAA: - case ARM64_INS_BLRAAZ: - case ARM64_INS_BLRAB: - case ARM64_INS_BLRABZ: + case AArch64_INS_BLRAA: + case AArch64_INS_BLRAAZ: + case AArch64_INS_BLRAB: + case AArch64_INS_BLRABZ: op->family = RZ_ANALYSIS_OP_FAMILY_SECURITY; op->type = RZ_ANALYSIS_OP_TYPE_RCALL; break; - case ARM64_INS_BRAA: - case ARM64_INS_BRAAZ: - case ARM64_INS_BRAB: - case ARM64_INS_BRABZ: + case AArch64_INS_BRAA: + case AArch64_INS_BRAAZ: + case AArch64_INS_BRAB: + case AArch64_INS_BRABZ: op->family = RZ_ANALYSIS_OP_FAMILY_SECURITY; op->type = RZ_ANALYSIS_OP_TYPE_RJMP; break; - case ARM64_INS_LDRAA: - case ARM64_INS_LDRAB: + case AArch64_INS_LDRAA: + case AArch64_INS_LDRAB: op->family = RZ_ANALYSIS_OP_FAMILY_SECURITY; op->type = RZ_ANALYSIS_OP_TYPE_LOAD; break; - case ARM64_INS_RETAA: - case ARM64_INS_RETAB: - case ARM64_INS_ERETAA: - case ARM64_INS_ERETAB: + case AArch64_INS_RETAA: + case AArch64_INS_RETAB: + case AArch64_INS_ERETAA: + case AArch64_INS_ERETAB: op->family = RZ_ANALYSIS_OP_FAMILY_SECURITY; op->type = RZ_ANALYSIS_OP_TYPE_RET; break; #endif - case ARM64_INS_ERET: + case AArch64_INS_ERET: op->family = RZ_ANALYSIS_OP_FAMILY_PRIV; op->type = RZ_ANALYSIS_OP_TYPE_RET; break; - case ARM64_INS_RET: + case AArch64_INS_RET: op->type = RZ_ANALYSIS_OP_TYPE_RET; break; - case ARM64_INS_BL: // bl 0x89480 + case AArch64_INS_BL: // bl 0x89480 op->type = RZ_ANALYSIS_OP_TYPE_CALL; op->jump = IMM64(0); op->fail = addr + 4; break; - case ARM64_INS_BLR: // blr x0 + case AArch64_INS_BLR: // blr x0 op->type = RZ_ANALYSIS_OP_TYPE_RCALL; op->reg = cs_reg_name(handle, REGID64(0)); op->fail = addr + 4; // op->jump = IMM64(0); break; - case ARM64_INS_CBZ: - case ARM64_INS_CBNZ: + case AArch64_INS_CBZ: + case AArch64_INS_CBNZ: op->type = RZ_ANALYSIS_OP_TYPE_CJMP; op->jump = IMM64(1); op->fail = addr + op->size; break; - case ARM64_INS_TBZ: - case ARM64_INS_TBNZ: + case AArch64_INS_TBZ: + case AArch64_INS_TBNZ: op->type = RZ_ANALYSIS_OP_TYPE_CJMP; op->jump = IMM64(2); op->fail = addr + op->size; break; - case ARM64_INS_BR: + case AArch64_INS_BR: op->type = RZ_ANALYSIS_OP_TYPE_RJMP; op->reg = cs_reg_name(handle, REGID64(0)); op->eob = true; break; - case ARM64_INS_B: + case AArch64_INS_B: // BX LR == RET - if (insn->detail->arm64.operands[0].reg == ARM64_REG_LR) { + if (insn->detail->aarch64.operands[0].reg == AArch64_REG_LR) { op->type = RZ_ANALYSIS_OP_TYPE_RET; - } else if (insn->detail->arm64.cc) { + } else if (insn->detail->aarch64.cc) { op->type = RZ_ANALYSIS_OP_TYPE_CJMP; op->jump = IMM64(0); op->fail = addr + op->size; @@ -1514,8 +1514,8 @@ static int parse_reg_name(RzReg *reg, RzRegItem **reg_base, RzRegItem **reg_delt return 0; } -static bool is_valid64(arm64_reg reg) { - return reg != ARM64_REG_INVALID; +static bool is_valid64(aarch64_reg reg) { + return reg != AArch64_REG_INVALID; } static char *reg_list[] = { @@ -1529,12 +1529,12 @@ static char *reg_list[] = { }; static int parse_reg64_name(RzReg *reg, RzRegItem **reg_base, RzRegItem **reg_delta, csh handle, cs_insn *insn, int reg_num) { - cs_arm64_op armop = INSOP64(reg_num); + cs_aarch64_op armop = INSOP64(reg_num); switch (armop.type) { - case ARM64_OP_REG: + case AArch64_OP_REG: *reg_base = rz_reg_get(reg, cs_reg_name(handle, armop.reg), RZ_REG_TYPE_ANY); break; - case ARM64_OP_MEM: + case AArch64_OP_MEM: if (is_valid64(armop.mem.base) && is_valid64(armop.mem.index)) { *reg_base = rz_reg_get(reg, cs_reg_name(handle, armop.mem.base), RZ_REG_TYPE_ANY); *reg_delta = rz_reg_get(reg, cs_reg_name(handle, armop.mem.index), RZ_REG_TYPE_ANY); @@ -1577,7 +1577,7 @@ static void set_opdir(RzAnalysisOp *op) { static void set_src_dst(RzAnalysisValue *val, RzReg *reg, csh *handle, cs_insn *insn, int x, int bits) { cs_arm_op armop = INSOP(x); - cs_arm64_op arm64op = INSOP64(x); + cs_aarch64_op arm64op = INSOP64(x); if (bits == 64) { parse_reg64_name(reg, &val->reg, &val->regdelta, *handle, insn, x); } else { @@ -1585,14 +1585,14 @@ static void set_src_dst(RzAnalysisValue *val, RzReg *reg, csh *handle, cs_insn * } if (bits == 64) { switch (arm64op.type) { - case ARM64_OP_REG: + case AArch64_OP_REG: val->type = RZ_ANALYSIS_VAL_REG; break; - case ARM64_OP_MEM: + case AArch64_OP_MEM: val->type = RZ_ANALYSIS_VAL_MEM; val->delta = arm64op.mem.disp; break; - case ARM64_OP_IMM: + case AArch64_OP_IMM: val->type = RZ_ANALYSIS_VAL_IMM; val->imm = arm64op.imm; break; @@ -1635,7 +1635,7 @@ static void create_src_dst(RzAnalysisOp *op) { static void op_fillval(RzAnalysis *analysis, RzAnalysisOp *op, csh handle, cs_insn *insn, int bits) { create_src_dst(op); int i, j; - int count = bits == 64 ? insn->detail->arm64.op_count : insn->detail->arm.op_count; + int count = bits == 64 ? insn->detail->aarch64.op_count : insn->detail->arm.op_count; switch (op->type & RZ_ANALYSIS_OP_TYPE_MASK) { case RZ_ANALYSIS_OP_TYPE_MOV: case RZ_ANALYSIS_OP_TYPE_CMP: @@ -1660,7 +1660,7 @@ static void op_fillval(RzAnalysis *analysis, RzAnalysisOp *op, csh handle, cs_in for (i = 1; i < count; i++) { #if CS_API_MAJOR > 3 if (bits == 64) { - cs_arm64_op arm64op = INSOP64(i); + cs_aarch64_op arm64op = INSOP64(i); if (arm64op.access == CS_AC_WRITE) { continue; } @@ -1682,8 +1682,8 @@ static void op_fillval(RzAnalysis *analysis, RzAnalysisOp *op, csh handle, cs_in case RZ_ANALYSIS_OP_TYPE_STORE: if (count > 2) { if (bits == 64) { - cs_arm64_op arm64op = INSOP64(count - 1); - if (arm64op.type == ARM64_OP_IMM) { + cs_aarch64_op arm64op = INSOP64(count - 1); + if (arm64op.type == AArch64_OP_IMM) { count--; } } else { @@ -1748,7 +1748,7 @@ static int analysis_op(RzAnalysis *a, RzAnalysisOp *op, ut64 addr, const ut8 *bu op->size = (a->bits == 16) ? 2 : 4; op->addr = addr; if (ctx->handle == 0) { - ret = (a->bits == 64) ? cs_open(CS_ARCH_ARM64, mode, &ctx->handle) : cs_open(CS_ARCH_ARM, mode, &ctx->handle); + ret = (a->bits == 64) ? cs_open(CS_ARCH_AARCH64, mode, &ctx->handle) : cs_open(CS_ARCH_ARM, mode, &ctx->handle); cs_option(ctx->handle, CS_OPT_DETAIL, CS_OPT_ON); cs_option(ctx->handle, CS_OPT_SYNTAX, CS_OPT_SYNTAX_CS_REG_ALIAS); if (ret != CS_ERR_OK) { @@ -2259,47 +2259,47 @@ static ut8 *analysis_mask(RzAnalysis *analysis, int size, const ut8 *data, ut64 case 4: if (analysis->bits == 64) { switch (op->id) { - case ARM64_INS_LDP: - case ARM64_INS_LDXP: - case ARM64_INS_LDXR: - case ARM64_INS_LDXRB: - case ARM64_INS_LDXRH: - case ARM64_INS_LDPSW: - case ARM64_INS_LDNP: - case ARM64_INS_LDTR: - case ARM64_INS_LDTRB: - case ARM64_INS_LDTRH: - case ARM64_INS_LDTRSB: - case ARM64_INS_LDTRSH: - case ARM64_INS_LDTRSW: - case ARM64_INS_LDUR: - case ARM64_INS_LDURB: - case ARM64_INS_LDURH: - case ARM64_INS_LDURSB: - case ARM64_INS_LDURSH: - case ARM64_INS_LDURSW: - case ARM64_INS_STP: - case ARM64_INS_STNP: - case ARM64_INS_STXR: - case ARM64_INS_STXRB: - case ARM64_INS_STXRH: + case AArch64_INS_LDP: + case AArch64_INS_LDXP: + case AArch64_INS_LDXR: + case AArch64_INS_LDXRB: + case AArch64_INS_LDXRH: + case AArch64_INS_LDPSW: + case AArch64_INS_LDNP: + case AArch64_INS_LDTR: + case AArch64_INS_LDTRB: + case AArch64_INS_LDTRH: + case AArch64_INS_LDTRSB: + case AArch64_INS_LDTRSH: + case AArch64_INS_LDTRSW: + case AArch64_INS_LDUR: + case AArch64_INS_LDURB: + case AArch64_INS_LDURH: + case AArch64_INS_LDURSB: + case AArch64_INS_LDURSH: + case AArch64_INS_LDURSW: + case AArch64_INS_STP: + case AArch64_INS_STNP: + case AArch64_INS_STXR: + case AArch64_INS_STXRB: + case AArch64_INS_STXRH: rz_write_ble(ret + idx, 0xffffffff, analysis->big_endian, 32); break; - case ARM64_INS_STRB: - case ARM64_INS_STURB: - case ARM64_INS_STURH: - case ARM64_INS_STUR: - case ARM64_INS_STR: - case ARM64_INS_STTR: - case ARM64_INS_STTRB: - case ARM64_INS_STRH: - case ARM64_INS_STTRH: - case ARM64_INS_LDR: - case ARM64_INS_LDRB: - case ARM64_INS_LDRH: - case ARM64_INS_LDRSB: - case ARM64_INS_LDRSW: - case ARM64_INS_LDRSH: { + case AArch64_INS_STRB: + case AArch64_INS_STURB: + case AArch64_INS_STURH: + case AArch64_INS_STUR: + case AArch64_INS_STR: + case AArch64_INS_STTR: + case AArch64_INS_STTRB: + case AArch64_INS_STRH: + case AArch64_INS_STTRH: + case AArch64_INS_LDR: + case AArch64_INS_LDRB: + case AArch64_INS_LDRH: + case AArch64_INS_LDRSB: + case AArch64_INS_LDRSW: + case AArch64_INS_LDRSH: { bool is_literal = (opcode & 0x38000000) == 0x18000000; if (is_literal) { rz_write_ble(ret + idx, 0xff000000, analysis->big_endian, 32); @@ -2308,22 +2308,22 @@ static ut8 *analysis_mask(RzAnalysis *analysis, int size, const ut8 *data, ut64 } break; } - case ARM64_INS_B: - case ARM64_INS_BL: - case ARM64_INS_CBZ: - case ARM64_INS_CBNZ: + case AArch64_INS_B: + case AArch64_INS_BL: + case AArch64_INS_CBZ: + case AArch64_INS_CBNZ: if (op->type == RZ_ANALYSIS_OP_TYPE_CJMP) { rz_write_ble(ret + idx, 0xff00001f, analysis->big_endian, 32); } else { rz_write_ble(ret + idx, 0xfc000000, analysis->big_endian, 32); } break; - case ARM64_INS_TBZ: - case ARM64_INS_TBNZ: + case AArch64_INS_TBZ: + case AArch64_INS_TBNZ: rz_write_ble(ret + idx, 0xfff8001f, analysis->big_endian, 32); break; - case ARM64_INS_ADR: - case ARM64_INS_ADRP: + case AArch64_INS_ADR: + case AArch64_INS_ADRP: rz_write_ble(ret + idx, 0xff00001f, analysis->big_endian, 32); break; default: diff --git a/librz/asm/p/asm_arm_cs.c b/librz/asm/p/asm_arm_cs.c index 8aa682513d4..8e5aaf427db 100644 --- a/librz/asm/p/asm_arm_cs.c +++ b/librz/asm/p/asm_arm_cs.c @@ -85,7 +85,7 @@ static int disassemble(RzAsm *a, RzAsmOp *op, const ut8 *buf, int len) { rz_strbuf_set(&op->buf_asm, ""); } if (!ctx->cd || mode != ctx->omode) { - ret = (a->bits == 64) ? cs_open(CS_ARCH_ARM64, mode, &ctx->cd) : cs_open(CS_ARCH_ARM, mode, &ctx->cd); + ret = (a->bits == 64) ? cs_open(CS_ARCH_AARCH64, mode, &ctx->cd) : cs_open(CS_ARCH_ARM, mode, &ctx->cd); if (ret) { ret = -1; goto beach;