From 3ef7b49bd125e99cb22342561bbd842f23afac5e Mon Sep 17 00:00:00 2001 From: Rot127 Date: Sat, 26 Aug 2023 12:40:37 -0500 Subject: [PATCH] Replace ARM64 with version sensitive macros. --- librz/analysis/arch/arm/arm_accessors64.h | 24 +- librz/analysis/arch/arm/arm_cs.h | 2 +- librz/analysis/arch/arm/arm_esil64.c | 428 ++--- librz/analysis/arch/arm/arm_il64.c | 1796 ++++++++++----------- librz/analysis/p/analysis_arm_cs.c | 646 ++++---- 5 files changed, 1448 insertions(+), 1448 deletions(-) diff --git a/librz/analysis/arch/arm/arm_accessors64.h b/librz/analysis/arch/arm/arm_accessors64.h index d1c1caee956..7c63001696f 100644 --- a/librz/analysis/arch/arm/arm_accessors64.h +++ b/librz/analysis/arch/arm/arm_accessors64.h @@ -9,25 +9,25 @@ #include -#define IMM64(x) (ut64)(insn->detail->arm64.operands[x].imm) -#define INSOP64(x) insn->detail->arm64.operands[x] +#define IMM64(x) (ut64)(insn->detail->CS_aarch64().operands[x].imm) +#define INSOP64(x) insn->detail->CS_aarch64().operands[x] -#define REGID64(x) insn->detail->arm64.operands[x].reg -#define REGBASE64(x) insn->detail->arm64.operands[x].mem.base +#define REGID64(x) insn->detail->CS_aarch64().operands[x].reg +#define REGBASE64(x) insn->detail->CS_aarch64().operands[x].mem.base // s/index/base|reg/ -#define HASMEMINDEX64(x) (insn->detail->arm64.operands[x].mem.index != ARM64_REG_INVALID) -#define MEMDISP64(x) (ut64) insn->detail->arm64.operands[x].mem.disp -#define ISIMM64(x) (insn->detail->arm64.operands[x].type == ARM64_OP_IMM) -#define ISREG64(x) (insn->detail->arm64.operands[x].type == ARM64_OP_REG) -#define ISMEM64(x) (insn->detail->arm64.operands[x].type == ARM64_OP_MEM) +#define HASMEMINDEX64(x) (insn->detail->CS_aarch64().operands[x].mem.index != CS_AARCH64(_REG_INVALID)) +#define MEMDISP64(x) (ut64) insn->detail->CS_aarch64().operands[x].mem.disp +#define ISIMM64(x) (insn->detail->CS_aarch64().operands[x].type == CS_AARCH64(_OP_IMM)) +#define ISREG64(x) (insn->detail->CS_aarch64().operands[x].type == CS_AARCH64(_OP_REG)) +#define ISMEM64(x) (insn->detail->CS_aarch64().operands[x].type == CS_AARCH64(_OP_MEM)) #if CS_API_MAJOR > 3 -#define LSHIFT2_64(x) insn->detail->arm64.operands[x].shift.value +#define LSHIFT2_64(x) insn->detail->CS_aarch64().operands[x].shift.value #else #define LSHIFT2_64(x) 0 #endif -#define OPCOUNT64() insn->detail->arm64.op_count +#define OPCOUNT64() insn->detail->CS_aarch64().op_count -#define ISWRITEBACK64() (insn->detail->arm64.writeback == true) +#define ISWRITEBACK64() (insn->detail->CS_aarch64().writeback == true) #define ISPREINDEX64() (((OPCOUNT64() == 2) && (ISMEM64(1)) && (ISWRITEBACK64())) || ((OPCOUNT64() == 3) && (ISMEM64(2)) && (ISWRITEBACK64()))) #define ISPOSTINDEX64() (((OPCOUNT64() == 3) && (ISIMM64(2)) && (ISWRITEBACK64())) || ((OPCOUNT64() == 4) && (ISIMM64(3)) && (ISWRITEBACK64()))) diff --git a/librz/analysis/arch/arm/arm_cs.h b/librz/analysis/arch/arm/arm_cs.h index 8bfaa543e50..8f9852cfb71 100644 --- a/librz/analysis/arch/arm/arm_cs.h +++ b/librz/analysis/arch/arm/arm_cs.h @@ -13,7 +13,7 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a RZ_IPI bool rz_arm_cs_is_group_member(const cs_insn *insn, arm_insn_group feature); RZ_IPI const char *rz_arm32_cs_esil_prefix_cond(RzAnalysisOp *op, ARMCC_CondCodes cond_type); -RZ_IPI const char *rz_arm64_cs_esil_prefix_cond(RzAnalysisOp *op, arm64_cc cond_type); +RZ_IPI const char *rz_arm64_cs_esil_prefix_cond(RzAnalysisOp *op, CS_aarch64_cc() cond_type); RZ_IPI RzILOpEffect *rz_arm_cs_32_il(csh *handle, cs_insn *insn, bool thumb); RZ_IPI RzAnalysisILConfig *rz_arm_cs_32_il_config(bool big_endian); diff --git a/librz/analysis/arch/arm/arm_esil64.c b/librz/analysis/arch/arm/arm_esil64.c index 30e04b51635..50592f27f89 100644 --- a/librz/analysis/arch/arm/arm_esil64.c +++ b/librz/analysis/arch/arm/arm_esil64.c @@ -7,75 +7,75 @@ #include "arm_cs.h" #include "arm_accessors64.h" -#define REG64(x) rz_str_get_null(cs_reg_name(*handle, insn->detail->arm64.operands[x].reg)) -#define MEMBASE64(x) rz_str_get_null(cs_reg_name(*handle, insn->detail->arm64.operands[x].mem.base)) -#define MEMINDEX64(x) rz_str_get_null(cs_reg_name(*handle, insn->detail->arm64.operands[x].mem.index)) +#define REG64(x) rz_str_get_null(cs_reg_name(*handle, insn->detail->CS_aarch64().operands[x].reg)) +#define MEMBASE64(x) rz_str_get_null(cs_reg_name(*handle, insn->detail->CS_aarch64().operands[x].mem.base)) +#define MEMINDEX64(x) rz_str_get_null(cs_reg_name(*handle, insn->detail->CS_aarch64().operands[x].mem.index)) -RZ_IPI const char *rz_arm64_cs_esil_prefix_cond(RzAnalysisOp *op, arm64_cc cond_type) { +RZ_IPI const char *rz_arm64_cs_esil_prefix_cond(RzAnalysisOp *op, CS_aarch64_cc() cond_type) { const char *close_cond[2]; close_cond[0] = ""; close_cond[1] = ",}"; int close_type = 0; switch (cond_type) { - case ARM64_CC_EQ: + case CS_AARCH64CC(_EQ): close_type = 1; rz_strbuf_setf(&op->esil, "zf,?{,"); break; - case ARM64_CC_NE: + case CS_AARCH64CC(_NE): close_type = 1; rz_strbuf_setf(&op->esil, "zf,!,?{,"); break; - case ARM64_CC_HS: + case CS_AARCH64CC(_HS): close_type = 1; rz_strbuf_setf(&op->esil, "cf,?{,"); break; - case ARM64_CC_LO: + case CS_AARCH64CC(_LO): close_type = 1; rz_strbuf_setf(&op->esil, "cf,!,?{,"); break; - case ARM64_CC_MI: + case CS_AARCH64CC(_MI): close_type = 1; rz_strbuf_setf(&op->esil, "nf,?{,"); break; - case ARM64_CC_PL: + case CS_AARCH64CC(_PL): close_type = 1; rz_strbuf_setf(&op->esil, "nf,!,?{,"); break; - case ARM64_CC_VS: + case CS_AARCH64CC(_VS): close_type = 1; rz_strbuf_setf(&op->esil, "vf,?{,"); break; - case ARM64_CC_VC: + case CS_AARCH64CC(_VC): close_type = 1; rz_strbuf_setf(&op->esil, "vf,!,?{,"); break; - case ARM64_CC_HI: + case CS_AARCH64CC(_HI): close_type = 1; rz_strbuf_setf(&op->esil, "cf,zf,!,&,?{,"); break; - case ARM64_CC_LS: + case CS_AARCH64CC(_LS): close_type = 1; rz_strbuf_setf(&op->esil, "cf,!,zf,|,?{,"); break; - case ARM64_CC_GE: + case CS_AARCH64CC(_GE): close_type = 1; rz_strbuf_setf(&op->esil, "nf,vf,^,!,?{,"); break; - case ARM64_CC_LT: + case CS_AARCH64CC(_LT): close_type = 1; rz_strbuf_setf(&op->esil, "nf,vf,^,?{,"); break; - case ARM64_CC_GT: + case CS_AARCH64CC(_GT): // zf == 0 && nf == vf close_type = 1; rz_strbuf_setf(&op->esil, "zf,!,nf,vf,^,!,&,?{,"); break; - case ARM64_CC_LE: + case CS_AARCH64CC(_LE): // zf == 1 || nf != vf close_type = 1; rz_strbuf_setf(&op->esil, "zf,nf,vf,^,|,?{,"); break; - case ARM64_CC_AL: + case CS_AARCH64CC(_AL): // always executed break; default: @@ -86,37 +86,37 @@ RZ_IPI const char *rz_arm64_cs_esil_prefix_cond(RzAnalysisOp *op, arm64_cc cond_ static int arm64_reg_width(int reg) { switch (reg) { - case ARM64_REG_W0: - case ARM64_REG_W1: - case ARM64_REG_W2: - case ARM64_REG_W3: - case ARM64_REG_W4: - case ARM64_REG_W5: - case ARM64_REG_W6: - case ARM64_REG_W7: - case ARM64_REG_W8: - case ARM64_REG_W9: - case ARM64_REG_W10: - case ARM64_REG_W11: - case ARM64_REG_W12: - case ARM64_REG_W13: - case ARM64_REG_W14: - case ARM64_REG_W15: - case ARM64_REG_W16: - case ARM64_REG_W17: - case ARM64_REG_W18: - case ARM64_REG_W19: - case ARM64_REG_W20: - case ARM64_REG_W21: - case ARM64_REG_W22: - case ARM64_REG_W23: - case ARM64_REG_W24: - case ARM64_REG_W25: - case ARM64_REG_W26: - case ARM64_REG_W27: - case ARM64_REG_W28: - case ARM64_REG_W29: - case ARM64_REG_W30: + case CS_AARCH64(_REG_W0): + case CS_AARCH64(_REG_W1): + case CS_AARCH64(_REG_W2): + case CS_AARCH64(_REG_W3): + case CS_AARCH64(_REG_W4): + case CS_AARCH64(_REG_W5): + case CS_AARCH64(_REG_W6): + case CS_AARCH64(_REG_W7): + case CS_AARCH64(_REG_W8): + case CS_AARCH64(_REG_W9): + case CS_AARCH64(_REG_W10): + case CS_AARCH64(_REG_W11): + case CS_AARCH64(_REG_W12): + case CS_AARCH64(_REG_W13): + case CS_AARCH64(_REG_W14): + case CS_AARCH64(_REG_W15): + case CS_AARCH64(_REG_W16): + case CS_AARCH64(_REG_W17): + case CS_AARCH64(_REG_W18): + case CS_AARCH64(_REG_W19): + case CS_AARCH64(_REG_W20): + case CS_AARCH64(_REG_W21): + case CS_AARCH64(_REG_W22): + case CS_AARCH64(_REG_W23): + case CS_AARCH64(_REG_W24): + case CS_AARCH64(_REG_W25): + case CS_AARCH64(_REG_W26): + case CS_AARCH64(_REG_W27): + case CS_AARCH64(_REG_W28): + case CS_AARCH64(_REG_W29): + case CS_AARCH64(_REG_W30): return 32; break; default: @@ -125,20 +125,20 @@ static int arm64_reg_width(int reg) { return 64; } -static int decode_sign_ext(arm64_extender extender) { +static int decode_sign_ext(CS_aarch64_extender() extender) { switch (extender) { - case ARM64_EXT_UXTB: - case ARM64_EXT_UXTH: - case ARM64_EXT_UXTW: - case ARM64_EXT_UXTX: + case CS_AARCH64(_EXT_UXTB): + case CS_AARCH64(_EXT_UXTH): + case CS_AARCH64(_EXT_UXTW): + case CS_AARCH64(_EXT_UXTX): return 0; // nothing needs to be done for unsigned - case ARM64_EXT_SXTB: + case CS_AARCH64(_EXT_SXTB): return 8; - case ARM64_EXT_SXTH: + case CS_AARCH64(_EXT_SXTH): return 16; - case ARM64_EXT_SXTW: + case CS_AARCH64(_EXT_SXTW): return 32; - case ARM64_EXT_SXTX: + case CS_AARCH64(_EXT_SXTX): return 64; default: break; @@ -147,24 +147,24 @@ static int decode_sign_ext(arm64_extender extender) { return 0; } -#define EXT64(x) decode_sign_ext(insn->detail->arm64.operands[x].ext) +#define EXT64(x) decode_sign_ext(insn->detail->CS_aarch64().operands[x].ext) -static const char *decode_shift_64(arm64_shifter shift) { +static const char *decode_shift_64(CS_aarch64_shifter() shift) { const char *E_OP_SR = ">>"; const char *E_OP_SL = "<<"; const char *E_OP_RR = ">>>"; const char *E_OP_VOID = ""; switch (shift) { - case ARM64_SFT_ASR: - case ARM64_SFT_LSR: + case CS_AARCH64(_SFT_ASR): + case CS_AARCH64(_SFT_LSR): return E_OP_SR; - case ARM64_SFT_LSL: - case ARM64_SFT_MSL: + case CS_AARCH64(_SFT_LSL): + case CS_AARCH64(_SFT_MSL): return E_OP_SL; - case ARM64_SFT_ROR: + case CS_AARCH64(_SFT_ROR): return E_OP_RR; default: @@ -173,22 +173,22 @@ static const char *decode_shift_64(arm64_shifter shift) { return E_OP_VOID; } -#define DECODE_SHIFT64(x) decode_shift_64(insn->detail->arm64.operands[x].shift.type) +#define DECODE_SHIFT64(x) decode_shift_64(insn->detail->CS_aarch64().operands[x].shift.type) static int regsize64(cs_insn *insn, int n) { - unsigned int reg = insn->detail->arm64.operands[n].reg; - if ((reg >= ARM64_REG_S0 && reg <= ARM64_REG_S31) || - (reg >= ARM64_REG_W0 && reg <= ARM64_REG_W30) || - reg == ARM64_REG_WZR) { + unsigned int reg = insn->detail->CS_aarch64().operands[n].reg; + if ((reg >= CS_AARCH64(_REG_S0) && reg <= CS_AARCH64(_REG_S31)) || + (reg >= CS_AARCH64(_REG_W0) && reg <= CS_AARCH64(_REG_W30)) || + reg == CS_AARCH64(_REG_WZR)) { return 4; } - if (reg >= ARM64_REG_B0 && reg <= ARM64_REG_B31) { + if (reg >= CS_AARCH64(_REG_B0) && reg <= CS_AARCH64(_REG_B31)) { return 1; } - if (reg >= ARM64_REG_H0 && reg <= ARM64_REG_H31) { + if (reg >= CS_AARCH64(_REG_H0) && reg <= CS_AARCH64(_REG_H31)) { return 2; } - if (reg >= ARM64_REG_Q0 && reg <= ARM64_REG_Q31) { + if (reg >= CS_AARCH64(_REG_Q0) && reg <= CS_AARCH64(_REG_Q31)) { return 16; } return 8; @@ -210,7 +210,7 @@ static void shifted_reg64_append(RzStrBuf *sb, csh *handle, cs_insn *insn, int n } if (LSHIFT2_64(n)) { - if (insn->detail->arm64.operands[n].shift.type != ARM64_SFT_ASR) { + if (insn->detail->CS_aarch64().operands[n].shift.type != CS_AARCH64(_SFT_ASR)) { if (signext) { rz_strbuf_appendf(sb, "%d,%d,%s,~,%s", LSHIFT2_64(n), signext, rn, DECODE_SHIFT64(n)); } else { @@ -278,10 +278,10 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a rz_strbuf_init(&op->esil); rz_strbuf_set(&op->esil, ""); - postfix = rz_arm64_cs_esil_prefix_cond(op, insn->detail->arm64.cc); + postfix = rz_arm64_cs_esil_prefix_cond(op, insn->detail->CS_aarch64().cc); switch (insn->id) { - case ARM64_INS_REV: + case CS_AARCH64(_INS_REV): // these REV* instructions were almost right, except in the cases like rev x0, x0 // where the use of |= caused copies of the value to be erroneously present { @@ -322,7 +322,7 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a } break; } - case ARM64_INS_REV32: { + case CS_AARCH64(_INS_REV32): { const char *r0 = REG64(0); const char *r1 = REG64(1); rz_strbuf_setf(&op->esil, @@ -333,7 +333,7 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a r1, r1, r1, r1, r0); break; } - case ARM64_INS_REV16: { + case CS_AARCH64(_INS_REV16): { const char *r0 = REG64(0); const char *r1 = REG64(1); rz_strbuf_setf(&op->esil, @@ -342,69 +342,69 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a r1, r1, r0); break; } - case ARM64_INS_ADR: + case CS_AARCH64(_INS_ADR): // TODO: must be 21bit signed rz_strbuf_setf(&op->esil, "%" PFMT64d ",%s,=", IMM64(1), REG64(0)); break; - case ARM64_INS_SMADDL: { + case CS_AARCH64(_INS_SMADDL): { int size = REGSIZE64(1) * 8; rz_strbuf_setf(&op->esil, "%d,%s,~,%d,%s,~,*,%s,+,%s,=", size, REG64(2), size, REG64(1), REG64(3), REG64(0)); break; } - case ARM64_INS_UMADDL: - case ARM64_INS_FMADD: - case ARM64_INS_MADD: + case CS_AARCH64(_INS_UMADDL): + case CS_AARCH64(_INS_FMADD): + case CS_AARCH64(_INS_MADD): rz_strbuf_setf(&op->esil, "%s,%s,*,%s,+,%s,=", REG64(2), REG64(1), REG64(3), REG64(0)); break; - case ARM64_INS_MSUB: + case CS_AARCH64(_INS_MSUB): rz_strbuf_setf(&op->esil, "%s,%s,*,%s,-,%s,=", REG64(2), REG64(1), REG64(3), REG64(0)); break; - case ARM64_INS_MNEG: + case CS_AARCH64(_INS_MNEG): rz_strbuf_setf(&op->esil, "%s,%s,*,0,-,%s,=", REG64(2), REG64(1), REG64(0)); break; - case ARM64_INS_ADD: - case ARM64_INS_ADC: // Add with carry. - // case ARM64_INS_ADCS: // Add with carry. + case CS_AARCH64(_INS_ADD): + case CS_AARCH64(_INS_ADC): // Add with carry. + // case CS_AARCH64(_INS_ADCS): // Add with carry. OPCALL("+"); break; - case ARM64_INS_SUB: + case CS_AARCH64(_INS_SUB): OPCALL("-"); break; - case ARM64_INS_SBC: + case CS_AARCH64(_INS_SBC): // TODO have to check this more, VEX does not work rz_strbuf_setf(&op->esil, "%s,cf,+,%s,-,%s,=", REG64(2), REG64(1), REG64(0)); break; - case ARM64_INS_SMULL: { + case CS_AARCH64(_INS_SMULL): { int size = REGSIZE64(1) * 8; rz_strbuf_setf(&op->esil, "%d,%s,~,%d,%s,~,*,%s,=", size, REG64(2), size, REG64(1), REG64(0)); break; } - case ARM64_INS_MUL: + case CS_AARCH64(_INS_MUL): OPCALL("*"); break; - case ARM64_INS_AND: + case CS_AARCH64(_INS_AND): OPCALL("&"); break; - case ARM64_INS_ORR: + case CS_AARCH64(_INS_ORR): OPCALL("|"); break; - case ARM64_INS_EOR: + case CS_AARCH64(_INS_EOR): OPCALL("^"); break; - case ARM64_INS_ORN: + case CS_AARCH64(_INS_ORN): OPCALL_NEG("|"); break; - case ARM64_INS_EON: + case CS_AARCH64(_INS_EON): OPCALL_NEG("^"); break; - case ARM64_INS_LSR: { + case CS_AARCH64(_INS_LSR): { const char *r0 = REG64(0); const char *r1 = REG64(1); const int size = REGSIZE64(0) * 8; @@ -423,7 +423,7 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a } break; } - case ARM64_INS_LSL: { + case CS_AARCH64(_INS_LSL): { const char *r0 = REG64(0); const char *r1 = REG64(1); const int size = REGSIZE64(0) * 8; @@ -442,15 +442,15 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a } break; } - case ARM64_INS_ROR: + case CS_AARCH64(_INS_ROR): OPCALL(">>>"); break; - case ARM64_INS_NOP: + case CS_AARCH64(_INS_NOP): rz_strbuf_setf(&op->esil, ","); break; - case ARM64_INS_FDIV: + case CS_AARCH64(_INS_FDIV): break; - case ARM64_INS_SDIV: { + case CS_AARCH64(_INS_SDIV): { /* TODO: support WZR XZR to specify 32, 64bit op */ int size = REGSIZE64(1) * 8; if (ISREG64(2)) { @@ -460,7 +460,7 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a } break; } - case ARM64_INS_UDIV: + case CS_AARCH64(_INS_UDIV): /* TODO: support WZR XZR to specify 32, 64bit op */ if ISREG64 (2) { rz_strbuf_setf(&op->esil, "%s,%s,/,%s,=", REG64(2), REG64(1), REG64(0)); @@ -468,20 +468,20 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a rz_strbuf_setf(&op->esil, "%s,%s,/=", REG64(1), REG64(0)); } break; - case ARM64_INS_BR: + case CS_AARCH64(_INS_BR): rz_strbuf_setf(&op->esil, "%s,pc,=", REG64(0)); break; - case ARM64_INS_B: + case CS_AARCH64(_INS_B): /* capstone precompute resulting address, using PC + IMM */ rz_strbuf_appendf(&op->esil, "%" PFMT64d ",pc,=", IMM64(0)); break; - case ARM64_INS_BL: + case CS_AARCH64(_INS_BL): rz_strbuf_setf(&op->esil, "pc,lr,=,%" PFMT64d ",pc,=", IMM64(0)); break; - case ARM64_INS_BLR: + case CS_AARCH64(_INS_BLR): rz_strbuf_setf(&op->esil, "pc,lr,=,%s,pc,=", REG64(0)); break; - case ARM64_INS_CLZ:; + case CS_AARCH64(_INS_CLZ):; int size = 8 * REGSIZE64(0); // expression is much more concise with GOTO, but GOTOs should be minimized @@ -528,43 +528,43 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a } break; - case ARM64_INS_LDRH: - case ARM64_INS_LDUR: - case ARM64_INS_LDURB: - case ARM64_INS_LDURH: - case ARM64_INS_LDR: - // case ARM64_INS_LDRSB: - // case ARM64_INS_LDRSH: - case ARM64_INS_LDRB: - // case ARM64_INS_LDRSW: - // case ARM64_INS_LDURSW: - case ARM64_INS_LDXR: - case ARM64_INS_LDXRB: - case ARM64_INS_LDXRH: - case ARM64_INS_LDAXR: - case ARM64_INS_LDAXRB: - case ARM64_INS_LDAXRH: - case ARM64_INS_LDAR: - case ARM64_INS_LDARB: - case ARM64_INS_LDARH: { + case CS_AARCH64(_INS_LDRH): + case CS_AARCH64(_INS_LDUR): + case CS_AARCH64(_INS_LDURB): + case CS_AARCH64(_INS_LDURH): + case CS_AARCH64(_INS_LDR): + // case CS_AARCH64(_INS_LDRSB): + // case CS_AARCH64(_INS_LDRSH): + case CS_AARCH64(_INS_LDRB): + // case CS_AARCH64(_INS_LDRSW): + // case CS_AARCH64(_INS_LDURSW): + case CS_AARCH64(_INS_LDXR): + case CS_AARCH64(_INS_LDXRB): + case CS_AARCH64(_INS_LDXRH): + case CS_AARCH64(_INS_LDAXR): + case CS_AARCH64(_INS_LDAXRB): + case CS_AARCH64(_INS_LDAXRH): + case CS_AARCH64(_INS_LDAR): + case CS_AARCH64(_INS_LDARB): + case CS_AARCH64(_INS_LDARH): { int size = REGSIZE64(0); switch (insn->id) { - case ARM64_INS_LDRB: - case ARM64_INS_LDARB: - case ARM64_INS_LDAXRB: - case ARM64_INS_LDXRB: - case ARM64_INS_LDURB: + case CS_AARCH64(_INS_LDRB): + case CS_AARCH64(_INS_LDARB): + case CS_AARCH64(_INS_LDAXRB): + case CS_AARCH64(_INS_LDXRB): + case CS_AARCH64(_INS_LDURB): size = 1; break; - case ARM64_INS_LDRH: - case ARM64_INS_LDARH: - case ARM64_INS_LDXRH: - case ARM64_INS_LDAXRH: - case ARM64_INS_LDURH: + case CS_AARCH64(_INS_LDRH): + case CS_AARCH64(_INS_LDARH): + case CS_AARCH64(_INS_LDXRH): + case CS_AARCH64(_INS_LDAXRH): + case CS_AARCH64(_INS_LDURH): size = 2; break; - case ARM64_INS_LDRSW: - case ARM64_INS_LDURSW: + case CS_AARCH64(_INS_LDRSW): + case CS_AARCH64(_INS_LDURSW): size = 4; break; default: @@ -623,7 +623,7 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a instructions like ldr x16, [x13, x9] ldrb w2, [x19, x23] - are not detected as ARM64_OP_MEM type and + are not detected as CS_AARCH64(_OP_MEM) type and fall in this case instead. */ if (ISREG64(2)) { @@ -638,25 +638,25 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a } break; } - case ARM64_INS_LDRSB: - case ARM64_INS_LDRSH: - case ARM64_INS_LDRSW: - case ARM64_INS_LDURSB: - case ARM64_INS_LDURSH: - case ARM64_INS_LDURSW: { + case CS_AARCH64(_INS_LDRSB): + case CS_AARCH64(_INS_LDRSH): + case CS_AARCH64(_INS_LDRSW): + case CS_AARCH64(_INS_LDURSB): + case CS_AARCH64(_INS_LDURSH): + case CS_AARCH64(_INS_LDURSW): { // handle the sign extended instrs here int size = REGSIZE64(0); switch (insn->id) { - case ARM64_INS_LDRSB: - case ARM64_INS_LDURSB: + case CS_AARCH64(_INS_LDRSB): + case CS_AARCH64(_INS_LDURSB): size = 1; break; - case ARM64_INS_LDRSH: - case ARM64_INS_LDURSH: + case CS_AARCH64(_INS_LDRSH): + case CS_AARCH64(_INS_LDURSH): size = 2; break; - case ARM64_INS_LDRSW: - case ARM64_INS_LDURSW: + case CS_AARCH64(_INS_LDRSW): + case CS_AARCH64(_INS_LDURSW): size = 4; break; default: @@ -715,7 +715,7 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a instructions like ldr x16, [x13, x9] ldrb w2, [x19, x23] - are not detected as ARM64_OP_MEM type and + are not detected as CS_AARCH64(_OP_MEM) type and fall in this case instead. */ if (ISREG64(2)) { @@ -730,12 +730,12 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a } break; } - case ARM64_INS_FCMP: - case ARM64_INS_CCMP: - case ARM64_INS_CCMN: - case ARM64_INS_TST: // cmp w8, 0xd - case ARM64_INS_CMP: // cmp w8, 0xd - case ARM64_INS_CMN: // cmp w8, 0xd + case CS_AARCH64(_INS_FCMP): + case CS_AARCH64(_INS_CCMP): + case CS_AARCH64(_INS_CCMN): + case CS_AARCH64(_INS_TST): // cmp w8, 0xd + case CS_AARCH64(_INS_CMP): // cmp w8, 0xd + case CS_AARCH64(_INS_CMN): // cmp w8, 0xd { // update esil, cpu flags int bits = arm64_reg_width(REGID64(0)); @@ -748,47 +748,47 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a } break; } - case ARM64_INS_FCSEL: - case ARM64_INS_CSEL: // csel Wd, Wn, Wm --> Wd := (cond) ? Wn : Wm + case CS_AARCH64(_INS_FCSEL): + case CS_AARCH64(_INS_CSEL): // csel Wd, Wn, Wm --> Wd := (cond) ? Wn : Wm rz_strbuf_appendf(&op->esil, "%s,}{,%s,},%s,=", REG64(1), REG64(2), REG64(0)); postfix = ""; break; - case ARM64_INS_CSET: // cset Wd --> Wd := (cond) ? 1 : 0 + case CS_AARCH64(_INS_CSET): // cset Wd --> Wd := (cond) ? 1 : 0 rz_strbuf_appendf(&op->esil, "1,}{,0,},%s,=", REG64(0)); postfix = ""; break; - case ARM64_INS_CINC: // cinc Wd, Wn --> Wd := (cond) ? (Wn+1) : Wn + case CS_AARCH64(_INS_CINC): // cinc Wd, Wn --> Wd := (cond) ? (Wn+1) : Wn rz_strbuf_appendf(&op->esil, "1,%s,+,}{,%s,},%s,=", REG64(1), REG64(1), REG64(0)); postfix = ""; break; - case ARM64_INS_CSINC: // csinc Wd, Wn, Wm --> Wd := (cond) ? Wn : (Wm+1) + case CS_AARCH64(_INS_CSINC): // csinc Wd, Wn, Wm --> Wd := (cond) ? Wn : (Wm+1) rz_strbuf_appendf(&op->esil, "%s,}{,1,%s,+,},%s,=", REG64(1), REG64(2), REG64(0)); postfix = ""; break; - case ARM64_INS_STXRB: - case ARM64_INS_STXRH: - case ARM64_INS_STXR: { + case CS_AARCH64(_INS_STXRB): + case CS_AARCH64(_INS_STXRH): + case CS_AARCH64(_INS_STXR): { int size = REGSIZE64(1); - if (insn->id == ARM64_INS_STXRB) { + if (insn->id == CS_AARCH64(_INS_STXRB)) { size = 1; - } else if (insn->id == ARM64_INS_STXRH) { + } else if (insn->id == CS_AARCH64(_INS_STXRH)) { size = 2; } rz_strbuf_setf(&op->esil, "0,%s,=,%s,%s,%" PFMT64d ",+,=[%d]", REG64(0), REG64(1), MEMBASE64(1), MEMDISP64(1), size); break; } - case ARM64_INS_STRB: - case ARM64_INS_STRH: - case ARM64_INS_STUR: - case ARM64_INS_STURB: - case ARM64_INS_STURH: - case ARM64_INS_STR: // str x6, [x6,0xf90] + case CS_AARCH64(_INS_STRB): + case CS_AARCH64(_INS_STRH): + case CS_AARCH64(_INS_STUR): + case CS_AARCH64(_INS_STURB): + case CS_AARCH64(_INS_STURH): + case CS_AARCH64(_INS_STR): // str x6, [x6,0xf90] { int size = REGSIZE64(0); - if (insn->id == ARM64_INS_STRB || insn->id == ARM64_INS_STURB) { + if (insn->id == CS_AARCH64(_INS_STRB) || insn->id == CS_AARCH64(_INS_STURB)) { size = 1; - } else if (insn->id == ARM64_INS_STRH || insn->id == ARM64_INS_STURH) { + } else if (insn->id == CS_AARCH64(_INS_STRH) || insn->id == CS_AARCH64(_INS_STURH)) { size = 2; } if (ISMEM64(1)) { @@ -843,7 +843,7 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a instructions like ldr x16, [x13, x9] ldrb w2, [x19, x23] - are not detected as ARM64_OP_MEM type and + are not detected as CS_AARCH64(_OP_MEM) type and fall in this case instead. */ if (ISREG64(2)) { @@ -858,7 +858,7 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a } break; } - case ARM64_INS_BIC: + case CS_AARCH64(_INS_BIC): if (OPCOUNT64() == 2) { if (REGSIZE64(0) == 4) { rz_strbuf_appendf(&op->esil, "%s,0xffffffff,^,%s,&=", REG64(1), REG64(0)); @@ -873,28 +873,28 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a } } break; - case ARM64_INS_CBZ: + case CS_AARCH64(_INS_CBZ): rz_strbuf_setf(&op->esil, "%s,!,?{,%" PFMT64d ",pc,=,}", REG64(0), IMM64(1)); break; - case ARM64_INS_CBNZ: + case CS_AARCH64(_INS_CBNZ): rz_strbuf_setf(&op->esil, "%s,?{,%" PFMT64d ",pc,=,}", REG64(0), IMM64(1)); break; - case ARM64_INS_TBZ: + case CS_AARCH64(_INS_TBZ): // tbnz x0, 4, label // if ((1<<4) & x0) goto label; rz_strbuf_setf(&op->esil, "%" PFMT64d ",1,<<,%s,&,!,?{,%" PFMT64d ",pc,=,}", IMM64(1), REG64(0), IMM64(2)); break; - case ARM64_INS_TBNZ: + case CS_AARCH64(_INS_TBNZ): // tbnz x0, 4, label // if ((1<<4) & x0) goto label; rz_strbuf_setf(&op->esil, "%" PFMT64d ",1,<<,%s,&,?{,%" PFMT64d ",pc,=,}", IMM64(1), REG64(0), IMM64(2)); break; - case ARM64_INS_STNP: - case ARM64_INS_STP: // stp x6, x7, [x6,0xf90] + case CS_AARCH64(_INS_STNP): + case CS_AARCH64(_INS_STP): // stp x6, x7, [x6,0xf90] { int disp = (int)MEMDISP64(2); char sign = disp >= 0 ? '+' : '-'; @@ -930,7 +930,7 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a REG64(1), MEMBASE64(2), abs, sign, size, size); } } break; - case ARM64_INS_LDP: // ldp x29, x30, [sp], 0x10 + case CS_AARCH64(_INS_LDP): // ldp x29, x30, [sp], 0x10 { int disp = (int)MEMDISP64(2); char sign = disp >= 0 ? '+' : '-'; @@ -970,18 +970,18 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a size, abs, MEMBASE64(2), sign, size, REG64(1)); } } break; - case ARM64_INS_ADRP: + case CS_AARCH64(_INS_ADRP): rz_strbuf_setf(&op->esil, "%" PFMT64d ",%s,=", IMM64(1), REG64(0)); break; - case ARM64_INS_MOV: + case CS_AARCH64(_INS_MOV): if (ISREG64(1)) { rz_strbuf_setf(&op->esil, "%s,%s,=", REG64(1), REG64(0)); } else { rz_strbuf_setf(&op->esil, "%" PFMT64d ",%s,=", IMM64(1), REG64(0)); } break; - case ARM64_INS_EXTR: + case CS_AARCH64(_INS_EXTR): // from VEX /* 01 | t0 = GET:I64(x4) @@ -994,21 +994,21 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a rz_strbuf_setf(&op->esil, "%" PFMT64d ",%s,>>,%" PFMT64d ",%s,<<,|,%s,=", IMM64(3), REG64(2), IMM64(3), REG64(1), REG64(0)); break; - case ARM64_INS_RBIT: + case CS_AARCH64(_INS_RBIT): // this expression reverses the bits. it does. do not scroll right. // Derived from VEX rz_strbuf_setf(&op->esil, "0xffffffff00000000,0x20,0xffff0000ffff0000,0x10,0xff00ff00ff00ff00,0x8,0xf0f0f0f0f0f0f0f0,0x4,0xcccccccccccccccc,0x2,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,<<,&,0x2,0xcccccccccccccccc,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,&,>>,|,<<,&,0x4,0xf0f0f0f0f0f0f0f0,0xcccccccccccccccc,0x2,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,<<,&,0x2,0xcccccccccccccccc,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,&,>>,|,&,>>,|,<<,&,0x8,0xff00ff00ff00ff00,0xf0f0f0f0f0f0f0f0,0x4,0xcccccccccccccccc,0x2,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,<<,&,0x2,0xcccccccccccccccc,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,&,>>,|,<<,&,0x4,0xf0f0f0f0f0f0f0f0,0xcccccccccccccccc,0x2,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,<<,&,0x2,0xcccccccccccccccc,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,&,>>,|,&,>>,|,&,>>,|,<<,&,0x10,0xffff0000ffff0000,0xff00ff00ff00ff00,0x8,0xf0f0f0f0f0f0f0f0,0x4,0xcccccccccccccccc,0x2,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,<<,&,0x2,0xcccccccccccccccc,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,&,>>,|,<<,&,0x4,0xf0f0f0f0f0f0f0f0,0xcccccccccccccccc,0x2,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,<<,&,0x2,0xcccccccccccccccc,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,&,>>,|,&,>>,|,<<,&,0x8,0xff00ff00ff00ff00,0xf0f0f0f0f0f0f0f0,0x4,0xcccccccccccccccc,0x2,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,<<,&,0x2,0xcccccccccccccccc,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,&,>>,|,<<,&,0x4,0xf0f0f0f0f0f0f0f0,0xcccccccccccccccc,0x2,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,<<,&,0x2,0xcccccccccccccccc,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,&,>>,|,&,>>,|,&,>>,|,&,>>,|,<<,&,0x20,0xffffffff00000000,0xffff0000ffff0000,0x10,0xff00ff00ff00ff00,0x8,0xf0f0f0f0f0f0f0f0,0x4,0xcccccccccccccccc,0x2,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,<<,&,0x2,0xcccccccccccccccc,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,&,>>,|,<<,&,0x4,0xf0f0f0f0f0f0f0f0,0xcccccccccccccccc,0x2,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,<<,&,0x2,0xcccccccccccccccc,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,&,>>,|,&,>>,|,<<,&,0x8,0xff00ff00ff00ff00,0xf0f0f0f0f0f0f0f0,0x4,0xcccccccccccccccc,0x2,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,<<,&,0x2,0xcccccccccccccccc,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,&,>>,|,<<,&,0x4,0xf0f0f0f0f0f0f0f0,0xcccccccccccccccc,0x2,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,<<,&,0x2,0xcccccccccccccccc,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,&,>>,|,&,>>,|,&,>>,|,<<,&,0x10,0xffff0000ffff0000,0xff00ff00ff00ff00,0x8,0xf0f0f0f0f0f0f0f0,0x4,0xcccccccccccccccc,0x2,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,<<,&,0x2,0xcccccccccccccccc,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,&,>>,|,<<,&,0x4,0xf0f0f0f0f0f0f0f0,0xcccccccccccccccc,0x2,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,<<,&,0x2,0xcccccccccccccccc,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,&,>>,|,&,>>,|,<<,&,0x8,0xff00ff00ff00ff00,0xf0f0f0f0f0f0f0f0,0x4,0xcccccccccccccccc,0x2,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,<<,&,0x2,0xcccccccccccccccc,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,&,>>,|,<<,&,0x4,0xf0f0f0f0f0f0f0f0,0xcccccccccccccccc,0x2,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,<<,&,0x2,0xcccccccccccccccc,0xaaaaaaaaaaaaaaaa,0x1,%1$s,<<,&,0x1,0xaaaaaaaaaaaaaaaa,%1$s,&,>>,|,&,>>,|,&,>>,|,&,>>,|,&,>>,|,&,>>,|,%2$s,=", REG64(1), REG64(0)); break; - case ARM64_INS_MVN: - case ARM64_INS_MOVN: + case CS_AARCH64(_INS_MVN): + case CS_AARCH64(_INS_MOVN): if (ISREG64(1)) { rz_strbuf_setf(&op->esil, "%d,%s,-1,^,<<,%s,=", LSHIFT2_64(1), REG64(1), REG64(0)); } else { rz_strbuf_setf(&op->esil, "%d,%" PFMT64d ",<<,-1,^,%s,=", LSHIFT2_64(1), IMM64(1), REG64(0)); } break; - case ARM64_INS_MOVK: // movk w8, 0x1290 + case CS_AARCH64(_INS_MOVK): // movk w8, 0x1290 { ut64 shift = LSHIFT2_64(1); if (shift < 0) { @@ -1027,13 +1027,13 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a break; } - case ARM64_INS_MOVZ: + case CS_AARCH64(_INS_MOVZ): rz_strbuf_setf(&op->esil, "%" PFMT64u ",%s,=", IMM64(1) << LSHIFT2_64(1), REG64(0)); break; /* ASR, SXTB, SXTH and SXTW are alias for SBFM */ - case ARM64_INS_ASR: { + case CS_AARCH64(_INS_ASR): { // OPCALL(">>>>"); const char *r0 = REG64(0); const char *r1 = REG64(1); @@ -1053,7 +1053,7 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a } break; } - case ARM64_INS_SXTB: + case CS_AARCH64(_INS_SXTB): if (arm64_reg_width(REGID64(0)) == 32) { rz_strbuf_setf(&op->esil, "0xffffffff,8,0xff,%s,&,~,&,%s,=", REG64(1), REG64(0)); @@ -1062,7 +1062,7 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a REG64(1), REG64(0)); } break; - case ARM64_INS_SXTH: /* halfword */ + case CS_AARCH64(_INS_SXTH): /* halfword */ if (arm64_reg_width(REGID64(0)) == 32) { rz_strbuf_setf(&op->esil, "0xffffffff,16,0xffff,%s,&,~,&,%s,=", REG64(1), REG64(0)); @@ -1071,27 +1071,27 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a REG64(1), REG64(0)); } break; - case ARM64_INS_SXTW: /* word */ + case CS_AARCH64(_INS_SXTW): /* word */ rz_strbuf_setf(&op->esil, "32,0xffffffff,%s,&,~,%s,=", REG64(1), REG64(0)); break; - case ARM64_INS_UXTB: + case CS_AARCH64(_INS_UXTB): rz_strbuf_setf(&op->esil, "%s,0xff,&,%s,=", REG64(1), REG64(0)); break; - case ARM64_INS_UMULL: + case CS_AARCH64(_INS_UMULL): rz_strbuf_setf(&op->esil, "%s,%s,*,%s,=", REG64(1), REG64(2), REG64(0)); break; - case ARM64_INS_UXTH: + case CS_AARCH64(_INS_UXTH): rz_strbuf_setf(&op->esil, "%s,0xffff,&,%s,=", REG64(1), REG64(0)); break; - case ARM64_INS_RET: + case CS_AARCH64(_INS_RET): rz_strbuf_setf(&op->esil, "lr,pc,="); break; - case ARM64_INS_ERET: + case CS_AARCH64(_INS_ERET): rz_strbuf_setf(&op->esil, "lr,pc,="); break; - case ARM64_INS_BFI: // bfi w8, w8, 2, 1 - case ARM64_INS_BFXIL: { + case CS_AARCH64(_INS_BFI): // bfi w8, w8, 2, 1 + case CS_AARCH64(_INS_BFXIL): { if (OPCOUNT64() >= 3 && ISIMM64(3) && IMM64(3) > 0) { ut64 mask = rz_num_bitmask((ut8)IMM64(3)); ut64 shift = IMM64(2); @@ -1102,33 +1102,33 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a } break; } - case ARM64_INS_SBFIZ: + case CS_AARCH64(_INS_SBFIZ): if (IMM64(3) > 0 && IMM64(3) <= 64 - IMM64(2)) { rz_strbuf_appendf(&op->esil, "%" PFMT64d ",%" PFMT64d ",%s,%" PFMT64u ",&,~,<<,%s,=", IMM64(2), IMM64(3), REG64(1), rz_num_bitmask((ut8)IMM64(3)), REG64(0)); } break; - case ARM64_INS_UBFIZ: + case CS_AARCH64(_INS_UBFIZ): if (IMM64(3) > 0 && IMM64(3) <= 64 - IMM64(2)) { rz_strbuf_appendf(&op->esil, "%" PFMT64d ",%s,%" PFMT64u ",&,<<,%s,=", IMM64(2), REG64(1), rz_num_bitmask((ut8)IMM64(3)), REG64(0)); } break; - case ARM64_INS_SBFX: + case CS_AARCH64(_INS_SBFX): if (IMM64(3) > 0 && IMM64(3) <= 64 - IMM64(2)) { rz_strbuf_appendf(&op->esil, "%" PFMT64d ",%" PFMT64d ",%s,%" PFMT64d ",%" PFMT64u ",<<,&,>>,~,%s,=", IMM64(3), IMM64(2), REG64(1), IMM64(2), rz_num_bitmask((ut8)IMM64(3)), REG64(0)); } break; - case ARM64_INS_UBFX: + case CS_AARCH64(_INS_UBFX): if (IMM64(3) > 0 && IMM64(3) <= 64 - IMM64(2)) { rz_strbuf_appendf(&op->esil, "%" PFMT64d ",%s,%" PFMT64d ",%" PFMT64u ",<<,&,>>,%s,=", IMM64(2), REG64(1), IMM64(2), rz_num_bitmask((ut8)IMM64(3)), REG64(0)); } break; - case ARM64_INS_NEG: + case CS_AARCH64(_INS_NEG): #if CS_API_MAJOR > 3 - case ARM64_INS_NEGS: + case CS_AARCH64(_INS_NEGS): #endif if (LSHIFT2_64(1)) { SHIFTED_REG64_APPEND(&op->esil, 1); @@ -1137,7 +1137,7 @@ RZ_IPI int rz_arm_cs_analysis_op_64_esil(RzAnalysis *a, RzAnalysisOp *op, ut64 a } rz_strbuf_appendf(&op->esil, ",0,-,%s,=", REG64(0)); break; - case ARM64_INS_SVC: + case CS_AARCH64(_INS_SVC): rz_strbuf_setf(&op->esil, "%" PFMT64u ",$", IMM64(0)); break; } diff --git a/librz/analysis/arch/arm/arm_il64.c b/librz/analysis/arch/arm/arm_il64.c index 70a29ae230b..3973997d72c 100644 --- a/librz/analysis/arch/arm/arm_il64.c +++ b/librz/analysis/arch/arm/arm_il64.c @@ -15,7 +15,7 @@ #define ISMEM ISMEM64 #define OPCOUNT OPCOUNT64 #undef MEMDISP64 // the original one casts to ut64 which we don't want here -#define MEMDISP(x) insn->detail->arm64.operands[x].mem.disp +#define MEMDISP(x) insn->detail->CS_aarch64().operands[x].mem.disp #include @@ -35,144 +35,144 @@ static const char *regs_bound[] = { * IL for arm64 condition * unconditional is returned as NULL (rather than true), for simpler code */ -static RzILOpBool *cond(arm64_cc c) { +static RzILOpBool *cond(CS_aarch64_cc() c) { switch (c) { - case ARM64_CC_EQ: + case CS_AARCH64CC(_EQ): return VARG("zf"); - case ARM64_CC_NE: + case CS_AARCH64CC(_NE): return INV(VARG("zf")); - case ARM64_CC_HS: + case CS_AARCH64CC(_HS): return VARG("cf"); - case ARM64_CC_LO: + case CS_AARCH64CC(_LO): return INV(VARG("cf")); - case ARM64_CC_MI: + case CS_AARCH64CC(_MI): return VARG("nf"); - case ARM64_CC_PL: + case CS_AARCH64CC(_PL): return INV(VARG("nf")); - case ARM64_CC_VS: + case CS_AARCH64CC(_VS): return VARG("vf"); - case ARM64_CC_VC: + case CS_AARCH64CC(_VC): return INV(VARG("vf")); - case ARM64_CC_HI: + case CS_AARCH64CC(_HI): return AND(VARG("cf"), INV(VARG("zf"))); - case ARM64_CC_LS: + case CS_AARCH64CC(_LS): return OR(INV(VARG("cf")), VARG("zf")); - case ARM64_CC_GE: + case CS_AARCH64CC(_GE): return INV(XOR(VARG("nf"), VARG("vf"))); - case ARM64_CC_LT: + case CS_AARCH64CC(_LT): return XOR(VARG("nf"), VARG("vf")); - case ARM64_CC_GT: + case CS_AARCH64CC(_GT): return INV(OR(XOR(VARG("nf"), VARG("vf")), VARG("zf"))); - case ARM64_CC_LE: + case CS_AARCH64CC(_LE): return OR(XOR(VARG("nf"), VARG("vf")), VARG("zf")); default: return NULL; } } -static arm64_reg xreg(ut8 idx) { - // for some reason, the ARM64_REG_X0...ARM64_REG_X30 enum values are not contiguous, +static CS_aarch64_reg() xreg(ut8 idx) { + // for some reason, the CS_AARCH64(_REG_X0)...CS_AARCH64(_REG_X30) enum values are not contiguous, // so use switch here and let the compiler optimize: switch (idx) { - case 0: return ARM64_REG_X0; - case 1: return ARM64_REG_X1; - case 2: return ARM64_REG_X2; - case 3: return ARM64_REG_X3; - case 4: return ARM64_REG_X4; - case 5: return ARM64_REG_X5; - case 6: return ARM64_REG_X6; - case 7: return ARM64_REG_X7; - case 8: return ARM64_REG_X8; - case 9: return ARM64_REG_X9; - case 10: return ARM64_REG_X10; - case 11: return ARM64_REG_X11; - case 12: return ARM64_REG_X12; - case 13: return ARM64_REG_X13; - case 14: return ARM64_REG_X14; - case 15: return ARM64_REG_X15; - case 16: return ARM64_REG_X16; - case 17: return ARM64_REG_X17; - case 18: return ARM64_REG_X18; - case 19: return ARM64_REG_X19; - case 20: return ARM64_REG_X20; - case 21: return ARM64_REG_X21; - case 22: return ARM64_REG_X22; - case 23: return ARM64_REG_X23; - case 24: return ARM64_REG_X24; - case 25: return ARM64_REG_X25; - case 26: return ARM64_REG_X26; - case 27: return ARM64_REG_X27; - case 28: return ARM64_REG_X28; - case 29: return ARM64_REG_X29; - case 30: return ARM64_REG_X30; - case 31: return ARM64_REG_SP; - case 32: return ARM64_REG_XZR; + case 0: return CS_AARCH64(_REG_X0); + case 1: return CS_AARCH64(_REG_X1); + case 2: return CS_AARCH64(_REG_X2); + case 3: return CS_AARCH64(_REG_X3); + case 4: return CS_AARCH64(_REG_X4); + case 5: return CS_AARCH64(_REG_X5); + case 6: return CS_AARCH64(_REG_X6); + case 7: return CS_AARCH64(_REG_X7); + case 8: return CS_AARCH64(_REG_X8); + case 9: return CS_AARCH64(_REG_X9); + case 10: return CS_AARCH64(_REG_X10); + case 11: return CS_AARCH64(_REG_X11); + case 12: return CS_AARCH64(_REG_X12); + case 13: return CS_AARCH64(_REG_X13); + case 14: return CS_AARCH64(_REG_X14); + case 15: return CS_AARCH64(_REG_X15); + case 16: return CS_AARCH64(_REG_X16); + case 17: return CS_AARCH64(_REG_X17); + case 18: return CS_AARCH64(_REG_X18); + case 19: return CS_AARCH64(_REG_X19); + case 20: return CS_AARCH64(_REG_X20); + case 21: return CS_AARCH64(_REG_X21); + case 22: return CS_AARCH64(_REG_X22); + case 23: return CS_AARCH64(_REG_X23); + case 24: return CS_AARCH64(_REG_X24); + case 25: return CS_AARCH64(_REG_X25); + case 26: return CS_AARCH64(_REG_X26); + case 27: return CS_AARCH64(_REG_X27); + case 28: return CS_AARCH64(_REG_X28); + case 29: return CS_AARCH64(_REG_X29); + case 30: return CS_AARCH64(_REG_X30); + case 31: return CS_AARCH64(_REG_SP); + case 32: return CS_AARCH64(_REG_XZR); default: rz_warn_if_reached(); - return ARM64_REG_INVALID; + return CS_AARCH64(_REG_INVALID); } } -static bool is_xreg(arm64_reg reg) { +static bool is_xreg(CS_aarch64_reg() reg) { switch (reg) { - case ARM64_REG_X0: - case ARM64_REG_X1: - case ARM64_REG_X2: - case ARM64_REG_X3: - case ARM64_REG_X4: - case ARM64_REG_X5: - case ARM64_REG_X6: - case ARM64_REG_X7: - case ARM64_REG_X8: - case ARM64_REG_X9: - case ARM64_REG_X10: - case ARM64_REG_X11: - case ARM64_REG_X12: - case ARM64_REG_X13: - case ARM64_REG_X14: - case ARM64_REG_X15: - case ARM64_REG_X16: - case ARM64_REG_X17: - case ARM64_REG_X18: - case ARM64_REG_X19: - case ARM64_REG_X20: - case ARM64_REG_X21: - case ARM64_REG_X22: - case ARM64_REG_X23: - case ARM64_REG_X24: - case ARM64_REG_X25: - case ARM64_REG_X26: - case ARM64_REG_X27: - case ARM64_REG_X28: - case ARM64_REG_X29: - case ARM64_REG_X30: - case ARM64_REG_SP: - case ARM64_REG_XZR: + case CS_AARCH64(_REG_X0): + case CS_AARCH64(_REG_X1): + case CS_AARCH64(_REG_X2): + case CS_AARCH64(_REG_X3): + case CS_AARCH64(_REG_X4): + case CS_AARCH64(_REG_X5): + case CS_AARCH64(_REG_X6): + case CS_AARCH64(_REG_X7): + case CS_AARCH64(_REG_X8): + case CS_AARCH64(_REG_X9): + case CS_AARCH64(_REG_X10): + case CS_AARCH64(_REG_X11): + case CS_AARCH64(_REG_X12): + case CS_AARCH64(_REG_X13): + case CS_AARCH64(_REG_X14): + case CS_AARCH64(_REG_X15): + case CS_AARCH64(_REG_X16): + case CS_AARCH64(_REG_X17): + case CS_AARCH64(_REG_X18): + case CS_AARCH64(_REG_X19): + case CS_AARCH64(_REG_X20): + case CS_AARCH64(_REG_X21): + case CS_AARCH64(_REG_X22): + case CS_AARCH64(_REG_X23): + case CS_AARCH64(_REG_X24): + case CS_AARCH64(_REG_X25): + case CS_AARCH64(_REG_X26): + case CS_AARCH64(_REG_X27): + case CS_AARCH64(_REG_X28): + case CS_AARCH64(_REG_X29): + case CS_AARCH64(_REG_X30): + case CS_AARCH64(_REG_SP): + case CS_AARCH64(_REG_XZR): return true; default: return false; } } -static ut8 wreg_idx(arm64_reg reg) { - if (reg >= ARM64_REG_W0 && reg <= ARM64_REG_W30) { - return reg - ARM64_REG_W0; +static ut8 wreg_idx(CS_aarch64_reg() reg) { + if (reg >= CS_AARCH64(_REG_W0) && reg <= CS_AARCH64(_REG_W30)) { + return reg - CS_AARCH64(_REG_W0); } - if (reg == ARM64_REG_WSP) { + if (reg == CS_AARCH64(_REG_WSP)) { return 31; } - if (reg == ARM64_REG_WZR) { + if (reg == CS_AARCH64(_REG_WZR)) { return 32; } rz_warn_if_reached(); return 0; } -static bool is_wreg(arm64_reg reg) { - return (reg >= ARM64_REG_W0 && reg <= ARM64_REG_W30) || reg == ARM64_REG_WSP || reg == ARM64_REG_WZR; +static bool is_wreg(CS_aarch64_reg() reg) { + return (reg >= CS_AARCH64(_REG_W0) && reg <= CS_AARCH64(_REG_W30)) || reg == CS_AARCH64(_REG_WSP) || reg == CS_AARCH64(_REG_WZR); } -static arm64_reg xreg_of_reg(arm64_reg reg) { +static CS_aarch64_reg() xreg_of_reg(CS_aarch64_reg() reg) { if (is_wreg(reg)) { return xreg(wreg_idx(reg)); } @@ -182,41 +182,41 @@ static arm64_reg xreg_of_reg(arm64_reg reg) { /** * Variable name for a register given by cs */ -static const char *reg_var_name(arm64_reg reg) { +static const char *reg_var_name(CS_aarch64_reg() reg) { reg = xreg_of_reg(reg); switch (reg) { - case ARM64_REG_X0: return "x0"; - case ARM64_REG_X1: return "x1"; - case ARM64_REG_X2: return "x2"; - case ARM64_REG_X3: return "x3"; - case ARM64_REG_X4: return "x4"; - case ARM64_REG_X5: return "x5"; - case ARM64_REG_X6: return "x6"; - case ARM64_REG_X7: return "x7"; - case ARM64_REG_X8: return "x8"; - case ARM64_REG_X9: return "x9"; - case ARM64_REG_X10: return "x10"; - case ARM64_REG_X11: return "x11"; - case ARM64_REG_X12: return "x12"; - case ARM64_REG_X13: return "x13"; - case ARM64_REG_X14: return "x14"; - case ARM64_REG_X15: return "x15"; - case ARM64_REG_X16: return "x16"; - case ARM64_REG_X17: return "x17"; - case ARM64_REG_X18: return "x18"; - case ARM64_REG_X19: return "x19"; - case ARM64_REG_X20: return "x20"; - case ARM64_REG_X21: return "x21"; - case ARM64_REG_X22: return "x22"; - case ARM64_REG_X23: return "x23"; - case ARM64_REG_X24: return "x24"; - case ARM64_REG_X25: return "x25"; - case ARM64_REG_X26: return "x26"; - case ARM64_REG_X27: return "x27"; - case ARM64_REG_X28: return "x28"; - case ARM64_REG_X29: return "x29"; - case ARM64_REG_X30: return "x30"; - case ARM64_REG_SP: return "sp"; + case CS_AARCH64(_REG_X0): return "x0"; + case CS_AARCH64(_REG_X1): return "x1"; + case CS_AARCH64(_REG_X2): return "x2"; + case CS_AARCH64(_REG_X3): return "x3"; + case CS_AARCH64(_REG_X4): return "x4"; + case CS_AARCH64(_REG_X5): return "x5"; + case CS_AARCH64(_REG_X6): return "x6"; + case CS_AARCH64(_REG_X7): return "x7"; + case CS_AARCH64(_REG_X8): return "x8"; + case CS_AARCH64(_REG_X9): return "x9"; + case CS_AARCH64(_REG_X10): return "x10"; + case CS_AARCH64(_REG_X11): return "x11"; + case CS_AARCH64(_REG_X12): return "x12"; + case CS_AARCH64(_REG_X13): return "x13"; + case CS_AARCH64(_REG_X14): return "x14"; + case CS_AARCH64(_REG_X15): return "x15"; + case CS_AARCH64(_REG_X16): return "x16"; + case CS_AARCH64(_REG_X17): return "x17"; + case CS_AARCH64(_REG_X18): return "x18"; + case CS_AARCH64(_REG_X19): return "x19"; + case CS_AARCH64(_REG_X20): return "x20"; + case CS_AARCH64(_REG_X21): return "x21"; + case CS_AARCH64(_REG_X22): return "x22"; + case CS_AARCH64(_REG_X23): return "x23"; + case CS_AARCH64(_REG_X24): return "x24"; + case CS_AARCH64(_REG_X25): return "x25"; + case CS_AARCH64(_REG_X26): return "x26"; + case CS_AARCH64(_REG_X27): return "x27"; + case CS_AARCH64(_REG_X28): return "x28"; + case CS_AARCH64(_REG_X29): return "x29"; + case CS_AARCH64(_REG_X30): return "x30"; + case CS_AARCH64(_REG_SP): return "sp"; default: return NULL; } } @@ -224,11 +224,11 @@ static const char *reg_var_name(arm64_reg reg) { /** * Get the bits of the given register or 0, if it is not known (e.g. not implemented yet) */ -static ut32 reg_bits(arm64_reg reg) { - if (is_xreg(reg) || reg == ARM64_REG_XZR) { +static ut32 reg_bits(CS_aarch64_reg() reg) { + if (is_xreg(reg) || reg == CS_AARCH64(_REG_XZR)) { return 64; } - if (is_wreg(reg) || reg == ARM64_REG_WZR) { + if (is_wreg(reg) || reg == CS_AARCH64(_REG_WZR)) { return 32; } return 0; @@ -237,11 +237,11 @@ static ut32 reg_bits(arm64_reg reg) { /** * IL to read the given capstone reg */ -static RzILOpBitVector *read_reg(arm64_reg reg) { - if (reg == ARM64_REG_XZR) { +static RzILOpBitVector *read_reg(CS_aarch64_reg() reg) { + if (reg == CS_AARCH64(_REG_XZR)) { return U64(0); } - if (reg == ARM64_REG_WZR) { + if (reg == CS_AARCH64(_REG_WZR)) { return U32(0); } const char *var = reg_var_name(reg); @@ -267,35 +267,35 @@ static RzILOpBitVector *adjust_unsigned(ut32 bits, RZ_OWN RzILOpBitVector *v) { return v; } -static RzILOpBitVector *extend(ut32 dst_bits, arm64_extender ext, RZ_OWN RzILOpBitVector *v, ut32 v_bits) { +static RzILOpBitVector *extend(ut32 dst_bits, CS_aarch64_extender() ext, RZ_OWN RzILOpBitVector *v, ut32 v_bits) { bool is_signed = false; ut32 src_bits; switch (ext) { - case ARM64_EXT_SXTB: + case CS_AARCH64(_EXT_SXTB): is_signed = true; // fallthrough - case ARM64_EXT_UXTB: + case CS_AARCH64(_EXT_UXTB): src_bits = 8; break; - case ARM64_EXT_SXTH: + case CS_AARCH64(_EXT_SXTH): is_signed = true; // fallthrough - case ARM64_EXT_UXTH: + case CS_AARCH64(_EXT_UXTH): src_bits = 16; break; - case ARM64_EXT_SXTW: + case CS_AARCH64(_EXT_SXTW): is_signed = true; // fallthrough - case ARM64_EXT_UXTW: + case CS_AARCH64(_EXT_UXTW): src_bits = 32; break; - case ARM64_EXT_SXTX: + case CS_AARCH64(_EXT_SXTX): is_signed = true; // fallthrough - case ARM64_EXT_UXTX: + case CS_AARCH64(_EXT_UXTX): src_bits = 64; break; @@ -311,16 +311,16 @@ static RzILOpBitVector *extend(ut32 dst_bits, arm64_extender ext, RZ_OWN RzILOpB return is_signed ? SIGNED(dst_bits, v) : UNSIGNED(dst_bits, v); } -static RzILOpBitVector *apply_shift(arm64_shifter sft, ut32 dist, RZ_OWN RzILOpBitVector *v) { +static RzILOpBitVector *apply_shift(CS_aarch64_shifter() sft, ut32 dist, RZ_OWN RzILOpBitVector *v) { if (!dist) { return v; } switch (sft) { - case ARM64_SFT_LSL: + case CS_AARCH64(_SFT_LSL): return SHIFTL0(v, UN(6, dist)); - case ARM64_SFT_LSR: + case CS_AARCH64(_SFT_LSR): return SHIFTR0(v, UN(6, dist)); - case ARM64_SFT_ASR: + case CS_AARCH64(_SFT_ASR): return SHIFTRA(v, UN(6, dist)); default: return v; @@ -329,13 +329,13 @@ static RzILOpBitVector *apply_shift(arm64_shifter sft, ut32 dist, RZ_OWN RzILOpB #define REG(n) read_reg(REGID(n)) #define REGBITS(n) reg_bits(REGID(n)) -#define MEMBASEID(x) insn->detail->arm64.operands[x].mem.base +#define MEMBASEID(x) insn->detail->CS_aarch64().operands[x].mem.base #define MEMBASE(x) read_reg(MEMBASEID(x)) /** * IL to write a value to the given capstone reg */ -static RzILOpEffect *write_reg(arm64_reg reg, RZ_OWN RZ_NONNULL RzILOpBitVector *v) { +static RzILOpEffect *write_reg(CS_aarch64_reg() reg, RZ_OWN RZ_NONNULL RzILOpBitVector *v) { rz_return_val_if_fail(v, NULL); const char *var = reg_var_name(reg); if (!var) { @@ -348,8 +348,8 @@ static RzILOpEffect *write_reg(arm64_reg reg, RZ_OWN RZ_NONNULL RzILOpBitVector return SETG(var, v); } -static RzILOpBitVector *arg_mem(RzILOpBitVector *base_plus_disp, cs_arm64_op *op) { - if (op->mem.index == ARM64_REG_INVALID) { +static RzILOpBitVector *arg_mem(RzILOpBitVector *base_plus_disp, CS_aarch64_op() *op) { + if (op->mem.index == CS_AARCH64(_REG_INVALID)) { return base_plus_disp; } RzILOpBitVector *index = read_reg(op->mem.index); @@ -366,9 +366,9 @@ static RzILOpBitVector *arg_mem(RzILOpBitVector *base_plus_disp, cs_arm64_op *op */ static RzILOpBitVector *arg(cs_insn *insn, size_t n, ut32 *bits_inout) { ut32 bits_requested = bits_inout ? *bits_inout : 0; - cs_arm64_op *op = &insn->detail->arm64.operands[n]; + CS_aarch64_op() *op = &insn->detail->CS_aarch64().operands[n]; switch (op->type) { - case ARM64_OP_REG: { + case CS_AARCH64(_OP_REG): { if (!bits_requested) { bits_requested = REGBITS(n); if (!bits_requested) { @@ -384,17 +384,17 @@ static RzILOpBitVector *arg(cs_insn *insn, size_t n, ut32 *bits_inout) { } return apply_shift(op->shift.type, op->shift.value, extend(bits_requested, op->ext, r, REGBITS(n))); } - case ARM64_OP_IMM: { + case CS_AARCH64(_OP_IMM): { if (!bits_requested) { return NULL; } ut64 val = IMM(n); - if (op->shift.type == ARM64_SFT_LSL) { + if (op->shift.type == CS_AARCH64(_SFT_LSL)) { val <<= op->shift.value; } return UN(bits_requested, val); } - case ARM64_OP_MEM: { + case CS_AARCH64(_OP_MEM): { RzILOpBitVector *addr = MEMBASE(n); st64 disp = MEMDISP(n); if (disp > 0) { @@ -402,7 +402,7 @@ static RzILOpBitVector *arg(cs_insn *insn, size_t n, ut32 *bits_inout) { } else if (disp < 0) { addr = SUB(addr, U64(-disp)); } - return arg_mem(addr, &insn->detail->arm64.operands[n]); + return arg_mem(addr, &insn->detail->CS_aarch64().operands[n]); } default: break; @@ -436,16 +436,16 @@ static RzILOpEffect *update_flags_zn00(RzILOpBitVector *v) { } /** - * Capstone: ARM64_INS_ADD, ARM64_INS_ADC, ARM64_INS_SUB, ARM64_INS_SBC + * Capstone: CS_AARCH64(_INS_ADD), CS_AARCH64(_INS_ADC), CS_AARCH64(_INS_SUB), CS_AARCH64(_INS_SBC) * ARM: add, adds, adc, adcs, sub, subs, sbc, sbcs */ static RzILOpEffect *add_sub(cs_insn *insn) { if (!ISREG(0)) { return NULL; } - bool is_sub = insn->id == ARM64_INS_SUB || insn->id == ARM64_INS_SBC + bool is_sub = insn->id == CS_AARCH64(_INS_SUB) || insn->id == CS_AARCH64(_INS_SBC) #if CS_API_MAJOR > 4 - || insn->id == ARM64_INS_SUBS || insn->id == ARM64_INS_SBCS + || insn->id == CS_AARCH64(_INS_SUBS) || insn->id == CS_AARCH64(_INS_SBCS) #endif ; ut32 bits = REGBITS(0); @@ -461,23 +461,23 @@ static RzILOpEffect *add_sub(cs_insn *insn) { } RzILOpBitVector *res = is_sub ? SUB(a, b) : ADD(a, b); bool with_carry = false; - if (insn->id == ARM64_INS_ADC + if (insn->id == CS_AARCH64(_INS_ADC) #if CS_API_MAJOR > 4 - || insn->id == ARM64_INS_ADCS + || insn->id == CS_AARCH64(_INS_ADCS) #endif ) { res = ADD(res, ITE(VARG("cf"), UN(bits, 1), UN(bits, 0))); with_carry = true; - } else if (insn->id == ARM64_INS_SBC + } else if (insn->id == CS_AARCH64(_INS_SBC) #if CS_API_MAJOR > 4 - || insn->id == ARM64_INS_SBCS + || insn->id == CS_AARCH64(_INS_SBCS) #endif ) { res = SUB(res, ITE(VARG("cf"), UN(bits, 0), UN(bits, 1))); with_carry = true; } RzILOpEffect *set = write_reg(REGID(0), res); - bool update_flags = insn->detail->arm64.update_flags; + bool update_flags = insn->detail->CS_aarch64().update_flags; if (update_flags) { return SEQ6( SETL("a", DUP(a)), @@ -491,7 +491,7 @@ static RzILOpEffect *add_sub(cs_insn *insn) { } /** - * Capstone: ARM64_INS_ADR, ARM64_INS_ADRP + * Capstone: CS_AARCH64(_INS_ADR), CS_AARCH64(_INS_ADRP) * ARM: adr, adrp */ static RzILOpEffect *adr(cs_insn *insn) { @@ -502,7 +502,7 @@ static RzILOpEffect *adr(cs_insn *insn) { } /** - * Capstone: ARM64_INS_AND, ARM64_INS_EON, ARM64_INS_EOR, ARM64_INS_ORN, ARM64_INS_AORR + * Capstone: CS_AARCH64(_INS_AND), CS_AARCH64(_INS_EON), CS_AARCH64(_INS_EOR), CS_AARCH64(_INS_ORN), CS_AARCH64(_INS_AORR) * ARM: and, eon, eor, orn, orr */ static RzILOpEffect *bitwise(cs_insn *insn) { @@ -522,19 +522,19 @@ static RzILOpEffect *bitwise(cs_insn *insn) { } RzILOpBitVector *res; switch (insn->id) { - case ARM64_INS_EOR: + case CS_AARCH64(_INS_EOR): res = LOGXOR(a, b); break; - case ARM64_INS_EON: + case CS_AARCH64(_INS_EON): res = LOGXOR(a, LOGNOT(b)); break; - case ARM64_INS_ORN: + case CS_AARCH64(_INS_ORN): res = LOGOR(a, LOGNOT(b)); break; - case ARM64_INS_ORR: + case CS_AARCH64(_INS_ORR): res = LOGOR(a, b); break; - default: // ARM64_INS_AND + default: // CS_AARCH64(_INS_AND) res = LOGAND(a, b); break; } @@ -542,14 +542,14 @@ static RzILOpEffect *bitwise(cs_insn *insn) { if (!eff) { return NULL; } - if (insn->detail->arm64.update_flags) { + if (insn->detail->CS_aarch64().update_flags) { return SEQ2(eff, update_flags_zn00(REG(0))); } return eff; } /** - * Capstone: ARM64_INS_ASR, ARM64_INS_LSL, ARM64_INS_LSR, ARM64_INS_ROR + * Capstone: CS_AARCH64(_INS_ASR), CS_AARCH64(_INS_LSL), CS_AARCH64(_INS_LSR), CS_AARCH64(_INS_ROR) * ARM: asr, asrv, lsl, lslv, lsr, lsrv, ror, rorv */ static RzILOpEffect *shift(cs_insn *insn) { @@ -572,16 +572,16 @@ static RzILOpEffect *shift(cs_insn *insn) { } RzILOpBitVector *res; switch (insn->id) { - case ARM64_INS_ASR: + case CS_AARCH64(_INS_ASR): res = SHIFTRA(a, b); break; - case ARM64_INS_LSR: + case CS_AARCH64(_INS_LSR): res = SHIFTR0(a, b); break; - case ARM64_INS_ROR: + case CS_AARCH64(_INS_ROR): res = LOGOR(SHIFTR0(a, b), SHIFTL0(DUP(a), NEG(DUP(b)))); break; - default: // ARM64_INS_LSL + default: // CS_AARCH64(_INS_LSL) res = SHIFTL0(a, b); break; } @@ -589,14 +589,14 @@ static RzILOpEffect *shift(cs_insn *insn) { } /** - * Capstone: ARM64_INS_B, ARM64_INS_RET, ARM64_INS_RETAA, ARM64_INS_RETAB + * Capstone: CS_AARCH64(_INS_B), CS_AARCH64(_INS_RET), CS_AARCH64(_INS_RETAA), CS_AARCH64(_INS_RETAB) * ARM: b, b.cond, ret, retaa, retab */ static RzILOpEffect *branch(cs_insn *insn) { RzILOpBitVector *a; if (OPCOUNT() == 0) { - // for ARM64_INS_RET and similar - a = read_reg(ARM64_REG_LR); + // for CS_AARCH64(_INS_RET) and similar + a = read_reg(CS_AARCH64(_REG_LR)); } else { ut32 bits = 64; a = ARG(0, &bits); @@ -604,7 +604,7 @@ static RzILOpEffect *branch(cs_insn *insn) { if (!a) { return NULL; } - RzILOpBool *c = cond(insn->detail->arm64.cc); + RzILOpBool *c = cond(insn->detail->CS_aarch64().cc); if (c) { return BRANCH(c, JMP(a), NOP()); } @@ -612,7 +612,7 @@ static RzILOpEffect *branch(cs_insn *insn) { } /** - * Capstone: ARM64_INS_BL, ARM64_INS_BLR, ARM64_INS_BLRAA, ARM64_INS_BLRAAZ, ARM64_INS_BLRAB, ARM64_INS_BLRABZ + * Capstone: CS_AARCH64(_INS_BL), CS_AARCH64(_INS_BLR), CS_AARCH64(_INS_BLRAA), CS_AARCH64(_INS_BLRAAZ), CS_AARCH64(_INS_BLRAB), CS_AARCH64(_INS_BLRABZ) * ARM: bl, blr, blraa, blraaz, blrab, blrabz */ static RzILOpEffect *bl(cs_insn *insn) { @@ -627,7 +627,7 @@ static RzILOpEffect *bl(cs_insn *insn) { } /** - * Capstone: ARM64_INS_BFM, ARM64_INS_BFI, ARM64_INS_BFXIL + * Capstone: CS_AARCH64(_INS_BFM), CS_AARCH64(_INS_BFI), CS_AARCH64(_INS_BFXIL) * ARM: bfm, bfc, bfi, bfxil */ static RzILOpEffect *bfm(cs_insn *insn) { @@ -650,15 +650,15 @@ static RzILOpEffect *bfm(cs_insn *insn) { } ut64 mask_base = rz_num_bitmask(IMM(3)); ut64 mask = mask_base << RZ_MIN(63, IMM(2)); - if (insn->id == ARM64_INS_BFI) { + if (insn->id == CS_AARCH64(_INS_BFI)) { return write_reg(REGID(0), LOGOR(LOGAND(a, UN(bits, ~mask)), SHIFTL0(LOGAND(b, UN(bits, mask_base)), UN(6, IMM(2))))); } - // insn->id == ARM64_INS_BFXIL + // insn->id == CS_AARCH64(_INS_BFXIL) return write_reg(REGID(0), LOGOR(LOGAND(a, UN(bits, ~mask_base)), SHIFTR0(LOGAND(b, UN(bits, mask)), UN(6, IMM(2))))); } /** - * Capstone: ARM64_INS_BIC, ARM64_INS_BICS + * Capstone: CS_AARCH64(_INS_BIC), CS_AARCH64(_INS_BICS) * ARM: bic, bics */ static RzILOpEffect *bic(cs_insn *insn) { @@ -678,14 +678,14 @@ static RzILOpEffect *bic(cs_insn *insn) { } RzILOpBitVector *res = LOGAND(a, LOGNOT(b)); RzILOpEffect *eff = NULL; - if (REGID(0) != ARM64_REG_XZR && REGID(0) != ARM64_REG_WZR) { + if (REGID(0) != CS_AARCH64(_REG_XZR) && REGID(0) != CS_AARCH64(_REG_WZR)) { eff = write_reg(REGID(0), res); if (!eff) { return NULL; } res = NULL; } - if (insn->detail->arm64.update_flags) { + if (insn->detail->CS_aarch64().update_flags) { RzILOpEffect *eff1 = update_flags_zn00(res ? res : REG(0)); return eff ? SEQ2(eff, eff1) : eff1; } @@ -697,9 +697,9 @@ static RzILOpEffect *bic(cs_insn *insn) { #if CS_API_MAJOR > 4 /** - * Capstone: ARM64_INS_CAS, ARM64_INS_CASA, ARM64_INS_CASAL, ARM64_INS_CASL, - * ARM64_INS_CASB, ARM64_INS_CASAB, ARM64_INS_CASALB, ARM64_INS_CASLB, - * ARM64_INS_CASH, ARM64_INS_CASAH, ARM64_INS_CASALH, ARM64_INS_CASLH: + * Capstone: CS_AARCH64(_INS_CAS), CS_AARCH64(_INS_CASA), CS_AARCH64(_INS_CASAL), CS_AARCH64(_INS_CASL), + * CS_AARCH64(_INS_CASB), CS_AARCH64(_INS_CASAB), CS_AARCH64(_INS_CASALB), CS_AARCH64(_INS_CASLB), + * CS_AARCH64(_INS_CASH), CS_AARCH64(_INS_CASAH), CS_AARCH64(_INS_CASALH), CS_AARCH64(_INS_CASLH): * ARM: cas, casa, casal, casl, casb, casab, casalb, caslb, cash, casah, casalh, caslh */ static RzILOpEffect *cas(cs_insn *insn) { @@ -711,16 +711,16 @@ static RzILOpEffect *cas(cs_insn *insn) { return NULL; } switch (insn->id) { - case ARM64_INS_CASB: - case ARM64_INS_CASAB: - case ARM64_INS_CASALB: - case ARM64_INS_CASLB: + case CS_AARCH64(_INS_CASB): + case CS_AARCH64(_INS_CASAB): + case CS_AARCH64(_INS_CASALB): + case CS_AARCH64(_INS_CASLB): bits = 8; break; - case ARM64_INS_CASH: - case ARM64_INS_CASAH: - case ARM64_INS_CASALH: - case ARM64_INS_CASLH: + case CS_AARCH64(_INS_CASH): + case CS_AARCH64(_INS_CASAH): + case CS_AARCH64(_INS_CASALH): + case CS_AARCH64(_INS_CASLH): bits = 16; break; default: @@ -744,7 +744,7 @@ static RzILOpEffect *cas(cs_insn *insn) { } /** - * Capstone: ARM64_INS_CASP, ARM64_INS_CASPA, ARM64_INS_CASPAL, ARM64_INS_CASPL + * Capstone: CS_AARCH64(_INS_CASP), CS_AARCH64(_INS_CASPA), CS_AARCH64(_INS_CASPAL), CS_AARCH64(_INS_CASPL) * ARM: casp, caspa, caspal, caspl */ static RzILOpEffect *casp(cs_insn *insn) { @@ -783,7 +783,7 @@ static RzILOpEffect *casp(cs_insn *insn) { #endif /** - * Capstone: ARM64_INS_CBZ, ARM64_INS_CBNZ + * Capstone: CS_AARCH64(_INS_CBZ), CS_AARCH64(_INS_CBNZ) * ARM: cbz, cbnz */ static RzILOpEffect *cbz(cs_insn *insn) { @@ -795,11 +795,11 @@ static RzILOpEffect *cbz(cs_insn *insn) { rz_il_op_pure_free(tgt); return NULL; } - return BRANCH(insn->id == ARM64_INS_CBNZ ? INV(IS_ZERO(v)) : IS_ZERO(v), JMP(tgt), NULL); + return BRANCH(insn->id == CS_AARCH64(_INS_CBNZ) ? INV(IS_ZERO(v)) : IS_ZERO(v), JMP(tgt), NULL); } /** - * Capstone: ARM64_INS_CMP, ARM64_INS_CMN, ARM64_INS_CCMP, ARM64_INS_CCMN + * Capstone: CS_AARCH64(_INS_CMP), CS_AARCH64(_INS_CMN), CS_AARCH64(_INS_CCMP), CS_AARCH64(_INS_CCMN) * ARM: cmp, cmn, ccmp, ccmn */ static RzILOpEffect *cmp(cs_insn *insn) { @@ -811,7 +811,7 @@ static RzILOpEffect *cmp(cs_insn *insn) { rz_il_op_pure_free(b); return NULL; } - bool is_neg = insn->id == ARM64_INS_CMN || insn->id == ARM64_INS_CCMN; + bool is_neg = insn->id == CS_AARCH64(_INS_CMN) || insn->id == CS_AARCH64(_INS_CCMN); RzILOpEffect *eff = SEQ6( SETL("a", a), SETL("b", b), @@ -819,7 +819,7 @@ static RzILOpEffect *cmp(cs_insn *insn) { SETG("cf", (is_neg ? add_carry : sub_carry)(VARL("a"), VARL("b"), false, bits)), SETG("vf", (is_neg ? add_overflow : sub_overflow)(VARL("a"), VARL("b"), VARL("r"))), update_flags_zn(VARL("r"))); - RzILOpBool *c = cond(insn->detail->arm64.cc); + RzILOpBool *c = cond(insn->detail->CS_aarch64().cc); if (c) { ut64 imm = IMM(2); return BRANCH(c, @@ -834,7 +834,7 @@ static RzILOpEffect *cmp(cs_insn *insn) { } /** - * Capstone: ARM64_INS_CINC, ARM64_INS_CSINC, ARM64_INS_CINV, ARM64_INS_CSINV, ARM64_INS_CNEG, ARM64_INS_CSNEG, ARM64_INS_CSEL + * Capstone: CS_AARCH64(_INS_CINC), CS_AARCH64(_INS_CSINC), CS_AARCH64(_INS_CINV), CS_AARCH64(_INS_CSINV), CS_AARCH64(_INS_CNEG), CS_AARCH64(_INS_CSNEG), CS_AARCH64(_INS_CSEL) * ARM: cinc, csinc, cinv, csinv, cneg, csneg, csel */ static RzILOpEffect *csinc(cs_insn *insn) { @@ -852,7 +852,7 @@ static RzILOpEffect *csinc(cs_insn *insn) { if (!src0) { return NULL; } - RzILOpBool *c = cond(insn->detail->arm64.cc); + RzILOpBool *c = cond(insn->detail->CS_aarch64().cc); if (!c) { // al/nv conditions, only possible in cs(inc|inv|neg) return write_reg(REGID(dst_idx), src0); @@ -866,26 +866,26 @@ static RzILOpEffect *csinc(cs_insn *insn) { RzILOpBitVector *res; bool invert_cond = false; switch (insn->id) { - case ARM64_INS_CSEL: + case CS_AARCH64(_INS_CSEL): invert_cond = true; res = src1; break; - case ARM64_INS_CSINV: + case CS_AARCH64(_INS_CSINV): invert_cond = true; // fallthrough - case ARM64_INS_CINV: + case CS_AARCH64(_INS_CINV): res = LOGNOT(src1); break; - case ARM64_INS_CSNEG: + case CS_AARCH64(_INS_CSNEG): invert_cond = true; // fallthrough - case ARM64_INS_CNEG: + case CS_AARCH64(_INS_CNEG): res = NEG(src1); break; - case ARM64_INS_CSINC: + case CS_AARCH64(_INS_CSINC): invert_cond = true; // fallthrough - default: // ARM64_INS_CINC, ARM64_INS_CSINC + default: // CS_AARCH64(_INS_CINC), CS_AARCH64(_INS_CSINC) res = ADD(src1, UN(bits, 1)); break; } @@ -893,23 +893,23 @@ static RzILOpEffect *csinc(cs_insn *insn) { } /** - * Capstone: ARM64_INS_CSET, ARM64_INS_CSETM + * Capstone: CS_AARCH64(_INS_CSET), CS_AARCH64(_INS_CSETM) * ARM: cset, csetm */ static RzILOpEffect *cset(cs_insn *insn) { if (!ISREG(0) || !REGBITS(0)) { return NULL; } - RzILOpBool *c = cond(insn->detail->arm64.cc); + RzILOpBool *c = cond(insn->detail->CS_aarch64().cc); if (!c) { return NULL; } ut32 bits = REGBITS(0); - return write_reg(REGID(0), ITE(c, SN(bits, insn->id == ARM64_INS_CSETM ? -1 : 1), SN(bits, 0))); + return write_reg(REGID(0), ITE(c, SN(bits, insn->id == CS_AARCH64(_INS_CSETM) ? -1 : 1), SN(bits, 0))); } /** - * Capstone: ARM64_INS_CLS + * Capstone: CS_AARCH64(_INS_CLS) * ARM: cls */ static RzILOpEffect *cls(cs_insn *insn) { @@ -933,7 +933,7 @@ static RzILOpEffect *cls(cs_insn *insn) { } /** - * Capstone: ARM64_INS_CLZ + * Capstone: CS_AARCH64(_INS_CLZ) * ARM: clz */ static RzILOpEffect *clz(cs_insn *insn) { @@ -956,7 +956,7 @@ static RzILOpEffect *clz(cs_insn *insn) { } /** - * Capstone: ARM64_INS_EXTR + * Capstone: CS_AARCH64(_INS_EXTR) * ARM: extr */ static RzILOpEffect *extr(cs_insn *insn) { @@ -993,7 +993,7 @@ static void label_svc(RzILVM *vm, RzILOpEffect *op) { } /** - * Capstone: ARM64_INS_HVC + * Capstone: CS_AARCH64(_INS_HVC) * ARM: hvc */ static RzILOpEffect *hvc(cs_insn *insn) { @@ -1004,7 +1004,7 @@ static void label_hvc(RzILVM *vm, RzILOpEffect *op) { // stub, nothing to do here } -static RzILOpEffect *load_effect(ut32 bits, bool is_signed, arm64_reg dst_reg, RZ_OWN RzILOpBitVector *addr) { +static RzILOpEffect *load_effect(ut32 bits, bool is_signed, CS_aarch64_reg() dst_reg, RZ_OWN RzILOpBitVector *addr) { RzILOpBitVector *val = bits == 8 ? LOAD(addr) : LOADW(bits, addr); if (bits != 64) { if (is_signed) { @@ -1022,7 +1022,7 @@ static RzILOpEffect *load_effect(ut32 bits, bool is_signed, arm64_reg dst_reg, R } static RzILOpEffect *writeback(cs_insn *insn, size_t addr_op, RZ_BORROW RzILOpBitVector *addr) { - if (!insn->detail->arm64.writeback || !is_xreg(MEMBASEID(addr_op))) { + if (!insn->detail->CS_aarch64().writeback || !is_xreg(MEMBASEID(addr_op))) { return NULL; } RzILOpBitVector *wbaddr = DUP(addr); @@ -1039,16 +1039,16 @@ static RzILOpEffect *writeback(cs_insn *insn, size_t addr_op, RZ_BORROW RzILOpBi } /** - * Capstone: ARM64_INS_LDR, ARM64_INS_LDRB, ARM64_INS_LDRH, ARM64_INS_LDRU, ARM64_INS_LDRUB, ARM64_INS_LDRUH, - * ARM64_INS_LDRSW, ARM64_INS_LDRSB, ARM64_INS_LDRSH, ARM64_INS_LDURSW, ARM64_INS_LDURSB, ARM64_INS_LDURSH, - * ARM64_INS_LDAPR, ARM64_INS_LDAPRB, ARM64_INS_LDAPRH, ARM64_INS_LDAPUR, ARM64_INS_LDAPURB, ARM64_INS_LDAPURH, - * ARM64_INS_LDAPURSB, ARM64_INS_LDAPURSH, ARM64_INS_LDAPURSW, ARM64_INS_LDAR, ARM64_INS_LDARB, ARM64_INS_LDARH, - * ARM64_INS_LDAXP, ARM64_INS_LDXP, ARM64_INS_LDAXR, ARM64_INS_LDAXRB, ARM64_INS_LDAXRH, - * ARM64_INS_LDLAR, ARM64_INS_LDLARB, ARM64_INS_LDLARH, - * ARM64_INS_LDP, ARM64_INS_LDNP, ARM64_INS_LDPSW, - * ARM64_INS_LDRAA, ARM64_INS_LDRAB, - * ARM64_INS_LDTR, ARM64_INS_LDTRB, ARM64_INS_LDTRH, ARM64_INS_LDTRSW, ARM64_INS_LDTRSB, ARM64_INS_LDTRSH, - * ARM64_INS_LDXR, ARM64_INS_LDXRB, ARM64_INS_LDXRH + * Capstone: CS_AARCH64(_INS_LDR), CS_AARCH64(_INS_LDRB), CS_AARCH64(_INS_LDRH), CS_AARCH64(_INS_LDRU), CS_AARCH64(_INS_LDRUB), CS_AARCH64(_INS_LDRUH), + * CS_AARCH64(_INS_LDRSW), CS_AARCH64(_INS_LDRSB), CS_AARCH64(_INS_LDRSH), CS_AARCH64(_INS_LDURSW), CS_AARCH64(_INS_LDURSB), CS_AARCH64(_INS_LDURSH), + * CS_AARCH64(_INS_LDAPR), CS_AARCH64(_INS_LDAPRB), CS_AARCH64(_INS_LDAPRH), CS_AARCH64(_INS_LDAPUR), CS_AARCH64(_INS_LDAPURB), CS_AARCH64(_INS_LDAPURH), + * CS_AARCH64(_INS_LDAPURSB), CS_AARCH64(_INS_LDAPURSH), CS_AARCH64(_INS_LDAPURSW), CS_AARCH64(_INS_LDAR), CS_AARCH64(_INS_LDARB), CS_AARCH64(_INS_LDARH), + * CS_AARCH64(_INS_LDAXP), CS_AARCH64(_INS_LDXP), CS_AARCH64(_INS_LDAXR), CS_AARCH64(_INS_LDAXRB), CS_AARCH64(_INS_LDAXRH), + * CS_AARCH64(_INS_LDLAR), CS_AARCH64(_INS_LDLARB), CS_AARCH64(_INS_LDLARH), + * CS_AARCH64(_INS_LDP), CS_AARCH64(_INS_LDNP), CS_AARCH64(_INS_LDPSW), + * CS_AARCH64(_INS_LDRAA), CS_AARCH64(_INS_LDRAB), + * CS_AARCH64(_INS_LDTR), CS_AARCH64(_INS_LDTRB), CS_AARCH64(_INS_LDTRH), CS_AARCH64(_INS_LDTRSW), CS_AARCH64(_INS_LDTRSB), CS_AARCH64(_INS_LDTRSH), + * CS_AARCH64(_INS_LDXR), CS_AARCH64(_INS_LDXRB), CS_AARCH64(_INS_LDXRH) * ARM: ldr, ldrb, ldrh, ldru, ldrub, ldruh, ldrsw, ldrsb, ldrsh, ldursw, ldurwb, ldursh, * ldapr, ldaprb, ldaprh, ldapur, ldapurb, ldapurh, ldapursb, ldapursh, ldapursw, * ldaxp, ldxp, ldaxr, ldaxrb, ldaxrh, ldar, ldarb, ldarh, @@ -1059,8 +1059,8 @@ static RzILOpEffect *ldr(cs_insn *insn) { if (!ISREG(0)) { return NULL; } - bool pair = insn->id == ARM64_INS_LDAXP || insn->id == ARM64_INS_LDXP || - insn->id == ARM64_INS_LDP || insn->id == ARM64_INS_LDNP || insn->id == ARM64_INS_LDPSW; + bool pair = insn->id == CS_AARCH64(_INS_LDAXP) || insn->id == CS_AARCH64(_INS_LDXP) || + insn->id == CS_AARCH64(_INS_LDP) || insn->id == CS_AARCH64(_INS_LDNP) || insn->id == CS_AARCH64(_INS_LDPSW); if (pair && !ISREG(1)) { return NULL; } @@ -1070,65 +1070,65 @@ static RzILOpEffect *ldr(cs_insn *insn) { if (!addr) { return NULL; } - arm64_reg dst_reg = REGID(0); + CS_aarch64_reg() dst_reg = REGID(0); ut64 loadsz; bool is_signed = false; switch (insn->id) { - case ARM64_INS_LDRSB: - case ARM64_INS_LDURSB: - case ARM64_INS_LDTRSB: + case CS_AARCH64(_INS_LDRSB): + case CS_AARCH64(_INS_LDURSB): + case CS_AARCH64(_INS_LDTRSB): #if CS_API_MAJOR > 4 - case ARM64_INS_LDAPURSB: + case CS_AARCH64(_INS_LDAPURSB): #endif is_signed = true; // fallthrough - case ARM64_INS_LDRB: - case ARM64_INS_LDURB: - case ARM64_INS_LDARB: - case ARM64_INS_LDAXRB: - case ARM64_INS_LDTRB: - case ARM64_INS_LDXRB: + case CS_AARCH64(_INS_LDRB): + case CS_AARCH64(_INS_LDURB): + case CS_AARCH64(_INS_LDARB): + case CS_AARCH64(_INS_LDAXRB): + case CS_AARCH64(_INS_LDTRB): + case CS_AARCH64(_INS_LDXRB): #if CS_API_MAJOR > 4 - case ARM64_INS_LDLARB: - case ARM64_INS_LDAPRB: - case ARM64_INS_LDAPURB: + case CS_AARCH64(_INS_LDLARB): + case CS_AARCH64(_INS_LDAPRB): + case CS_AARCH64(_INS_LDAPURB): #endif loadsz = 8; break; - case ARM64_INS_LDRSH: - case ARM64_INS_LDURSH: - case ARM64_INS_LDTRSH: + case CS_AARCH64(_INS_LDRSH): + case CS_AARCH64(_INS_LDURSH): + case CS_AARCH64(_INS_LDTRSH): #if CS_API_MAJOR > 4 - case ARM64_INS_LDAPURSH: + case CS_AARCH64(_INS_LDAPURSH): #endif is_signed = true; // fallthrough - case ARM64_INS_LDRH: - case ARM64_INS_LDURH: - case ARM64_INS_LDARH: - case ARM64_INS_LDAXRH: - case ARM64_INS_LDTRH: - case ARM64_INS_LDXRH: + case CS_AARCH64(_INS_LDRH): + case CS_AARCH64(_INS_LDURH): + case CS_AARCH64(_INS_LDARH): + case CS_AARCH64(_INS_LDAXRH): + case CS_AARCH64(_INS_LDTRH): + case CS_AARCH64(_INS_LDXRH): #if CS_API_MAJOR > 4 - case ARM64_INS_LDAPRH: - case ARM64_INS_LDAPURH: - case ARM64_INS_LDLARH: + case CS_AARCH64(_INS_LDAPRH): + case CS_AARCH64(_INS_LDAPURH): + case CS_AARCH64(_INS_LDLARH): #endif loadsz = 16; break; - case ARM64_INS_LDRSW: - case ARM64_INS_LDURSW: - case ARM64_INS_LDPSW: - case ARM64_INS_LDTRSW: + case CS_AARCH64(_INS_LDRSW): + case CS_AARCH64(_INS_LDURSW): + case CS_AARCH64(_INS_LDPSW): + case CS_AARCH64(_INS_LDTRSW): #if CS_API_MAJOR > 4 - case ARM64_INS_LDAPURSW: + case CS_AARCH64(_INS_LDAPURSW): #endif is_signed = true; loadsz = 32; break; default: - // ARM64_INS_LDR, ARM64_INS_LDRU, ARM64_INS_LDAPR, ARM64_INS_LDAPUR, ARM64_INS_LDAR, ARM64_INS_LDAXR, ARM64_INS_LDLAR, - // ARM64_INS_LDP, ARM64_INS_LDNP, ARM64_INS_LDRAA, ARM64_INS_LDRAB, ARM64_INS_LDTR, ARM64_INS_LDXR + // CS_AARCH64(_INS_LDR), CS_AARCH64(_INS_LDRU), CS_AARCH64(_INS_LDAPR), CS_AARCH64(_INS_LDAPUR), CS_AARCH64(_INS_LDAR), CS_AARCH64(_INS_LDAXR), CS_AARCH64(_INS_LDLAR), + // CS_AARCH64(_INS_LDP), CS_AARCH64(_INS_LDNP), CS_AARCH64(_INS_LDRAA), CS_AARCH64(_INS_LDRAB), CS_AARCH64(_INS_LDTR), CS_AARCH64(_INS_LDXR) loadsz = is_wreg(dst_reg) ? 32 : 64; break; } @@ -1158,11 +1158,11 @@ static RzILOpEffect *ldr(cs_insn *insn) { } /** - * Capstone: ARM64_INS_STR, ARM64_INS_STUR, ARM64_INS_STRB, ARM64_INS_STURB, ARM64_INS_STRH, ARM64_INS_STURH, - * ARM64_INS_STLLR, ARM64_INS_STLLRB, ARM64_INS_STLLRH, ARM64_INS_STLR, ARM64_INS_STLRB, ARM64_INS_STLRH, - * ARM64_INS_STLUR, ARM64_INS_STLURB, ARM64_INS_STLURH, ARM64_INS_STP, ARM64_INS_STXR, ARM64_INS_STXRB, - * ARM64_INS_STXRH, ARM64_INS_STXP, ARM64_INS_STLXR, ARM64_INS_STLXRB. ARM64_INS_STLXRH, ARM64_INS_STLXP, - * ARM64_INS_STNP, ARM64_INS_STTR, ARM64_INS_STTRB, ARM64_INS_STTRH + * Capstone: CS_AARCH64(_INS_STR), CS_AARCH64(_INS_STUR), CS_AARCH64(_INS_STRB), CS_AARCH64(_INS_STURB), CS_AARCH64(_INS_STRH), CS_AARCH64(_INS_STURH), + * CS_AARCH64(_INS_STLLR), CS_AARCH64(_INS_STLLRB), CS_AARCH64(_INS_STLLRH), CS_AARCH64(_INS_STLR), CS_AARCH64(_INS_STLRB), CS_AARCH64(_INS_STLRH), + * CS_AARCH64(_INS_STLUR), CS_AARCH64(_INS_STLURB), CS_AARCH64(_INS_STLURH), CS_AARCH64(_INS_STP), CS_AARCH64(_INS_STXR), CS_AARCH64(_INS_STXRB), + * CS_AARCH64(_INS_STXRH), CS_AARCH64(_INS_STXP), CS_AARCH64(_INS_STLXR), CS_AARCH64(_INS_STLXRB). CS_AARCH64(_INS_STLXRH), CS_AARCH64(_INS_STLXP), + * CS_AARCH64(_INS_STNP), CS_AARCH64(_INS_STTR), CS_AARCH64(_INS_STTRB), CS_AARCH64(_INS_STTRH) * ARM: str, stur, strb, sturb, strh, sturh, stllr, stllrb, stllrh, stlr, stlrb, stlrh, stlur, stlurb, stlurh, stp, stxr, stxrb, * stxrh, stxp, stlxr, stlxrb. stlxrh, stlxp, stnp, sttr, sttrb, sttrh */ @@ -1170,9 +1170,9 @@ static RzILOpEffect *str(cs_insn *insn) { if (!ISREG(0) || !REGBITS(0)) { return NULL; } - bool result = insn->id == ARM64_INS_STXR || insn->id == ARM64_INS_STXRB || insn->id == ARM64_INS_STXRH || insn->id == ARM64_INS_STXP || - insn->id == ARM64_INS_STLXR || insn->id == ARM64_INS_STLXRB || insn->id == ARM64_INS_STLXRH || insn->id == ARM64_INS_STLXP; - bool pair = insn->id == ARM64_INS_STP || insn->id == ARM64_INS_STNP || insn->id == ARM64_INS_STXP || insn->id == ARM64_INS_STLXP; + bool result = insn->id == CS_AARCH64(_INS_STXR) || insn->id == CS_AARCH64(_INS_STXRB) || insn->id == CS_AARCH64(_INS_STXRH) || insn->id == CS_AARCH64(_INS_STXP) || + insn->id == CS_AARCH64(_INS_STLXR) || insn->id == CS_AARCH64(_INS_STLXRB) || insn->id == CS_AARCH64(_INS_STLXRH) || insn->id == CS_AARCH64(_INS_STLXP); + bool pair = insn->id == CS_AARCH64(_INS_STP) || insn->id == CS_AARCH64(_INS_STNP) || insn->id == CS_AARCH64(_INS_STXP) || insn->id == CS_AARCH64(_INS_STLXP); size_t src_op = result ? 1 : 0; size_t addr_op = (result ? 1 : 0) + 1 + (pair ? 1 : 0); ut32 addr_bits = 64; @@ -1182,33 +1182,33 @@ static RzILOpEffect *str(cs_insn *insn) { } ut32 bits; switch (insn->id) { - case ARM64_INS_STRB: - case ARM64_INS_STURB: - case ARM64_INS_STLRB: - case ARM64_INS_STXRB: - case ARM64_INS_STLXRB: - case ARM64_INS_STTRB: + case CS_AARCH64(_INS_STRB): + case CS_AARCH64(_INS_STURB): + case CS_AARCH64(_INS_STLRB): + case CS_AARCH64(_INS_STXRB): + case CS_AARCH64(_INS_STLXRB): + case CS_AARCH64(_INS_STTRB): #if CS_API_MAJOR > 4 - case ARM64_INS_STLLRB: - case ARM64_INS_STLURB: + case CS_AARCH64(_INS_STLLRB): + case CS_AARCH64(_INS_STLURB): #endif bits = 8; break; - case ARM64_INS_STRH: - case ARM64_INS_STURH: - case ARM64_INS_STLRH: - case ARM64_INS_STXRH: - case ARM64_INS_STLXRH: - case ARM64_INS_STTRH: + case CS_AARCH64(_INS_STRH): + case CS_AARCH64(_INS_STURH): + case CS_AARCH64(_INS_STLRH): + case CS_AARCH64(_INS_STXRH): + case CS_AARCH64(_INS_STLXRH): + case CS_AARCH64(_INS_STTRH): #if CS_API_MAJOR > 4 - case ARM64_INS_STLLRH: - case ARM64_INS_STLURH: + case CS_AARCH64(_INS_STLLRH): + case CS_AARCH64(_INS_STLURH): #endif bits = 16; break; default: - // ARM64_INS_STR, ARM64_INS_STUR, ARM64_INS_STLLR, ARM64_INS_STLR, ARM64_INS_STLUR, ARM64_INS_STP, - // ARM64_INS_STXR, ARM64_INS_STXP, ARM64_INS_STLXR, ARM64_INS_STLXP, ARM64_INS_STNP, ARM64_INS_STTR + // CS_AARCH64(_INS_STR), CS_AARCH64(_INS_STUR), CS_AARCH64(_INS_STLLR), CS_AARCH64(_INS_STLR), CS_AARCH64(_INS_STLUR), CS_AARCH64(_INS_STP), + // CS_AARCH64(_INS_STXR), CS_AARCH64(_INS_STXP), CS_AARCH64(_INS_STLXR), CS_AARCH64(_INS_STLXP), CS_AARCH64(_INS_STNP), CS_AARCH64(_INS_STTR) bits = REGBITS(src_op); if (!bits) { rz_il_op_pure_free(addr); @@ -1253,34 +1253,34 @@ static RzILOpEffect *str(cs_insn *insn) { #if CS_API_MAJOR > 4 /** - * Capstone: ARM64_INS_LDADD, ARM64_INS_LDADDA, ARM64_INS_LDADDAL, ARM64_INS_LDADDL, - * ARM64_INS_LDADDB, ARM64_INS_LDADDAB, ARM64_INS_LDADDALB, ARM64_INS_LDADDLB, - * ARM64_INS_LDADDH, ARM64_INS_LDADDAH, ARM64_INS_LDADDALH, ARM64_INS_LDADDLH, - * ARM64_INS_STADD, ARM64_INS_STADDL, ARM64_INS_STADDB, ARM64_INS_STADDLB, ARM64_INS_STADDH, ARM64_INS_STADDLH, - * ARM64_INS_LDCLRB, ARM64_INS_LDCLRAB, ARM64_INS_LDCLRALB, ARM64_INS_LDCLRLB, - * ARM64_INS_LDCLRH, ARM64_INS_LDCLRAH, ARM64_INS_LDCLRALH, ARM64_INS_LDCLRLH - * ARM64_INS_LDCLR, ARM64_INS_LDCLRA, ARM64_INS_LDCLRAL, ARM64_INS_LDCLRL, - * ARM64_INS_STSETB, ARM64_INS_STSETLB, ARM64_INS_STSETH, ARM64_INS_STSETLH, ARM64_INS_STSET, ARM64_INS_STSETL, - * ARM64_INS_LDSETB, ARM64_INS_LDSETAB, ARM64_INS_LDSETALB, ARM64_INS_LDSETLB, - * ARM64_INS_LDSETH, ARM64_INS_LDSETAH, ARM64_INS_LDSETALH, ARM64_INS_LDSETLH - * ARM64_INS_LDSET, ARM64_INS_LDSETA, ARM64_INS_LDSETAL, ARM64_INS_LDSETL, - * ARM64_INS_STSETB, ARM64_INS_STSETLB, ARM64_INS_STSETH, ARM64_INS_STSETLH, ARM64_INS_STSET, ARM64_INS_STSETL, - * ARM64_INS_LDSMAXB, ARM64_INS_LDSMAXAB, ARM64_INS_LDSMAXALB, ARM64_INS_LDSMAXLB, - * ARM64_INS_LDSMAXH, ARM64_INS_LDSMAXAH, ARM64_INS_LDSMAXALH, ARM64_INS_LDSMAXLH - * ARM64_INS_LDSMAX, ARM64_INS_LDSMAXA, ARM64_INS_LDSMAXAL, ARM64_INS_LDSMAXL, - * ARM64_INS_STSMAXB, ARM64_INS_STSMAXLB, ARM64_INS_STSMAXH, ARM64_INS_STSMAXLH, ARM64_INS_STSMAX, ARM64_INS_STSMAXL, - * ARM64_INS_LDSMINB, ARM64_INS_LDSMINAB, ARM64_INS_LDSMINALB, ARM64_INS_LDSMINLB, - * ARM64_INS_LDSMINH, ARM64_INS_LDSMINAH, ARM64_INS_LDSMINALH, ARM64_INS_LDSMINLH - * ARM64_INS_LDSMIN, ARM64_INS_LDSMINA, ARM64_INS_LDSMINAL, ARM64_INS_LDSMINL, - * ARM64_INS_STSMINB, ARM64_INS_STSMINLB, ARM64_INS_STSMINH, ARM64_INS_STSMINLH, ARM64_INS_STSMIN, ARM64_INS_STSMINL, - * ARM64_INS_LDUMAXB, ARM64_INS_LDUMAXAB, ARM64_INS_LDUMAXALB, ARM64_INS_LDUMAXLB, - * ARM64_INS_LDUMAXH, ARM64_INS_LDUMAXAH, ARM64_INS_LDUMAXALH, ARM64_INS_LDUMAXLH - * ARM64_INS_LDUMAX, ARM64_INS_LDUMAXA, ARM64_INS_LDUMAXAL, ARM64_INS_LDUMAXL, - * ARM64_INS_STUMAXB, ARM64_INS_STUMAXLB, ARM64_INS_STUMAXH, ARM64_INS_STUMAXLH, ARM64_INS_STUMAX, ARM64_INS_STUMAXL, - * ARM64_INS_LDUMINB, ARM64_INS_LDUMINAB, ARM64_INS_LDUMINALB, ARM64_INS_LDUMINLB, - * ARM64_INS_LDUMINH, ARM64_INS_LDUMINAH, ARM64_INS_LDUMINALH, ARM64_INS_LDUMINLH - * ARM64_INS_LDUMIN, ARM64_INS_LDUMINA, ARM64_INS_LDUMINAL, ARM64_INS_LDUMINL, - * ARM64_INS_STUMINB, ARM64_INS_STUMINLB, ARM64_INS_STUMINH, ARM64_INS_STUMINLH, ARM64_INS_STUMIN, ARM64_INS_STUMINL + * Capstone: CS_AARCH64(_INS_LDADD), CS_AARCH64(_INS_LDADDA), CS_AARCH64(_INS_LDADDAL), CS_AARCH64(_INS_LDADDL), + * CS_AARCH64(_INS_LDADDB), CS_AARCH64(_INS_LDADDAB), CS_AARCH64(_INS_LDADDALB), CS_AARCH64(_INS_LDADDLB), + * CS_AARCH64(_INS_LDADDH), CS_AARCH64(_INS_LDADDAH), CS_AARCH64(_INS_LDADDALH), CS_AARCH64(_INS_LDADDLH), + * CS_AARCH64(_INS_STADD), CS_AARCH64(_INS_STADDL), CS_AARCH64(_INS_STADDB), CS_AARCH64(_INS_STADDLB), CS_AARCH64(_INS_STADDH), CS_AARCH64(_INS_STADDLH), + * CS_AARCH64(_INS_LDCLRB), CS_AARCH64(_INS_LDCLRAB), CS_AARCH64(_INS_LDCLRALB), CS_AARCH64(_INS_LDCLRLB), + * CS_AARCH64(_INS_LDCLRH), CS_AARCH64(_INS_LDCLRAH), CS_AARCH64(_INS_LDCLRALH), CS_AARCH64(_INS_LDCLRLH) + * CS_AARCH64(_INS_LDCLR), CS_AARCH64(_INS_LDCLRA), CS_AARCH64(_INS_LDCLRAL), CS_AARCH64(_INS_LDCLRL), + * CS_AARCH64(_INS_STSETB), CS_AARCH64(_INS_STSETLB), CS_AARCH64(_INS_STSETH), CS_AARCH64(_INS_STSETLH), CS_AARCH64(_INS_STSET), CS_AARCH64(_INS_STSETL), + * CS_AARCH64(_INS_LDSETB), CS_AARCH64(_INS_LDSETAB), CS_AARCH64(_INS_LDSETALB), CS_AARCH64(_INS_LDSETLB), + * CS_AARCH64(_INS_LDSETH), CS_AARCH64(_INS_LDSETAH), CS_AARCH64(_INS_LDSETALH), CS_AARCH64(_INS_LDSETLH) + * CS_AARCH64(_INS_LDSET), CS_AARCH64(_INS_LDSETA), CS_AARCH64(_INS_LDSETAL), CS_AARCH64(_INS_LDSETL), + * CS_AARCH64(_INS_STSETB), CS_AARCH64(_INS_STSETLB), CS_AARCH64(_INS_STSETH), CS_AARCH64(_INS_STSETLH), CS_AARCH64(_INS_STSET), CS_AARCH64(_INS_STSETL), + * CS_AARCH64(_INS_LDSMAXB), CS_AARCH64(_INS_LDSMAXAB), CS_AARCH64(_INS_LDSMAXALB), CS_AARCH64(_INS_LDSMAXLB), + * CS_AARCH64(_INS_LDSMAXH), CS_AARCH64(_INS_LDSMAXAH), CS_AARCH64(_INS_LDSMAXALH), CS_AARCH64(_INS_LDSMAXLH) + * CS_AARCH64(_INS_LDSMAX), CS_AARCH64(_INS_LDSMAXA), CS_AARCH64(_INS_LDSMAXAL), CS_AARCH64(_INS_LDSMAXL), + * CS_AARCH64(_INS_STSMAXB), CS_AARCH64(_INS_STSMAXLB), CS_AARCH64(_INS_STSMAXH), CS_AARCH64(_INS_STSMAXLH), CS_AARCH64(_INS_STSMAX), CS_AARCH64(_INS_STSMAXL), + * CS_AARCH64(_INS_LDSMINB), CS_AARCH64(_INS_LDSMINAB), CS_AARCH64(_INS_LDSMINALB), CS_AARCH64(_INS_LDSMINLB), + * CS_AARCH64(_INS_LDSMINH), CS_AARCH64(_INS_LDSMINAH), CS_AARCH64(_INS_LDSMINALH), CS_AARCH64(_INS_LDSMINLH) + * CS_AARCH64(_INS_LDSMIN), CS_AARCH64(_INS_LDSMINA), CS_AARCH64(_INS_LDSMINAL), CS_AARCH64(_INS_LDSMINL), + * CS_AARCH64(_INS_STSMINB), CS_AARCH64(_INS_STSMINLB), CS_AARCH64(_INS_STSMINH), CS_AARCH64(_INS_STSMINLH), CS_AARCH64(_INS_STSMIN), CS_AARCH64(_INS_STSMINL), + * CS_AARCH64(_INS_LDUMAXB), CS_AARCH64(_INS_LDUMAXAB), CS_AARCH64(_INS_LDUMAXALB), CS_AARCH64(_INS_LDUMAXLB), + * CS_AARCH64(_INS_LDUMAXH), CS_AARCH64(_INS_LDUMAXAH), CS_AARCH64(_INS_LDUMAXALH), CS_AARCH64(_INS_LDUMAXLH) + * CS_AARCH64(_INS_LDUMAX), CS_AARCH64(_INS_LDUMAXA), CS_AARCH64(_INS_LDUMAXAL), CS_AARCH64(_INS_LDUMAXL), + * CS_AARCH64(_INS_STUMAXB), CS_AARCH64(_INS_STUMAXLB), CS_AARCH64(_INS_STUMAXH), CS_AARCH64(_INS_STUMAXLH), CS_AARCH64(_INS_STUMAX), CS_AARCH64(_INS_STUMAXL), + * CS_AARCH64(_INS_LDUMINB), CS_AARCH64(_INS_LDUMINAB), CS_AARCH64(_INS_LDUMINALB), CS_AARCH64(_INS_LDUMINLB), + * CS_AARCH64(_INS_LDUMINH), CS_AARCH64(_INS_LDUMINAH), CS_AARCH64(_INS_LDUMINALH), CS_AARCH64(_INS_LDUMINLH) + * CS_AARCH64(_INS_LDUMIN), CS_AARCH64(_INS_LDUMINA), CS_AARCH64(_INS_LDUMINAL), CS_AARCH64(_INS_LDUMINL), + * CS_AARCH64(_INS_STUMINB), CS_AARCH64(_INS_STUMINLB), CS_AARCH64(_INS_STUMINH), CS_AARCH64(_INS_STUMINLH), CS_AARCH64(_INS_STUMIN), CS_AARCH64(_INS_STUMINL) * ARM: ldadd, ldadda, ldaddal, ldaddl, ldaddb, ldaddab, ldaddalb, ldaddlb, ldaddh, ldaddah, ldaddalh, ldaddlh, * stadd, staddl, staddb, staddlb, stadd, * ldclr, ldclra, ldclral, ldclrl, ldclrb, ldclrab, ldclralb, ldclrlb, ldclrh, ldclrah, ldclralh, ldclrlh, @@ -1301,7 +1301,7 @@ static RzILOpEffect *ldadd(cs_insn *insn) { if (!ISMEM(addr_op)) { return NULL; } - arm64_reg addend_reg = REGID(0); + CS_aarch64_reg() addend_reg = REGID(0); ut64 loadsz; enum { OP_ADD, @@ -1314,208 +1314,208 @@ static RzILOpEffect *ldadd(cs_insn *insn) { OP_UMIN } op = OP_ADD; switch (insn->id) { - case ARM64_INS_LDCLRB: - case ARM64_INS_LDCLRAB: - case ARM64_INS_LDCLRALB: - case ARM64_INS_LDCLRLB: - case ARM64_INS_STCLRB: - case ARM64_INS_STCLRLB: + case CS_AARCH64(_INS_LDCLRB): + case CS_AARCH64(_INS_LDCLRAB): + case CS_AARCH64(_INS_LDCLRALB): + case CS_AARCH64(_INS_LDCLRLB): + case CS_AARCH64(_INS_STCLRB): + case CS_AARCH64(_INS_STCLRLB): op = OP_CLR; loadsz = 8; break; - case ARM64_INS_LDEORB: - case ARM64_INS_LDEORAB: - case ARM64_INS_LDEORALB: - case ARM64_INS_LDEORLB: - case ARM64_INS_STEORB: - case ARM64_INS_STEORLB: + case CS_AARCH64(_INS_LDEORB): + case CS_AARCH64(_INS_LDEORAB): + case CS_AARCH64(_INS_LDEORALB): + case CS_AARCH64(_INS_LDEORLB): + case CS_AARCH64(_INS_STEORB): + case CS_AARCH64(_INS_STEORLB): op = OP_EOR; loadsz = 8; break; - case ARM64_INS_LDSETB: - case ARM64_INS_LDSETAB: - case ARM64_INS_LDSETALB: - case ARM64_INS_LDSETLB: - case ARM64_INS_STSETB: - case ARM64_INS_STSETLB: + case CS_AARCH64(_INS_LDSETB): + case CS_AARCH64(_INS_LDSETAB): + case CS_AARCH64(_INS_LDSETALB): + case CS_AARCH64(_INS_LDSETLB): + case CS_AARCH64(_INS_STSETB): + case CS_AARCH64(_INS_STSETLB): op = OP_SET; loadsz = 8; break; - case ARM64_INS_LDSMAXB: - case ARM64_INS_LDSMAXAB: - case ARM64_INS_LDSMAXALB: - case ARM64_INS_LDSMAXLB: - case ARM64_INS_STSMAXB: - case ARM64_INS_STSMAXLB: + case CS_AARCH64(_INS_LDSMAXB): + case CS_AARCH64(_INS_LDSMAXAB): + case CS_AARCH64(_INS_LDSMAXALB): + case CS_AARCH64(_INS_LDSMAXLB): + case CS_AARCH64(_INS_STSMAXB): + case CS_AARCH64(_INS_STSMAXLB): op = OP_SMAX; loadsz = 8; break; - case ARM64_INS_LDSMINB: - case ARM64_INS_LDSMINAB: - case ARM64_INS_LDSMINALB: - case ARM64_INS_LDSMINLB: - case ARM64_INS_STSMINB: - case ARM64_INS_STSMINLB: + case CS_AARCH64(_INS_LDSMINB): + case CS_AARCH64(_INS_LDSMINAB): + case CS_AARCH64(_INS_LDSMINALB): + case CS_AARCH64(_INS_LDSMINLB): + case CS_AARCH64(_INS_STSMINB): + case CS_AARCH64(_INS_STSMINLB): op = OP_SMIN; loadsz = 8; break; - case ARM64_INS_LDUMAXB: - case ARM64_INS_LDUMAXAB: - case ARM64_INS_LDUMAXALB: - case ARM64_INS_LDUMAXLB: - case ARM64_INS_STUMAXB: - case ARM64_INS_STUMAXLB: + case CS_AARCH64(_INS_LDUMAXB): + case CS_AARCH64(_INS_LDUMAXAB): + case CS_AARCH64(_INS_LDUMAXALB): + case CS_AARCH64(_INS_LDUMAXLB): + case CS_AARCH64(_INS_STUMAXB): + case CS_AARCH64(_INS_STUMAXLB): op = OP_UMAX; loadsz = 8; break; - case ARM64_INS_LDUMINB: - case ARM64_INS_LDUMINAB: - case ARM64_INS_LDUMINALB: - case ARM64_INS_LDUMINLB: - case ARM64_INS_STUMINB: - case ARM64_INS_STUMINLB: + case CS_AARCH64(_INS_LDUMINB): + case CS_AARCH64(_INS_LDUMINAB): + case CS_AARCH64(_INS_LDUMINALB): + case CS_AARCH64(_INS_LDUMINLB): + case CS_AARCH64(_INS_STUMINB): + case CS_AARCH64(_INS_STUMINLB): op = OP_UMIN; loadsz = 8; break; - case ARM64_INS_LDADDB: - case ARM64_INS_LDADDAB: - case ARM64_INS_LDADDALB: - case ARM64_INS_LDADDLB: - case ARM64_INS_STADDB: - case ARM64_INS_STADDLB: + case CS_AARCH64(_INS_LDADDB): + case CS_AARCH64(_INS_LDADDAB): + case CS_AARCH64(_INS_LDADDALB): + case CS_AARCH64(_INS_LDADDLB): + case CS_AARCH64(_INS_STADDB): + case CS_AARCH64(_INS_STADDLB): loadsz = 8; break; - case ARM64_INS_LDCLRH: - case ARM64_INS_LDCLRAH: - case ARM64_INS_LDCLRALH: - case ARM64_INS_LDCLRLH: - case ARM64_INS_STCLRH: - case ARM64_INS_STCLRLH: + case CS_AARCH64(_INS_LDCLRH): + case CS_AARCH64(_INS_LDCLRAH): + case CS_AARCH64(_INS_LDCLRALH): + case CS_AARCH64(_INS_LDCLRLH): + case CS_AARCH64(_INS_STCLRH): + case CS_AARCH64(_INS_STCLRLH): op = OP_CLR; loadsz = 16; break; - case ARM64_INS_LDEORH: - case ARM64_INS_LDEORAH: - case ARM64_INS_LDEORALH: - case ARM64_INS_LDEORLH: - case ARM64_INS_STEORH: - case ARM64_INS_STEORLH: + case CS_AARCH64(_INS_LDEORH): + case CS_AARCH64(_INS_LDEORAH): + case CS_AARCH64(_INS_LDEORALH): + case CS_AARCH64(_INS_LDEORLH): + case CS_AARCH64(_INS_STEORH): + case CS_AARCH64(_INS_STEORLH): op = OP_EOR; loadsz = 16; break; - case ARM64_INS_LDSETH: - case ARM64_INS_LDSETAH: - case ARM64_INS_LDSETALH: - case ARM64_INS_LDSETLH: - case ARM64_INS_STSETH: - case ARM64_INS_STSETLH: + case CS_AARCH64(_INS_LDSETH): + case CS_AARCH64(_INS_LDSETAH): + case CS_AARCH64(_INS_LDSETALH): + case CS_AARCH64(_INS_LDSETLH): + case CS_AARCH64(_INS_STSETH): + case CS_AARCH64(_INS_STSETLH): op = OP_SET; loadsz = 16; break; - case ARM64_INS_LDSMAXH: - case ARM64_INS_LDSMAXAH: - case ARM64_INS_LDSMAXALH: - case ARM64_INS_LDSMAXLH: - case ARM64_INS_STSMAXH: - case ARM64_INS_STSMAXLH: + case CS_AARCH64(_INS_LDSMAXH): + case CS_AARCH64(_INS_LDSMAXAH): + case CS_AARCH64(_INS_LDSMAXALH): + case CS_AARCH64(_INS_LDSMAXLH): + case CS_AARCH64(_INS_STSMAXH): + case CS_AARCH64(_INS_STSMAXLH): op = OP_SMAX; loadsz = 16; break; - case ARM64_INS_LDSMINH: - case ARM64_INS_LDSMINAH: - case ARM64_INS_LDSMINALH: - case ARM64_INS_LDSMINLH: - case ARM64_INS_STSMINH: - case ARM64_INS_STSMINLH: + case CS_AARCH64(_INS_LDSMINH): + case CS_AARCH64(_INS_LDSMINAH): + case CS_AARCH64(_INS_LDSMINALH): + case CS_AARCH64(_INS_LDSMINLH): + case CS_AARCH64(_INS_STSMINH): + case CS_AARCH64(_INS_STSMINLH): op = OP_SMIN; loadsz = 16; break; - case ARM64_INS_LDUMAXH: - case ARM64_INS_LDUMAXAH: - case ARM64_INS_LDUMAXALH: - case ARM64_INS_LDUMAXLH: - case ARM64_INS_STUMAXH: - case ARM64_INS_STUMAXLH: + case CS_AARCH64(_INS_LDUMAXH): + case CS_AARCH64(_INS_LDUMAXAH): + case CS_AARCH64(_INS_LDUMAXALH): + case CS_AARCH64(_INS_LDUMAXLH): + case CS_AARCH64(_INS_STUMAXH): + case CS_AARCH64(_INS_STUMAXLH): op = OP_UMAX; loadsz = 16; break; - case ARM64_INS_LDUMINH: - case ARM64_INS_LDUMINAH: - case ARM64_INS_LDUMINALH: - case ARM64_INS_LDUMINLH: - case ARM64_INS_STUMINH: - case ARM64_INS_STUMINLH: + case CS_AARCH64(_INS_LDUMINH): + case CS_AARCH64(_INS_LDUMINAH): + case CS_AARCH64(_INS_LDUMINALH): + case CS_AARCH64(_INS_LDUMINLH): + case CS_AARCH64(_INS_STUMINH): + case CS_AARCH64(_INS_STUMINLH): op = OP_UMIN; loadsz = 16; break; - case ARM64_INS_LDADDH: - case ARM64_INS_LDADDAH: - case ARM64_INS_LDADDALH: - case ARM64_INS_LDADDLH: - case ARM64_INS_STADDH: - case ARM64_INS_STADDLH: + case CS_AARCH64(_INS_LDADDH): + case CS_AARCH64(_INS_LDADDAH): + case CS_AARCH64(_INS_LDADDALH): + case CS_AARCH64(_INS_LDADDLH): + case CS_AARCH64(_INS_STADDH): + case CS_AARCH64(_INS_STADDLH): loadsz = 16; break; - case ARM64_INS_LDCLR: - case ARM64_INS_LDCLRA: - case ARM64_INS_LDCLRAL: - case ARM64_INS_LDCLRL: - case ARM64_INS_STCLR: - case ARM64_INS_STCLRL: + case CS_AARCH64(_INS_LDCLR): + case CS_AARCH64(_INS_LDCLRA): + case CS_AARCH64(_INS_LDCLRAL): + case CS_AARCH64(_INS_LDCLRL): + case CS_AARCH64(_INS_STCLR): + case CS_AARCH64(_INS_STCLRL): op = OP_CLR; goto size_from_reg; - case ARM64_INS_LDEOR: - case ARM64_INS_LDEORA: - case ARM64_INS_LDEORAL: - case ARM64_INS_LDEORL: - case ARM64_INS_STEOR: - case ARM64_INS_STEORL: + case CS_AARCH64(_INS_LDEOR): + case CS_AARCH64(_INS_LDEORA): + case CS_AARCH64(_INS_LDEORAL): + case CS_AARCH64(_INS_LDEORL): + case CS_AARCH64(_INS_STEOR): + case CS_AARCH64(_INS_STEORL): op = OP_EOR; goto size_from_reg; - case ARM64_INS_LDSET: - case ARM64_INS_LDSETA: - case ARM64_INS_LDSETAL: - case ARM64_INS_LDSETL: - case ARM64_INS_STSET: - case ARM64_INS_STSETL: + case CS_AARCH64(_INS_LDSET): + case CS_AARCH64(_INS_LDSETA): + case CS_AARCH64(_INS_LDSETAL): + case CS_AARCH64(_INS_LDSETL): + case CS_AARCH64(_INS_STSET): + case CS_AARCH64(_INS_STSETL): op = OP_SET; goto size_from_reg; - case ARM64_INS_LDSMAX: - case ARM64_INS_LDSMAXA: - case ARM64_INS_LDSMAXAL: - case ARM64_INS_LDSMAXL: - case ARM64_INS_STSMAX: - case ARM64_INS_STSMAXL: + case CS_AARCH64(_INS_LDSMAX): + case CS_AARCH64(_INS_LDSMAXA): + case CS_AARCH64(_INS_LDSMAXAL): + case CS_AARCH64(_INS_LDSMAXL): + case CS_AARCH64(_INS_STSMAX): + case CS_AARCH64(_INS_STSMAXL): op = OP_SMAX; goto size_from_reg; - case ARM64_INS_LDSMIN: - case ARM64_INS_LDSMINA: - case ARM64_INS_LDSMINAL: - case ARM64_INS_LDSMINL: - case ARM64_INS_STSMIN: - case ARM64_INS_STSMINL: + case CS_AARCH64(_INS_LDSMIN): + case CS_AARCH64(_INS_LDSMINA): + case CS_AARCH64(_INS_LDSMINAL): + case CS_AARCH64(_INS_LDSMINL): + case CS_AARCH64(_INS_STSMIN): + case CS_AARCH64(_INS_STSMINL): op = OP_SMIN; goto size_from_reg; - case ARM64_INS_LDUMAX: - case ARM64_INS_LDUMAXA: - case ARM64_INS_LDUMAXAL: - case ARM64_INS_LDUMAXL: - case ARM64_INS_STUMAX: - case ARM64_INS_STUMAXL: + case CS_AARCH64(_INS_LDUMAX): + case CS_AARCH64(_INS_LDUMAXA): + case CS_AARCH64(_INS_LDUMAXAL): + case CS_AARCH64(_INS_LDUMAXL): + case CS_AARCH64(_INS_STUMAX): + case CS_AARCH64(_INS_STUMAXL): op = OP_UMAX; goto size_from_reg; - case ARM64_INS_LDUMIN: - case ARM64_INS_LDUMINA: - case ARM64_INS_LDUMINAL: - case ARM64_INS_LDUMINL: - case ARM64_INS_STUMIN: - case ARM64_INS_STUMINL: + case CS_AARCH64(_INS_LDUMIN): + case CS_AARCH64(_INS_LDUMINA): + case CS_AARCH64(_INS_LDUMINAL): + case CS_AARCH64(_INS_LDUMINL): + case CS_AARCH64(_INS_STUMIN): + case CS_AARCH64(_INS_STUMINL): op = OP_UMIN; // fallthrough size_from_reg: - default: // ARM64_INS_LDADD, ARM64_INS_LDADDA, ARM64_INS_LDADDAL, ARM64_INS_LDADDL, ARM64_INS_STADD, ARM64_INS_STADDL + default: // CS_AARCH64(_INS_LDADD), CS_AARCH64(_INS_LDADDA), CS_AARCH64(_INS_LDADDAL), CS_AARCH64(_INS_LDADDL), CS_AARCH64(_INS_STADD), CS_AARCH64(_INS_STADDL) loadsz = is_wreg(addend_reg) ? 32 : 64; break; } @@ -1532,7 +1532,7 @@ static RzILOpEffect *ldadd(cs_insn *insn) { rz_il_op_pure_free(addr); return NULL; } - arm64_reg dst_reg = REGID(1); + CS_aarch64_reg() dst_reg = REGID(1); dst_reg = xreg_of_reg(dst_reg); ld_eff = write_reg(dst_reg, loadsz != 64 ? UNSIGNED(64, VARL("old")) : VARL("old")); if (!ld_eff) { @@ -1585,7 +1585,7 @@ static RzILOpEffect *ldadd(cs_insn *insn) { #endif /** - * Capstone: ARM64_INS_MADD, ARM64_INS_MSUB + * Capstone: CS_AARCH64(_INS_MADD), CS_AARCH64(_INS_MSUB) * ARM: madd, msub */ static RzILOpEffect *madd(cs_insn *insn) { @@ -1603,7 +1603,7 @@ static RzILOpEffect *madd(cs_insn *insn) { return NULL; } RzILOpBitVector *res; - if (insn->id == ARM64_INS_MSUB) { + if (insn->id == CS_AARCH64(_INS_MSUB)) { res = SUB(addend, MUL(ma, mb)); } else { res = ADD(MUL(ma, mb), addend); @@ -1612,7 +1612,7 @@ static RzILOpEffect *madd(cs_insn *insn) { } /** - * Capstone: ARM64_INS_MUL, ARM64_INS_MNEG + * Capstone: CS_AARCH64(_INS_MUL), CS_AARCH64(_INS_MNEG) * ARM: mul, mneg */ static RzILOpEffect *mul(cs_insn *insn) { @@ -1631,7 +1631,7 @@ static RzILOpEffect *mul(cs_insn *insn) { return NULL; } RzILOpBitVector *res = MUL(ma, mb); - if (insn->id == ARM64_INS_MNEG) { + if (insn->id == CS_AARCH64(_INS_MNEG)) { res = NEG(res); } return write_reg(REGID(0), res); @@ -1640,7 +1640,7 @@ static RzILOpEffect *mul(cs_insn *insn) { static RzILOpEffect *movn(cs_insn *insn); /** - * Capstone: ARM64_INS_MOV, ARM64_INS_MOVZ + * Capstone: CS_AARCH64(_INS_MOV), CS_AARCH64(_INS_MOVZ) * ARM: mov, movz */ static RzILOpEffect *mov(cs_insn *insn) { @@ -1664,7 +1664,7 @@ static RzILOpEffect *mov(cs_insn *insn) { } /** - * Capstone: ARM64_INS_MOVK + * Capstone: CS_AARCH64(_INS_MOVK) * ARM: movk */ static RzILOpEffect *movk(cs_insn *insn) { @@ -1676,13 +1676,13 @@ static RzILOpEffect *movk(cs_insn *insn) { if (!src) { return NULL; } - cs_arm64_op *op = &insn->detail->arm64.operands[1]; - ut32 shift = op->shift.type == ARM64_SFT_LSL ? op->shift.value : 0; + CS_aarch64_op() *op = &insn->detail->CS_aarch64().operands[1]; + ut32 shift = op->shift.type == CS_AARCH64(_SFT_LSL) ? op->shift.value : 0; return write_reg(REGID(0), LOGOR(LOGAND(src, UN(bits, ~(0xffffull << shift))), UN(bits, ((ut64)op->imm) << shift))); } /** - * Capstone: ARM64_INS_MOVN + * Capstone: CS_AARCH64(_INS_MOVN) * ARM: movn */ static RzILOpEffect *movn(cs_insn *insn) { @@ -1692,8 +1692,8 @@ static RzILOpEffect *movn(cs_insn *insn) { // The only case where the movn encoding should be disassembled as "movn" is // when (IsZero(imm16) && hw != '00'), according to the "alias conditions" in the reference manual. // Unfortunately, capstone v4 seems to always disassemble as movn, so we still have to implement this. - cs_arm64_op *op = &insn->detail->arm64.operands[1]; - ut32 shift = op->shift.type == ARM64_SFT_LSL ? op->shift.value : 0; + CS_aarch64_op() *op = &insn->detail->CS_aarch64().operands[1]; + ut32 shift = op->shift.type == CS_AARCH64(_SFT_LSL) ? op->shift.value : 0; ut32 bits = REGBITS(0); if (!bits) { return NULL; @@ -1702,17 +1702,17 @@ static RzILOpEffect *movn(cs_insn *insn) { } /** - * Capstone: ARM64_INS_MSR + * Capstone: CS_AARCH64(_INS_MSR) * ARM: msr */ static RzILOpEffect *msr(cs_insn *insn) { - cs_arm64_op *op = &insn->detail->arm64.operands[0]; + CS_aarch64_op() *op = &insn->detail->CS_aarch64().operands[0]; #if CS_API_MAJOR > 4 - if (op->type != ARM64_OP_SYS || (ut64)op->sys != (ut64)ARM64_SYSREG_NZCV) { + if (op->type != CS_AARCH64(_OP_SYS) || (ut64)op->sys != (ut64)ARM64_SYSREG_NZCV) { return NULL; } #else - if (op->type != ARM64_OP_REG_MSR || op->reg != 0xda10) { + if (op->type != CS_AARCH64(_OP_REG_MSR) || op->reg != 0xda10) { return NULL; } #endif @@ -1730,7 +1730,7 @@ static RzILOpEffect *msr(cs_insn *insn) { #if CS_API_MAJOR > 4 /** - * Capstone: ARM64_INS_RMIF + * Capstone: CS_AARCH64(_INS_RMIF) * ARM: rmif */ static RzILOpEffect *rmif(cs_insn *insn) { @@ -1764,7 +1764,7 @@ static RzILOpEffect *rmif(cs_insn *insn) { #endif /** - * Capstone: ARM64_INS_SBFX, ARM64_INS_SBFIZ, ARM64_INS_UBFX, ARM64_INS_UBFIZ + * Capstone: CS_AARCH64(_INS_SBFX), CS_AARCH64(_INS_SBFIZ), CS_AARCH64(_INS_UBFX), CS_AARCH64(_INS_UBFIZ) * ARM: sbfx, sbfiz, ubfx, ubfiz */ static RzILOpEffect *sbfx(cs_insn *insn) { @@ -1782,32 +1782,32 @@ static RzILOpEffect *sbfx(cs_insn *insn) { ut64 lsb = IMM(2); ut64 width = IMM(3); RzILOpBitVector *res; - if (insn->id == ARM64_INS_SBFIZ || insn->id == ARM64_INS_UBFIZ) { + if (insn->id == CS_AARCH64(_INS_SBFIZ) || insn->id == CS_AARCH64(_INS_UBFIZ)) { res = SHIFTL0(UNSIGNED(width + lsb, src), UN(6, lsb)); } else { - // ARM64_INS_SBFX, ARM64_INS_UBFX + // CS_AARCH64(_INS_SBFX), CS_AARCH64(_INS_UBFX) res = UNSIGNED(width, SHIFTR0(src, UN(6, lsb))); } - bool is_signed = insn->id == ARM64_INS_SBFX || insn->id == ARM64_INS_SBFIZ; + bool is_signed = insn->id == CS_AARCH64(_INS_SBFX) || insn->id == CS_AARCH64(_INS_SBFIZ); res = LET("res", res, is_signed ? SIGNED(bits, VARLP("res")) : UNSIGNED(bits, VARLP("res"))); return write_reg(REGID(0), res); } /** - * Capstone: ARM64_INS_MRS + * Capstone: CS_AARCH64(_INS_MRS) * ARM: mrs */ static RzILOpEffect *mrs(cs_insn *insn) { if (!ISREG(0)) { return NULL; } - cs_arm64_op *op = &insn->detail->arm64.operands[1]; + CS_aarch64_op() *op = &insn->detail->CS_aarch64().operands[1]; #if CS_API_MAJOR > 4 - if (op->type != ARM64_OP_SYS || (ut64)op->sys != (ut64)ARM64_SYSREG_NZCV) { + if (op->type != CS_AARCH64(_OP_SYS) || (ut64)op->sys != (ut64)ARM64_SYSREG_NZCV) { return NULL; } #else - if (op->type != ARM64_OP_REG_MRS || op->reg != 0xda10) { + if (op->type != CS_AARCH64(_OP_REG_MRS) || op->reg != 0xda10) { return NULL; } #endif @@ -1823,7 +1823,7 @@ static RzILOpEffect *mrs(cs_insn *insn) { } /** - * Capstone: ARM64_INS_MVN, ARM64_INS_NEG, ARM64_INS_NEGS, ARM64_INS_NGC, ARM64_INS_NGCS + * Capstone: CS_AARCH64(_INS_MVN), CS_AARCH64(_INS_NEG), CS_AARCH64(_INS_NEGS), CS_AARCH64(_INS_NGC), CS_AARCH64(_INS_NGCS) * ARM: mvn, neg, negs, ngc, ngcs */ static RzILOpEffect *mvn(cs_insn *insn) { @@ -1837,19 +1837,19 @@ static RzILOpEffect *mvn(cs_insn *insn) { } RzILOpBitVector *res; switch (insn->id) { - case ARM64_INS_NEG: + case CS_AARCH64(_INS_NEG): #if CS_API_MAJOR > 3 - case ARM64_INS_NEGS: + case CS_AARCH64(_INS_NEGS): #endif res = NEG(val); break; - case ARM64_INS_NGC: + case CS_AARCH64(_INS_NGC): #if CS_API_MAJOR > 3 - case ARM64_INS_NGCS: + case CS_AARCH64(_INS_NGCS): #endif res = NEG(ADD(val, ITE(VARG("cf"), UN(bits, 0), UN(bits, 1)))); break; - default: // ARM64_INS_MVN + default: // CS_AARCH64(_INS_MVN) res = LOGNOT(val); break; } @@ -1857,11 +1857,11 @@ static RzILOpEffect *mvn(cs_insn *insn) { if (!set) { return NULL; } - if (insn->detail->arm64.update_flags) { + if (insn->detail->CS_aarch64().update_flags) { return SEQ5( SETL("b", DUP(val)), set, - SETG("cf", sub_carry(UN(bits, 0), VARL("b"), insn->id == ARM64_INS_NGC, bits)), + SETG("cf", sub_carry(UN(bits, 0), VARL("b"), insn->id == CS_AARCH64(_INS_NGC), bits)), SETG("vf", sub_overflow(UN(bits, 0), VARL("b"), REG(0))), update_flags_zn(REG(0))); } @@ -1869,7 +1869,7 @@ static RzILOpEffect *mvn(cs_insn *insn) { } /** - * Capstone: ARM64_INS_RBIT + * Capstone: CS_AARCH64(_INS_RBIT) * ARM: rbit */ static RzILOpEffect *rbit(cs_insn *insn) { @@ -1898,7 +1898,7 @@ static RzILOpEffect *rbit(cs_insn *insn) { } /** - * Capstone: ARM64_INS_REV, ARM64_INS_REV32, ARM64_INS_REV16 + * Capstone: CS_AARCH64(_INS_REV), CS_AARCH64(_INS_REV32), CS_AARCH64(_INS_REV16) * ARM: rev, rev32, rev16 */ static RzILOpEffect *rev(cs_insn *insn) { @@ -1909,11 +1909,11 @@ static RzILOpEffect *rev(cs_insn *insn) { if (!dst_bits) { return NULL; } - arm64_reg src_reg = xreg_of_reg(REGID(1)); + CS_aarch64_reg() src_reg = xreg_of_reg(REGID(1)); ut32 container_bits = dst_bits; - if (insn->id == ARM64_INS_REV32) { + if (insn->id == CS_AARCH64(_INS_REV32)) { container_bits = 32; - } else if (insn->id == ARM64_INS_REV16) { + } else if (insn->id == CS_AARCH64(_INS_REV16)) { container_bits = 16; } RzILOpBitVector *src = read_reg(src_reg); @@ -1964,7 +1964,7 @@ static RzILOpEffect *rev(cs_insn *insn) { } /** - * Capstone: ARM64_INS_SDIV + * Capstone: CS_AARCH64(_INS_SDIV) * ARM: sdiv */ static RzILOpEffect *sdiv(cs_insn *insn) { @@ -1990,7 +1990,7 @@ static RzILOpEffect *sdiv(cs_insn *insn) { } /** - * Capstone: ARM64_INS_UDIV + * Capstone: CS_AARCH64(_INS_UDIV) * ARM: udiv */ static RzILOpEffect *udiv(cs_insn *insn) { @@ -2014,7 +2014,7 @@ static RzILOpEffect *udiv(cs_insn *insn) { #if CS_API_MAJOR > 4 /** - * Capstone: ARM64_INS_SETF8, ARM64_INS_SETF16 + * Capstone: CS_AARCH64(_INS_SETF8), CS_AARCH64(_INS_SETF16) * ARM: setf8, setf16 */ static RzILOpEffect *setf(cs_insn *insn) { @@ -2025,7 +2025,7 @@ static RzILOpEffect *setf(cs_insn *insn) { if (!val) { return NULL; } - ut32 bits = insn->id == ARM64_INS_SETF16 ? 16 : 8; + ut32 bits = insn->id == CS_AARCH64(_INS_SETF16) ? 16 : 8; return SEQ2( SETG("vf", XOR(MSB(UNSIGNED(bits + 1, val)), MSB(UNSIGNED(bits, DUP(val))))), update_flags_zn(UNSIGNED(bits, DUP(val)))); @@ -2033,7 +2033,7 @@ static RzILOpEffect *setf(cs_insn *insn) { #endif /** - * Capstone: ARM64_INS_SMADDL, ARM64_INS_SMSUBL, ARM64_INS_UMADDL, ARM64_INS_UMSUBL + * Capstone: CS_AARCH64(_INS_SMADDL), CS_AARCH64(_INS_SMSUBL), CS_AARCH64(_INS_UMADDL), CS_AARCH64(_INS_UMSUBL) * ARM: smaddl, smsubl, umaddl, umsubl */ static RzILOpEffect *smaddl(cs_insn *insn) { @@ -2051,9 +2051,9 @@ static RzILOpEffect *smaddl(cs_insn *insn) { rz_il_op_pure_free(addend); return NULL; } - bool is_signed = insn->id == ARM64_INS_SMADDL || insn->id == ARM64_INS_SMSUBL; + bool is_signed = insn->id == CS_AARCH64(_INS_SMADDL) || insn->id == CS_AARCH64(_INS_SMSUBL); RzILOpBitVector *res = MUL(is_signed ? SIGNED(64, x) : UNSIGNED(64, x), is_signed ? SIGNED(64, y) : UNSIGNED(64, y)); - if (insn->id == ARM64_INS_SMSUBL || insn->id == ARM64_INS_UMSUBL) { + if (insn->id == CS_AARCH64(_INS_SMSUBL) || insn->id == CS_AARCH64(_INS_UMSUBL)) { res = SUB(addend, res); } else { res = ADD(addend, res); @@ -2062,7 +2062,7 @@ static RzILOpEffect *smaddl(cs_insn *insn) { } /** - * Capstone: ARM64_INS_SMULL, ARM64_INS_SMNEGL, ARM64_INS_UMULL, ARM64_INS_UMNEGL + * Capstone: CS_AARCH64(_INS_SMULL), CS_AARCH64(_INS_SMNEGL), CS_AARCH64(_INS_UMULL), CS_AARCH64(_INS_UMNEGL) * ARM: smull, smnegl, umull, umnegl */ static RzILOpEffect *smull(cs_insn *insn) { @@ -2077,16 +2077,16 @@ static RzILOpEffect *smull(cs_insn *insn) { rz_il_op_pure_free(y); return NULL; } - bool is_signed = insn->id == ARM64_INS_SMULL || insn->id == ARM64_INS_SMNEGL; + bool is_signed = insn->id == CS_AARCH64(_INS_SMULL) || insn->id == CS_AARCH64(_INS_SMNEGL); RzILOpBitVector *res = MUL(is_signed ? SIGNED(64, x) : UNSIGNED(64, x), is_signed ? SIGNED(64, y) : UNSIGNED(64, y)); - if (insn->id == ARM64_INS_SMNEGL || insn->id == ARM64_INS_UMNEGL) { + if (insn->id == CS_AARCH64(_INS_SMNEGL) || insn->id == CS_AARCH64(_INS_UMNEGL)) { res = NEG(res); } return write_reg(REGID(0), res); } /** - * Capstone: ARM64_INS_SMULH, ARM64_INS_UMULH + * Capstone: CS_AARCH64(_INS_SMULH), CS_AARCH64(_INS_UMULH) * ARM: smulh, umulh */ static RzILOpEffect *smulh(cs_insn *insn) { @@ -2101,16 +2101,16 @@ static RzILOpEffect *smulh(cs_insn *insn) { rz_il_op_pure_free(y); return NULL; } - bool is_signed = insn->id == ARM64_INS_SMULH; + bool is_signed = insn->id == CS_AARCH64(_INS_SMULH); RzILOpBitVector *res = MUL(is_signed ? SIGNED(128, x) : UNSIGNED(128, x), is_signed ? SIGNED(128, y) : UNSIGNED(128, y)); return write_reg(REGID(0), UNSIGNED(64, SHIFTR0(res, UN(7, 64)))); } #if CS_API_MAJOR > 4 /** - * Capstone: ARM64_INS_SWP, ARM64_INS_SWPA, ARM64_INS_SWPAL, ARM64_INS_SWPL, - * ARM64_INS_SWPB, ARM64_INS_SWPAB, ARM64_INS_SWPALB, ARM64_INS_SWPLB - * ARM64_INS_SWPH, ARM64_INS_SWPAH, ARM64_INS_SWPALH, ARM64_INS_SWPLH + * Capstone: CS_AARCH64(_INS_SWP), CS_AARCH64(_INS_SWPA), CS_AARCH64(_INS_SWPAL), CS_AARCH64(_INS_SWPL), + * CS_AARCH64(_INS_SWPB), CS_AARCH64(_INS_SWPAB), CS_AARCH64(_INS_SWPALB), CS_AARCH64(_INS_SWPLB) + * CS_AARCH64(_INS_SWPH), CS_AARCH64(_INS_SWPAH), CS_AARCH64(_INS_SWPALH), CS_AARCH64(_INS_SWPLH) * ARM: swp, swpa, swpal, swpl, swpb, swpab, swpalb, swplb, swph, swpah, swpalh, swplh */ static RzILOpEffect *swp(cs_insn *insn) { @@ -2119,19 +2119,19 @@ static RzILOpEffect *swp(cs_insn *insn) { } ut32 bits; switch (insn->id) { - case ARM64_INS_SWPB: - case ARM64_INS_SWPAB: - case ARM64_INS_SWPALB: - case ARM64_INS_SWPLB: + case CS_AARCH64(_INS_SWPB): + case CS_AARCH64(_INS_SWPAB): + case CS_AARCH64(_INS_SWPALB): + case CS_AARCH64(_INS_SWPLB): bits = 8; break; - case ARM64_INS_SWPH: - case ARM64_INS_SWPAH: - case ARM64_INS_SWPALH: - case ARM64_INS_SWPLH: + case CS_AARCH64(_INS_SWPH): + case CS_AARCH64(_INS_SWPAH): + case CS_AARCH64(_INS_SWPALH): + case CS_AARCH64(_INS_SWPLH): bits = 16; break; - default: // ARM64_INS_SWP, ARM64_INS_SWPA, ARM64_INS_SWPAL, ARM64_INS_SWPL: + default: // CS_AARCH64(_INS_SWP), CS_AARCH64(_INS_SWPA), CS_AARCH64(_INS_SWPAL), CS_AARCH64(_INS_SWPL): bits = REGBITS(0); if (!bits) { return NULL; @@ -2150,8 +2150,8 @@ static RzILOpEffect *swp(cs_insn *insn) { return NULL; } RzILOpEffect *store_eff = bits == 8 ? STORE(addr, store_val) : STOREW(addr, store_val); - arm64_reg ret_reg = xreg_of_reg(REGID(1)); - if (ret_reg == ARM64_REG_XZR) { + CS_aarch64_reg() ret_reg = xreg_of_reg(REGID(1)); + if (ret_reg == CS_AARCH64(_REG_XZR)) { return store_eff; } RzILOpEffect *ret_eff = write_reg(ret_reg, bits != 64 ? UNSIGNED(64, VARL("ret")) : VARL("ret")); @@ -2167,7 +2167,7 @@ static RzILOpEffect *swp(cs_insn *insn) { #endif /** - * Capstone: ARM64_INS_SXTB, ARM64_INS_SXTH, ARM64_INS_SXTW, ARM64_INS_UXTB, ARM64_INS_UXTH + * Capstone: CS_AARCH64(_INS_SXTB), CS_AARCH64(_INS_SXTH), CS_AARCH64(_INS_SXTW), CS_AARCH64(_INS_UXTB), CS_AARCH64(_INS_UXTH) * ARM: sxtb, sxth, sxtw, uxtb, uxth */ static RzILOpEffect *sxt(cs_insn *insn) { @@ -2177,19 +2177,19 @@ static RzILOpEffect *sxt(cs_insn *insn) { ut32 bits; bool is_signed = true; switch (insn->id) { - case ARM64_INS_UXTB: + case CS_AARCH64(_INS_UXTB): is_signed = false; // fallthrough - case ARM64_INS_SXTB: + case CS_AARCH64(_INS_SXTB): bits = 8; break; - case ARM64_INS_UXTH: + case CS_AARCH64(_INS_UXTH): is_signed = false; // fallthrough - case ARM64_INS_SXTH: + case CS_AARCH64(_INS_SXTH): bits = 16; break; - default: // ARM64_INS_SXTW + default: // CS_AARCH64(_INS_SXTW) bits = 32; break; } @@ -2201,7 +2201,7 @@ static RzILOpEffect *sxt(cs_insn *insn) { } /** - * Capstone: ARM64_INS_TBNZ, ARM64_TBZ + * Capstone: CS_AARCH64(_INS_TBNZ), ARM64_TBZ * ARM: tbnz, tbz */ static RzILOpEffect *tbz(cs_insn *insn) { @@ -2217,13 +2217,13 @@ static RzILOpEffect *tbz(cs_insn *insn) { return NULL; } RzILOpBool *c = LSB(SHIFTR0(src, UN(6, IMM(1)))); - return insn->id == ARM64_INS_TBNZ + return insn->id == CS_AARCH64(_INS_TBNZ) ? BRANCH(c, JMP(tgt), NULL) : BRANCH(c, NULL, JMP(tgt)); } /** - * Capstone: ARM64_INS_TST + * Capstone: CS_AARCH64(_INS_TST) * ARM: tst */ static RzILOpEffect *tst(cs_insn *insn) { @@ -2315,441 +2315,441 @@ static RzILOpEffect *tst(cs_insn *insn) { */ RZ_IPI RzILOpEffect *rz_arm_cs_64_il(csh *handle, cs_insn *insn) { switch (insn->id) { - case ARM64_INS_NOP: - case ARM64_INS_HINT: - case ARM64_INS_PRFM: - case ARM64_INS_PRFUM: - case ARM64_INS_SEV: - case ARM64_INS_SEVL: - case ARM64_INS_WFE: - case ARM64_INS_WFI: - case ARM64_INS_YIELD: + case CS_AARCH64(_INS_NOP): + case CS_AARCH64(_INS_HINT): + case CS_AARCH64(_INS_PRFM): + case CS_AARCH64(_INS_PRFUM): + case CS_AARCH64(_INS_SEV): + case CS_AARCH64(_INS_SEVL): + case CS_AARCH64(_INS_WFE): + case CS_AARCH64(_INS_WFI): + case CS_AARCH64(_INS_YIELD): return NOP(); - case ARM64_INS_ADD: - case ARM64_INS_ADC: - case ARM64_INS_SUB: - case ARM64_INS_SBC: + case CS_AARCH64(_INS_ADD): + case CS_AARCH64(_INS_ADC): + case CS_AARCH64(_INS_SUB): + case CS_AARCH64(_INS_SBC): #if CS_API_MAJOR > 4 - case ARM64_INS_ADDS: - case ARM64_INS_SUBS: - case ARM64_INS_ADCS: - case ARM64_INS_SBCS: + case CS_AARCH64(_INS_ADDS): + case CS_AARCH64(_INS_SUBS): + case CS_AARCH64(_INS_ADCS): + case CS_AARCH64(_INS_SBCS): #endif return add_sub(insn); - case ARM64_INS_ADR: - case ARM64_INS_ADRP: + case CS_AARCH64(_INS_ADR): + case CS_AARCH64(_INS_ADRP): return adr(insn); - case ARM64_INS_AND: + case CS_AARCH64(_INS_AND): #if CS_API_MAJOR > 4 - case ARM64_INS_ANDS: + case CS_AARCH64(_INS_ANDS): #endif - case ARM64_INS_EOR: - case ARM64_INS_EON: - case ARM64_INS_ORN: - case ARM64_INS_ORR: + case CS_AARCH64(_INS_EOR): + case CS_AARCH64(_INS_EON): + case CS_AARCH64(_INS_ORN): + case CS_AARCH64(_INS_ORR): return bitwise(insn); - case ARM64_INS_ASR: - case ARM64_INS_LSL: - case ARM64_INS_LSR: - case ARM64_INS_ROR: + case CS_AARCH64(_INS_ASR): + case CS_AARCH64(_INS_LSL): + case CS_AARCH64(_INS_LSR): + case CS_AARCH64(_INS_ROR): return shift(insn); - case ARM64_INS_B: - case ARM64_INS_BR: - case ARM64_INS_RET: + case CS_AARCH64(_INS_B): + case CS_AARCH64(_INS_BR): + case CS_AARCH64(_INS_RET): #if CS_API_MAJOR > 4 - case ARM64_INS_BRAA: - case ARM64_INS_BRAAZ: - case ARM64_INS_BRAB: - case ARM64_INS_BRABZ: - case ARM64_INS_RETAA: - case ARM64_INS_RETAB: + case CS_AARCH64(_INS_BRAA): + case CS_AARCH64(_INS_BRAAZ): + case CS_AARCH64(_INS_BRAB): + case CS_AARCH64(_INS_BRABZ): + case CS_AARCH64(_INS_RETAA): + case CS_AARCH64(_INS_RETAB): #endif return branch(insn); - case ARM64_INS_BL: - case ARM64_INS_BLR: + case CS_AARCH64(_INS_BL): + case CS_AARCH64(_INS_BLR): #if CS_API_MAJOR > 4 - case ARM64_INS_BLRAA: - case ARM64_INS_BLRAAZ: - case ARM64_INS_BLRAB: - case ARM64_INS_BLRABZ: + case CS_AARCH64(_INS_BLRAA): + case CS_AARCH64(_INS_BLRAAZ): + case CS_AARCH64(_INS_BLRAB): + case CS_AARCH64(_INS_BLRABZ): #endif return bl(insn); - case ARM64_INS_BFM: - case ARM64_INS_BFI: - case ARM64_INS_BFXIL: + case CS_AARCH64(_INS_BFM): + case CS_AARCH64(_INS_BFI): + case CS_AARCH64(_INS_BFXIL): return bfm(insn); - case ARM64_INS_BIC: + case CS_AARCH64(_INS_BIC): #if CS_API_MAJOR > 4 - case ARM64_INS_BICS: + case CS_AARCH64(_INS_BICS): #endif return bic(insn); #if CS_API_MAJOR > 4 - case ARM64_INS_CAS: - case ARM64_INS_CASA: - case ARM64_INS_CASAL: - case ARM64_INS_CASL: - case ARM64_INS_CASB: - case ARM64_INS_CASAB: - case ARM64_INS_CASALB: - case ARM64_INS_CASLB: - case ARM64_INS_CASH: - case ARM64_INS_CASAH: - case ARM64_INS_CASALH: - case ARM64_INS_CASLH: + case CS_AARCH64(_INS_CAS): + case CS_AARCH64(_INS_CASA): + case CS_AARCH64(_INS_CASAL): + case CS_AARCH64(_INS_CASL): + case CS_AARCH64(_INS_CASB): + case CS_AARCH64(_INS_CASAB): + case CS_AARCH64(_INS_CASALB): + case CS_AARCH64(_INS_CASLB): + case CS_AARCH64(_INS_CASH): + case CS_AARCH64(_INS_CASAH): + case CS_AARCH64(_INS_CASALH): + case CS_AARCH64(_INS_CASLH): return cas(insn); - case ARM64_INS_CASP: - case ARM64_INS_CASPA: - case ARM64_INS_CASPAL: - case ARM64_INS_CASPL: + case CS_AARCH64(_INS_CASP): + case CS_AARCH64(_INS_CASPA): + case CS_AARCH64(_INS_CASPAL): + case CS_AARCH64(_INS_CASPL): return casp(insn); #endif - case ARM64_INS_CBZ: - case ARM64_INS_CBNZ: + case CS_AARCH64(_INS_CBZ): + case CS_AARCH64(_INS_CBNZ): return cbz(insn); - case ARM64_INS_CMP: - case ARM64_INS_CMN: - case ARM64_INS_CCMP: - case ARM64_INS_CCMN: + case CS_AARCH64(_INS_CMP): + case CS_AARCH64(_INS_CMN): + case CS_AARCH64(_INS_CCMP): + case CS_AARCH64(_INS_CCMN): return cmp(insn); #if CS_API_MAJOR > 4 - case ARM64_INS_CFINV: + case CS_AARCH64(_INS_CFINV): return SETG("cf", INV(VARG("cf"))); #endif - case ARM64_INS_CINC: - case ARM64_INS_CSINC: - case ARM64_INS_CINV: - case ARM64_INS_CSINV: - case ARM64_INS_CNEG: - case ARM64_INS_CSNEG: - case ARM64_INS_CSEL: + case CS_AARCH64(_INS_CINC): + case CS_AARCH64(_INS_CSINC): + case CS_AARCH64(_INS_CINV): + case CS_AARCH64(_INS_CSINV): + case CS_AARCH64(_INS_CNEG): + case CS_AARCH64(_INS_CSNEG): + case CS_AARCH64(_INS_CSEL): return csinc(insn); - case ARM64_INS_CSET: - case ARM64_INS_CSETM: + case CS_AARCH64(_INS_CSET): + case CS_AARCH64(_INS_CSETM): return cset(insn); - case ARM64_INS_CLS: + case CS_AARCH64(_INS_CLS): return cls(insn); - case ARM64_INS_CLZ: + case CS_AARCH64(_INS_CLZ): return clz(insn); - case ARM64_INS_EXTR: + case CS_AARCH64(_INS_EXTR): return extr(insn); - case ARM64_INS_HVC: + case CS_AARCH64(_INS_HVC): return hvc(insn); - case ARM64_INS_SVC: + case CS_AARCH64(_INS_SVC): return svc(insn); - case ARM64_INS_LDR: - case ARM64_INS_LDRB: - case ARM64_INS_LDRH: - case ARM64_INS_LDUR: - case ARM64_INS_LDURB: - case ARM64_INS_LDURH: - case ARM64_INS_LDRSW: - case ARM64_INS_LDRSB: - case ARM64_INS_LDRSH: - case ARM64_INS_LDURSW: - case ARM64_INS_LDURSB: - case ARM64_INS_LDURSH: - case ARM64_INS_LDAR: - case ARM64_INS_LDARB: - case ARM64_INS_LDARH: - case ARM64_INS_LDAXP: - case ARM64_INS_LDXP: - case ARM64_INS_LDAXR: - case ARM64_INS_LDAXRB: - case ARM64_INS_LDAXRH: - case ARM64_INS_LDP: - case ARM64_INS_LDNP: - case ARM64_INS_LDPSW: - case ARM64_INS_LDTR: - case ARM64_INS_LDTRB: - case ARM64_INS_LDTRH: - case ARM64_INS_LDTRSW: - case ARM64_INS_LDTRSB: - case ARM64_INS_LDTRSH: - case ARM64_INS_LDXR: - case ARM64_INS_LDXRB: - case ARM64_INS_LDXRH: + case CS_AARCH64(_INS_LDR): + case CS_AARCH64(_INS_LDRB): + case CS_AARCH64(_INS_LDRH): + case CS_AARCH64(_INS_LDUR): + case CS_AARCH64(_INS_LDURB): + case CS_AARCH64(_INS_LDURH): + case CS_AARCH64(_INS_LDRSW): + case CS_AARCH64(_INS_LDRSB): + case CS_AARCH64(_INS_LDRSH): + case CS_AARCH64(_INS_LDURSW): + case CS_AARCH64(_INS_LDURSB): + case CS_AARCH64(_INS_LDURSH): + case CS_AARCH64(_INS_LDAR): + case CS_AARCH64(_INS_LDARB): + case CS_AARCH64(_INS_LDARH): + case CS_AARCH64(_INS_LDAXP): + case CS_AARCH64(_INS_LDXP): + case CS_AARCH64(_INS_LDAXR): + case CS_AARCH64(_INS_LDAXRB): + case CS_AARCH64(_INS_LDAXRH): + case CS_AARCH64(_INS_LDP): + case CS_AARCH64(_INS_LDNP): + case CS_AARCH64(_INS_LDPSW): + case CS_AARCH64(_INS_LDTR): + case CS_AARCH64(_INS_LDTRB): + case CS_AARCH64(_INS_LDTRH): + case CS_AARCH64(_INS_LDTRSW): + case CS_AARCH64(_INS_LDTRSB): + case CS_AARCH64(_INS_LDTRSH): + case CS_AARCH64(_INS_LDXR): + case CS_AARCH64(_INS_LDXRB): + case CS_AARCH64(_INS_LDXRH): #if CS_API_MAJOR > 4 - case ARM64_INS_LDAPR: - case ARM64_INS_LDAPRB: - case ARM64_INS_LDAPRH: - case ARM64_INS_LDAPUR: - case ARM64_INS_LDAPURB: - case ARM64_INS_LDAPURH: - case ARM64_INS_LDAPURSB: - case ARM64_INS_LDAPURSH: - case ARM64_INS_LDAPURSW: - case ARM64_INS_LDLAR: - case ARM64_INS_LDLARB: - case ARM64_INS_LDLARH: - case ARM64_INS_LDRAA: - case ARM64_INS_LDRAB: + case CS_AARCH64(_INS_LDAPR): + case CS_AARCH64(_INS_LDAPRB): + case CS_AARCH64(_INS_LDAPRH): + case CS_AARCH64(_INS_LDAPUR): + case CS_AARCH64(_INS_LDAPURB): + case CS_AARCH64(_INS_LDAPURH): + case CS_AARCH64(_INS_LDAPURSB): + case CS_AARCH64(_INS_LDAPURSH): + case CS_AARCH64(_INS_LDAPURSW): + case CS_AARCH64(_INS_LDLAR): + case CS_AARCH64(_INS_LDLARB): + case CS_AARCH64(_INS_LDLARH): + case CS_AARCH64(_INS_LDRAA): + case CS_AARCH64(_INS_LDRAB): #endif return ldr(insn); #if CS_API_MAJOR > 4 - case ARM64_INS_LDADD: - case ARM64_INS_LDADDA: - case ARM64_INS_LDADDAL: - case ARM64_INS_LDADDL: - case ARM64_INS_LDADDB: - case ARM64_INS_LDADDAB: - case ARM64_INS_LDADDALB: - case ARM64_INS_LDADDLB: - case ARM64_INS_LDADDH: - case ARM64_INS_LDADDAH: - case ARM64_INS_LDADDALH: - case ARM64_INS_LDADDLH: - case ARM64_INS_STADD: - case ARM64_INS_STADDL: - case ARM64_INS_STADDB: - case ARM64_INS_STADDLB: - case ARM64_INS_STADDH: - case ARM64_INS_STADDLH: - case ARM64_INS_LDCLRB: - case ARM64_INS_LDCLRAB: - case ARM64_INS_LDCLRALB: - case ARM64_INS_LDCLRLB: - case ARM64_INS_LDCLRH: - case ARM64_INS_LDCLRAH: - case ARM64_INS_LDCLRALH: - case ARM64_INS_LDCLRLH: - case ARM64_INS_LDCLR: - case ARM64_INS_LDCLRA: - case ARM64_INS_LDCLRAL: - case ARM64_INS_LDCLRL: - case ARM64_INS_STCLR: - case ARM64_INS_STCLRL: - case ARM64_INS_STCLRB: - case ARM64_INS_STCLRLB: - case ARM64_INS_STCLRH: - case ARM64_INS_STCLRLH: - case ARM64_INS_LDEORB: - case ARM64_INS_LDEORAB: - case ARM64_INS_LDEORALB: - case ARM64_INS_LDEORLB: - case ARM64_INS_LDEORH: - case ARM64_INS_LDEORAH: - case ARM64_INS_LDEORALH: - case ARM64_INS_LDEORLH: - case ARM64_INS_LDEOR: - case ARM64_INS_LDEORA: - case ARM64_INS_LDEORAL: - case ARM64_INS_LDEORL: - case ARM64_INS_STEOR: - case ARM64_INS_STEORL: - case ARM64_INS_STEORB: - case ARM64_INS_STEORLB: - case ARM64_INS_STEORH: - case ARM64_INS_STEORLH: - case ARM64_INS_LDSETB: - case ARM64_INS_LDSETAB: - case ARM64_INS_LDSETALB: - case ARM64_INS_LDSETLB: - case ARM64_INS_LDSETH: - case ARM64_INS_LDSETAH: - case ARM64_INS_LDSETALH: - case ARM64_INS_LDSETLH: - case ARM64_INS_LDSET: - case ARM64_INS_LDSETA: - case ARM64_INS_LDSETAL: - case ARM64_INS_LDSETL: - case ARM64_INS_STSET: - case ARM64_INS_STSETL: - case ARM64_INS_STSETB: - case ARM64_INS_STSETLB: - case ARM64_INS_STSETH: - case ARM64_INS_STSETLH: - case ARM64_INS_LDSMAXB: - case ARM64_INS_LDSMAXAB: - case ARM64_INS_LDSMAXALB: - case ARM64_INS_LDSMAXLB: - case ARM64_INS_LDSMAXH: - case ARM64_INS_LDSMAXAH: - case ARM64_INS_LDSMAXALH: - case ARM64_INS_LDSMAXLH: - case ARM64_INS_LDSMAX: - case ARM64_INS_LDSMAXA: - case ARM64_INS_LDSMAXAL: - case ARM64_INS_LDSMAXL: - case ARM64_INS_STSMAX: - case ARM64_INS_STSMAXL: - case ARM64_INS_STSMAXB: - case ARM64_INS_STSMAXLB: - case ARM64_INS_STSMAXH: - case ARM64_INS_STSMAXLH: - case ARM64_INS_LDSMINB: - case ARM64_INS_LDSMINAB: - case ARM64_INS_LDSMINALB: - case ARM64_INS_LDSMINLB: - case ARM64_INS_LDSMINH: - case ARM64_INS_LDSMINAH: - case ARM64_INS_LDSMINALH: - case ARM64_INS_LDSMINLH: - case ARM64_INS_LDSMIN: - case ARM64_INS_LDSMINA: - case ARM64_INS_LDSMINAL: - case ARM64_INS_LDSMINL: - case ARM64_INS_STSMIN: - case ARM64_INS_STSMINL: - case ARM64_INS_STSMINB: - case ARM64_INS_STSMINLB: - case ARM64_INS_STSMINH: - case ARM64_INS_STSMINLH: - case ARM64_INS_LDUMAXB: - case ARM64_INS_LDUMAXAB: - case ARM64_INS_LDUMAXALB: - case ARM64_INS_LDUMAXLB: - case ARM64_INS_LDUMAXH: - case ARM64_INS_LDUMAXAH: - case ARM64_INS_LDUMAXALH: - case ARM64_INS_LDUMAXLH: - case ARM64_INS_LDUMAX: - case ARM64_INS_LDUMAXA: - case ARM64_INS_LDUMAXAL: - case ARM64_INS_LDUMAXL: - case ARM64_INS_STUMAX: - case ARM64_INS_STUMAXL: - case ARM64_INS_STUMAXB: - case ARM64_INS_STUMAXLB: - case ARM64_INS_STUMAXH: - case ARM64_INS_STUMAXLH: - case ARM64_INS_LDUMINB: - case ARM64_INS_LDUMINAB: - case ARM64_INS_LDUMINALB: - case ARM64_INS_LDUMINLB: - case ARM64_INS_LDUMINH: - case ARM64_INS_LDUMINAH: - case ARM64_INS_LDUMINALH: - case ARM64_INS_LDUMINLH: - case ARM64_INS_LDUMIN: - case ARM64_INS_LDUMINA: - case ARM64_INS_LDUMINAL: - case ARM64_INS_LDUMINL: - case ARM64_INS_STUMIN: - case ARM64_INS_STUMINL: - case ARM64_INS_STUMINB: - case ARM64_INS_STUMINLB: - case ARM64_INS_STUMINH: - case ARM64_INS_STUMINLH: + case CS_AARCH64(_INS_LDADD): + case CS_AARCH64(_INS_LDADDA): + case CS_AARCH64(_INS_LDADDAL): + case CS_AARCH64(_INS_LDADDL): + case CS_AARCH64(_INS_LDADDB): + case CS_AARCH64(_INS_LDADDAB): + case CS_AARCH64(_INS_LDADDALB): + case CS_AARCH64(_INS_LDADDLB): + case CS_AARCH64(_INS_LDADDH): + case CS_AARCH64(_INS_LDADDAH): + case CS_AARCH64(_INS_LDADDALH): + case CS_AARCH64(_INS_LDADDLH): + case CS_AARCH64(_INS_STADD): + case CS_AARCH64(_INS_STADDL): + case CS_AARCH64(_INS_STADDB): + case CS_AARCH64(_INS_STADDLB): + case CS_AARCH64(_INS_STADDH): + case CS_AARCH64(_INS_STADDLH): + case CS_AARCH64(_INS_LDCLRB): + case CS_AARCH64(_INS_LDCLRAB): + case CS_AARCH64(_INS_LDCLRALB): + case CS_AARCH64(_INS_LDCLRLB): + case CS_AARCH64(_INS_LDCLRH): + case CS_AARCH64(_INS_LDCLRAH): + case CS_AARCH64(_INS_LDCLRALH): + case CS_AARCH64(_INS_LDCLRLH): + case CS_AARCH64(_INS_LDCLR): + case CS_AARCH64(_INS_LDCLRA): + case CS_AARCH64(_INS_LDCLRAL): + case CS_AARCH64(_INS_LDCLRL): + case CS_AARCH64(_INS_STCLR): + case CS_AARCH64(_INS_STCLRL): + case CS_AARCH64(_INS_STCLRB): + case CS_AARCH64(_INS_STCLRLB): + case CS_AARCH64(_INS_STCLRH): + case CS_AARCH64(_INS_STCLRLH): + case CS_AARCH64(_INS_LDEORB): + case CS_AARCH64(_INS_LDEORAB): + case CS_AARCH64(_INS_LDEORALB): + case CS_AARCH64(_INS_LDEORLB): + case CS_AARCH64(_INS_LDEORH): + case CS_AARCH64(_INS_LDEORAH): + case CS_AARCH64(_INS_LDEORALH): + case CS_AARCH64(_INS_LDEORLH): + case CS_AARCH64(_INS_LDEOR): + case CS_AARCH64(_INS_LDEORA): + case CS_AARCH64(_INS_LDEORAL): + case CS_AARCH64(_INS_LDEORL): + case CS_AARCH64(_INS_STEOR): + case CS_AARCH64(_INS_STEORL): + case CS_AARCH64(_INS_STEORB): + case CS_AARCH64(_INS_STEORLB): + case CS_AARCH64(_INS_STEORH): + case CS_AARCH64(_INS_STEORLH): + case CS_AARCH64(_INS_LDSETB): + case CS_AARCH64(_INS_LDSETAB): + case CS_AARCH64(_INS_LDSETALB): + case CS_AARCH64(_INS_LDSETLB): + case CS_AARCH64(_INS_LDSETH): + case CS_AARCH64(_INS_LDSETAH): + case CS_AARCH64(_INS_LDSETALH): + case CS_AARCH64(_INS_LDSETLH): + case CS_AARCH64(_INS_LDSET): + case CS_AARCH64(_INS_LDSETA): + case CS_AARCH64(_INS_LDSETAL): + case CS_AARCH64(_INS_LDSETL): + case CS_AARCH64(_INS_STSET): + case CS_AARCH64(_INS_STSETL): + case CS_AARCH64(_INS_STSETB): + case CS_AARCH64(_INS_STSETLB): + case CS_AARCH64(_INS_STSETH): + case CS_AARCH64(_INS_STSETLH): + case CS_AARCH64(_INS_LDSMAXB): + case CS_AARCH64(_INS_LDSMAXAB): + case CS_AARCH64(_INS_LDSMAXALB): + case CS_AARCH64(_INS_LDSMAXLB): + case CS_AARCH64(_INS_LDSMAXH): + case CS_AARCH64(_INS_LDSMAXAH): + case CS_AARCH64(_INS_LDSMAXALH): + case CS_AARCH64(_INS_LDSMAXLH): + case CS_AARCH64(_INS_LDSMAX): + case CS_AARCH64(_INS_LDSMAXA): + case CS_AARCH64(_INS_LDSMAXAL): + case CS_AARCH64(_INS_LDSMAXL): + case CS_AARCH64(_INS_STSMAX): + case CS_AARCH64(_INS_STSMAXL): + case CS_AARCH64(_INS_STSMAXB): + case CS_AARCH64(_INS_STSMAXLB): + case CS_AARCH64(_INS_STSMAXH): + case CS_AARCH64(_INS_STSMAXLH): + case CS_AARCH64(_INS_LDSMINB): + case CS_AARCH64(_INS_LDSMINAB): + case CS_AARCH64(_INS_LDSMINALB): + case CS_AARCH64(_INS_LDSMINLB): + case CS_AARCH64(_INS_LDSMINH): + case CS_AARCH64(_INS_LDSMINAH): + case CS_AARCH64(_INS_LDSMINALH): + case CS_AARCH64(_INS_LDSMINLH): + case CS_AARCH64(_INS_LDSMIN): + case CS_AARCH64(_INS_LDSMINA): + case CS_AARCH64(_INS_LDSMINAL): + case CS_AARCH64(_INS_LDSMINL): + case CS_AARCH64(_INS_STSMIN): + case CS_AARCH64(_INS_STSMINL): + case CS_AARCH64(_INS_STSMINB): + case CS_AARCH64(_INS_STSMINLB): + case CS_AARCH64(_INS_STSMINH): + case CS_AARCH64(_INS_STSMINLH): + case CS_AARCH64(_INS_LDUMAXB): + case CS_AARCH64(_INS_LDUMAXAB): + case CS_AARCH64(_INS_LDUMAXALB): + case CS_AARCH64(_INS_LDUMAXLB): + case CS_AARCH64(_INS_LDUMAXH): + case CS_AARCH64(_INS_LDUMAXAH): + case CS_AARCH64(_INS_LDUMAXALH): + case CS_AARCH64(_INS_LDUMAXLH): + case CS_AARCH64(_INS_LDUMAX): + case CS_AARCH64(_INS_LDUMAXA): + case CS_AARCH64(_INS_LDUMAXAL): + case CS_AARCH64(_INS_LDUMAXL): + case CS_AARCH64(_INS_STUMAX): + case CS_AARCH64(_INS_STUMAXL): + case CS_AARCH64(_INS_STUMAXB): + case CS_AARCH64(_INS_STUMAXLB): + case CS_AARCH64(_INS_STUMAXH): + case CS_AARCH64(_INS_STUMAXLH): + case CS_AARCH64(_INS_LDUMINB): + case CS_AARCH64(_INS_LDUMINAB): + case CS_AARCH64(_INS_LDUMINALB): + case CS_AARCH64(_INS_LDUMINLB): + case CS_AARCH64(_INS_LDUMINH): + case CS_AARCH64(_INS_LDUMINAH): + case CS_AARCH64(_INS_LDUMINALH): + case CS_AARCH64(_INS_LDUMINLH): + case CS_AARCH64(_INS_LDUMIN): + case CS_AARCH64(_INS_LDUMINA): + case CS_AARCH64(_INS_LDUMINAL): + case CS_AARCH64(_INS_LDUMINL): + case CS_AARCH64(_INS_STUMIN): + case CS_AARCH64(_INS_STUMINL): + case CS_AARCH64(_INS_STUMINB): + case CS_AARCH64(_INS_STUMINLB): + case CS_AARCH64(_INS_STUMINH): + case CS_AARCH64(_INS_STUMINLH): return ldadd(insn); #endif - case ARM64_INS_MADD: - case ARM64_INS_MSUB: + case CS_AARCH64(_INS_MADD): + case CS_AARCH64(_INS_MSUB): return madd(insn); - case ARM64_INS_MUL: - case ARM64_INS_MNEG: + case CS_AARCH64(_INS_MUL): + case CS_AARCH64(_INS_MNEG): return mul(insn); - case ARM64_INS_MOV: - case ARM64_INS_MOVZ: + case CS_AARCH64(_INS_MOV): + case CS_AARCH64(_INS_MOVZ): return mov(insn); - case ARM64_INS_MOVK: + case CS_AARCH64(_INS_MOVK): return movk(insn); - case ARM64_INS_MOVN: + case CS_AARCH64(_INS_MOVN): return movn(insn); - case ARM64_INS_MSR: + case CS_AARCH64(_INS_MSR): return msr(insn); - case ARM64_INS_MRS: + case CS_AARCH64(_INS_MRS): return mrs(insn); - case ARM64_INS_MVN: - case ARM64_INS_NEG: - case ARM64_INS_NGC: + case CS_AARCH64(_INS_MVN): + case CS_AARCH64(_INS_NEG): + case CS_AARCH64(_INS_NGC): #if CS_API_MAJOR > 3 - case ARM64_INS_NEGS: - case ARM64_INS_NGCS: + case CS_AARCH64(_INS_NEGS): + case CS_AARCH64(_INS_NGCS): #endif return mvn(insn); - case ARM64_INS_RBIT: + case CS_AARCH64(_INS_RBIT): return rbit(insn); - case ARM64_INS_REV: - case ARM64_INS_REV32: - case ARM64_INS_REV16: + case CS_AARCH64(_INS_REV): + case CS_AARCH64(_INS_REV32): + case CS_AARCH64(_INS_REV16): return rev(insn); #if CS_API_MAJOR > 4 - case ARM64_INS_RMIF: + case CS_AARCH64(_INS_RMIF): return rmif(insn); #endif - case ARM64_INS_SBFIZ: - case ARM64_INS_SBFX: - case ARM64_INS_UBFIZ: - case ARM64_INS_UBFX: + case CS_AARCH64(_INS_SBFIZ): + case CS_AARCH64(_INS_SBFX): + case CS_AARCH64(_INS_UBFIZ): + case CS_AARCH64(_INS_UBFX): return sbfx(insn); - case ARM64_INS_SDIV: + case CS_AARCH64(_INS_SDIV): return sdiv(insn); #if CS_API_MAJOR > 4 - case ARM64_INS_SETF8: - case ARM64_INS_SETF16: + case CS_AARCH64(_INS_SETF8): + case CS_AARCH64(_INS_SETF16): return setf(insn); #endif - case ARM64_INS_SMADDL: - case ARM64_INS_SMSUBL: - case ARM64_INS_UMADDL: - case ARM64_INS_UMSUBL: + case CS_AARCH64(_INS_SMADDL): + case CS_AARCH64(_INS_SMSUBL): + case CS_AARCH64(_INS_UMADDL): + case CS_AARCH64(_INS_UMSUBL): return smaddl(insn); - case ARM64_INS_SMULL: - case ARM64_INS_SMNEGL: - case ARM64_INS_UMULL: - case ARM64_INS_UMNEGL: + case CS_AARCH64(_INS_SMULL): + case CS_AARCH64(_INS_SMNEGL): + case CS_AARCH64(_INS_UMULL): + case CS_AARCH64(_INS_UMNEGL): return smull(insn); - case ARM64_INS_SMULH: - case ARM64_INS_UMULH: + case CS_AARCH64(_INS_SMULH): + case CS_AARCH64(_INS_UMULH): return smulh(insn); - case ARM64_INS_STR: - case ARM64_INS_STUR: - case ARM64_INS_STRB: - case ARM64_INS_STURB: - case ARM64_INS_STRH: - case ARM64_INS_STURH: - case ARM64_INS_STLR: - case ARM64_INS_STLRB: - case ARM64_INS_STLRH: - case ARM64_INS_STP: - case ARM64_INS_STNP: - case ARM64_INS_STXR: - case ARM64_INS_STXRB: - case ARM64_INS_STXRH: - case ARM64_INS_STXP: - case ARM64_INS_STLXR: - case ARM64_INS_STLXRB: - case ARM64_INS_STLXRH: - case ARM64_INS_STLXP: - case ARM64_INS_STTR: - case ARM64_INS_STTRB: - case ARM64_INS_STTRH: + case CS_AARCH64(_INS_STR): + case CS_AARCH64(_INS_STUR): + case CS_AARCH64(_INS_STRB): + case CS_AARCH64(_INS_STURB): + case CS_AARCH64(_INS_STRH): + case CS_AARCH64(_INS_STURH): + case CS_AARCH64(_INS_STLR): + case CS_AARCH64(_INS_STLRB): + case CS_AARCH64(_INS_STLRH): + case CS_AARCH64(_INS_STP): + case CS_AARCH64(_INS_STNP): + case CS_AARCH64(_INS_STXR): + case CS_AARCH64(_INS_STXRB): + case CS_AARCH64(_INS_STXRH): + case CS_AARCH64(_INS_STXP): + case CS_AARCH64(_INS_STLXR): + case CS_AARCH64(_INS_STLXRB): + case CS_AARCH64(_INS_STLXRH): + case CS_AARCH64(_INS_STLXP): + case CS_AARCH64(_INS_STTR): + case CS_AARCH64(_INS_STTRB): + case CS_AARCH64(_INS_STTRH): #if CS_API_MAJOR > 4 - case ARM64_INS_STLLR: - case ARM64_INS_STLLRB: - case ARM64_INS_STLLRH: - case ARM64_INS_STLUR: - case ARM64_INS_STLURB: - case ARM64_INS_STLURH: + case CS_AARCH64(_INS_STLLR): + case CS_AARCH64(_INS_STLLRB): + case CS_AARCH64(_INS_STLLRH): + case CS_AARCH64(_INS_STLUR): + case CS_AARCH64(_INS_STLURB): + case CS_AARCH64(_INS_STLURH): #endif return str(insn); #if CS_API_MAJOR > 4 - case ARM64_INS_SWP: - case ARM64_INS_SWPA: - case ARM64_INS_SWPAL: - case ARM64_INS_SWPL: - case ARM64_INS_SWPB: - case ARM64_INS_SWPAB: - case ARM64_INS_SWPALB: - case ARM64_INS_SWPLB: - case ARM64_INS_SWPH: - case ARM64_INS_SWPAH: - case ARM64_INS_SWPALH: - case ARM64_INS_SWPLH: + case CS_AARCH64(_INS_SWP): + case CS_AARCH64(_INS_SWPA): + case CS_AARCH64(_INS_SWPAL): + case CS_AARCH64(_INS_SWPL): + case CS_AARCH64(_INS_SWPB): + case CS_AARCH64(_INS_SWPAB): + case CS_AARCH64(_INS_SWPALB): + case CS_AARCH64(_INS_SWPLB): + case CS_AARCH64(_INS_SWPH): + case CS_AARCH64(_INS_SWPAH): + case CS_AARCH64(_INS_SWPALH): + case CS_AARCH64(_INS_SWPLH): return swp(insn); #endif - case ARM64_INS_SXTB: - case ARM64_INS_SXTH: - case ARM64_INS_SXTW: - case ARM64_INS_UXTB: - case ARM64_INS_UXTH: + case CS_AARCH64(_INS_SXTB): + case CS_AARCH64(_INS_SXTH): + case CS_AARCH64(_INS_SXTW): + case CS_AARCH64(_INS_UXTB): + case CS_AARCH64(_INS_UXTH): return sxt(insn); - case ARM64_INS_TBNZ: - case ARM64_INS_TBZ: + case CS_AARCH64(_INS_TBNZ): + case CS_AARCH64(_INS_TBZ): return tbz(insn); - case ARM64_INS_TST: + case CS_AARCH64(_INS_TST): return tst(insn); - case ARM64_INS_UDIV: + case CS_AARCH64(_INS_UDIV): return udiv(insn); default: break; diff --git a/librz/analysis/p/analysis_arm_cs.c b/librz/analysis/p/analysis_arm_cs.c index 5ee9180dc42..da0c4fb2d40 100644 --- a/librz/analysis/p/analysis_arm_cs.c +++ b/librz/analysis/p/analysis_arm_cs.c @@ -266,94 +266,94 @@ static void opex(RzStrBuf *buf, csh handle, cs_insn *insn) { pj_free(pj); } -static const char *cc_name64(arm64_cc cc) { +static const char *cc_name64(CS_aarch64_cc() cc) { switch (cc) { - case ARM64_CC_EQ: // Equal + case CS_AARCH64CC(_EQ): // Equal return "eq"; - case ARM64_CC_NE: // Not equal: Not equal, or unordered + case CS_AARCH64CC(_NE): // Not equal: Not equal, or unordered return "ne"; - case ARM64_CC_HS: // Unsigned higher or same: >, ==, or unordered + case CS_AARCH64CC(_HS): // Unsigned higher or same: >, ==, or unordered return "hs"; - case ARM64_CC_LO: // Unsigned lower or same: Less than + case CS_AARCH64CC(_LO): // Unsigned lower or same: Less than return "lo"; - case ARM64_CC_MI: // Minus, negative: Less than + case CS_AARCH64CC(_MI): // Minus, negative: Less than return "mi"; - case ARM64_CC_PL: // Plus, positive or zero: >, ==, or unordered + case CS_AARCH64CC(_PL): // Plus, positive or zero: >, ==, or unordered return "pl"; - case ARM64_CC_VS: // Overflow: Unordered + case CS_AARCH64CC(_VS): // Overflow: Unordered return "vs"; - case ARM64_CC_VC: // No overflow: Ordered + case CS_AARCH64CC(_VC): // No overflow: Ordered return "vc"; - case ARM64_CC_HI: // Unsigned higher: Greater than, or unordered + case CS_AARCH64CC(_HI): // Unsigned higher: Greater than, or unordered return "hi"; - case ARM64_CC_LS: // Unsigned lower or same: Less than or equal + case CS_AARCH64CC(_LS): // Unsigned lower or same: Less than or equal return "ls"; - case ARM64_CC_GE: // Greater than or equal: Greater than or equal + case CS_AARCH64CC(_GE): // Greater than or equal: Greater than or equal return "ge"; - case ARM64_CC_LT: // Less than: Less than, or unordered + case CS_AARCH64CC(_LT): // Less than: Less than, or unordered return "lt"; - case ARM64_CC_GT: // Signed greater than: Greater than + case CS_AARCH64CC(_GT): // Signed greater than: Greater than return "gt"; - case ARM64_CC_LE: // Signed less than or equal: <, ==, or unordered + case CS_AARCH64CC(_LE): // Signed less than or equal: <, ==, or unordered return "le"; default: return ""; } } -static const char *extender_name(arm64_extender extender) { +static const char *extender_name(CS_aarch64_extender() extender) { switch (extender) { - case ARM64_EXT_UXTB: + case CS_AARCH64(_EXT_UXTB): return "uxtb"; - case ARM64_EXT_UXTH: + case CS_AARCH64(_EXT_UXTH): return "uxth"; - case ARM64_EXT_UXTW: + case CS_AARCH64(_EXT_UXTW): return "uxtw"; - case ARM64_EXT_UXTX: + case CS_AARCH64(_EXT_UXTX): return "uxtx"; - case ARM64_EXT_SXTB: + case CS_AARCH64(_EXT_SXTB): return "sxtb"; - case ARM64_EXT_SXTH: + case CS_AARCH64(_EXT_SXTH): return "sxth"; - case ARM64_EXT_SXTW: + case CS_AARCH64(_EXT_SXTW): return "sxtw"; - case ARM64_EXT_SXTX: + case CS_AARCH64(_EXT_SXTX): return "sxtx"; default: return ""; } } -static const char *vas_name(arm64_vas vas) { +static const char *vas_name(CS_aarch64_vas() vas) { switch (vas) { - case ARM64_VAS_8B: + case CS_AARCH64_VL_(8B): return "8b"; - case ARM64_VAS_16B: + case CS_AARCH64_VL_(16B): return "16b"; - case ARM64_VAS_4H: + case CS_AARCH64_VL_(4H): return "4h"; - case ARM64_VAS_8H: + case CS_AARCH64_VL_(8H): return "8h"; - case ARM64_VAS_2S: + case CS_AARCH64_VL_(2S): return "2s"; - case ARM64_VAS_4S: + case CS_AARCH64_VL_(4S): return "4s"; - case ARM64_VAS_2D: + case CS_AARCH64_VL_(2D): return "2d"; - case ARM64_VAS_1D: + case CS_AARCH64_VL_(1D): return "1d"; - case ARM64_VAS_1Q: + case CS_AARCH64_VL_(1Q): return "1q"; #if CS_API_MAJOR > 4 - case ARM64_VAS_1B: + case CS_AARCH64_VL_(1B): return "8b"; - case ARM64_VAS_4B: + case CS_AARCH64_VL_(4B): return "8b"; - case ARM64_VAS_2H: + case CS_AARCH64_VL_(2H): return "2h"; - case ARM64_VAS_1H: + case CS_AARCH64_VL_(1H): return "1h"; - case ARM64_VAS_1S: + case CS_AARCH64_VL_(1S): return "1s"; #endif default: @@ -386,46 +386,46 @@ static void opex64(RzStrBuf *buf, csh handle, cs_insn *insn) { } pj_o(pj); pj_ka(pj, "operands"); - cs_arm64 *x = &insn->detail->arm64; + CS_cs_aarch64() *x = &insn->detail->CS_aarch64(); for (i = 0; i < x->op_count; i++) { - cs_arm64_op *op = x->operands + i; + CS_aarch64_op() *op = x->operands + i; pj_o(pj); switch (op->type) { - case ARM64_OP_REG: + case CS_AARCH64(_OP_REG): pj_ks(pj, "type", "reg"); pj_ks(pj, "value", cs_reg_name(handle, op->reg)); break; - case ARM64_OP_REG_MRS: + case CS_AARCH64(_OP_REG_MRS): pj_ks(pj, "type", "reg_mrs"); // TODO value break; - case ARM64_OP_REG_MSR: + case CS_AARCH64(_OP_REG_MSR): pj_ks(pj, "type", "reg_msr"); // TODO value break; - case ARM64_OP_IMM: + case CS_AARCH64(_OP_IMM): pj_ks(pj, "type", "imm"); pj_kN(pj, "value", op->imm); break; - case ARM64_OP_MEM: + case CS_AARCH64(_OP_MEM): pj_ks(pj, "type", "mem"); - if (op->mem.base != ARM64_REG_INVALID) { + if (op->mem.base != CS_AARCH64(_REG_INVALID)) { pj_ks(pj, "base", cs_reg_name(handle, op->mem.base)); } - if (op->mem.index != ARM64_REG_INVALID) { + if (op->mem.index != CS_AARCH64(_REG_INVALID)) { pj_ks(pj, "index", cs_reg_name(handle, op->mem.index)); } pj_ki(pj, "disp", op->mem.disp); break; - case ARM64_OP_FP: + case CS_AARCH64(_OP_FP): pj_ks(pj, "type", "fp"); pj_kd(pj, "value", op->fp); break; - case ARM64_OP_CIMM: + case CS_AARCH64(_OP_CIMM): pj_ks(pj, "type", "cimm"); pj_kN(pj, "value", op->imm); break; - case ARM64_OP_PSTATE: + case CS_AARCH64(_OP_PSTATE): pj_ks(pj, "type", "pstate"); switch (op->pstate) { case ARM64_PSTATE_SPSEL: @@ -441,15 +441,15 @@ static void opex64(RzStrBuf *buf, csh handle, cs_insn *insn) { pj_ki(pj, "value", op->pstate); } break; - case ARM64_OP_SYS: + case CS_AARCH64(_OP_SYS): pj_ks(pj, "type", "sys"); pj_kn(pj, "value", (ut64)op->sys); break; - case ARM64_OP_PREFETCH: + case CS_AARCH64(_OP_PREFETCH): pj_ks(pj, "type", "prefetch"); pj_ki(pj, "value", op->prefetch - 1); break; - case ARM64_OP_BARRIER: + case CS_AARCH64(_OP_BARRIER): pj_ks(pj, "type", "prefetch"); pj_ki(pj, "value", op->barrier - 1); break; @@ -457,22 +457,22 @@ static void opex64(RzStrBuf *buf, csh handle, cs_insn *insn) { pj_ks(pj, "type", "invalid"); break; } - if (op->shift.type != ARM64_SFT_INVALID) { + if (op->shift.type != CS_AARCH64(_SFT_INVALID)) { pj_ko(pj, "shift"); switch (op->shift.type) { - case ARM64_SFT_LSL: + case CS_AARCH64(_SFT_LSL): pj_ks(pj, "type", "lsl"); break; - case ARM64_SFT_MSL: + case CS_AARCH64(_SFT_MSL): pj_ks(pj, "type", "msl"); break; - case ARM64_SFT_LSR: + case CS_AARCH64(_SFT_LSR): pj_ks(pj, "type", "lsr"); break; - case ARM64_SFT_ASR: + case CS_AARCH64(_SFT_ASR): pj_ks(pj, "type", "asr"); break; - case ARM64_SFT_ROR: + case CS_AARCH64(_SFT_ROR): pj_ks(pj, "type", "ror"); break; default: @@ -481,13 +481,13 @@ static void opex64(RzStrBuf *buf, csh handle, cs_insn *insn) { pj_kn(pj, "value", (ut64)op->shift.value); pj_end(pj); } - if (op->ext != ARM64_EXT_INVALID) { + if (op->ext != CS_AARCH64(_EXT_INVALID)) { pj_ks(pj, "ext", extender_name(op->ext)); } if (op->vector_index != -1) { pj_ki(pj, "vector_index", op->vector_index); } - if (op->vas != ARM64_VAS_INVALID) { + if (op->vas != CS_AARCH64_VL_(INVALID)) { pj_ks(pj, "vas", vas_name(op->vas)); } #if CS_API_MAJOR == 4 @@ -504,7 +504,7 @@ static void opex64(RzStrBuf *buf, csh handle, cs_insn *insn) { if (x->writeback) { pj_kb(pj, "writeback", true); } - if (x->cc != ARM64_CC_INVALID && x->cc != ARM64_CC_AL && x->cc != ARM64_CC_NV) { + if (x->cc != CS_AARCH64CC(_INVALID) && x->cc != CS_AARCH64CC(_AL) && x->cc != CS_AARCH64CC(_NV)) { pj_ks(pj, "cc", cc_name64(x->cc)); } pj_end(pj); @@ -543,20 +543,20 @@ static int cond_cs2r2_64(int cc) { cc = RZ_TYPE_COND_AL; } else { switch (cc) { - case ARM64_CC_EQ: cc = RZ_TYPE_COND_EQ; break; - case ARM64_CC_NE: cc = RZ_TYPE_COND_NE; break; - case ARM64_CC_HS: cc = RZ_TYPE_COND_HS; break; - case ARM64_CC_LO: cc = RZ_TYPE_COND_LO; break; - case ARM64_CC_MI: cc = RZ_TYPE_COND_MI; break; - case ARM64_CC_PL: cc = RZ_TYPE_COND_PL; break; - case ARM64_CC_VS: cc = RZ_TYPE_COND_VS; break; - case ARM64_CC_VC: cc = RZ_TYPE_COND_VC; break; - case ARM64_CC_HI: cc = RZ_TYPE_COND_HI; break; - case ARM64_CC_LS: cc = RZ_TYPE_COND_LS; break; - case ARM64_CC_GE: cc = RZ_TYPE_COND_GE; break; - case ARM64_CC_LT: cc = RZ_TYPE_COND_LT; break; - case ARM64_CC_GT: cc = RZ_TYPE_COND_GT; break; - case ARM64_CC_LE: cc = RZ_TYPE_COND_LE; break; + case CS_AARCH64CC(_EQ): cc = RZ_TYPE_COND_EQ; break; + case CS_AARCH64CC(_NE): cc = RZ_TYPE_COND_NE; break; + case CS_AARCH64CC(_HS): cc = RZ_TYPE_COND_HS; break; + case CS_AARCH64CC(_LO): cc = RZ_TYPE_COND_LO; break; + case CS_AARCH64CC(_MI): cc = RZ_TYPE_COND_MI; break; + case CS_AARCH64CC(_PL): cc = RZ_TYPE_COND_PL; break; + case CS_AARCH64CC(_VS): cc = RZ_TYPE_COND_VS; break; + case CS_AARCH64CC(_VC): cc = RZ_TYPE_COND_VC; break; + case CS_AARCH64CC(_HI): cc = RZ_TYPE_COND_HI; break; + case CS_AARCH64CC(_LS): cc = RZ_TYPE_COND_LS; break; + case CS_AARCH64CC(_GE): cc = RZ_TYPE_COND_GE; break; + case CS_AARCH64CC(_LT): cc = RZ_TYPE_COND_LT; break; + case CS_AARCH64CC(_GT): cc = RZ_TYPE_COND_GT; break; + case CS_AARCH64CC(_LE): cc = RZ_TYPE_COND_LE; break; } } return cc; @@ -583,17 +583,17 @@ static void anop64(ArmCSContext *ctx, RzAnalysisOp *op, cs_insn *insn) { op->family = RZ_ANALYSIS_OP_FAMILY_CPU; } - op->cond = cond_cs2r2_64(insn->detail->arm64.cc); + op->cond = cond_cs2r2_64(insn->detail->CS_aarch64().cc); if (op->cond == RZ_TYPE_COND_NV) { op->type = RZ_ANALYSIS_OP_TYPE_NOP; return; } - switch (insn->detail->arm64.cc) { - case ARM64_CC_GE: - case ARM64_CC_GT: - case ARM64_CC_LE: - case ARM64_CC_LT: + switch (insn->detail->CS_aarch64().cc) { + case CS_AARCH64CC(_GE): + case CS_AARCH64CC(_GT): + case CS_AARCH64CC(_LE): + case CS_AARCH64CC(_LT): op->sign = true; break; default: @@ -602,62 +602,62 @@ static void anop64(ArmCSContext *ctx, RzAnalysisOp *op, cs_insn *insn) { switch (insn->id) { #if CS_API_MAJOR > 4 - case ARM64_INS_PACDA: - case ARM64_INS_PACDB: - case ARM64_INS_PACDZA: - case ARM64_INS_PACDZB: - case ARM64_INS_PACGA: - case ARM64_INS_PACIA: - case ARM64_INS_PACIA1716: - case ARM64_INS_PACIASP: - case ARM64_INS_PACIAZ: - case ARM64_INS_PACIB: - case ARM64_INS_PACIB1716: - case ARM64_INS_PACIBSP: - case ARM64_INS_PACIBZ: - case ARM64_INS_PACIZA: - case ARM64_INS_PACIZB: - case ARM64_INS_AUTDA: - case ARM64_INS_AUTDB: - case ARM64_INS_AUTDZA: - case ARM64_INS_AUTDZB: - case ARM64_INS_AUTIA: - case ARM64_INS_AUTIA1716: - case ARM64_INS_AUTIASP: - case ARM64_INS_AUTIAZ: - case ARM64_INS_AUTIB: - case ARM64_INS_AUTIB1716: - case ARM64_INS_AUTIBSP: - case ARM64_INS_AUTIBZ: - case ARM64_INS_AUTIZA: - case ARM64_INS_AUTIZB: - case ARM64_INS_XPACD: - case ARM64_INS_XPACI: - case ARM64_INS_XPACLRI: + case CS_AARCH64(_INS_PACDA): + case CS_AARCH64(_INS_PACDB): + case CS_AARCH64(_INS_PACDZA): + case CS_AARCH64(_INS_PACDZB): + case CS_AARCH64(_INS_PACGA): + case CS_AARCH64(_INS_PACIA): + case CS_AARCH64(_INS_PACIA1716): + case CS_AARCH64(_INS_PACIASP): + case CS_AARCH64(_INS_PACIAZ): + case CS_AARCH64(_INS_PACIB): + case CS_AARCH64(_INS_PACIB1716): + case CS_AARCH64(_INS_PACIBSP): + case CS_AARCH64(_INS_PACIBZ): + case CS_AARCH64(_INS_PACIZA): + case CS_AARCH64(_INS_PACIZB): + case CS_AARCH64(_INS_AUTDA): + case CS_AARCH64(_INS_AUTDB): + case CS_AARCH64(_INS_AUTDZA): + case CS_AARCH64(_INS_AUTDZB): + case CS_AARCH64(_INS_AUTIA): + case CS_AARCH64(_INS_AUTIA1716): + case CS_AARCH64(_INS_AUTIASP): + case CS_AARCH64(_INS_AUTIAZ): + case CS_AARCH64(_INS_AUTIB): + case CS_AARCH64(_INS_AUTIB1716): + case CS_AARCH64(_INS_AUTIBSP): + case CS_AARCH64(_INS_AUTIBZ): + case CS_AARCH64(_INS_AUTIZA): + case CS_AARCH64(_INS_AUTIZB): + case CS_AARCH64(_INS_XPACD): + case CS_AARCH64(_INS_XPACI): + case CS_AARCH64(_INS_XPACLRI): op->type = RZ_ANALYSIS_OP_TYPE_CMP; op->family = RZ_ANALYSIS_OP_FAMILY_SECURITY; break; #endif - case ARM64_INS_SVC: + case CS_AARCH64(_INS_SVC): op->type = RZ_ANALYSIS_OP_TYPE_SWI; op->val = IMM64(0); break; - case ARM64_INS_ADRP: - case ARM64_INS_ADR: + case CS_AARCH64(_INS_ADRP): + case CS_AARCH64(_INS_ADR): op->type = RZ_ANALYSIS_OP_TYPE_LEA; op->ptr = IMM64(1); break; - case ARM64_INS_NOP: + case CS_AARCH64(_INS_NOP): op->type = RZ_ANALYSIS_OP_TYPE_NOP; op->cycles = 1; break; - case ARM64_INS_SUB: - if (ISREG64(0) && REGID64(0) == ARM64_REG_SP) { + case CS_AARCH64(_INS_SUB): + if (ISREG64(0) && REGID64(0) == CS_AARCH64(_REG_SP)) { op->stackop = RZ_ANALYSIS_STACK_INC; if (ISIMM64(1)) { // sub sp, 0x54 op->stackptr = IMM(1); - } else if (ISIMM64(2) && ISREG64(1) && REGID64(1) == ARM64_REG_SP) { + } else if (ISIMM64(2) && ISREG64(1) && REGID64(1) == CS_AARCH64(_REG_SP)) { // sub sp, sp, 0x10 op->stackptr = IMM64(2); } @@ -665,31 +665,31 @@ static void anop64(ArmCSContext *ctx, RzAnalysisOp *op, cs_insn *insn) { } op->cycles = 1; /* fallthru */ - case ARM64_INS_MSUB: + case CS_AARCH64(_INS_MSUB): op->type = RZ_ANALYSIS_OP_TYPE_SUB; break; - case ARM64_INS_FDIV: - case ARM64_INS_SDIV: - case ARM64_INS_UDIV: + case CS_AARCH64(_INS_FDIV): + case CS_AARCH64(_INS_SDIV): + case CS_AARCH64(_INS_UDIV): op->cycles = 4; op->type = RZ_ANALYSIS_OP_TYPE_DIV; break; - case ARM64_INS_MUL: - case ARM64_INS_SMULL: - case ARM64_INS_FMUL: - case ARM64_INS_UMULL: + case CS_AARCH64(_INS_MUL): + case CS_AARCH64(_INS_SMULL): + case CS_AARCH64(_INS_FMUL): + case CS_AARCH64(_INS_UMULL): /* TODO: if next instruction is also a MUL, cycles are /=2 */ /* also known as Register Indexing Addressing */ op->cycles = 4; op->type = RZ_ANALYSIS_OP_TYPE_MUL; break; - case ARM64_INS_ADD: - if (ISREG64(0) && REGID64(0) == ARM64_REG_SP) { + case CS_AARCH64(_INS_ADD): + if (ISREG64(0) && REGID64(0) == CS_AARCH64(_REG_SP)) { op->stackop = RZ_ANALYSIS_STACK_INC; if (ISIMM64(1)) { // add sp, 0x54 op->stackptr = -(st64)IMM(1); - } else if (ISIMM64(2) && ISREG64(1) && REGID64(1) == ARM64_REG_SP) { + } else if (ISIMM64(2) && ISREG64(1) && REGID64(1) == CS_AARCH64(_REG_SP)) { // add sp, sp, 0x10 op->stackptr = -(st64)IMM64(2); } @@ -699,22 +699,22 @@ static void anop64(ArmCSContext *ctx, RzAnalysisOp *op, cs_insn *insn) { } op->cycles = 1; /* fallthru */ - case ARM64_INS_ADC: - // case ARM64_INS_ADCS: - case ARM64_INS_UMADDL: - case ARM64_INS_SMADDL: - case ARM64_INS_FMADD: - case ARM64_INS_MADD: + case CS_AARCH64(_INS_ADC): + // case CS_AARCH64(_INS_ADCS): + case CS_AARCH64(_INS_UMADDL): + case CS_AARCH64(_INS_SMADDL): + case CS_AARCH64(_INS_FMADD): + case CS_AARCH64(_INS_MADD): op->type = RZ_ANALYSIS_OP_TYPE_ADD; break; - case ARM64_INS_CSEL: - case ARM64_INS_FCSEL: - case ARM64_INS_CSET: - case ARM64_INS_CINC: + case CS_AARCH64(_INS_CSEL): + case CS_AARCH64(_INS_FCSEL): + case CS_AARCH64(_INS_CSET): + case CS_AARCH64(_INS_CINC): op->type = RZ_ANALYSIS_OP_TYPE_CMOV; break; - case ARM64_INS_MOV: - if (REGID64(0) == ARM64_REG_SP) { + case CS_AARCH64(_INS_MOV): + if (REGID64(0) == CS_AARCH64(_REG_SP)) { op->stackop = RZ_ANALYSIS_STACK_RESET; op->stackptr = 0; } @@ -723,169 +723,169 @@ static void anop64(ArmCSContext *ctx, RzAnalysisOp *op, cs_insn *insn) { } op->cycles = 1; /* fallthru */ - case ARM64_INS_MOVI: - case ARM64_INS_MOVK: - case ARM64_INS_MOVN: - case ARM64_INS_SMOV: - case ARM64_INS_UMOV: - case ARM64_INS_FMOV: - case ARM64_INS_SBFX: - case ARM64_INS_UBFX: - case ARM64_INS_UBFM: - case ARM64_INS_SBFIZ: - case ARM64_INS_UBFIZ: - case ARM64_INS_BIC: - case ARM64_INS_BFI: - case ARM64_INS_BFXIL: + case CS_AARCH64(_INS_MOVI): + case CS_AARCH64(_INS_MOVK): + case CS_AARCH64(_INS_MOVN): + case CS_AARCH64(_INS_SMOV): + case CS_AARCH64(_INS_UMOV): + case CS_AARCH64(_INS_FMOV): + case CS_AARCH64(_INS_SBFX): + case CS_AARCH64(_INS_UBFX): + case CS_AARCH64(_INS_UBFM): + case CS_AARCH64(_INS_SBFIZ): + case CS_AARCH64(_INS_UBFIZ): + case CS_AARCH64(_INS_BIC): + case CS_AARCH64(_INS_BFI): + case CS_AARCH64(_INS_BFXIL): op->type = RZ_ANALYSIS_OP_TYPE_MOV; break; - case ARM64_INS_MRS: - case ARM64_INS_MSR: + case CS_AARCH64(_INS_MRS): + case CS_AARCH64(_INS_MSR): op->type = RZ_ANALYSIS_OP_TYPE_MOV; op->family = RZ_ANALYSIS_OP_FAMILY_PRIV; break; - case ARM64_INS_MOVZ: + case CS_AARCH64(_INS_MOVZ): op->type = RZ_ANALYSIS_OP_TYPE_MOV; op->ptr = 0LL; op->ptrsize = 8; op->val = IMM64(1); break; - case ARM64_INS_UXTB: - case ARM64_INS_SXTB: + case CS_AARCH64(_INS_UXTB): + case CS_AARCH64(_INS_SXTB): op->type = RZ_ANALYSIS_OP_TYPE_CAST; op->ptr = 0LL; op->ptrsize = 1; break; - case ARM64_INS_UXTH: - case ARM64_INS_SXTH: + case CS_AARCH64(_INS_UXTH): + case CS_AARCH64(_INS_SXTH): op->type = RZ_ANALYSIS_OP_TYPE_MOV; op->ptr = 0LL; op->ptrsize = 2; break; - case ARM64_INS_UXTW: - case ARM64_INS_SXTW: + case CS_AARCH64(_INS_UXTW): + case CS_AARCH64(_INS_SXTW): op->type = RZ_ANALYSIS_OP_TYPE_MOV; op->ptr = 0LL; op->ptrsize = 4; break; - case ARM64_INS_BRK: - case ARM64_INS_HLT: + case CS_AARCH64(_INS_BRK): + case CS_AARCH64(_INS_HLT): op->type = RZ_ANALYSIS_OP_TYPE_TRAP; // hlt stops the process, not skips some cycles like in x86 break; - case ARM64_INS_DMB: - case ARM64_INS_DSB: - case ARM64_INS_ISB: + case CS_AARCH64(_INS_DMB): + case CS_AARCH64(_INS_DSB): + case CS_AARCH64(_INS_ISB): op->family = RZ_ANALYSIS_OP_FAMILY_THREAD; // intentional fallthrough - case ARM64_INS_IC: // instruction cache invalidate - case ARM64_INS_DC: // data cache invalidate + case CS_AARCH64(_INS_IC): // instruction cache invalidate + case CS_AARCH64(_INS_DC): // data cache invalidate op->type = RZ_ANALYSIS_OP_TYPE_SYNC; // or cache break; // XXX unimplemented instructions - case ARM64_INS_DUP: - case ARM64_INS_XTN: - case ARM64_INS_XTN2: - case ARM64_INS_REV64: - case ARM64_INS_EXT: - case ARM64_INS_INS: + case CS_AARCH64(_INS_DUP): + case CS_AARCH64(_INS_XTN): + case CS_AARCH64(_INS_XTN2): + case CS_AARCH64(_INS_REV64): + case CS_AARCH64(_INS_EXT): + case CS_AARCH64(_INS_INS): op->type = RZ_ANALYSIS_OP_TYPE_MOV; break; - case ARM64_INS_LSL: + case CS_AARCH64(_INS_LSL): op->cycles = 1; /* fallthru */ - case ARM64_INS_SHL: - case ARM64_INS_USHLL: + case CS_AARCH64(_INS_SHL): + case CS_AARCH64(_INS_USHLL): op->type = RZ_ANALYSIS_OP_TYPE_SHL; break; - case ARM64_INS_LSR: + case CS_AARCH64(_INS_LSR): op->cycles = 1; op->type = RZ_ANALYSIS_OP_TYPE_SHR; break; - case ARM64_INS_ASR: + case CS_AARCH64(_INS_ASR): op->cycles = 1; op->type = RZ_ANALYSIS_OP_TYPE_SAR; break; - case ARM64_INS_NEG: + case CS_AARCH64(_INS_NEG): #if CS_API_MAJOR > 3 - case ARM64_INS_NEGS: + case CS_AARCH64(_INS_NEGS): #endif op->type = RZ_ANALYSIS_OP_TYPE_NOT; break; - case ARM64_INS_FCMP: - case ARM64_INS_CCMP: - case ARM64_INS_CCMN: - case ARM64_INS_CMP: - case ARM64_INS_CMN: - case ARM64_INS_TST: + case CS_AARCH64(_INS_FCMP): + case CS_AARCH64(_INS_CCMP): + case CS_AARCH64(_INS_CCMN): + case CS_AARCH64(_INS_CMP): + case CS_AARCH64(_INS_CMN): + case CS_AARCH64(_INS_TST): op->type = RZ_ANALYSIS_OP_TYPE_CMP; break; - case ARM64_INS_ROR: + case CS_AARCH64(_INS_ROR): op->cycles = 1; op->type = RZ_ANALYSIS_OP_TYPE_ROR; break; - case ARM64_INS_AND: + case CS_AARCH64(_INS_AND): op->type = RZ_ANALYSIS_OP_TYPE_AND; break; - case ARM64_INS_ORR: - case ARM64_INS_ORN: + case CS_AARCH64(_INS_ORR): + case CS_AARCH64(_INS_ORN): op->type = RZ_ANALYSIS_OP_TYPE_OR; if (ISIMM64(2)) { op->val = IMM64(2); } break; - case ARM64_INS_EOR: - case ARM64_INS_EON: + case CS_AARCH64(_INS_EOR): + case CS_AARCH64(_INS_EON): op->type = RZ_ANALYSIS_OP_TYPE_XOR; break; - case ARM64_INS_STRB: - case ARM64_INS_STURB: - case ARM64_INS_STUR: - case ARM64_INS_STR: - case ARM64_INS_STP: - case ARM64_INS_STNP: - case ARM64_INS_STXR: - case ARM64_INS_STXRH: - case ARM64_INS_STLXR: - case ARM64_INS_STLXRH: - case ARM64_INS_STXRB: + case CS_AARCH64(_INS_STRB): + case CS_AARCH64(_INS_STURB): + case CS_AARCH64(_INS_STUR): + case CS_AARCH64(_INS_STR): + case CS_AARCH64(_INS_STP): + case CS_AARCH64(_INS_STNP): + case CS_AARCH64(_INS_STXR): + case CS_AARCH64(_INS_STXRH): + case CS_AARCH64(_INS_STLXR): + case CS_AARCH64(_INS_STLXRH): + case CS_AARCH64(_INS_STXRB): op->type = RZ_ANALYSIS_OP_TYPE_STORE; - if (ISPREINDEX64() && REGBASE64(2) == ARM64_REG_SP) { + if (ISPREINDEX64() && REGBASE64(2) == CS_AARCH64(_REG_SP)) { op->stackop = RZ_ANALYSIS_STACK_INC; op->stackptr = -MEMDISP64(2); - } else if (ISPOSTINDEX64() && REGID64(2) == ARM64_REG_SP) { + } else if (ISPOSTINDEX64() && REGID64(2) == CS_AARCH64(_REG_SP)) { op->stackop = RZ_ANALYSIS_STACK_INC; op->stackptr = -IMM64(3); - } else if (ISPREINDEX64() && REGBASE64(1) == ARM64_REG_SP) { + } else if (ISPREINDEX64() && REGBASE64(1) == CS_AARCH64(_REG_SP)) { op->stackop = RZ_ANALYSIS_STACK_INC; op->stackptr = -MEMDISP64(1); - } else if (ISPOSTINDEX64() && REGID64(1) == ARM64_REG_SP) { + } else if (ISPOSTINDEX64() && REGID64(1) == CS_AARCH64(_REG_SP)) { op->stackop = RZ_ANALYSIS_STACK_INC; op->stackptr = -IMM64(2); } break; - case ARM64_INS_LDUR: - case ARM64_INS_LDURB: - case ARM64_INS_LDRSW: - case ARM64_INS_LDRSB: - case ARM64_INS_LDRSH: - case ARM64_INS_LDR: - case ARM64_INS_LDURSW: - case ARM64_INS_LDP: - case ARM64_INS_LDNP: - case ARM64_INS_LDPSW: - case ARM64_INS_LDRH: - case ARM64_INS_LDRB: - if (ISPREINDEX64() && REGBASE64(2) == ARM64_REG_SP) { + case CS_AARCH64(_INS_LDUR): + case CS_AARCH64(_INS_LDURB): + case CS_AARCH64(_INS_LDRSW): + case CS_AARCH64(_INS_LDRSB): + case CS_AARCH64(_INS_LDRSH): + case CS_AARCH64(_INS_LDR): + case CS_AARCH64(_INS_LDURSW): + case CS_AARCH64(_INS_LDP): + case CS_AARCH64(_INS_LDNP): + case CS_AARCH64(_INS_LDPSW): + case CS_AARCH64(_INS_LDRH): + case CS_AARCH64(_INS_LDRB): + if (ISPREINDEX64() && REGBASE64(2) == CS_AARCH64(_REG_SP)) { op->stackop = RZ_ANALYSIS_STACK_INC; op->stackptr = -MEMDISP64(2); - } else if (ISPOSTINDEX64() && REGID64(2) == ARM64_REG_SP) { + } else if (ISPOSTINDEX64() && REGID64(2) == CS_AARCH64(_REG_SP)) { op->stackop = RZ_ANALYSIS_STACK_INC; op->stackptr = -IMM64(3); - } else if (ISPREINDEX64() && REGBASE64(1) == ARM64_REG_SP) { + } else if (ISPREINDEX64() && REGBASE64(1) == CS_AARCH64(_REG_SP)) { op->stackop = RZ_ANALYSIS_STACK_INC; op->stackptr = -MEMDISP64(1); - } else if (ISPOSTINDEX64() && REGID64(1) == ARM64_REG_SP) { + } else if (ISPOSTINDEX64() && REGID64(1) == CS_AARCH64(_REG_SP)) { op->stackop = RZ_ANALYSIS_STACK_INC; op->stackptr = -IMM64(2); } @@ -899,14 +899,14 @@ static void anop64(ArmCSContext *ctx, RzAnalysisOp *op, cs_insn *insn) { op->type = RZ_ANALYSIS_OP_TYPE_LOAD; } switch (insn->id) { - case ARM64_INS_LDPSW: - case ARM64_INS_LDRSW: - case ARM64_INS_LDRSH: - case ARM64_INS_LDRSB: + case CS_AARCH64(_INS_LDPSW): + case CS_AARCH64(_INS_LDRSW): + case CS_AARCH64(_INS_LDRSH): + case CS_AARCH64(_INS_LDRSB): op->sign = true; break; } - if (REGBASE64(1) == ARM64_REG_X29) { + if (REGBASE64(1) == CS_AARCH64(_REG_X29)) { op->stackop = RZ_ANALYSIS_STACK_GET; op->stackptr = 0; op->ptr = MEMDISP64(1); @@ -923,73 +923,73 @@ static void anop64(ArmCSContext *ctx, RzAnalysisOp *op, cs_insn *insn) { } break; #if CS_API_MAJOR > 4 - case ARM64_INS_BLRAA: - case ARM64_INS_BLRAAZ: - case ARM64_INS_BLRAB: - case ARM64_INS_BLRABZ: + case CS_AARCH64(_INS_BLRAA): + case CS_AARCH64(_INS_BLRAAZ): + case CS_AARCH64(_INS_BLRAB): + case CS_AARCH64(_INS_BLRABZ): op->family = RZ_ANALYSIS_OP_FAMILY_SECURITY; op->type = RZ_ANALYSIS_OP_TYPE_RCALL; break; - case ARM64_INS_BRAA: - case ARM64_INS_BRAAZ: - case ARM64_INS_BRAB: - case ARM64_INS_BRABZ: + case CS_AARCH64(_INS_BRAA): + case CS_AARCH64(_INS_BRAAZ): + case CS_AARCH64(_INS_BRAB): + case CS_AARCH64(_INS_BRABZ): op->family = RZ_ANALYSIS_OP_FAMILY_SECURITY; op->type = RZ_ANALYSIS_OP_TYPE_RJMP; break; - case ARM64_INS_LDRAA: - case ARM64_INS_LDRAB: + case CS_AARCH64(_INS_LDRAA): + case CS_AARCH64(_INS_LDRAB): op->family = RZ_ANALYSIS_OP_FAMILY_SECURITY; op->type = RZ_ANALYSIS_OP_TYPE_LOAD; break; - case ARM64_INS_RETAA: - case ARM64_INS_RETAB: - case ARM64_INS_ERETAA: - case ARM64_INS_ERETAB: + case CS_AARCH64(_INS_RETAA): + case CS_AARCH64(_INS_RETAB): + case CS_AARCH64(_INS_ERETAA): + case CS_AARCH64(_INS_ERETAB): op->family = RZ_ANALYSIS_OP_FAMILY_SECURITY; op->type = RZ_ANALYSIS_OP_TYPE_RET; break; #endif - case ARM64_INS_ERET: + case CS_AARCH64(_INS_ERET): op->family = RZ_ANALYSIS_OP_FAMILY_PRIV; op->type = RZ_ANALYSIS_OP_TYPE_RET; break; - case ARM64_INS_RET: + case CS_AARCH64(_INS_RET): op->type = RZ_ANALYSIS_OP_TYPE_RET; break; - case ARM64_INS_BL: // bl 0x89480 + case CS_AARCH64(_INS_BL): // bl 0x89480 op->type = RZ_ANALYSIS_OP_TYPE_CALL; op->jump = IMM64(0); op->fail = addr + 4; break; - case ARM64_INS_BLR: // blr x0 + case CS_AARCH64(_INS_BLR): // blr x0 op->type = RZ_ANALYSIS_OP_TYPE_RCALL; op->reg = cs_reg_name(handle, REGID64(0)); op->fail = addr + 4; // op->jump = IMM64(0); break; - case ARM64_INS_CBZ: - case ARM64_INS_CBNZ: + case CS_AARCH64(_INS_CBZ): + case CS_AARCH64(_INS_CBNZ): op->type = RZ_ANALYSIS_OP_TYPE_CJMP; op->jump = IMM64(1); op->fail = addr + op->size; break; - case ARM64_INS_TBZ: - case ARM64_INS_TBNZ: + case CS_AARCH64(_INS_TBZ): + case CS_AARCH64(_INS_TBNZ): op->type = RZ_ANALYSIS_OP_TYPE_CJMP; op->jump = IMM64(2); op->fail = addr + op->size; break; - case ARM64_INS_BR: + case CS_AARCH64(_INS_BR): op->type = RZ_ANALYSIS_OP_TYPE_RJMP; op->reg = cs_reg_name(handle, REGID64(0)); op->eob = true; break; - case ARM64_INS_B: + case CS_AARCH64(_INS_B): // BX LR == RET - if (insn->detail->arm64.operands[0].reg == ARM64_REG_LR) { + if (insn->detail->CS_aarch64().operands[0].reg == CS_AARCH64(_REG_LR)) { op->type = RZ_ANALYSIS_OP_TYPE_RET; - } else if (insn->detail->arm64.cc) { + } else if (insn->detail->CS_aarch64().cc) { op->type = RZ_ANALYSIS_OP_TYPE_CJMP; op->jump = IMM64(0); op->fail = addr + op->size; @@ -1514,8 +1514,8 @@ static int parse_reg_name(RzReg *reg, RzRegItem **reg_base, RzRegItem **reg_delt return 0; } -static bool is_valid64(arm64_reg reg) { - return reg != ARM64_REG_INVALID; +static bool is_valid64(CS_aarch64_reg() reg) { + return reg != CS_AARCH64(_REG_INVALID); } static char *reg_list[] = { @@ -1529,12 +1529,12 @@ static char *reg_list[] = { }; static int parse_reg64_name(RzReg *reg, RzRegItem **reg_base, RzRegItem **reg_delta, csh handle, cs_insn *insn, int reg_num) { - cs_arm64_op armop = INSOP64(reg_num); + CS_aarch64_op() armop = INSOP64(reg_num); switch (armop.type) { - case ARM64_OP_REG: + case CS_AARCH64(_OP_REG): *reg_base = rz_reg_get(reg, cs_reg_name(handle, armop.reg), RZ_REG_TYPE_ANY); break; - case ARM64_OP_MEM: + case CS_AARCH64(_OP_MEM): if (is_valid64(armop.mem.base) && is_valid64(armop.mem.index)) { *reg_base = rz_reg_get(reg, cs_reg_name(handle, armop.mem.base), RZ_REG_TYPE_ANY); *reg_delta = rz_reg_get(reg, cs_reg_name(handle, armop.mem.index), RZ_REG_TYPE_ANY); @@ -1577,7 +1577,7 @@ static void set_opdir(RzAnalysisOp *op) { static void set_src_dst(RzAnalysisValue *val, RzReg *reg, csh *handle, cs_insn *insn, int x, int bits) { cs_arm_op armop = INSOP(x); - cs_arm64_op arm64op = INSOP64(x); + CS_aarch64_op() arm64op = INSOP64(x); if (bits == 64) { parse_reg64_name(reg, &val->reg, &val->regdelta, *handle, insn, x); } else { @@ -1585,14 +1585,14 @@ static void set_src_dst(RzAnalysisValue *val, RzReg *reg, csh *handle, cs_insn * } if (bits == 64) { switch (arm64op.type) { - case ARM64_OP_REG: + case CS_AARCH64(_OP_REG): val->type = RZ_ANALYSIS_VAL_REG; break; - case ARM64_OP_MEM: + case CS_AARCH64(_OP_MEM): val->type = RZ_ANALYSIS_VAL_MEM; val->delta = arm64op.mem.disp; break; - case ARM64_OP_IMM: + case CS_AARCH64(_OP_IMM): val->type = RZ_ANALYSIS_VAL_IMM; val->imm = arm64op.imm; break; @@ -1635,7 +1635,7 @@ static void create_src_dst(RzAnalysisOp *op) { static void op_fillval(RzAnalysis *analysis, RzAnalysisOp *op, csh handle, cs_insn *insn, int bits) { create_src_dst(op); int i, j; - int count = bits == 64 ? insn->detail->arm64.op_count : insn->detail->arm.op_count; + int count = bits == 64 ? insn->detail->CS_aarch64().op_count : insn->detail->arm.op_count; switch (op->type & RZ_ANALYSIS_OP_TYPE_MASK) { case RZ_ANALYSIS_OP_TYPE_MOV: case RZ_ANALYSIS_OP_TYPE_CMP: @@ -1660,7 +1660,7 @@ static void op_fillval(RzAnalysis *analysis, RzAnalysisOp *op, csh handle, cs_in for (i = 1; i < count; i++) { #if CS_API_MAJOR > 3 if (bits == 64) { - cs_arm64_op arm64op = INSOP64(i); + CS_aarch64_op() arm64op = INSOP64(i); if (arm64op.access == CS_AC_WRITE) { continue; } @@ -1682,8 +1682,8 @@ static void op_fillval(RzAnalysis *analysis, RzAnalysisOp *op, csh handle, cs_in case RZ_ANALYSIS_OP_TYPE_STORE: if (count > 2) { if (bits == 64) { - cs_arm64_op arm64op = INSOP64(count - 1); - if (arm64op.type == ARM64_OP_IMM) { + CS_aarch64_op() arm64op = INSOP64(count - 1); + if (arm64op.type == CS_AARCH64(_OP_IMM)) { count--; } } else { @@ -1748,7 +1748,7 @@ static int analysis_op(RzAnalysis *a, RzAnalysisOp *op, ut64 addr, const ut8 *bu op->size = (a->bits == 16) ? 2 : 4; op->addr = addr; if (ctx->handle == 0) { - ret = (a->bits == 64) ? cs_open(CS_ARCH_ARM64, mode, &ctx->handle) : cs_open(CS_ARCH_ARM, mode, &ctx->handle); + ret = (a->bits == 64) ? cs_open(CS_AARCH64pre(CS_ARCH_), mode, &ctx->handle) : cs_open(CS_ARCH_ARM, mode, &ctx->handle); cs_option(ctx->handle, CS_OPT_DETAIL, CS_OPT_ON); cs_option(ctx->handle, CS_OPT_SYNTAX, CS_OPT_SYNTAX_CS_REG_ALIAS); if (ret != CS_ERR_OK) { @@ -2259,47 +2259,47 @@ static ut8 *analysis_mask(RzAnalysis *analysis, int size, const ut8 *data, ut64 case 4: if (analysis->bits == 64) { switch (op->id) { - case ARM64_INS_LDP: - case ARM64_INS_LDXP: - case ARM64_INS_LDXR: - case ARM64_INS_LDXRB: - case ARM64_INS_LDXRH: - case ARM64_INS_LDPSW: - case ARM64_INS_LDNP: - case ARM64_INS_LDTR: - case ARM64_INS_LDTRB: - case ARM64_INS_LDTRH: - case ARM64_INS_LDTRSB: - case ARM64_INS_LDTRSH: - case ARM64_INS_LDTRSW: - case ARM64_INS_LDUR: - case ARM64_INS_LDURB: - case ARM64_INS_LDURH: - case ARM64_INS_LDURSB: - case ARM64_INS_LDURSH: - case ARM64_INS_LDURSW: - case ARM64_INS_STP: - case ARM64_INS_STNP: - case ARM64_INS_STXR: - case ARM64_INS_STXRB: - case ARM64_INS_STXRH: + case CS_AARCH64(_INS_LDP): + case CS_AARCH64(_INS_LDXP): + case CS_AARCH64(_INS_LDXR): + case CS_AARCH64(_INS_LDXRB): + case CS_AARCH64(_INS_LDXRH): + case CS_AARCH64(_INS_LDPSW): + case CS_AARCH64(_INS_LDNP): + case CS_AARCH64(_INS_LDTR): + case CS_AARCH64(_INS_LDTRB): + case CS_AARCH64(_INS_LDTRH): + case CS_AARCH64(_INS_LDTRSB): + case CS_AARCH64(_INS_LDTRSH): + case CS_AARCH64(_INS_LDTRSW): + case CS_AARCH64(_INS_LDUR): + case CS_AARCH64(_INS_LDURB): + case CS_AARCH64(_INS_LDURH): + case CS_AARCH64(_INS_LDURSB): + case CS_AARCH64(_INS_LDURSH): + case CS_AARCH64(_INS_LDURSW): + case CS_AARCH64(_INS_STP): + case CS_AARCH64(_INS_STNP): + case CS_AARCH64(_INS_STXR): + case CS_AARCH64(_INS_STXRB): + case CS_AARCH64(_INS_STXRH): rz_write_ble(ret + idx, 0xffffffff, analysis->big_endian, 32); break; - case ARM64_INS_STRB: - case ARM64_INS_STURB: - case ARM64_INS_STURH: - case ARM64_INS_STUR: - case ARM64_INS_STR: - case ARM64_INS_STTR: - case ARM64_INS_STTRB: - case ARM64_INS_STRH: - case ARM64_INS_STTRH: - case ARM64_INS_LDR: - case ARM64_INS_LDRB: - case ARM64_INS_LDRH: - case ARM64_INS_LDRSB: - case ARM64_INS_LDRSW: - case ARM64_INS_LDRSH: { + case CS_AARCH64(_INS_STRB): + case CS_AARCH64(_INS_STURB): + case CS_AARCH64(_INS_STURH): + case CS_AARCH64(_INS_STUR): + case CS_AARCH64(_INS_STR): + case CS_AARCH64(_INS_STTR): + case CS_AARCH64(_INS_STTRB): + case CS_AARCH64(_INS_STRH): + case CS_AARCH64(_INS_STTRH): + case CS_AARCH64(_INS_LDR): + case CS_AARCH64(_INS_LDRB): + case CS_AARCH64(_INS_LDRH): + case CS_AARCH64(_INS_LDRSB): + case CS_AARCH64(_INS_LDRSW): + case CS_AARCH64(_INS_LDRSH): { bool is_literal = (opcode & 0x38000000) == 0x18000000; if (is_literal) { rz_write_ble(ret + idx, 0xff000000, analysis->big_endian, 32); @@ -2308,22 +2308,22 @@ static ut8 *analysis_mask(RzAnalysis *analysis, int size, const ut8 *data, ut64 } break; } - case ARM64_INS_B: - case ARM64_INS_BL: - case ARM64_INS_CBZ: - case ARM64_INS_CBNZ: + case CS_AARCH64(_INS_B): + case CS_AARCH64(_INS_BL): + case CS_AARCH64(_INS_CBZ): + case CS_AARCH64(_INS_CBNZ): if (op->type == RZ_ANALYSIS_OP_TYPE_CJMP) { rz_write_ble(ret + idx, 0xff00001f, analysis->big_endian, 32); } else { rz_write_ble(ret + idx, 0xfc000000, analysis->big_endian, 32); } break; - case ARM64_INS_TBZ: - case ARM64_INS_TBNZ: + case CS_AARCH64(_INS_TBZ): + case CS_AARCH64(_INS_TBNZ): rz_write_ble(ret + idx, 0xfff8001f, analysis->big_endian, 32); break; - case ARM64_INS_ADR: - case ARM64_INS_ADRP: + case CS_AARCH64(_INS_ADR): + case CS_AARCH64(_INS_ADRP): rz_write_ble(ret + idx, 0xff00001f, analysis->big_endian, 32); break; default: