|
@@ -67,6 +67,8 @@ enum aarch64_insn_imm_type {
|
|
|
AARCH64_INSN_IMM_12,
|
|
|
AARCH64_INSN_IMM_9,
|
|
|
AARCH64_INSN_IMM_7,
|
|
|
+ AARCH64_INSN_IMM_S,
|
|
|
+ AARCH64_INSN_IMM_R,
|
|
|
AARCH64_INSN_IMM_MAX
|
|
|
};
|
|
|
|
|
@@ -170,6 +172,12 @@ enum aarch64_insn_adsb_type {
|
|
|
AARCH64_INSN_ADSB_SUB_SETFLAGS
|
|
|
};
|
|
|
|
|
|
+enum aarch64_insn_bitfield_type {
|
|
|
+ AARCH64_INSN_BITFIELD_MOVE,
|
|
|
+ AARCH64_INSN_BITFIELD_MOVE_UNSIGNED,
|
|
|
+ AARCH64_INSN_BITFIELD_MOVE_SIGNED
|
|
|
+};
|
|
|
+
|
|
|
#define __AARCH64_INSN_FUNCS(abbr, mask, val) \
|
|
|
static __always_inline bool aarch64_insn_is_##abbr(u32 code) \
|
|
|
{ return (code & (mask)) == (val); } \
|
|
@@ -186,6 +194,9 @@ __AARCH64_INSN_FUNCS(add_imm, 0x7F000000, 0x11000000)
|
|
|
__AARCH64_INSN_FUNCS(adds_imm, 0x7F000000, 0x31000000)
|
|
|
__AARCH64_INSN_FUNCS(sub_imm, 0x7F000000, 0x51000000)
|
|
|
__AARCH64_INSN_FUNCS(subs_imm, 0x7F000000, 0x71000000)
|
|
|
+__AARCH64_INSN_FUNCS(sbfm, 0x7F800000, 0x13000000)
|
|
|
+__AARCH64_INSN_FUNCS(bfm, 0x7F800000, 0x33000000)
|
|
|
+__AARCH64_INSN_FUNCS(ubfm, 0x7F800000, 0x53000000)
|
|
|
__AARCH64_INSN_FUNCS(b, 0xFC000000, 0x14000000)
|
|
|
__AARCH64_INSN_FUNCS(bl, 0xFC000000, 0x94000000)
|
|
|
__AARCH64_INSN_FUNCS(cbz, 0xFE000000, 0x34000000)
|
|
@@ -236,6 +247,11 @@ u32 aarch64_insn_gen_add_sub_imm(enum aarch64_insn_register dst,
|
|
|
enum aarch64_insn_register src,
|
|
|
int imm, enum aarch64_insn_variant variant,
|
|
|
enum aarch64_insn_adsb_type type);
|
|
|
+u32 aarch64_insn_gen_bitfield(enum aarch64_insn_register dst,
|
|
|
+ enum aarch64_insn_register src,
|
|
|
+ int immr, int imms,
|
|
|
+ enum aarch64_insn_variant variant,
|
|
|
+ enum aarch64_insn_bitfield_type type);
|
|
|
|
|
|
bool aarch64_insn_hotpatch_safe(u32 old_insn, u32 new_insn);
|
|
|
|