use binaryninjacore_sys::{BNGetLowLevelILByIndex, BNLowLevelILInstruction};
use std::collections::BTreeMap;
use std::marker::PhantomData;
use std::mem;
use super::*;
pub struct Operation<'func, A, M, F, O>
where
A: 'func + Architecture,
M: FunctionMutability,
F: FunctionForm,
O: OperationArguments,
{
pub(crate) function: &'func Function<A, M, F>,
pub(crate) op: BNLowLevelILInstruction,
_args: PhantomData<O>,
}
impl<'func, A, M, F, O> Operation<'func, A, M, F, O>
where
A: 'func + Architecture,
M: FunctionMutability,
F: FunctionForm,
O: OperationArguments,
{
pub(crate) fn new(function: &'func Function<A, M, F>, op: BNLowLevelILInstruction) -> Self {
Self {
function,
op,
_args: PhantomData,
}
}
pub fn address(&self) -> u64 {
self.op.address
}
}
impl<'func, A, M, O> Operation<'func, A, M, NonSSA<LiftedNonSSA>, O>
where
A: 'func + Architecture,
M: FunctionMutability,
O: OperationArguments,
{
pub fn flag_write(&self) -> Option<A::FlagWrite> {
match self.op.flags {
0 => None,
id => self.function.arch().flag_write_from_id(id),
}
}
}
pub struct NoArgs;
pub struct Pop;
impl<'func, A, M, F> Operation<'func, A, M, F, Pop>
where
A: 'func + Architecture,
M: FunctionMutability,
F: FunctionForm,
{
pub fn size(&self) -> usize {
self.op.size
}
}
pub struct Syscall;
pub struct Intrinsic;
impl<'func, A, M, V> Operation<'func, A, M, NonSSA<V>, Intrinsic>
where
A: 'func + Architecture,
M: FunctionMutability,
V: NonSSAVariant,
{
pub fn intrinsic(&self) -> Option<A::Intrinsic> {
let raw_id = self.op.operands[2] as u32;
self.function.arch().intrinsic_from_id(raw_id)
}
}
pub struct SetReg;
impl<'func, A, M, V> Operation<'func, A, M, NonSSA<V>, SetReg>
where
A: 'func + Architecture,
M: FunctionMutability,
V: NonSSAVariant,
{
pub fn size(&self) -> usize {
self.op.size
}
pub fn dest_reg(&self) -> Register<A::Register> {
let raw_id = self.op.operands[0] as u32;
if raw_id >= 0x8000_0000 {
Register::Temp(raw_id & 0x7fff_ffff)
} else {
self.function
.arch()
.register_from_id(raw_id)
.map(Register::ArchReg)
.unwrap_or_else(|| {
error!(
"got garbage register from LLIL_SET_REG @ 0x{:x}",
self.op.address
);
Register::Temp(0)
})
}
}
pub fn source_expr(&self) -> Expression<'func, A, M, NonSSA<V>, ValueExpr> {
Expression::new(self.function, self.op.operands[1] as usize)
}
}
pub struct SetRegSplit;
impl<'func, A, M, V> Operation<'func, A, M, NonSSA<V>, SetRegSplit>
where
A: 'func + Architecture,
M: FunctionMutability,
V: NonSSAVariant,
{
pub fn size(&self) -> usize {
self.op.size
}
pub fn dest_reg_high(&self) -> Register<A::Register> {
let raw_id = self.op.operands[0] as u32;
if raw_id >= 0x8000_0000 {
Register::Temp(raw_id & 0x7fff_ffff)
} else {
self.function
.arch()
.register_from_id(raw_id)
.map(Register::ArchReg)
.unwrap_or_else(|| {
error!(
"got garbage register from LLIL_SET_REG_SPLIT @ 0x{:x}",
self.op.address
);
Register::Temp(0)
})
}
}
pub fn dest_reg_low(&self) -> Register<A::Register> {
let raw_id = self.op.operands[1] as u32;
if raw_id >= 0x8000_0000 {
Register::Temp(raw_id & 0x7fff_ffff)
} else {
self.function
.arch()
.register_from_id(raw_id)
.map(Register::ArchReg)
.unwrap_or_else(|| {
error!(
"got garbage register from LLIL_SET_REG_SPLIT @ 0x{:x}",
self.op.address
);
Register::Temp(0)
})
}
}
pub fn source_expr(&self) -> Expression<'func, A, M, NonSSA<V>, ValueExpr> {
Expression::new(self.function, self.op.operands[2] as usize)
}
}
pub struct SetFlag;
impl<'func, A, M, V> Operation<'func, A, M, NonSSA<V>, SetFlag>
where
A: 'func + Architecture,
M: FunctionMutability,
V: NonSSAVariant,
{
pub fn source_expr(&self) -> Expression<'func, A, M, NonSSA<V>, ValueExpr> {
Expression::new(self.function, self.op.operands[1] as usize)
}
}
pub struct Load;
impl<'func, A, M, V> Operation<'func, A, M, NonSSA<V>, Load>
where
A: 'func + Architecture,
M: FunctionMutability,
V: NonSSAVariant,
{
pub fn size(&self) -> usize {
self.op.size
}
pub fn source_mem_expr(&self) -> Expression<'func, A, M, NonSSA<V>, ValueExpr> {
Expression::new(self.function, self.op.operands[0] as usize)
}
}
pub struct Store;
impl<'func, A, M, V> Operation<'func, A, M, NonSSA<V>, Store>
where
A: 'func + Architecture,
M: FunctionMutability,
V: NonSSAVariant,
{
pub fn size(&self) -> usize {
self.op.size
}
pub fn dest_mem_expr(&self) -> Expression<'func, A, M, NonSSA<V>, ValueExpr> {
Expression::new(self.function, self.op.operands[0] as usize)
}
pub fn source_expr(&self) -> Expression<'func, A, M, NonSSA<V>, ValueExpr> {
Expression::new(self.function, self.op.operands[1] as usize)
}
}
pub struct Reg;
impl<'func, A, M, V> Operation<'func, A, M, NonSSA<V>, Reg>
where
A: 'func + Architecture,
M: FunctionMutability,
V: NonSSAVariant,
{
pub fn size(&self) -> usize {
self.op.size
}
pub fn source_reg(&self) -> Register<A::Register> {
let raw_id = self.op.operands[0] as u32;
if raw_id >= 0x8000_0000 {
Register::Temp(raw_id & 0x7fff_ffff)
} else {
self.function
.arch()
.register_from_id(raw_id)
.map(Register::ArchReg)
.unwrap_or_else(|| {
error!(
"got garbage register from LLIL_REG @ 0x{:x}",
self.op.address
);
Register::Temp(0)
})
}
}
}
pub struct RegSplit;
impl<'func, A, M, V> Operation<'func, A, M, NonSSA<V>, RegSplit>
where
A: 'func + Architecture,
M: FunctionMutability,
V: NonSSAVariant,
{
pub fn size(&self) -> usize {
self.op.size
}
pub fn low_reg(&self) -> Register<A::Register> {
let raw_id = self.op.operands[0] as u32;
if raw_id >= 0x8000_0000 {
Register::Temp(raw_id & 0x7fff_ffff)
} else {
self.function
.arch()
.register_from_id(raw_id)
.map(Register::ArchReg)
.unwrap_or_else(|| {
error!(
"got garbage register from LLIL_REG @ 0x{:x}",
self.op.address
);
Register::Temp(0)
})
}
}
pub fn high_reg(&self) -> Register<A::Register> {
let raw_id = self.op.operands[1] as u32;
if raw_id >= 0x8000_0000 {
Register::Temp(raw_id & 0x7fff_ffff)
} else {
self.function
.arch()
.register_from_id(raw_id)
.map(Register::ArchReg)
.unwrap_or_else(|| {
error!(
"got garbage register from LLIL_REG @ 0x{:x}",
self.op.address
);
Register::Temp(0)
})
}
}
}
pub struct Flag;
pub struct FlagBit;
pub struct Jump;
impl<'func, A, M, F> Operation<'func, A, M, F, Jump>
where
A: 'func + Architecture,
M: FunctionMutability,
F: FunctionForm,
{
pub fn target(&self) -> Expression<'func, A, M, F, ValueExpr> {
Expression::new(self.function, self.op.operands[0] as usize)
}
}
pub struct JumpTo;
struct TargetListIter<'func, A, M, F>
where
A: 'func + Architecture,
M: FunctionMutability,
F: FunctionForm,
{
function: &'func Function<A, M, F>,
cursor: BNLowLevelILInstruction,
cursor_operand: usize,
}
impl<'func, A, M, F> TargetListIter<'func, A, M, F>
where
A: 'func + Architecture,
M: FunctionMutability,
F: FunctionForm,
{
fn next(&mut self) -> u64 {
if self.cursor_operand >= 3 {
self.cursor = unsafe {
BNGetLowLevelILByIndex(self.function.handle, self.cursor.operands[3] as usize)
};
self.cursor_operand = 0;
}
let result = self.cursor.operands[self.cursor_operand];
self.cursor_operand += 1;
result
}
}
impl<'func, A, M, F> Operation<'func, A, M, F, JumpTo>
where
A: 'func + Architecture,
M: FunctionMutability,
F: FunctionForm,
{
pub fn target(&self) -> Expression<'func, A, M, F, ValueExpr> {
Expression::new(self.function, self.op.operands[0] as usize)
}
pub fn target_list(&self) -> BTreeMap<u64, usize> {
let mut result = BTreeMap::new();
let count = self.op.operands[1] as usize / 2;
let mut list = TargetListIter {
function: self.function,
cursor: unsafe {
BNGetLowLevelILByIndex(self.function.handle, self.op.operands[2] as usize)
},
cursor_operand: 0,
};
for _ in 0..count {
let value = list.next();
let target = list.next() as usize;
result.insert(value, target);
}
result
}
}
pub struct Call;
impl<'func, A, M, V> Operation<'func, A, M, NonSSA<V>, Call>
where
A: 'func + Architecture,
M: FunctionMutability,
V: NonSSAVariant,
{
pub fn target(&self) -> Expression<'func, A, M, NonSSA<V>, ValueExpr> {
Expression::new(self.function, self.op.operands[0] as usize)
}
pub fn stack_adjust(&self) -> Option<u64> {
use binaryninjacore_sys::BNLowLevelILOperation::LLIL_CALL_STACK_ADJUST;
if self.op.operation == LLIL_CALL_STACK_ADJUST {
Some(self.op.operands[1])
} else {
None
}
}
}
pub struct Ret;
impl<'func, A, M, F> Operation<'func, A, M, F, Ret>
where
A: 'func + Architecture,
M: FunctionMutability,
F: FunctionForm,
{
pub fn target(&self) -> Expression<'func, A, M, F, ValueExpr> {
Expression::new(self.function, self.op.operands[0] as usize)
}
}
pub struct If;
impl<'func, A, M, F> Operation<'func, A, M, F, If>
where
A: 'func + Architecture,
M: FunctionMutability,
F: FunctionForm,
{
pub fn condition(&self) -> Expression<'func, A, M, F, ValueExpr> {
Expression::new(self.function, self.op.operands[0] as usize)
}
pub fn true_target(&self) -> Instruction<'func, A, M, F> {
Instruction {
function: self.function,
instr_idx: self.op.operands[1] as usize,
}
}
pub fn true_target_idx(&self) -> usize {
self.op.operands[1] as usize
}
pub fn false_target(&self) -> Instruction<'func, A, M, F> {
Instruction {
function: self.function,
instr_idx: self.op.operands[2] as usize,
}
}
pub fn false_target_idx(&self) -> usize {
self.op.operands[2] as usize
}
}
pub struct Goto;
impl<'func, A, M, F> Operation<'func, A, M, F, Goto>
where
A: 'func + Architecture,
M: FunctionMutability,
F: FunctionForm,
{
pub fn target(&self) -> Instruction<'func, A, M, F> {
Instruction {
function: self.function,
instr_idx: self.op.operands[0] as usize,
}
}
pub fn target_idx(&self) -> usize {
self.op.operands[0] as usize
}
}
pub struct FlagCond;
pub struct FlagGroup;
impl<'func, A, M> Operation<'func, A, M, NonSSA<LiftedNonSSA>, FlagGroup>
where
A: 'func + Architecture,
M: FunctionMutability,
{
pub fn flag_group(&self) -> A::FlagGroup {
let id = self.op.operands[0] as u32;
self.function.arch().flag_group_from_id(id).unwrap()
}
}
pub struct Trap;
impl<'func, A, M, F> Operation<'func, A, M, F, Trap>
where
A: 'func + Architecture,
M: FunctionMutability,
F: FunctionForm,
{
pub fn vector(&self) -> u64 {
self.op.operands[0]
}
}
pub struct RegPhi;
pub struct FlagPhi;
pub struct MemPhi;
pub struct Const;
impl<'func, A, M, F> Operation<'func, A, M, F, Const>
where
A: 'func + Architecture,
M: FunctionMutability,
F: FunctionForm,
{
pub fn size(&self) -> usize {
self.op.size
}
pub fn value(&self) -> u64 {
#[cfg(debug_assertions)]
{
let raw = self.op.operands[0] as i64;
let is_safe = match raw.overflowing_shr(self.op.size as u32 * 8) {
(_, true) => true,
(res, false) => [-1, 0].contains(&res),
};
if !is_safe {
error!(
"il expr @ {:x} contains constant 0x{:x} as {} byte value (doesn't fit!)",
self.op.address, self.op.operands[0], self.op.size
);
}
}
let mut mask = -1i64 as u64;
if self.op.size < mem::size_of::<u64>() {
mask <<= self.op.size * 8;
mask = !mask;
}
self.op.operands[0] & mask
}
}
pub struct Extern;
impl<'func, A, M, F> Operation<'func, A, M, F, Extern>
where
A: 'func + Architecture,
M: FunctionMutability,
F: FunctionForm,
{
pub fn size(&self) -> usize {
self.op.size
}
pub fn value(&self) -> u64 {
#[cfg(debug_assertions)]
{
let raw = self.op.operands[0] as i64;
let is_safe = match raw.overflowing_shr(self.op.size as u32 * 8) {
(_, true) => true,
(res, false) => [-1, 0].contains(&res),
};
if !is_safe {
error!(
"il expr @ {:x} contains extern 0x{:x} as {} byte value (doesn't fit!)",
self.op.address, self.op.operands[0], self.op.size
);
}
}
let mut mask = -1i64 as u64;
if self.op.size < mem::size_of::<u64>() {
mask <<= self.op.size * 8;
mask = !mask;
}
self.op.operands[0] & mask
}
}
pub struct BinaryOp;
impl<'func, A, M, F> Operation<'func, A, M, F, BinaryOp>
where
A: 'func + Architecture,
M: FunctionMutability,
F: FunctionForm,
{
pub fn size(&self) -> usize {
self.op.size
}
pub fn left(&self) -> Expression<'func, A, M, F, ValueExpr> {
Expression::new(self.function, self.op.operands[0] as usize)
}
pub fn right(&self) -> Expression<'func, A, M, F, ValueExpr> {
Expression::new(self.function, self.op.operands[1] as usize)
}
}
pub struct BinaryOpCarry;
impl<'func, A, M, F> Operation<'func, A, M, F, BinaryOpCarry>
where
A: 'func + Architecture,
M: FunctionMutability,
F: FunctionForm,
{
pub fn size(&self) -> usize {
self.op.size
}
pub fn left(&self) -> Expression<'func, A, M, F, ValueExpr> {
Expression::new(self.function, self.op.operands[0] as usize)
}
pub fn right(&self) -> Expression<'func, A, M, F, ValueExpr> {
Expression::new(self.function, self.op.operands[1] as usize)
}
pub fn carry(&self) -> Expression<'func, A, M, F, ValueExpr> {
Expression::new(self.function, self.op.operands[2] as usize)
}
}
pub struct DoublePrecDivOp;
impl<'func, A, M, F> Operation<'func, A, M, F, DoublePrecDivOp>
where
A: 'func + Architecture,
M: FunctionMutability,
F: FunctionForm,
{
pub fn size(&self) -> usize {
self.op.size
}
pub fn high(&self) -> Expression<'func, A, M, F, ValueExpr> {
Expression::new(self.function, self.op.operands[0] as usize)
}
pub fn low(&self) -> Expression<'func, A, M, F, ValueExpr> {
Expression::new(self.function, self.op.operands[1] as usize)
}
pub fn right(&self) -> Expression<'func, A, M, F, ValueExpr> {
Expression::new(self.function, self.op.operands[2] as usize)
}
}
pub struct UnaryOp;
impl<'func, A, M, F> Operation<'func, A, M, F, UnaryOp>
where
A: 'func + Architecture,
M: FunctionMutability,
F: FunctionForm,
{
pub fn size(&self) -> usize {
self.op.size
}
pub fn operand(&self) -> Expression<'func, A, M, F, ValueExpr> {
Expression::new(self.function, self.op.operands[0] as usize)
}
}
pub struct Condition;
impl<'func, A, M, F> Operation<'func, A, M, F, Condition>
where
A: 'func + Architecture,
M: FunctionMutability,
F: FunctionForm,
{
pub fn size(&self) -> usize {
self.op.size
}
pub fn left(&self) -> Expression<'func, A, M, F, ValueExpr> {
Expression::new(self.function, self.op.operands[0] as usize)
}
pub fn right(&self) -> Expression<'func, A, M, F, ValueExpr> {
Expression::new(self.function, self.op.operands[1] as usize)
}
}
pub struct UnimplMem;
impl<'func, A, M, F> Operation<'func, A, M, F, UnimplMem>
where
A: 'func + Architecture,
M: FunctionMutability,
F: FunctionForm,
{
pub fn size(&self) -> usize {
self.op.size
}
pub fn mem_expr(&self) -> Expression<'func, A, M, F, ValueExpr> {
Expression::new(self.function, self.op.operands[0] as usize)
}
}
pub trait OperationArguments: 'static {}
impl OperationArguments for NoArgs {}
impl OperationArguments for Pop {}
impl OperationArguments for Syscall {}
impl OperationArguments for Intrinsic {}
impl OperationArguments for SetReg {}
impl OperationArguments for SetRegSplit {}
impl OperationArguments for SetFlag {}
impl OperationArguments for Load {}
impl OperationArguments for Store {}
impl OperationArguments for Reg {}
impl OperationArguments for RegSplit {}
impl OperationArguments for Flag {}
impl OperationArguments for FlagBit {}
impl OperationArguments for Jump {}
impl OperationArguments for JumpTo {}
impl OperationArguments for Call {}
impl OperationArguments for Ret {}
impl OperationArguments for If {}
impl OperationArguments for Goto {}
impl OperationArguments for FlagCond {}
impl OperationArguments for FlagGroup {}
impl OperationArguments for Trap {}
impl OperationArguments for RegPhi {}
impl OperationArguments for FlagPhi {}
impl OperationArguments for MemPhi {}
impl OperationArguments for Const {}
impl OperationArguments for Extern {}
impl OperationArguments for BinaryOp {}
impl OperationArguments for BinaryOpCarry {}
impl OperationArguments for DoublePrecDivOp {}
impl OperationArguments for UnaryOp {}
impl OperationArguments for Condition {}
impl OperationArguments for UnimplMem {}