1use binaryninjacore_sys::*;
20use std::fmt::{Debug, Formatter};
21
22use crate::{
23 calling_convention::CoreCallingConvention,
24 data_buffer::DataBuffer,
25 disassembly::InstructionTextToken,
26 function::{ArchAndAddr, Function, NativeBlock},
27 platform::Platform,
28 rc::*,
29 relocation::CoreRelocationHandler,
30 string::{IntoCStr, *},
31 types::{NameAndType, Type},
32 BranchType, Endianness,
33};
34use std::ops::Deref;
35use std::{
36 borrow::{Borrow, Cow},
37 collections::HashMap,
38 ffi::{c_char, c_int, c_void, CStr, CString},
39 fmt::Display,
40 hash::Hash,
41 mem::MaybeUninit,
42};
43
44use crate::basic_block::BasicBlock;
45use crate::function_recognizer::FunctionRecognizer;
46use crate::relocation::{CustomRelocationHandlerHandle, RelocationHandler};
47use crate::variable::IndirectBranchInfo;
48
49use crate::confidence::Conf;
50use crate::low_level_il::expression::ValueExpr;
51use crate::low_level_il::lifting::{
52 get_default_flag_cond_llil, get_default_flag_write_llil, LowLevelILFlagWriteOp,
53};
54use crate::low_level_il::{LowLevelILMutableExpression, LowLevelILMutableFunction};
55pub use binaryninjacore_sys::BNFlagRole as FlagRole;
56pub use binaryninjacore_sys::BNImplicitRegisterExtend as ImplicitRegisterExtend;
57pub use binaryninjacore_sys::BNLowLevelILFlagCondition as FlagCondition;
58use std::collections::HashSet;
59
60macro_rules! newtype {
61 ($name:ident, $inner_type:ty) => {
62 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
63 pub struct $name(pub $inner_type);
64
65 impl From<$inner_type> for $name {
66 fn from(value: $inner_type) -> Self {
67 Self(value)
68 }
69 }
70
71 impl From<$name> for $inner_type {
72 fn from(value: $name) -> Self {
73 value.0
74 }
75 }
76
77 impl Display for $name {
78 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
79 write!(f, "{}", self.0)
80 }
81 }
82 };
83}
84
85newtype!(RegisterId, u32);
86
87impl RegisterId {
88 pub fn is_temporary(&self) -> bool {
89 self.0 & 0x8000_0000 != 0
90 }
91}
92
93newtype!(RegisterStackId, u32);
94newtype!(FlagId, u32);
95newtype!(FlagWriteId, u32);
97newtype!(FlagClassId, u32);
98newtype!(FlagGroupId, u32);
99newtype!(IntrinsicId, u32);
100
101#[derive(Default, Copy, Clone, PartialEq, Eq, Hash, Debug)]
102pub enum BranchKind {
103 #[default]
104 Unresolved,
105 Unconditional(u64),
106 False(u64),
107 True(u64),
108 Call(u64),
109 FunctionReturn,
110 SystemCall,
111 Indirect,
112 Exception,
113 UserDefined,
114}
115
116#[derive(Default, Copy, Clone, PartialEq, Eq, Hash, Debug)]
117pub struct BranchInfo {
118 pub arch: Option<CoreArchitecture>,
120 pub kind: BranchKind,
121}
122
123impl BranchInfo {
124 pub fn new(kind: BranchKind) -> Self {
126 Self { arch: None, kind }
127 }
128
129 pub fn new_with_arch(kind: BranchKind, arch: CoreArchitecture) -> Self {
133 Self {
134 arch: Some(arch),
135 kind,
136 }
137 }
138
139 pub fn target(&self) -> Option<u64> {
140 match self.kind {
141 BranchKind::Unconditional(target) => Some(target),
142 BranchKind::False(target) => Some(target),
143 BranchKind::True(target) => Some(target),
144 BranchKind::Call(target) => Some(target),
145 _ => None,
146 }
147 }
148}
149
150impl From<BranchInfo> for BNBranchType {
151 fn from(value: BranchInfo) -> Self {
152 match value.kind {
153 BranchKind::Unresolved => BNBranchType::UnresolvedBranch,
154 BranchKind::Unconditional(_) => BNBranchType::UnconditionalBranch,
155 BranchKind::False(_) => BNBranchType::FalseBranch,
156 BranchKind::True(_) => BNBranchType::TrueBranch,
157 BranchKind::Call(_) => BNBranchType::CallDestination,
158 BranchKind::FunctionReturn => BNBranchType::FunctionReturn,
159 BranchKind::SystemCall => BNBranchType::SystemCall,
160 BranchKind::Indirect => BNBranchType::IndirectBranch,
161 BranchKind::Exception => BNBranchType::ExceptionBranch,
162 BranchKind::UserDefined => BNBranchType::UserDefinedBranch,
163 }
164 }
165}
166
167impl From<BranchKind> for BranchInfo {
168 fn from(value: BranchKind) -> Self {
169 Self {
170 arch: None,
171 kind: value,
172 }
173 }
174}
175
176impl From<BranchKind> for BranchType {
177 fn from(value: BranchKind) -> Self {
178 match value {
179 BranchKind::Unresolved => BranchType::UnresolvedBranch,
180 BranchKind::Unconditional(_) => BranchType::UnconditionalBranch,
181 BranchKind::True(_) => BranchType::TrueBranch,
182 BranchKind::False(_) => BranchType::FalseBranch,
183 BranchKind::Call(_) => BranchType::CallDestination,
184 BranchKind::FunctionReturn => BranchType::FunctionReturn,
185 BranchKind::SystemCall => BranchType::SystemCall,
186 BranchKind::Indirect => BranchType::IndirectBranch,
187 BranchKind::Exception => BranchType::ExceptionBranch,
188 BranchKind::UserDefined => BranchType::UserDefinedBranch,
189 }
190 }
191}
192
193pub const NUM_BRANCH_INFO: usize = 3;
195
196#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
197pub struct InstructionInfo {
198 pub length: usize,
199 pub arch_transition_by_target_addr: bool,
201 pub delay_slots: u8,
202 pub branches: [Option<BranchInfo>; NUM_BRANCH_INFO],
203}
204
205impl InstructionInfo {
206 pub fn new(length: usize, delay_slots: u8) -> Self {
208 Self {
209 length,
210 arch_transition_by_target_addr: false,
211 delay_slots,
212 branches: Default::default(),
213 }
214 }
215
216 pub fn add_branch(&mut self, branch_info: impl Into<BranchInfo>) {
217 for branch in &mut self.branches {
220 if branch.is_none() {
221 *branch = Some(branch_info.into());
222 return;
223 }
224 }
225 }
226}
227
228impl From<BNInstructionInfo> for InstructionInfo {
229 fn from(value: BNInstructionInfo) -> Self {
230 let mut branch_info = [None; NUM_BRANCH_INFO];
232 #[allow(clippy::needless_range_loop)]
233 for i in 0..value.branchCount.min(NUM_BRANCH_INFO) {
234 let branch_target = value.branchTarget[i];
235 branch_info[i] = Some(BranchInfo {
236 kind: match value.branchType[i] {
237 BNBranchType::UnconditionalBranch => BranchKind::Unconditional(branch_target),
238 BNBranchType::FalseBranch => BranchKind::False(branch_target),
239 BNBranchType::TrueBranch => BranchKind::True(branch_target),
240 BNBranchType::CallDestination => BranchKind::Call(branch_target),
241 BNBranchType::FunctionReturn => BranchKind::FunctionReturn,
242 BNBranchType::SystemCall => BranchKind::SystemCall,
243 BNBranchType::IndirectBranch => BranchKind::Indirect,
244 BNBranchType::ExceptionBranch => BranchKind::Exception,
245 BNBranchType::UnresolvedBranch => BranchKind::Unresolved,
246 BNBranchType::UserDefinedBranch => BranchKind::UserDefined,
247 },
248 arch: if value.branchArch[i].is_null() {
249 None
250 } else {
251 Some(unsafe { CoreArchitecture::from_raw(value.branchArch[i]) })
252 },
253 });
254 }
255 Self {
256 length: value.length,
257 arch_transition_by_target_addr: value.archTransitionByTargetAddr,
258 delay_slots: value.delaySlots,
259 branches: branch_info,
260 }
261 }
262}
263
264impl From<InstructionInfo> for BNInstructionInfo {
265 fn from(value: InstructionInfo) -> Self {
266 let branch_count = value.branches.into_iter().filter(Option::is_some).count();
267 let branch_info_0 = value.branches[0].unwrap_or_default();
269 let branch_info_1 = value.branches[1].unwrap_or_default();
270 let branch_info_2 = value.branches[2].unwrap_or_default();
271 Self {
272 length: value.length,
273 branchCount: branch_count,
274 archTransitionByTargetAddr: value.arch_transition_by_target_addr,
275 delaySlots: value.delay_slots,
276 branchType: [
277 branch_info_0.into(),
278 branch_info_1.into(),
279 branch_info_2.into(),
280 ],
281 branchTarget: [
282 branch_info_0.target().unwrap_or_default(),
283 branch_info_1.target().unwrap_or_default(),
284 branch_info_2.target().unwrap_or_default(),
285 ],
286 branchArch: [
287 branch_info_0
288 .arch
289 .map(|a| a.handle)
290 .unwrap_or(std::ptr::null_mut()),
291 branch_info_1
292 .arch
293 .map(|a| a.handle)
294 .unwrap_or(std::ptr::null_mut()),
295 branch_info_2
296 .arch
297 .map(|a| a.handle)
298 .unwrap_or(std::ptr::null_mut()),
299 ],
300 }
301 }
302}
303
304pub trait RegisterInfo: Sized {
305 type RegType: Register<InfoType = Self>;
306
307 fn parent(&self) -> Option<Self::RegType>;
308 fn size(&self) -> usize;
309 fn offset(&self) -> usize;
310 fn implicit_extend(&self) -> ImplicitRegisterExtend;
311}
312
313pub trait Register: Debug + Sized + Clone + Copy + Hash + Eq {
314 type InfoType: RegisterInfo<RegType = Self>;
315
316 fn name(&self) -> Cow<'_, str>;
317 fn info(&self) -> Self::InfoType;
318
319 fn id(&self) -> RegisterId;
323}
324
325pub trait RegisterStackInfo: Sized {
326 type RegStackType: RegisterStack<InfoType = Self>;
327 type RegType: Register<InfoType = Self::RegInfoType>;
328 type RegInfoType: RegisterInfo<RegType = Self::RegType>;
329
330 fn storage_regs(&self) -> (Self::RegType, usize);
331 fn top_relative_regs(&self) -> Option<(Self::RegType, usize)>;
332 fn stack_top_reg(&self) -> Self::RegType;
333}
334
335pub trait RegisterStack: Debug + Sized + Clone + Copy {
336 type InfoType: RegisterStackInfo<
337 RegType = Self::RegType,
338 RegInfoType = Self::RegInfoType,
339 RegStackType = Self,
340 >;
341 type RegType: Register<InfoType = Self::RegInfoType>;
342 type RegInfoType: RegisterInfo<RegType = Self::RegType>;
343
344 fn name(&self) -> Cow<'_, str>;
345 fn info(&self) -> Self::InfoType;
346
347 fn id(&self) -> RegisterStackId;
351}
352
353pub trait Flag: Debug + Sized + Clone + Copy + Hash + Eq {
354 type FlagClass: FlagClass;
355
356 fn name(&self) -> Cow<'_, str>;
357 fn role(&self, class: Option<Self::FlagClass>) -> FlagRole;
358
359 fn id(&self) -> FlagId;
363}
364
365pub trait FlagWrite: Sized + Clone + Copy {
366 type FlagType: Flag;
367 type FlagClass: FlagClass;
368
369 fn name(&self) -> Cow<'_, str>;
370 fn class(&self) -> Option<Self::FlagClass>;
371
372 fn id(&self) -> FlagWriteId;
377
378 fn flags_written(&self) -> Vec<Self::FlagType>;
379}
380
381pub trait FlagClass: Sized + Clone + Copy + Hash + Eq {
382 fn name(&self) -> Cow<'_, str>;
383
384 fn id(&self) -> FlagClassId;
389}
390
391pub trait FlagGroup: Debug + Sized + Clone + Copy {
392 type FlagType: Flag;
393 type FlagClass: FlagClass;
394
395 fn name(&self) -> Cow<'_, str>;
396
397 fn id(&self) -> FlagGroupId;
401
402 fn flags_required(&self) -> Vec<Self::FlagType>;
408
409 fn flag_conditions(&self) -> HashMap<Self::FlagClass, FlagCondition>;
427}
428
429pub trait Intrinsic: Debug + Sized + Clone + Copy {
430 fn name(&self) -> Cow<'_, str>;
431
432 fn id(&self) -> IntrinsicId;
434
435 fn class(&self) -> BNIntrinsicClass {
437 BNIntrinsicClass::GeneralIntrinsicClass
438 }
439
440 fn inputs(&self) -> Vec<NameAndType>;
443
444 fn outputs(&self) -> Vec<Conf<Ref<Type>>>;
446}
447
448pub trait Architecture: 'static + Sized + AsRef<CoreArchitecture> {
449 type Handle: Borrow<Self> + Clone;
450
451 type RegisterInfo: RegisterInfo<RegType = Self::Register>;
452 type Register: Register<InfoType = Self::RegisterInfo>;
453 type RegisterStackInfo: RegisterStackInfo<
454 RegType = Self::Register,
455 RegInfoType = Self::RegisterInfo,
456 RegStackType = Self::RegisterStack,
457 >;
458 type RegisterStack: RegisterStack<
459 InfoType = Self::RegisterStackInfo,
460 RegType = Self::Register,
461 RegInfoType = Self::RegisterInfo,
462 >;
463
464 type Flag: Flag<FlagClass = Self::FlagClass>;
465 type FlagWrite: FlagWrite<FlagType = Self::Flag, FlagClass = Self::FlagClass>;
466 type FlagClass: FlagClass;
467 type FlagGroup: FlagGroup<FlagType = Self::Flag, FlagClass = Self::FlagClass>;
468
469 type Intrinsic: Intrinsic;
470
471 fn endianness(&self) -> Endianness;
472 fn address_size(&self) -> usize;
473 fn default_integer_size(&self) -> usize;
474 fn instruction_alignment(&self) -> usize;
475 fn max_instr_len(&self) -> usize;
476 fn opcode_display_len(&self) -> usize;
477
478 fn associated_arch_by_addr(&self, addr: u64) -> CoreArchitecture;
479
480 fn instruction_info(&self, data: &[u8], addr: u64) -> Option<InstructionInfo>;
481 fn instruction_text(
482 &self,
483 data: &[u8],
484 addr: u64,
485 ) -> Option<(usize, Vec<InstructionTextToken>)>;
486 fn instruction_llil(
487 &self,
488 data: &[u8],
489 addr: u64,
490 il: &LowLevelILMutableFunction,
491 ) -> Option<(usize, bool)>;
492
493 fn analyze_basic_blocks(
494 &self,
495 function: &mut Function,
496 context: &mut BasicBlockAnalysisContext,
497 ) {
498 unsafe {
499 BNArchitectureDefaultAnalyzeBasicBlocks(function.handle, context.handle);
500 }
501 }
502
503 fn flag_write_llil<'a>(
513 &self,
514 flag: Self::Flag,
515 flag_write_type: Self::FlagWrite,
516 op: LowLevelILFlagWriteOp<Self::Register>,
517 il: &'a LowLevelILMutableFunction,
518 ) -> Option<LowLevelILMutableExpression<'a, ValueExpr>> {
519 let role = flag.role(flag_write_type.class());
520 Some(get_default_flag_write_llil(self, role, op, il))
521 }
522
523 fn flags_required_for_flag_condition(
529 &self,
530 _condition: FlagCondition,
531 _class: Option<Self::FlagClass>,
532 ) -> Vec<Self::Flag> {
533 Vec::new()
534 }
535
536 fn flag_cond_llil<'a>(
543 &self,
544 cond: FlagCondition,
545 class: Option<Self::FlagClass>,
546 il: &'a LowLevelILMutableFunction,
547 ) -> Option<LowLevelILMutableExpression<'a, ValueExpr>> {
548 Some(get_default_flag_cond_llil(self, cond, class, il))
549 }
550
551 fn flag_group_llil<'a>(
566 &self,
567 _group: Self::FlagGroup,
568 _il: &'a LowLevelILMutableFunction,
569 ) -> Option<LowLevelILMutableExpression<'a, ValueExpr>> {
570 None
571 }
572
573 fn registers_all(&self) -> Vec<Self::Register>;
574 fn registers_full_width(&self) -> Vec<Self::Register>;
575 fn registers_global(&self) -> Vec<Self::Register> {
576 Vec::new()
577 }
578 fn registers_system(&self) -> Vec<Self::Register> {
579 Vec::new()
580 }
581
582 fn register_stacks(&self) -> Vec<Self::RegisterStack> {
583 Vec::new()
584 }
585
586 fn flags(&self) -> Vec<Self::Flag> {
587 Vec::new()
588 }
589 fn flag_write_types(&self) -> Vec<Self::FlagWrite> {
590 Vec::new()
591 }
592 fn flag_classes(&self) -> Vec<Self::FlagClass> {
593 Vec::new()
594 }
595 fn flag_groups(&self) -> Vec<Self::FlagGroup> {
596 Vec::new()
597 }
598
599 fn stack_pointer_reg(&self) -> Option<Self::Register>;
600 fn link_reg(&self) -> Option<Self::Register> {
601 None
602 }
603
604 fn register_from_id(&self, id: RegisterId) -> Option<Self::Register>;
605
606 fn register_stack_from_id(&self, _id: RegisterStackId) -> Option<Self::RegisterStack> {
607 None
608 }
609
610 fn flag_from_id(&self, _id: FlagId) -> Option<Self::Flag> {
611 None
612 }
613 fn flag_write_from_id(&self, _id: FlagWriteId) -> Option<Self::FlagWrite> {
614 None
615 }
616 fn flag_class_from_id(&self, _id: FlagClassId) -> Option<Self::FlagClass> {
617 None
618 }
619 fn flag_group_from_id(&self, _id: FlagGroupId) -> Option<Self::FlagGroup> {
620 None
621 }
622
623 fn intrinsics(&self) -> Vec<Self::Intrinsic> {
624 Vec::new()
625 }
626 fn intrinsic_class(&self, _id: IntrinsicId) -> BNIntrinsicClass {
627 BNIntrinsicClass::GeneralIntrinsicClass
628 }
629 fn intrinsic_from_id(&self, _id: IntrinsicId) -> Option<Self::Intrinsic> {
630 None
631 }
632
633 fn can_assemble(&self) -> bool {
634 false
635 }
636 fn assemble(&self, _code: &str, _addr: u64) -> Result<Vec<u8>, String> {
637 Err("Assemble unsupported".into())
638 }
639
640 fn is_never_branch_patch_available(&self, _data: &[u8], _addr: u64) -> bool {
641 false
642 }
643 fn is_always_branch_patch_available(&self, _data: &[u8], _addr: u64) -> bool {
644 false
645 }
646 fn is_invert_branch_patch_available(&self, _data: &[u8], _addr: u64) -> bool {
647 false
648 }
649 fn is_skip_and_return_zero_patch_available(&self, _data: &[u8], _addr: u64) -> bool {
650 false
651 }
652 fn is_skip_and_return_value_patch_available(&self, _data: &[u8], _addr: u64) -> bool {
653 false
654 }
655
656 fn convert_to_nop(&self, _data: &mut [u8], _addr: u64) -> bool {
657 false
658 }
659
660 fn always_branch(&self, _data: &mut [u8], _addr: u64) -> bool {
661 false
662 }
663
664 fn invert_branch(&self, _data: &mut [u8], _addr: u64) -> bool {
665 false
666 }
667
668 fn skip_and_return_value(&self, _data: &mut [u8], _addr: u64, _value: u64) -> bool {
669 false
670 }
671
672 fn handle(&self) -> Self::Handle;
673}
674
675#[derive(Clone, Copy, PartialEq, Eq, Hash)]
677pub struct UnusedRegisterStackInfo<R: Register> {
678 _reg: std::marker::PhantomData<R>,
679}
680
681#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
682pub struct UnusedRegisterStack<R: Register> {
683 _reg: std::marker::PhantomData<R>,
684}
685
686impl<R: Register> RegisterStackInfo for UnusedRegisterStackInfo<R> {
687 type RegStackType = UnusedRegisterStack<R>;
688 type RegType = R;
689 type RegInfoType = R::InfoType;
690
691 fn storage_regs(&self) -> (Self::RegType, usize) {
692 unreachable!()
693 }
694 fn top_relative_regs(&self) -> Option<(Self::RegType, usize)> {
695 unreachable!()
696 }
697 fn stack_top_reg(&self) -> Self::RegType {
698 unreachable!()
699 }
700}
701
702impl<R: Register> RegisterStack for UnusedRegisterStack<R> {
703 type InfoType = UnusedRegisterStackInfo<R>;
704 type RegType = R;
705 type RegInfoType = R::InfoType;
706
707 fn name(&self) -> Cow<'_, str> {
708 unreachable!()
709 }
710 fn id(&self) -> RegisterStackId {
711 unreachable!()
712 }
713 fn info(&self) -> Self::InfoType {
714 unreachable!()
715 }
716}
717
718#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
720pub struct UnusedFlag;
721
722impl Flag for UnusedFlag {
723 type FlagClass = Self;
724 fn name(&self) -> Cow<'_, str> {
725 unreachable!()
726 }
727 fn role(&self, _class: Option<Self::FlagClass>) -> FlagRole {
728 unreachable!()
729 }
730 fn id(&self) -> FlagId {
731 unreachable!()
732 }
733}
734
735impl FlagWrite for UnusedFlag {
736 type FlagType = Self;
737 type FlagClass = Self;
738 fn name(&self) -> Cow<'_, str> {
739 unreachable!()
740 }
741 fn class(&self) -> Option<Self> {
742 unreachable!()
743 }
744 fn id(&self) -> FlagWriteId {
745 unreachable!()
746 }
747 fn flags_written(&self) -> Vec<Self::FlagType> {
748 unreachable!()
749 }
750}
751
752impl FlagClass for UnusedFlag {
753 fn name(&self) -> Cow<'_, str> {
754 unreachable!()
755 }
756 fn id(&self) -> FlagClassId {
757 unreachable!()
758 }
759}
760
761impl FlagGroup for UnusedFlag {
762 type FlagType = Self;
763 type FlagClass = Self;
764 fn name(&self) -> Cow<'_, str> {
765 unreachable!()
766 }
767 fn id(&self) -> FlagGroupId {
768 unreachable!()
769 }
770 fn flags_required(&self) -> Vec<Self::FlagType> {
771 unreachable!()
772 }
773 fn flag_conditions(&self) -> HashMap<Self, FlagCondition> {
774 unreachable!()
775 }
776}
777
778#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
780pub struct UnusedIntrinsic;
781
782impl Intrinsic for UnusedIntrinsic {
783 fn name(&self) -> Cow<'_, str> {
784 unreachable!()
785 }
786 fn id(&self) -> IntrinsicId {
787 unreachable!()
788 }
789 fn inputs(&self) -> Vec<NameAndType> {
790 unreachable!()
791 }
792 fn outputs(&self) -> Vec<Conf<Ref<Type>>> {
793 unreachable!()
794 }
795}
796
797#[derive(Debug, Copy, Clone)]
798pub struct CoreRegisterInfo {
799 arch: CoreArchitecture,
800 id: RegisterId,
801 info: BNRegisterInfo,
802}
803
804impl CoreRegisterInfo {
805 pub fn new(arch: CoreArchitecture, id: RegisterId, info: BNRegisterInfo) -> Self {
806 Self { arch, id, info }
807 }
808}
809
810impl RegisterInfo for CoreRegisterInfo {
811 type RegType = CoreRegister;
812
813 fn parent(&self) -> Option<CoreRegister> {
814 if self.id != RegisterId::from(self.info.fullWidthRegister) {
815 Some(CoreRegister::new(
816 self.arch,
817 RegisterId::from(self.info.fullWidthRegister),
818 )?)
819 } else {
820 None
821 }
822 }
823
824 fn size(&self) -> usize {
825 self.info.size
826 }
827
828 fn offset(&self) -> usize {
829 self.info.offset
830 }
831
832 fn implicit_extend(&self) -> ImplicitRegisterExtend {
833 self.info.extend
834 }
835}
836
837#[derive(Copy, Clone, Eq, PartialEq, Hash)]
838pub struct CoreRegister {
839 arch: CoreArchitecture,
840 id: RegisterId,
841}
842
843impl CoreRegister {
844 pub fn new(arch: CoreArchitecture, id: RegisterId) -> Option<Self> {
845 let register = Self { arch, id };
846 register.is_valid().then_some(register)
847 }
848
849 fn is_valid(&self) -> bool {
850 let name = unsafe { BNGetArchitectureRegisterName(self.arch.handle, self.id.into()) };
852 match name.is_null() {
853 true => false,
854 false => {
855 unsafe { BNFreeString(name) };
856 true
857 }
858 }
859 }
860}
861
862impl Register for CoreRegister {
863 type InfoType = CoreRegisterInfo;
864
865 fn name(&self) -> Cow<'_, str> {
866 unsafe {
867 let name = BNGetArchitectureRegisterName(self.arch.handle, self.id.into());
868
869 let res = CStr::from_ptr(name);
873 let res = res.to_string_lossy().into_owned().into();
874
875 BNFreeString(name);
876
877 res
878 }
879 }
880
881 fn info(&self) -> CoreRegisterInfo {
882 CoreRegisterInfo::new(self.arch, self.id, unsafe {
883 BNGetArchitectureRegisterInfo(self.arch.handle, self.id.into())
884 })
885 }
886
887 fn id(&self) -> RegisterId {
888 self.id
889 }
890}
891
892impl Debug for CoreRegister {
893 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
894 f.debug_struct("CoreRegister")
895 .field("id", &self.id)
896 .field("name", &self.name())
897 .finish()
898 }
899}
900
901impl CoreArrayProvider for CoreRegister {
902 type Raw = u32;
903 type Context = CoreArchitecture;
904 type Wrapped<'a> = Self;
905}
906
907unsafe impl CoreArrayProviderInner for CoreRegister {
908 unsafe fn free(raw: *mut Self::Raw, _count: usize, _context: &Self::Context) {
909 BNFreeRegisterList(raw)
910 }
911
912 unsafe fn wrap_raw<'a>(raw: &'a Self::Raw, context: &'a Self::Context) -> Self::Wrapped<'a> {
913 Self::new(*context, RegisterId::from(*raw)).expect("Register list contains valid registers")
914 }
915}
916
917#[derive(Debug, Copy, Clone)]
918pub struct CoreRegisterStackInfo {
919 arch: CoreArchitecture,
920 info: BNRegisterStackInfo,
922}
923
924impl CoreRegisterStackInfo {
925 pub fn new(arch: CoreArchitecture, info: BNRegisterStackInfo) -> Self {
926 Self { arch, info }
927 }
928}
929
930impl RegisterStackInfo for CoreRegisterStackInfo {
931 type RegStackType = CoreRegisterStack;
932 type RegType = CoreRegister;
933 type RegInfoType = CoreRegisterInfo;
934
935 fn storage_regs(&self) -> (Self::RegType, usize) {
936 (
937 CoreRegister::new(self.arch, RegisterId::from(self.info.firstStorageReg))
938 .expect("Storage register is valid"),
939 self.info.storageCount as usize,
940 )
941 }
942
943 fn top_relative_regs(&self) -> Option<(Self::RegType, usize)> {
944 if self.info.topRelativeCount == 0 {
945 None
946 } else {
947 Some((
948 CoreRegister::new(self.arch, RegisterId::from(self.info.firstTopRelativeReg))
949 .expect("Top relative register is valid"),
950 self.info.topRelativeCount as usize,
951 ))
952 }
953 }
954
955 fn stack_top_reg(&self) -> Self::RegType {
956 CoreRegister::new(self.arch, RegisterId::from(self.info.stackTopReg))
957 .expect("Stack top register is valid")
958 }
959}
960
961#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
962pub struct CoreRegisterStack {
963 arch: CoreArchitecture,
964 id: RegisterStackId,
965}
966
967impl CoreRegisterStack {
968 pub fn new(arch: CoreArchitecture, id: RegisterStackId) -> Option<Self> {
969 let register_stack = Self { arch, id };
970 register_stack.is_valid().then_some(register_stack)
971 }
972
973 fn is_valid(&self) -> bool {
974 let name = unsafe { BNGetArchitectureRegisterStackName(self.arch.handle, self.id.into()) };
976 match name.is_null() {
977 true => false,
978 false => {
979 unsafe { BNFreeString(name) };
980 true
981 }
982 }
983 }
984}
985
986impl RegisterStack for CoreRegisterStack {
987 type InfoType = CoreRegisterStackInfo;
988 type RegType = CoreRegister;
989 type RegInfoType = CoreRegisterInfo;
990
991 fn name(&self) -> Cow<'_, str> {
992 unsafe {
993 let name = BNGetArchitectureRegisterStackName(self.arch.handle, self.id.into());
994
995 let res = CStr::from_ptr(name);
999 let res = res.to_string_lossy().into_owned().into();
1000
1001 BNFreeString(name);
1002
1003 res
1004 }
1005 }
1006
1007 fn info(&self) -> CoreRegisterStackInfo {
1008 CoreRegisterStackInfo::new(self.arch, unsafe {
1009 BNGetArchitectureRegisterStackInfo(self.arch.handle, self.id.into())
1010 })
1011 }
1012
1013 fn id(&self) -> RegisterStackId {
1014 self.id
1015 }
1016}
1017
1018#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
1019pub struct CoreFlag {
1020 arch: CoreArchitecture,
1021 id: FlagId,
1022}
1023
1024impl CoreFlag {
1025 pub fn new(arch: CoreArchitecture, id: FlagId) -> Option<Self> {
1026 let flag = Self { arch, id };
1027 flag.is_valid().then_some(flag)
1028 }
1029
1030 fn is_valid(&self) -> bool {
1031 let name = unsafe { BNGetArchitectureFlagName(self.arch.handle, self.id.into()) };
1033 match name.is_null() {
1034 true => false,
1035 false => {
1036 unsafe { BNFreeString(name) };
1037 true
1038 }
1039 }
1040 }
1041}
1042
1043impl Flag for CoreFlag {
1044 type FlagClass = CoreFlagClass;
1045
1046 fn name(&self) -> Cow<'_, str> {
1047 unsafe {
1048 let name = BNGetArchitectureFlagName(self.arch.handle, self.id.into());
1049
1050 let res = CStr::from_ptr(name);
1054 let res = res.to_string_lossy().into_owned().into();
1055
1056 BNFreeString(name);
1057
1058 res
1059 }
1060 }
1061
1062 fn role(&self, class: Option<CoreFlagClass>) -> FlagRole {
1063 unsafe {
1064 BNGetArchitectureFlagRole(
1065 self.arch.handle,
1066 self.id.into(),
1067 class.map(|c| c.id.0).unwrap_or(0),
1068 )
1069 }
1070 }
1071
1072 fn id(&self) -> FlagId {
1073 self.id
1074 }
1075}
1076
1077#[derive(Copy, Clone, Eq, PartialEq, Hash)]
1078pub struct CoreFlagWrite {
1079 arch: CoreArchitecture,
1080 id: FlagWriteId,
1081}
1082
1083impl CoreFlagWrite {
1084 pub fn new(arch: CoreArchitecture, id: FlagWriteId) -> Option<Self> {
1085 let flag_write = Self { arch, id };
1086 flag_write.is_valid().then_some(flag_write)
1087 }
1088
1089 fn is_valid(&self) -> bool {
1090 let name = unsafe { BNGetArchitectureFlagWriteTypeName(self.arch.handle, self.id.into()) };
1092 match name.is_null() {
1093 true => false,
1094 false => {
1095 unsafe { BNFreeString(name) };
1096 true
1097 }
1098 }
1099 }
1100}
1101
1102impl FlagWrite for CoreFlagWrite {
1103 type FlagType = CoreFlag;
1104 type FlagClass = CoreFlagClass;
1105
1106 fn name(&self) -> Cow<'_, str> {
1107 unsafe {
1108 let name = BNGetArchitectureFlagWriteTypeName(self.arch.handle, self.id.into());
1109
1110 let res = CStr::from_ptr(name);
1114 let res = res.to_string_lossy().into_owned().into();
1115
1116 BNFreeString(name);
1117
1118 res
1119 }
1120 }
1121
1122 fn class(&self) -> Option<CoreFlagClass> {
1123 let class = unsafe {
1124 BNGetArchitectureSemanticClassForFlagWriteType(self.arch.handle, self.id.into())
1125 };
1126
1127 match class {
1128 0 => None,
1129 class_id => Some(CoreFlagClass::new(self.arch, class_id.into())?),
1130 }
1131 }
1132
1133 fn id(&self) -> FlagWriteId {
1134 self.id
1135 }
1136
1137 fn flags_written(&self) -> Vec<CoreFlag> {
1138 let mut count: usize = 0;
1139 let regs: *mut u32 = unsafe {
1140 BNGetArchitectureFlagsWrittenByFlagWriteType(
1141 self.arch.handle,
1142 self.id.into(),
1143 &mut count,
1144 )
1145 };
1146
1147 let ret = unsafe {
1148 std::slice::from_raw_parts(regs, count)
1149 .iter()
1150 .map(|id| FlagId::from(*id))
1151 .filter_map(|reg| CoreFlag::new(self.arch, reg))
1152 .collect()
1153 };
1154
1155 unsafe {
1156 BNFreeRegisterList(regs);
1157 }
1158
1159 ret
1160 }
1161}
1162
1163#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
1164pub struct CoreFlagClass {
1165 arch: CoreArchitecture,
1166 id: FlagClassId,
1167}
1168
1169impl CoreFlagClass {
1170 pub fn new(arch: CoreArchitecture, id: FlagClassId) -> Option<Self> {
1171 let flag = Self { arch, id };
1172 flag.is_valid().then_some(flag)
1173 }
1174
1175 fn is_valid(&self) -> bool {
1176 let name =
1178 unsafe { BNGetArchitectureSemanticFlagClassName(self.arch.handle, self.id.into()) };
1179 match name.is_null() {
1180 true => false,
1181 false => {
1182 unsafe { BNFreeString(name) };
1183 true
1184 }
1185 }
1186 }
1187}
1188
1189impl FlagClass for CoreFlagClass {
1190 fn name(&self) -> Cow<'_, str> {
1191 unsafe {
1192 let name = BNGetArchitectureSemanticFlagClassName(self.arch.handle, self.id.into());
1193
1194 let res = CStr::from_ptr(name);
1198 let res = res.to_string_lossy().into_owned().into();
1199
1200 BNFreeString(name);
1201
1202 res
1203 }
1204 }
1205
1206 fn id(&self) -> FlagClassId {
1207 self.id
1208 }
1209}
1210
1211#[derive(Debug, Copy, Clone, Eq, PartialEq)]
1212pub struct CoreFlagGroup {
1213 arch: CoreArchitecture,
1214 id: FlagGroupId,
1215}
1216
1217impl CoreFlagGroup {
1218 pub fn new(arch: CoreArchitecture, id: FlagGroupId) -> Option<Self> {
1219 let flag_group = Self { arch, id };
1220 flag_group.is_valid().then_some(flag_group)
1221 }
1222
1223 fn is_valid(&self) -> bool {
1224 let name =
1226 unsafe { BNGetArchitectureSemanticFlagGroupName(self.arch.handle, self.id.into()) };
1227 match name.is_null() {
1228 true => false,
1229 false => {
1230 unsafe { BNFreeString(name) };
1231 true
1232 }
1233 }
1234 }
1235}
1236
1237impl FlagGroup for CoreFlagGroup {
1238 type FlagType = CoreFlag;
1239 type FlagClass = CoreFlagClass;
1240
1241 fn name(&self) -> Cow<'_, str> {
1242 unsafe {
1243 let name = BNGetArchitectureSemanticFlagGroupName(self.arch.handle, self.id.into());
1244
1245 let res = CStr::from_ptr(name);
1249 let res = res.to_string_lossy().into_owned().into();
1250
1251 BNFreeString(name);
1252
1253 res
1254 }
1255 }
1256
1257 fn id(&self) -> FlagGroupId {
1258 self.id
1259 }
1260
1261 fn flags_required(&self) -> Vec<CoreFlag> {
1262 let mut count: usize = 0;
1263 let regs: *mut u32 = unsafe {
1264 BNGetArchitectureFlagsRequiredForSemanticFlagGroup(
1265 self.arch.handle,
1266 self.id.into(),
1267 &mut count,
1268 )
1269 };
1270
1271 let ret = unsafe {
1272 std::slice::from_raw_parts(regs, count)
1273 .iter()
1274 .map(|id| FlagId::from(*id))
1275 .filter_map(|reg| CoreFlag::new(self.arch, reg))
1276 .collect()
1277 };
1278
1279 unsafe {
1280 BNFreeRegisterList(regs);
1281 }
1282
1283 ret
1284 }
1285
1286 fn flag_conditions(&self) -> HashMap<CoreFlagClass, FlagCondition> {
1287 let mut count: usize = 0;
1288
1289 unsafe {
1290 let flag_conds = BNGetArchitectureFlagConditionsForSemanticFlagGroup(
1291 self.arch.handle,
1292 self.id.into(),
1293 &mut count,
1294 );
1295
1296 let ret = std::slice::from_raw_parts_mut(flag_conds, count)
1297 .iter()
1298 .filter_map(|class_cond| {
1299 Some((
1300 CoreFlagClass::new(self.arch, class_cond.semanticClass.into())?,
1301 class_cond.condition,
1302 ))
1303 })
1304 .collect();
1305
1306 BNFreeFlagConditionsForSemanticFlagGroup(flag_conds);
1307
1308 ret
1309 }
1310 }
1311}
1312
1313#[derive(Copy, Clone, Eq, PartialEq)]
1314pub struct CoreIntrinsic {
1315 pub arch: CoreArchitecture,
1316 pub id: IntrinsicId,
1317}
1318
1319impl CoreIntrinsic {
1320 pub fn new(arch: CoreArchitecture, id: IntrinsicId) -> Option<Self> {
1321 let intrinsic = Self { arch, id };
1322 intrinsic.is_valid().then_some(intrinsic)
1323 }
1324
1325 fn is_valid(&self) -> bool {
1326 let name = unsafe { BNGetArchitectureIntrinsicName(self.arch.handle, self.id.into()) };
1328 match name.is_null() {
1329 true => false,
1330 false => {
1331 unsafe { BNFreeString(name) };
1332 true
1333 }
1334 }
1335 }
1336}
1337
1338impl Intrinsic for CoreIntrinsic {
1339 fn name(&self) -> Cow<'_, str> {
1340 unsafe {
1341 let name = BNGetArchitectureIntrinsicName(self.arch.handle, self.id.into());
1342
1343 let res = CStr::from_ptr(name);
1348 let res = res.to_string_lossy().into_owned().into();
1349
1350 BNFreeString(name);
1351
1352 res
1353 }
1354 }
1355
1356 fn id(&self) -> IntrinsicId {
1357 self.id
1358 }
1359
1360 fn class(&self) -> BNIntrinsicClass {
1361 unsafe { BNGetArchitectureIntrinsicClass(self.arch.handle, self.id.into()) }
1362 }
1363
1364 fn inputs(&self) -> Vec<NameAndType> {
1365 let mut count: usize = 0;
1366 unsafe {
1367 let inputs =
1368 BNGetArchitectureIntrinsicInputs(self.arch.handle, self.id.into(), &mut count);
1369
1370 let ret = std::slice::from_raw_parts_mut(inputs, count)
1371 .iter()
1372 .map(NameAndType::from_raw)
1373 .collect();
1374
1375 BNFreeNameAndTypeList(inputs, count);
1376
1377 ret
1378 }
1379 }
1380
1381 fn outputs(&self) -> Vec<Conf<Ref<Type>>> {
1382 let mut count: usize = 0;
1383 unsafe {
1384 let inputs =
1385 BNGetArchitectureIntrinsicOutputs(self.arch.handle, self.id.into(), &mut count);
1386
1387 let ret = std::slice::from_raw_parts_mut(inputs, count)
1388 .iter()
1389 .map(Conf::<Ref<Type>>::from_raw)
1390 .collect();
1391
1392 BNFreeOutputTypeList(inputs, count);
1393
1394 ret
1395 }
1396 }
1397}
1398
1399impl Debug for CoreIntrinsic {
1400 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
1401 f.debug_struct("CoreIntrinsic")
1402 .field("id", &self.id)
1403 .field("name", &self.name())
1404 .field("class", &self.class())
1405 .field("inputs", &self.inputs())
1406 .field("outputs", &self.outputs())
1407 .finish()
1408 }
1409}
1410
1411pub struct CoreArchitectureList(*mut *mut BNArchitecture, usize);
1413
1414impl Deref for CoreArchitectureList {
1415 type Target = [CoreArchitecture];
1416
1417 fn deref(&self) -> &Self::Target {
1418 unsafe { std::slice::from_raw_parts_mut(self.0 as *mut CoreArchitecture, self.1) }
1419 }
1420}
1421
1422impl Drop for CoreArchitectureList {
1423 fn drop(&mut self) {
1424 unsafe {
1425 BNFreeArchitectureList(self.0);
1426 }
1427 }
1428}
1429
1430#[derive(Copy, Clone, Eq, PartialEq, Hash)]
1431pub struct CoreArchitecture {
1432 pub(crate) handle: *mut BNArchitecture,
1433}
1434
1435impl CoreArchitecture {
1436 pub unsafe fn from_raw(handle: *mut BNArchitecture) -> Self {
1438 debug_assert!(!handle.is_null());
1439 CoreArchitecture { handle }
1440 }
1441
1442 pub fn list_all() -> CoreArchitectureList {
1443 let mut count: usize = 0;
1444 let archs = unsafe { BNGetArchitectureList(&mut count) };
1445
1446 CoreArchitectureList(archs, count)
1447 }
1448
1449 pub fn by_name(name: &str) -> Option<Self> {
1450 let name = name.to_cstr();
1451 let handle = unsafe { BNGetArchitectureByName(name.as_ptr()) };
1452 match handle.is_null() {
1453 false => Some(CoreArchitecture { handle }),
1454 true => None,
1455 }
1456 }
1457
1458 pub fn name(&self) -> String {
1459 unsafe { BnString::into_string(BNGetArchitectureName(self.handle)) }
1460 }
1461}
1462
1463unsafe impl Send for CoreArchitecture {}
1464unsafe impl Sync for CoreArchitecture {}
1465
1466impl AsRef<CoreArchitecture> for CoreArchitecture {
1467 fn as_ref(&self) -> &Self {
1468 self
1469 }
1470}
1471
1472impl Architecture for CoreArchitecture {
1473 type Handle = Self;
1474
1475 type RegisterInfo = CoreRegisterInfo;
1476 type Register = CoreRegister;
1477 type RegisterStackInfo = CoreRegisterStackInfo;
1478 type RegisterStack = CoreRegisterStack;
1479 type Flag = CoreFlag;
1480 type FlagWrite = CoreFlagWrite;
1481 type FlagClass = CoreFlagClass;
1482 type FlagGroup = CoreFlagGroup;
1483 type Intrinsic = CoreIntrinsic;
1484
1485 fn endianness(&self) -> Endianness {
1486 unsafe { BNGetArchitectureEndianness(self.handle) }
1487 }
1488
1489 fn address_size(&self) -> usize {
1490 unsafe { BNGetArchitectureAddressSize(self.handle) }
1491 }
1492
1493 fn default_integer_size(&self) -> usize {
1494 unsafe { BNGetArchitectureDefaultIntegerSize(self.handle) }
1495 }
1496
1497 fn instruction_alignment(&self) -> usize {
1498 unsafe { BNGetArchitectureInstructionAlignment(self.handle) }
1499 }
1500
1501 fn max_instr_len(&self) -> usize {
1502 unsafe { BNGetArchitectureMaxInstructionLength(self.handle) }
1503 }
1504
1505 fn opcode_display_len(&self) -> usize {
1506 unsafe { BNGetArchitectureOpcodeDisplayLength(self.handle) }
1507 }
1508
1509 fn associated_arch_by_addr(&self, addr: u64) -> CoreArchitecture {
1510 let handle = unsafe { BNGetAssociatedArchitectureByAddress(self.handle, addr as *mut _) };
1511 CoreArchitecture { handle }
1512 }
1513
1514 fn instruction_info(&self, data: &[u8], addr: u64) -> Option<InstructionInfo> {
1515 let mut info = BNInstructionInfo::default();
1516 if unsafe { BNGetInstructionInfo(self.handle, data.as_ptr(), addr, data.len(), &mut info) }
1517 {
1518 Some(info.into())
1519 } else {
1520 None
1521 }
1522 }
1523
1524 fn instruction_text(
1525 &self,
1526 data: &[u8],
1527 addr: u64,
1528 ) -> Option<(usize, Vec<InstructionTextToken>)> {
1529 let mut consumed = data.len();
1530 let mut count: usize = 0;
1531 let mut result: *mut BNInstructionTextToken = std::ptr::null_mut();
1532
1533 unsafe {
1534 if BNGetInstructionText(
1535 self.handle,
1536 data.as_ptr(),
1537 addr,
1538 &mut consumed,
1539 &mut result,
1540 &mut count,
1541 ) {
1542 let instr_text_tokens = std::slice::from_raw_parts(result, count)
1543 .iter()
1544 .map(InstructionTextToken::from_raw)
1545 .collect();
1546 BNFreeInstructionText(result, count);
1547 Some((consumed, instr_text_tokens))
1548 } else {
1549 None
1550 }
1551 }
1552 }
1553
1554 fn instruction_llil(
1555 &self,
1556 data: &[u8],
1557 addr: u64,
1558 il: &LowLevelILMutableFunction,
1559 ) -> Option<(usize, bool)> {
1560 let mut size = data.len();
1561 let success = unsafe {
1562 BNGetInstructionLowLevelIL(
1563 self.handle,
1564 data.as_ptr(),
1565 addr,
1566 &mut size as *mut _,
1567 il.handle,
1568 )
1569 };
1570
1571 if !success {
1572 None
1573 } else {
1574 Some((size, true))
1575 }
1576 }
1577
1578 fn analyze_basic_blocks(
1579 &self,
1580 function: &mut Function,
1581 context: &mut BasicBlockAnalysisContext,
1582 ) {
1583 unsafe {
1584 BNArchitectureAnalyzeBasicBlocks(self.handle, function.handle, context.handle);
1585 }
1586 }
1587
1588 fn flag_write_llil<'a>(
1589 &self,
1590 _flag: Self::Flag,
1591 _flag_write: Self::FlagWrite,
1592 _op: LowLevelILFlagWriteOp<Self::Register>,
1593 _il: &'a LowLevelILMutableFunction,
1594 ) -> Option<LowLevelILMutableExpression<'a, ValueExpr>> {
1595 None
1596 }
1597
1598 fn flags_required_for_flag_condition(
1599 &self,
1600 condition: FlagCondition,
1601 class: Option<Self::FlagClass>,
1602 ) -> Vec<Self::Flag> {
1603 let class_id_raw = class.map(|c| c.id().0).unwrap_or(0);
1604
1605 unsafe {
1606 let mut count: usize = 0;
1607 let flags = BNGetArchitectureFlagsRequiredForFlagCondition(
1608 self.handle,
1609 condition,
1610 class_id_raw,
1611 &mut count,
1612 );
1613
1614 let ret = std::slice::from_raw_parts(flags, count)
1615 .iter()
1616 .map(|&id| FlagId::from(id))
1617 .filter_map(|flag| CoreFlag::new(*self, flag))
1618 .collect();
1619
1620 BNFreeRegisterList(flags);
1621
1622 ret
1623 }
1624 }
1625
1626 fn flag_cond_llil<'a>(
1627 &self,
1628 _cond: FlagCondition,
1629 _class: Option<Self::FlagClass>,
1630 _il: &'a LowLevelILMutableFunction,
1631 ) -> Option<LowLevelILMutableExpression<'a, ValueExpr>> {
1632 None
1633 }
1634
1635 fn flag_group_llil<'a>(
1636 &self,
1637 _group: Self::FlagGroup,
1638 _il: &'a LowLevelILMutableFunction,
1639 ) -> Option<LowLevelILMutableExpression<'a, ValueExpr>> {
1640 None
1641 }
1642
1643 fn registers_all(&self) -> Vec<CoreRegister> {
1644 unsafe {
1645 let mut count: usize = 0;
1646 let registers_raw = BNGetAllArchitectureRegisters(self.handle, &mut count);
1647
1648 let ret = std::slice::from_raw_parts(registers_raw, count)
1649 .iter()
1650 .map(|&id| RegisterId::from(id))
1651 .filter_map(|reg| CoreRegister::new(*self, reg))
1652 .collect();
1653
1654 BNFreeRegisterList(registers_raw);
1655
1656 ret
1657 }
1658 }
1659
1660 fn registers_full_width(&self) -> Vec<CoreRegister> {
1661 unsafe {
1662 let mut count: usize = 0;
1663 let registers_raw = BNGetFullWidthArchitectureRegisters(self.handle, &mut count);
1664
1665 let ret = std::slice::from_raw_parts(registers_raw, count)
1666 .iter()
1667 .map(|&id| RegisterId::from(id))
1668 .filter_map(|reg| CoreRegister::new(*self, reg))
1669 .collect();
1670
1671 BNFreeRegisterList(registers_raw);
1672
1673 ret
1674 }
1675 }
1676
1677 fn registers_global(&self) -> Vec<CoreRegister> {
1678 unsafe {
1679 let mut count: usize = 0;
1680 let registers_raw = BNGetArchitectureGlobalRegisters(self.handle, &mut count);
1681
1682 let ret = std::slice::from_raw_parts(registers_raw, count)
1683 .iter()
1684 .map(|&id| RegisterId::from(id))
1685 .filter_map(|reg| CoreRegister::new(*self, reg))
1686 .collect();
1687
1688 BNFreeRegisterList(registers_raw);
1689
1690 ret
1691 }
1692 }
1693
1694 fn registers_system(&self) -> Vec<CoreRegister> {
1695 unsafe {
1696 let mut count: usize = 0;
1697 let registers_raw = BNGetArchitectureSystemRegisters(self.handle, &mut count);
1698
1699 let ret = std::slice::from_raw_parts(registers_raw, count)
1700 .iter()
1701 .map(|&id| RegisterId::from(id))
1702 .filter_map(|reg| CoreRegister::new(*self, reg))
1703 .collect();
1704
1705 BNFreeRegisterList(registers_raw);
1706
1707 ret
1708 }
1709 }
1710
1711 fn register_stacks(&self) -> Vec<CoreRegisterStack> {
1712 unsafe {
1713 let mut count: usize = 0;
1714 let reg_stacks_raw = BNGetAllArchitectureRegisterStacks(self.handle, &mut count);
1715
1716 let ret = std::slice::from_raw_parts(reg_stacks_raw, count)
1717 .iter()
1718 .map(|&id| RegisterStackId::from(id))
1719 .filter_map(|reg_stack| CoreRegisterStack::new(*self, reg_stack))
1720 .collect();
1721
1722 BNFreeRegisterList(reg_stacks_raw);
1723
1724 ret
1725 }
1726 }
1727
1728 fn flags(&self) -> Vec<CoreFlag> {
1729 unsafe {
1730 let mut count: usize = 0;
1731 let flags_raw = BNGetAllArchitectureFlags(self.handle, &mut count);
1732
1733 let ret = std::slice::from_raw_parts(flags_raw, count)
1734 .iter()
1735 .map(|&id| FlagId::from(id))
1736 .filter_map(|flag| CoreFlag::new(*self, flag))
1737 .collect();
1738
1739 BNFreeRegisterList(flags_raw);
1740
1741 ret
1742 }
1743 }
1744
1745 fn flag_write_types(&self) -> Vec<CoreFlagWrite> {
1746 unsafe {
1747 let mut count: usize = 0;
1748 let flag_writes_raw = BNGetAllArchitectureFlagWriteTypes(self.handle, &mut count);
1749
1750 let ret = std::slice::from_raw_parts(flag_writes_raw, count)
1751 .iter()
1752 .map(|&id| FlagWriteId::from(id))
1753 .filter_map(|flag_write| CoreFlagWrite::new(*self, flag_write))
1754 .collect();
1755
1756 BNFreeRegisterList(flag_writes_raw);
1757
1758 ret
1759 }
1760 }
1761
1762 fn flag_classes(&self) -> Vec<CoreFlagClass> {
1763 unsafe {
1764 let mut count: usize = 0;
1765 let flag_classes_raw = BNGetAllArchitectureSemanticFlagClasses(self.handle, &mut count);
1766
1767 let ret = std::slice::from_raw_parts(flag_classes_raw, count)
1768 .iter()
1769 .map(|&id| FlagClassId::from(id))
1770 .filter_map(|flag_class| CoreFlagClass::new(*self, flag_class))
1771 .collect();
1772
1773 BNFreeRegisterList(flag_classes_raw);
1774
1775 ret
1776 }
1777 }
1778
1779 fn flag_groups(&self) -> Vec<CoreFlagGroup> {
1780 unsafe {
1781 let mut count: usize = 0;
1782 let flag_groups_raw = BNGetAllArchitectureSemanticFlagGroups(self.handle, &mut count);
1783
1784 let ret = std::slice::from_raw_parts(flag_groups_raw, count)
1785 .iter()
1786 .map(|&id| FlagGroupId::from(id))
1787 .filter_map(|flag_group| CoreFlagGroup::new(*self, flag_group))
1788 .collect();
1789
1790 BNFreeRegisterList(flag_groups_raw);
1791
1792 ret
1793 }
1794 }
1795
1796 fn stack_pointer_reg(&self) -> Option<CoreRegister> {
1797 match unsafe { BNGetArchitectureStackPointerRegister(self.handle) } {
1798 0xffff_ffff => None,
1799 reg => Some(CoreRegister::new(*self, reg.into())?),
1800 }
1801 }
1802
1803 fn link_reg(&self) -> Option<CoreRegister> {
1804 match unsafe { BNGetArchitectureLinkRegister(self.handle) } {
1805 0xffff_ffff => None,
1806 reg => Some(CoreRegister::new(*self, reg.into())?),
1807 }
1808 }
1809
1810 fn register_from_id(&self, id: RegisterId) -> Option<CoreRegister> {
1811 CoreRegister::new(*self, id)
1812 }
1813
1814 fn register_stack_from_id(&self, id: RegisterStackId) -> Option<CoreRegisterStack> {
1815 CoreRegisterStack::new(*self, id)
1816 }
1817
1818 fn flag_from_id(&self, id: FlagId) -> Option<CoreFlag> {
1819 CoreFlag::new(*self, id)
1820 }
1821
1822 fn flag_write_from_id(&self, id: FlagWriteId) -> Option<CoreFlagWrite> {
1823 CoreFlagWrite::new(*self, id)
1824 }
1825
1826 fn flag_class_from_id(&self, id: FlagClassId) -> Option<CoreFlagClass> {
1827 CoreFlagClass::new(*self, id)
1828 }
1829
1830 fn flag_group_from_id(&self, id: FlagGroupId) -> Option<CoreFlagGroup> {
1831 CoreFlagGroup::new(*self, id)
1832 }
1833
1834 fn intrinsics(&self) -> Vec<CoreIntrinsic> {
1835 unsafe {
1836 let mut count: usize = 0;
1837 let intrinsics_raw = BNGetAllArchitectureIntrinsics(self.handle, &mut count);
1838
1839 let intrinsics = std::slice::from_raw_parts_mut(intrinsics_raw, count)
1840 .iter()
1841 .map(|&id| IntrinsicId::from(id))
1842 .filter_map(|intrinsic| CoreIntrinsic::new(*self, intrinsic))
1843 .collect();
1844
1845 BNFreeRegisterList(intrinsics_raw);
1846
1847 intrinsics
1848 }
1849 }
1850
1851 fn intrinsic_from_id(&self, id: IntrinsicId) -> Option<CoreIntrinsic> {
1852 CoreIntrinsic::new(*self, id)
1853 }
1854
1855 fn can_assemble(&self) -> bool {
1856 unsafe { BNCanArchitectureAssemble(self.handle) }
1857 }
1858
1859 fn assemble(&self, code: &str, addr: u64) -> Result<Vec<u8>, String> {
1860 let code = CString::new(code).map_err(|_| "Invalid encoding in code string".to_string())?;
1861
1862 let result = DataBuffer::new(&[]);
1863 let mut error_raw: *mut c_char = std::ptr::null_mut();
1865 let res = unsafe {
1866 BNAssemble(
1867 self.handle,
1868 code.as_ptr(),
1869 addr,
1870 result.as_raw(),
1871 &mut error_raw as *mut *mut c_char,
1872 )
1873 };
1874
1875 let error = raw_to_string(error_raw);
1876 unsafe {
1877 BNFreeString(error_raw);
1878 }
1879
1880 if res {
1881 Ok(result.get_data().to_vec())
1882 } else {
1883 Err(error.unwrap_or_else(|| "Assemble failed".into()))
1884 }
1885 }
1886
1887 fn is_never_branch_patch_available(&self, data: &[u8], addr: u64) -> bool {
1888 unsafe {
1889 BNIsArchitectureNeverBranchPatchAvailable(self.handle, data.as_ptr(), addr, data.len())
1890 }
1891 }
1892
1893 fn is_always_branch_patch_available(&self, data: &[u8], addr: u64) -> bool {
1894 unsafe {
1895 BNIsArchitectureAlwaysBranchPatchAvailable(self.handle, data.as_ptr(), addr, data.len())
1896 }
1897 }
1898
1899 fn is_invert_branch_patch_available(&self, data: &[u8], addr: u64) -> bool {
1900 unsafe {
1901 BNIsArchitectureInvertBranchPatchAvailable(self.handle, data.as_ptr(), addr, data.len())
1902 }
1903 }
1904
1905 fn is_skip_and_return_zero_patch_available(&self, data: &[u8], addr: u64) -> bool {
1906 unsafe {
1907 BNIsArchitectureSkipAndReturnZeroPatchAvailable(
1908 self.handle,
1909 data.as_ptr(),
1910 addr,
1911 data.len(),
1912 )
1913 }
1914 }
1915
1916 fn is_skip_and_return_value_patch_available(&self, data: &[u8], addr: u64) -> bool {
1917 unsafe {
1918 BNIsArchitectureSkipAndReturnValuePatchAvailable(
1919 self.handle,
1920 data.as_ptr(),
1921 addr,
1922 data.len(),
1923 )
1924 }
1925 }
1926
1927 fn convert_to_nop(&self, data: &mut [u8], addr: u64) -> bool {
1928 unsafe { BNArchitectureConvertToNop(self.handle, data.as_mut_ptr(), addr, data.len()) }
1929 }
1930
1931 fn always_branch(&self, data: &mut [u8], addr: u64) -> bool {
1932 unsafe { BNArchitectureAlwaysBranch(self.handle, data.as_mut_ptr(), addr, data.len()) }
1933 }
1934
1935 fn invert_branch(&self, data: &mut [u8], addr: u64) -> bool {
1936 unsafe { BNArchitectureInvertBranch(self.handle, data.as_mut_ptr(), addr, data.len()) }
1937 }
1938
1939 fn skip_and_return_value(&self, data: &mut [u8], addr: u64, value: u64) -> bool {
1940 unsafe {
1941 BNArchitectureSkipAndReturnValue(
1942 self.handle,
1943 data.as_mut_ptr(),
1944 addr,
1945 data.len(),
1946 value,
1947 )
1948 }
1949 }
1950
1951 fn handle(&self) -> CoreArchitecture {
1952 *self
1953 }
1954}
1955
1956pub struct BasicBlockAnalysisContext {
1957 pub(crate) handle: *mut BNBasicBlockAnalysisContext,
1958 contextual_returns_dirty: bool,
1959
1960 pub indirect_branches: Vec<IndirectBranchInfo>,
1962 pub indirect_no_return_calls: HashSet<ArchAndAddr>,
1963 pub analysis_skip_override: BNFunctionAnalysisSkipOverride,
1964 pub guided_analysis_mode: bool,
1965 pub trigger_guided_on_invalid_instruction: bool,
1966 pub translate_tail_calls: bool,
1967 pub disallow_branch_to_string: bool,
1968 pub max_function_size: u64,
1969
1970 pub max_size_reached: bool,
1972 contextual_returns: HashMap<ArchAndAddr, bool>,
1973
1974 direct_code_references: HashMap<u64, ArchAndAddr>,
1976 direct_no_return_calls: HashSet<ArchAndAddr>,
1977 halted_disassembly_addresses: HashSet<ArchAndAddr>,
1978 inlined_unresolved_indirect_branches: HashSet<ArchAndAddr>,
1979}
1980
1981impl BasicBlockAnalysisContext {
1982 pub unsafe fn from_raw(handle: *mut BNBasicBlockAnalysisContext) -> Self {
1983 debug_assert!(!handle.is_null());
1984
1985 let ctx_ref = &*handle;
1986
1987 let indirect_branches = (0..ctx_ref.indirectBranchesCount)
1988 .map(|i| {
1989 let raw: BNIndirectBranchInfo =
1990 unsafe { std::ptr::read(ctx_ref.indirectBranches.add(i)) };
1991 IndirectBranchInfo::from(raw)
1992 })
1993 .collect::<Vec<_>>();
1994
1995 let indirect_no_return_calls = (0..ctx_ref.indirectNoReturnCallsCount)
1996 .map(|i| {
1997 let raw = unsafe { std::ptr::read(ctx_ref.indirectNoReturnCalls.add(i)) };
1998 ArchAndAddr::from(raw)
1999 })
2000 .collect::<HashSet<_>>();
2001
2002 let contextual_returns = (0..ctx_ref.contextualFunctionReturnCount)
2003 .map(|i| {
2004 let loc = unsafe {
2005 let raw = std::ptr::read(ctx_ref.contextualFunctionReturnLocations.add(i));
2006 ArchAndAddr::from(raw)
2007 };
2008 let val = unsafe { *ctx_ref.contextualFunctionReturnValues.add(i) };
2009 (loc, val)
2010 })
2011 .collect::<HashMap<_, _>>();
2012
2013 let direct_code_references = (0..ctx_ref.directRefCount)
2014 .map(|i| {
2015 let src = unsafe {
2016 let raw = std::ptr::read(ctx_ref.directRefSources.add(i));
2017 ArchAndAddr::from(raw)
2018 };
2019 let tgt = unsafe { *ctx_ref.directRefTargets.add(i) };
2020 (tgt, src)
2021 })
2022 .collect::<HashMap<_, _>>();
2023
2024 let direct_no_return_calls = (0..ctx_ref.directNoReturnCallsCount)
2025 .map(|i| {
2026 let raw = unsafe { std::ptr::read(ctx_ref.directNoReturnCalls.add(i)) };
2027 ArchAndAddr::from(raw)
2028 })
2029 .collect::<HashSet<_>>();
2030
2031 let halted_disassembly_addresses = (0..ctx_ref.haltedDisassemblyAddressesCount)
2032 .map(|i| {
2033 let raw = unsafe { std::ptr::read(ctx_ref.haltedDisassemblyAddresses.add(i)) };
2034 ArchAndAddr::from(raw)
2035 })
2036 .collect::<HashSet<_>>();
2037
2038 let inlined_unresolved_indirect_branches = (0..ctx_ref
2039 .inlinedUnresolvedIndirectBranchCount)
2040 .map(|i| {
2041 let raw =
2042 unsafe { std::ptr::read(ctx_ref.inlinedUnresolvedIndirectBranches.add(i)) };
2043 ArchAndAddr::from(raw)
2044 })
2045 .collect::<HashSet<_>>();
2046
2047 BasicBlockAnalysisContext {
2048 handle,
2049 contextual_returns_dirty: false,
2050 indirect_branches,
2051 indirect_no_return_calls,
2052 analysis_skip_override: ctx_ref.analysisSkipOverride,
2053 guided_analysis_mode: ctx_ref.guidedAnalysisMode,
2054 trigger_guided_on_invalid_instruction: ctx_ref.triggerGuidedOnInvalidInstruction,
2055 translate_tail_calls: ctx_ref.translateTailCalls,
2056 disallow_branch_to_string: ctx_ref.disallowBranchToString,
2057 max_function_size: ctx_ref.maxFunctionSize,
2058 max_size_reached: ctx_ref.maxSizeReached,
2059 contextual_returns,
2060 direct_code_references,
2061 direct_no_return_calls,
2062 halted_disassembly_addresses,
2063 inlined_unresolved_indirect_branches,
2064 }
2065 }
2066
2067 pub fn add_contextual_return(&mut self, loc: ArchAndAddr, value: bool) {
2068 if !self.contextual_returns.contains_key(&loc) {
2069 self.contextual_returns_dirty = true;
2070 }
2071
2072 self.contextual_returns.insert(loc, value);
2073 }
2074
2075 pub fn add_direct_code_reference(&mut self, target: u64, src: ArchAndAddr) {
2076 self.direct_code_references.entry(target).or_insert(src);
2077 }
2078
2079 pub fn add_direct_no_return_call(&mut self, loc: ArchAndAddr) {
2080 self.direct_no_return_calls.insert(loc);
2081 }
2082
2083 pub fn add_halted_disassembly_address(&mut self, loc: ArchAndAddr) {
2084 self.halted_disassembly_addresses.insert(loc);
2085 }
2086
2087 pub fn add_inlined_unresolved_indirect_branch(&mut self, loc: ArchAndAddr) {
2088 self.inlined_unresolved_indirect_branches.insert(loc);
2089 }
2090
2091 pub fn create_basic_block(
2092 &self,
2093 arch: CoreArchitecture,
2094 start: u64,
2095 ) -> Option<Ref<BasicBlock<NativeBlock>>> {
2096 let raw_block =
2097 unsafe { BNAnalyzeBasicBlocksContextCreateBasicBlock(self.handle, arch.handle, start) };
2098
2099 if raw_block.is_null() {
2100 return None;
2101 }
2102
2103 unsafe { Some(BasicBlock::ref_from_raw(raw_block, NativeBlock::new())) }
2104 }
2105
2106 pub fn add_basic_block(&self, block: Ref<BasicBlock<NativeBlock>>) {
2107 unsafe {
2108 BNAnalyzeBasicBlocksContextAddBasicBlockToFunction(self.handle, block.handle);
2109 }
2110 }
2111
2112 pub fn add_temp_outgoing_reference(&self, target: &Function) {
2113 unsafe {
2114 BNAnalyzeBasicBlocksContextAddTempReference(self.handle, target.handle);
2115 }
2116 }
2117
2118 pub fn finalize(&mut self) {
2119 if !self.direct_code_references.is_empty() {
2120 let total = self.direct_code_references.len();
2121 let mut sources: Vec<BNArchitectureAndAddress> = Vec::with_capacity(total);
2122 let mut targets: Vec<u64> = Vec::with_capacity(total);
2123 for (target, src) in &self.direct_code_references {
2124 sources.push(src.into_raw());
2125 targets.push(*target);
2126 }
2127 unsafe {
2128 BNAnalyzeBasicBlocksContextSetDirectCodeReferences(
2129 self.handle,
2130 sources.as_mut_ptr(),
2131 targets.as_mut_ptr(),
2132 total,
2133 );
2134 }
2135 }
2136
2137 if !self.direct_no_return_calls.is_empty() {
2138 let total = self.direct_no_return_calls.len();
2139 let mut locations: Vec<BNArchitectureAndAddress> = Vec::with_capacity(total);
2140 for loc in &self.direct_no_return_calls {
2141 locations.push(loc.into_raw());
2142 }
2143 unsafe {
2144 BNAnalyzeBasicBlocksContextSetDirectNoReturnCalls(
2145 self.handle,
2146 locations.as_mut_ptr(),
2147 total,
2148 );
2149 }
2150 }
2151
2152 if !self.halted_disassembly_addresses.is_empty() {
2153 let total = self.halted_disassembly_addresses.len();
2154 let mut locations: Vec<BNArchitectureAndAddress> = Vec::with_capacity(total);
2155 for loc in &self.halted_disassembly_addresses {
2156 locations.push(loc.into_raw());
2157 }
2158 unsafe {
2159 BNAnalyzeBasicBlocksContextSetHaltedDisassemblyAddresses(
2160 self.handle,
2161 locations.as_mut_ptr(),
2162 total,
2163 );
2164 }
2165 }
2166
2167 if !self.inlined_unresolved_indirect_branches.is_empty() {
2168 let total = self.inlined_unresolved_indirect_branches.len();
2169 let mut locations: Vec<BNArchitectureAndAddress> = Vec::with_capacity(total);
2170 for loc in &self.inlined_unresolved_indirect_branches {
2171 locations.push(loc.into_raw());
2172 }
2173 unsafe {
2174 BNAnalyzeBasicBlocksContextSetInlinedUnresolvedIndirectBranches(
2175 self.handle,
2176 locations.as_mut_ptr(),
2177 total,
2178 );
2179 }
2180 }
2181
2182 unsafe {
2183 (*self.handle).maxSizeReached = self.max_size_reached;
2184 }
2185
2186 if self.contextual_returns_dirty {
2187 let total = self.contextual_returns.len();
2188 let mut locations: Vec<BNArchitectureAndAddress> = Vec::with_capacity(total);
2189 let mut values: Vec<bool> = Vec::with_capacity(total);
2190 for (loc, value) in &self.contextual_returns {
2191 locations.push(loc.into_raw());
2192 values.push(*value);
2193 }
2194 unsafe {
2195 BNAnalyzeBasicBlocksContextSetContextualFunctionReturns(
2196 self.handle,
2197 locations.as_mut_ptr(),
2198 values.as_mut_ptr(),
2199 total,
2200 );
2201 }
2202 }
2203
2204 unsafe { BNAnalyzeBasicBlocksContextFinalize(self.handle) };
2205 }
2206}
2207
2208impl Debug for CoreArchitecture {
2209 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
2210 f.debug_struct("CoreArchitecture")
2211 .field("name", &self.name())
2212 .field("endianness", &self.endianness())
2213 .field("address_size", &self.address_size())
2214 .field("default_integer_size", &self.default_integer_size())
2215 .field("instruction_alignment", &self.instruction_alignment())
2216 .field("max_instr_len", &self.max_instr_len())
2217 .field("opcode_display_len", &self.opcode_display_len())
2218 .finish()
2219 }
2220}
2221
2222macro_rules! cc_func {
2223 ($get_name:ident, $get_api:ident, $set_name:ident, $set_api:ident) => {
2224 fn $get_name(&self) -> Option<Ref<CoreCallingConvention>> {
2225 let arch = self.as_ref();
2226
2227 unsafe {
2228 let cc = $get_api(arch.handle);
2229
2230 if cc.is_null() {
2231 None
2232 } else {
2233 Some(CoreCallingConvention::ref_from_raw(
2234 cc,
2235 self.as_ref().handle(),
2236 ))
2237 }
2238 }
2239 }
2240
2241 fn $set_name(&self, cc: &CoreCallingConvention) {
2242 let arch = self.as_ref();
2243
2244 assert!(
2245 cc.arch_handle.borrow().as_ref().handle == arch.handle,
2246 "use of calling convention with non-matching architecture!"
2247 );
2248
2249 unsafe {
2250 $set_api(arch.handle, cc.handle);
2251 }
2252 }
2253 };
2254}
2255
2256pub trait ArchitectureExt: Architecture {
2258 fn register_by_name(&self, name: &str) -> Option<Self::Register> {
2259 let name = name.to_cstr();
2260
2261 match unsafe { BNGetArchitectureRegisterByName(self.as_ref().handle, name.as_ptr()) } {
2262 0xffff_ffff => None,
2263 reg => self.register_from_id(reg.into()),
2264 }
2265 }
2266
2267 fn calling_conventions(&self) -> Array<CoreCallingConvention> {
2268 unsafe {
2269 let mut count = 0;
2270 let calling_convs =
2271 BNGetArchitectureCallingConventions(self.as_ref().handle, &mut count);
2272 Array::new(calling_convs, count, self.as_ref().handle())
2273 }
2274 }
2275
2276 cc_func!(
2277 get_default_calling_convention,
2278 BNGetArchitectureDefaultCallingConvention,
2279 set_default_calling_convention,
2280 BNSetArchitectureDefaultCallingConvention
2281 );
2282
2283 cc_func!(
2284 get_cdecl_calling_convention,
2285 BNGetArchitectureCdeclCallingConvention,
2286 set_cdecl_calling_convention,
2287 BNSetArchitectureCdeclCallingConvention
2288 );
2289
2290 cc_func!(
2291 get_stdcall_calling_convention,
2292 BNGetArchitectureStdcallCallingConvention,
2293 set_stdcall_calling_convention,
2294 BNSetArchitectureStdcallCallingConvention
2295 );
2296
2297 cc_func!(
2298 get_fastcall_calling_convention,
2299 BNGetArchitectureFastcallCallingConvention,
2300 set_fastcall_calling_convention,
2301 BNSetArchitectureFastcallCallingConvention
2302 );
2303
2304 fn standalone_platform(&self) -> Option<Ref<Platform>> {
2305 unsafe {
2306 let handle = BNGetArchitectureStandalonePlatform(self.as_ref().handle);
2307
2308 if handle.is_null() {
2309 return None;
2310 }
2311
2312 Some(Platform::ref_from_raw(handle))
2313 }
2314 }
2315
2316 fn relocation_handler(&self, view_name: &str) -> Option<Ref<CoreRelocationHandler>> {
2317 let view_name = match CString::new(view_name) {
2318 Ok(view_name) => view_name,
2319 Err(_) => return None,
2320 };
2321
2322 unsafe {
2323 let handle =
2324 BNArchitectureGetRelocationHandler(self.as_ref().handle, view_name.as_ptr());
2325
2326 if handle.is_null() {
2327 return None;
2328 }
2329
2330 Some(CoreRelocationHandler::ref_from_raw(handle))
2331 }
2332 }
2333
2334 fn register_relocation_handler<R, F>(&self, name: &str, func: F)
2335 where
2336 R: 'static
2337 + RelocationHandler<Handle = CustomRelocationHandlerHandle<R>>
2338 + Send
2339 + Sync
2340 + Sized,
2341 F: FnOnce(CustomRelocationHandlerHandle<R>, CoreRelocationHandler) -> R,
2342 {
2343 crate::relocation::register_relocation_handler(self.as_ref(), name, func);
2344 }
2345
2346 fn register_function_recognizer<R>(&self, recognizer: R)
2347 where
2348 R: 'static + FunctionRecognizer + Send + Sync + Sized,
2349 {
2350 crate::function_recognizer::register_arch_function_recognizer(self.as_ref(), recognizer);
2351 }
2352}
2353
2354impl<T: Architecture> ArchitectureExt for T {}
2355
2356pub fn register_architecture<A, F>(name: &str, func: F) -> &'static A
2357where
2358 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync + Sized,
2359 F: FnOnce(CustomArchitectureHandle<A>, CoreArchitecture) -> A,
2360{
2361 #[repr(C)]
2362 struct ArchitectureBuilder<A, F>
2363 where
2364 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2365 F: FnOnce(CustomArchitectureHandle<A>, CoreArchitecture) -> A,
2366 {
2367 arch: MaybeUninit<A>,
2368 func: Option<F>,
2369 }
2370
2371 extern "C" fn cb_init<A, F>(ctxt: *mut c_void, obj: *mut BNArchitecture)
2372 where
2373 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2374 F: FnOnce(CustomArchitectureHandle<A>, CoreArchitecture) -> A,
2375 {
2376 unsafe {
2377 let custom_arch = &mut *(ctxt as *mut ArchitectureBuilder<A, F>);
2378 let custom_arch_handle = CustomArchitectureHandle {
2379 handle: ctxt as *mut A,
2380 };
2381
2382 let create = custom_arch.func.take().unwrap();
2383 custom_arch
2384 .arch
2385 .write(create(custom_arch_handle, CoreArchitecture::from_raw(obj)));
2386 }
2387 }
2388
2389 extern "C" fn cb_endianness<A>(ctxt: *mut c_void) -> BNEndianness
2390 where
2391 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2392 {
2393 let custom_arch = unsafe { &*(ctxt as *mut A) };
2394 custom_arch.endianness()
2395 }
2396
2397 extern "C" fn cb_address_size<A>(ctxt: *mut c_void) -> usize
2398 where
2399 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2400 {
2401 let custom_arch = unsafe { &*(ctxt as *mut A) };
2402 custom_arch.address_size()
2403 }
2404
2405 extern "C" fn cb_default_integer_size<A>(ctxt: *mut c_void) -> usize
2406 where
2407 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2408 {
2409 let custom_arch = unsafe { &*(ctxt as *mut A) };
2410 custom_arch.default_integer_size()
2411 }
2412
2413 extern "C" fn cb_instruction_alignment<A>(ctxt: *mut c_void) -> usize
2414 where
2415 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2416 {
2417 let custom_arch = unsafe { &*(ctxt as *mut A) };
2418 custom_arch.instruction_alignment()
2419 }
2420
2421 extern "C" fn cb_max_instr_len<A>(ctxt: *mut c_void) -> usize
2422 where
2423 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2424 {
2425 let custom_arch = unsafe { &*(ctxt as *mut A) };
2426 custom_arch.max_instr_len()
2427 }
2428
2429 extern "C" fn cb_opcode_display_len<A>(ctxt: *mut c_void) -> usize
2430 where
2431 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2432 {
2433 let custom_arch = unsafe { &*(ctxt as *mut A) };
2434 custom_arch.opcode_display_len()
2435 }
2436
2437 extern "C" fn cb_associated_arch_by_addr<A>(
2438 ctxt: *mut c_void,
2439 addr: *mut u64,
2440 ) -> *mut BNArchitecture
2441 where
2442 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2443 {
2444 let custom_arch = unsafe { &*(ctxt as *mut A) };
2445 let addr = unsafe { *(addr) };
2446
2447 custom_arch.associated_arch_by_addr(addr).handle
2448 }
2449
2450 extern "C" fn cb_instruction_info<A>(
2451 ctxt: *mut c_void,
2452 data: *const u8,
2453 addr: u64,
2454 len: usize,
2455 result: *mut BNInstructionInfo,
2456 ) -> bool
2457 where
2458 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2459 {
2460 let custom_arch = unsafe { &*(ctxt as *mut A) };
2461 let data = unsafe { std::slice::from_raw_parts(data, len) };
2462
2463 match custom_arch.instruction_info(data, addr) {
2464 Some(info) => {
2465 unsafe { *result = info.into() };
2467 true
2468 }
2469 None => false,
2470 }
2471 }
2472
2473 extern "C" fn cb_get_instruction_text<A>(
2474 ctxt: *mut c_void,
2475 data: *const u8,
2476 addr: u64,
2477 len: *mut usize,
2478 result: *mut *mut BNInstructionTextToken,
2479 count: *mut usize,
2480 ) -> bool
2481 where
2482 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2483 {
2484 let custom_arch = unsafe { &*(ctxt as *mut A) };
2485 let data = unsafe { std::slice::from_raw_parts(data, *len) };
2486 let result = unsafe { &mut *result };
2487
2488 let Some((res_size, res_tokens)) = custom_arch.instruction_text(data, addr) else {
2489 return false;
2490 };
2491
2492 let res_tokens: Box<[BNInstructionTextToken]> = res_tokens
2493 .into_iter()
2494 .map(InstructionTextToken::into_raw)
2495 .collect();
2496 unsafe {
2497 let res_tokens = Box::leak(res_tokens);
2499 *result = res_tokens.as_mut_ptr();
2500 *count = res_tokens.len();
2501 *len = res_size;
2502 }
2503 true
2504 }
2505
2506 extern "C" fn cb_free_instruction_text(tokens: *mut BNInstructionTextToken, count: usize) {
2507 unsafe {
2508 let raw_tokens = std::slice::from_raw_parts_mut(tokens, count);
2509 let boxed_tokens = Box::from_raw(raw_tokens);
2510 for token in boxed_tokens {
2511 InstructionTextToken::free_raw(token);
2512 }
2513 }
2514 }
2515
2516 extern "C" fn cb_instruction_llil<A>(
2517 ctxt: *mut c_void,
2518 data: *const u8,
2519 addr: u64,
2520 len: *mut usize,
2521 il: *mut BNLowLevelILFunction,
2522 ) -> bool
2523 where
2524 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2525 {
2526 let custom_arch = unsafe { &*(ctxt as *mut A) };
2527 let data = unsafe { std::slice::from_raw_parts(data, *len) };
2528 let lifter = unsafe {
2529 LowLevelILMutableFunction::from_raw_with_arch(il, Some(*custom_arch.as_ref()))
2530 };
2531
2532 match custom_arch.instruction_llil(data, addr, &lifter) {
2533 Some((res_len, res_value)) => {
2534 unsafe { *len = res_len };
2535 res_value
2536 }
2537 None => false,
2538 }
2539 }
2540
2541 extern "C" fn cb_analyze_basic_blocks<A>(
2542 ctxt: *mut c_void,
2543 function: *mut BNFunction,
2544 context: *mut BNBasicBlockAnalysisContext,
2545 ) where
2546 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2547 {
2548 let custom_arch = unsafe { &*(ctxt as *mut A) };
2549 let mut function = unsafe { Function::from_raw(function) };
2550 let mut context: BasicBlockAnalysisContext =
2551 unsafe { BasicBlockAnalysisContext::from_raw(context) };
2552 custom_arch.analyze_basic_blocks(&mut function, &mut context);
2553 }
2554
2555 extern "C" fn cb_reg_name<A>(ctxt: *mut c_void, reg: u32) -> *mut c_char
2556 where
2557 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2558 {
2559 let custom_arch = unsafe { &*(ctxt as *mut A) };
2560
2561 match custom_arch.register_from_id(reg.into()) {
2562 Some(reg) => BnString::into_raw(BnString::new(reg.name().as_ref())),
2563 None => BnString::into_raw(BnString::new("invalid_reg")),
2564 }
2565 }
2566
2567 extern "C" fn cb_flag_name<A>(ctxt: *mut c_void, flag: u32) -> *mut c_char
2568 where
2569 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2570 {
2571 let custom_arch = unsafe { &*(ctxt as *mut A) };
2572
2573 match custom_arch.flag_from_id(flag.into()) {
2574 Some(flag) => BnString::into_raw(BnString::new(flag.name().as_ref())),
2575 None => BnString::into_raw(BnString::new("invalid_flag")),
2576 }
2577 }
2578
2579 extern "C" fn cb_flag_write_name<A>(ctxt: *mut c_void, flag_write: u32) -> *mut c_char
2580 where
2581 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2582 {
2583 let custom_arch = unsafe { &*(ctxt as *mut A) };
2584
2585 match custom_arch.flag_write_from_id(flag_write.into()) {
2586 Some(flag_write) => BnString::into_raw(BnString::new(flag_write.name().as_ref())),
2587 None => BnString::into_raw(BnString::new("invalid_flag_write")),
2588 }
2589 }
2590
2591 extern "C" fn cb_semantic_flag_class_name<A>(ctxt: *mut c_void, class: u32) -> *mut c_char
2592 where
2593 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2594 {
2595 let custom_arch = unsafe { &*(ctxt as *mut A) };
2596
2597 match custom_arch.flag_class_from_id(class.into()) {
2598 Some(class) => BnString::into_raw(BnString::new(class.name().as_ref())),
2599 None => BnString::into_raw(BnString::new("invalid_flag_class")),
2600 }
2601 }
2602
2603 extern "C" fn cb_semantic_flag_group_name<A>(ctxt: *mut c_void, group: u32) -> *mut c_char
2604 where
2605 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2606 {
2607 let custom_arch = unsafe { &*(ctxt as *mut A) };
2608
2609 match custom_arch.flag_group_from_id(group.into()) {
2610 Some(group) => BnString::into_raw(BnString::new(group.name().as_ref())),
2611 None => BnString::into_raw(BnString::new("invalid_flag_group")),
2612 }
2613 }
2614
2615 extern "C" fn cb_registers_full_width<A>(ctxt: *mut c_void, count: *mut usize) -> *mut u32
2616 where
2617 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2618 {
2619 let custom_arch = unsafe { &*(ctxt as *mut A) };
2620 let mut regs: Box<[_]> = custom_arch
2621 .registers_full_width()
2622 .iter()
2623 .map(|r| r.id().0)
2624 .collect();
2625
2626 unsafe { *count = regs.len() };
2628 let regs_ptr = regs.as_mut_ptr();
2629 std::mem::forget(regs);
2630 regs_ptr
2631 }
2632
2633 extern "C" fn cb_registers_all<A>(ctxt: *mut c_void, count: *mut usize) -> *mut u32
2634 where
2635 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2636 {
2637 let custom_arch = unsafe { &*(ctxt as *mut A) };
2638 let mut regs: Box<[_]> = custom_arch
2639 .registers_all()
2640 .iter()
2641 .map(|r| r.id().0)
2642 .collect();
2643
2644 unsafe { *count = regs.len() };
2646 let regs_ptr = regs.as_mut_ptr();
2647 std::mem::forget(regs);
2648 regs_ptr
2649 }
2650
2651 extern "C" fn cb_registers_global<A>(ctxt: *mut c_void, count: *mut usize) -> *mut u32
2652 where
2653 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2654 {
2655 let custom_arch = unsafe { &*(ctxt as *mut A) };
2656 let mut regs: Box<[_]> = custom_arch
2657 .registers_global()
2658 .iter()
2659 .map(|r| r.id().0)
2660 .collect();
2661
2662 unsafe { *count = regs.len() };
2664 let regs_ptr = regs.as_mut_ptr();
2665 std::mem::forget(regs);
2666 regs_ptr
2667 }
2668
2669 extern "C" fn cb_registers_system<A>(ctxt: *mut c_void, count: *mut usize) -> *mut u32
2670 where
2671 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2672 {
2673 let custom_arch = unsafe { &*(ctxt as *mut A) };
2674 let mut regs: Box<[_]> = custom_arch
2675 .registers_system()
2676 .iter()
2677 .map(|r| r.id().0)
2678 .collect();
2679
2680 unsafe { *count = regs.len() };
2682 let regs_ptr = regs.as_mut_ptr();
2683 std::mem::forget(regs);
2684 regs_ptr
2685 }
2686
2687 extern "C" fn cb_flags<A>(ctxt: *mut c_void, count: *mut usize) -> *mut u32
2688 where
2689 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2690 {
2691 let custom_arch = unsafe { &*(ctxt as *mut A) };
2692 let mut flags: Box<[_]> = custom_arch.flags().iter().map(|f| f.id().0).collect();
2693
2694 unsafe { *count = flags.len() };
2696 let flags_ptr = flags.as_mut_ptr();
2697 std::mem::forget(flags);
2698 flags_ptr
2699 }
2700
2701 extern "C" fn cb_flag_write_types<A>(ctxt: *mut c_void, count: *mut usize) -> *mut u32
2702 where
2703 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2704 {
2705 let custom_arch = unsafe { &*(ctxt as *mut A) };
2706 let mut flag_writes: Box<[_]> = custom_arch
2707 .flag_write_types()
2708 .iter()
2709 .map(|f| f.id().0)
2710 .collect();
2711
2712 unsafe { *count = flag_writes.len() };
2714 let flags_ptr = flag_writes.as_mut_ptr();
2715 std::mem::forget(flag_writes);
2716 flags_ptr
2717 }
2718
2719 extern "C" fn cb_semantic_flag_classes<A>(ctxt: *mut c_void, count: *mut usize) -> *mut u32
2720 where
2721 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2722 {
2723 let custom_arch = unsafe { &*(ctxt as *mut A) };
2724 let mut flag_classes: Box<[_]> = custom_arch
2725 .flag_classes()
2726 .iter()
2727 .map(|f| f.id().0)
2728 .collect();
2729
2730 unsafe { *count = flag_classes.len() };
2732 let flags_ptr = flag_classes.as_mut_ptr();
2733 std::mem::forget(flag_classes);
2734 flags_ptr
2735 }
2736
2737 extern "C" fn cb_semantic_flag_groups<A>(ctxt: *mut c_void, count: *mut usize) -> *mut u32
2738 where
2739 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2740 {
2741 let custom_arch = unsafe { &*(ctxt as *mut A) };
2742 let mut flag_groups: Box<[_]> =
2743 custom_arch.flag_groups().iter().map(|f| f.id().0).collect();
2744
2745 unsafe { *count = flag_groups.len() };
2747 let flags_ptr = flag_groups.as_mut_ptr();
2748 std::mem::forget(flag_groups);
2749 flags_ptr
2750 }
2751
2752 extern "C" fn cb_flag_role<A>(ctxt: *mut c_void, flag: u32, class: u32) -> BNFlagRole
2753 where
2754 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2755 {
2756 let custom_arch = unsafe { &*(ctxt as *mut A) };
2757
2758 if let (Some(flag), class) = (
2759 custom_arch.flag_from_id(FlagId(flag)),
2760 custom_arch.flag_class_from_id(FlagClassId(class)),
2761 ) {
2762 flag.role(class)
2763 } else {
2764 FlagRole::SpecialFlagRole
2765 }
2766 }
2767
2768 extern "C" fn cb_flags_required_for_flag_cond<A>(
2769 ctxt: *mut c_void,
2770 cond: BNLowLevelILFlagCondition,
2771 class: u32,
2772 count: *mut usize,
2773 ) -> *mut u32
2774 where
2775 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2776 {
2777 let custom_arch = unsafe { &*(ctxt as *mut A) };
2778 let class = custom_arch.flag_class_from_id(FlagClassId(class));
2779 let mut flags: Box<[_]> = custom_arch
2780 .flags_required_for_flag_condition(cond, class)
2781 .iter()
2782 .map(|f| f.id().0)
2783 .collect();
2784
2785 unsafe { *count = flags.len() };
2787 let flags_ptr = flags.as_mut_ptr();
2788 std::mem::forget(flags);
2789 flags_ptr
2790 }
2791
2792 extern "C" fn cb_flags_required_for_semantic_flag_group<A>(
2793 ctxt: *mut c_void,
2794 group: u32,
2795 count: *mut usize,
2796 ) -> *mut u32
2797 where
2798 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2799 {
2800 let custom_arch = unsafe { &*(ctxt as *mut A) };
2801
2802 if let Some(group) = custom_arch.flag_group_from_id(FlagGroupId(group)) {
2803 let mut flags: Box<[_]> = group.flags_required().iter().map(|f| f.id().0).collect();
2804
2805 unsafe { *count = flags.len() };
2807 let flags_ptr = flags.as_mut_ptr();
2808 std::mem::forget(flags);
2809 flags_ptr
2810 } else {
2811 unsafe {
2812 *count = 0;
2813 }
2814 std::ptr::null_mut()
2815 }
2816 }
2817
2818 extern "C" fn cb_flag_conditions_for_semantic_flag_group<A>(
2819 ctxt: *mut c_void,
2820 group: u32,
2821 count: *mut usize,
2822 ) -> *mut BNFlagConditionForSemanticClass
2823 where
2824 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2825 {
2826 let custom_arch = unsafe { &*(ctxt as *mut A) };
2827
2828 if let Some(group) = custom_arch.flag_group_from_id(FlagGroupId(group)) {
2829 let flag_conditions = group.flag_conditions();
2830 let mut flags: Box<[_]> = flag_conditions
2831 .iter()
2832 .map(|(&class, &condition)| BNFlagConditionForSemanticClass {
2833 semanticClass: class.id().0,
2834 condition,
2835 })
2836 .collect();
2837
2838 unsafe { *count = flags.len() };
2840 let flags_ptr = flags.as_mut_ptr();
2841 std::mem::forget(flags);
2842 flags_ptr
2843 } else {
2844 unsafe {
2845 *count = 0;
2846 }
2847 std::ptr::null_mut()
2848 }
2849 }
2850
2851 extern "C" fn cb_free_flag_conditions_for_semantic_flag_group<A>(
2852 _ctxt: *mut c_void,
2853 conds: *mut BNFlagConditionForSemanticClass,
2854 count: usize,
2855 ) where
2856 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2857 {
2858 if conds.is_null() {
2859 return;
2860 }
2861
2862 unsafe {
2863 let flags_ptr = std::ptr::slice_from_raw_parts_mut(conds, count);
2864 let _flags = Box::from_raw(flags_ptr);
2865 }
2866 }
2867
2868 extern "C" fn cb_flags_written_by_write_type<A>(
2869 ctxt: *mut c_void,
2870 write_type: u32,
2871 count: *mut usize,
2872 ) -> *mut u32
2873 where
2874 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2875 {
2876 let custom_arch = unsafe { &*(ctxt as *mut A) };
2877
2878 if let Some(write_type) = custom_arch.flag_write_from_id(FlagWriteId(write_type)) {
2879 let mut flags_written: Box<[_]> = write_type
2880 .flags_written()
2881 .iter()
2882 .map(|f| f.id().0)
2883 .collect();
2884
2885 unsafe { *count = flags_written.len() };
2887 let flags_ptr = flags_written.as_mut_ptr();
2888 std::mem::forget(flags_written);
2889 flags_ptr
2890 } else {
2891 unsafe {
2892 *count = 0;
2893 }
2894 std::ptr::null_mut()
2895 }
2896 }
2897
2898 extern "C" fn cb_semantic_class_for_flag_write_type<A>(
2899 ctxt: *mut c_void,
2900 write_type: u32,
2901 ) -> u32
2902 where
2903 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2904 {
2905 let custom_arch = unsafe { &*(ctxt as *mut A) };
2906 custom_arch
2907 .flag_write_from_id(FlagWriteId(write_type))
2908 .map(|w| w.class())
2909 .and_then(|c| c.map(|c| c.id().0))
2910 .unwrap_or(0)
2911 }
2912
2913 extern "C" fn cb_flag_write_llil<A>(
2914 ctxt: *mut c_void,
2915 op: BNLowLevelILOperation,
2916 size: usize,
2917 flag_write: u32,
2918 flag: u32,
2919 operands_raw: *mut BNRegisterOrConstant,
2920 operand_count: usize,
2921 il: *mut BNLowLevelILFunction,
2922 ) -> usize
2923 where
2924 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2925 {
2926 let custom_arch = unsafe { &*(ctxt as *mut A) };
2927 let flag_write = custom_arch.flag_write_from_id(FlagWriteId(flag_write));
2928 let flag = custom_arch.flag_from_id(FlagId(flag));
2929 let operands = unsafe { std::slice::from_raw_parts(operands_raw, operand_count) };
2930 let lifter = unsafe {
2931 LowLevelILMutableFunction::from_raw_with_arch(il, Some(*custom_arch.as_ref()))
2932 };
2933
2934 if let (Some(flag_write), Some(flag)) = (flag_write, flag) {
2935 if let Some(op) = LowLevelILFlagWriteOp::from_op(custom_arch, size, op, operands) {
2936 if let Some(expr) = custom_arch.flag_write_llil(flag, flag_write, op, &lifter) {
2937 return expr.index.0;
2939 }
2940 } else {
2941 log::warn!(
2942 "unable to unpack flag write op: {:?} with {} operands",
2943 op,
2944 operands.len()
2945 );
2946 }
2947
2948 let role = flag.role(flag_write.class());
2949
2950 unsafe {
2951 BNGetDefaultArchitectureFlagWriteLowLevelIL(
2952 custom_arch.as_ref().handle,
2953 op,
2954 size,
2955 role,
2956 operands_raw,
2957 operand_count,
2958 il,
2959 )
2960 }
2961 } else {
2962 lifter.unimplemented().index.0
2965 }
2966 }
2967
2968 extern "C" fn cb_flag_cond_llil<A>(
2969 ctxt: *mut c_void,
2970 cond: FlagCondition,
2971 class: u32,
2972 il: *mut BNLowLevelILFunction,
2973 ) -> usize
2974 where
2975 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2976 {
2977 let custom_arch = unsafe { &*(ctxt as *mut A) };
2978 let class = custom_arch.flag_class_from_id(FlagClassId(class));
2979
2980 let lifter = unsafe {
2981 LowLevelILMutableFunction::from_raw_with_arch(il, Some(*custom_arch.as_ref()))
2982 };
2983 if let Some(expr) = custom_arch.flag_cond_llil(cond, class, &lifter) {
2984 return expr.index.0;
2986 }
2987
2988 lifter.unimplemented().index.0
2989 }
2990
2991 extern "C" fn cb_flag_group_llil<A>(
2992 ctxt: *mut c_void,
2993 group: u32,
2994 il: *mut BNLowLevelILFunction,
2995 ) -> usize
2996 where
2997 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2998 {
2999 let custom_arch = unsafe { &*(ctxt as *mut A) };
3000 let lifter = unsafe {
3001 LowLevelILMutableFunction::from_raw_with_arch(il, Some(*custom_arch.as_ref()))
3002 };
3003
3004 if let Some(group) = custom_arch.flag_group_from_id(FlagGroupId(group)) {
3005 if let Some(expr) = custom_arch.flag_group_llil(group, &lifter) {
3006 return expr.index.0;
3008 }
3009 }
3010
3011 lifter.unimplemented().index.0
3012 }
3013
3014 extern "C" fn cb_free_register_list(_ctxt: *mut c_void, regs: *mut u32, count: usize) {
3015 if regs.is_null() {
3016 return;
3017 }
3018
3019 unsafe {
3020 let regs_ptr = std::ptr::slice_from_raw_parts_mut(regs, count);
3021 let _regs = Box::from_raw(regs_ptr);
3022 }
3023 }
3024
3025 extern "C" fn cb_register_info<A>(ctxt: *mut c_void, reg: u32, result: *mut BNRegisterInfo)
3026 where
3027 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
3028 {
3029 let custom_arch = unsafe { &*(ctxt as *mut A) };
3030 let result = unsafe { &mut *result };
3031
3032 if let Some(reg) = custom_arch.register_from_id(RegisterId(reg)) {
3033 let info = reg.info();
3034
3035 result.fullWidthRegister = match info.parent() {
3036 Some(p) => p.id().0,
3037 None => reg.id().0,
3038 };
3039
3040 result.offset = info.offset();
3041 result.size = info.size();
3042 result.extend = info.implicit_extend();
3043 }
3044 }
3045
3046 extern "C" fn cb_stack_pointer<A>(ctxt: *mut c_void) -> u32
3047 where
3048 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
3049 {
3050 let custom_arch = unsafe { &*(ctxt as *mut A) };
3051
3052 if let Some(reg) = custom_arch.stack_pointer_reg() {
3053 reg.id().0
3054 } else {
3055 0xffff_ffff
3056 }
3057 }
3058
3059 extern "C" fn cb_link_reg<A>(ctxt: *mut c_void) -> u32
3060 where
3061 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
3062 {
3063 let custom_arch = unsafe { &*(ctxt as *mut A) };
3064
3065 if let Some(reg) = custom_arch.link_reg() {
3066 reg.id().0
3067 } else {
3068 0xffff_ffff
3069 }
3070 }
3071
3072 extern "C" fn cb_reg_stack_name<A>(ctxt: *mut c_void, stack: u32) -> *mut c_char
3073 where
3074 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
3075 {
3076 let custom_arch = unsafe { &*(ctxt as *mut A) };
3077
3078 match custom_arch.register_stack_from_id(RegisterStackId(stack)) {
3079 Some(stack) => BnString::into_raw(BnString::new(stack.name().as_ref())),
3080 None => BnString::into_raw(BnString::new("invalid_reg_stack")),
3081 }
3082 }
3083
3084 extern "C" fn cb_reg_stacks<A>(ctxt: *mut c_void, count: *mut usize) -> *mut u32
3085 where
3086 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
3087 {
3088 let custom_arch = unsafe { &*(ctxt as *mut A) };
3089 let mut regs: Box<[_]> = custom_arch
3090 .register_stacks()
3091 .iter()
3092 .map(|r| r.id().0)
3093 .collect();
3094
3095 unsafe { *count = regs.len() };
3097 let regs_ptr = regs.as_mut_ptr();
3098 std::mem::forget(regs);
3099 regs_ptr
3100 }
3101
3102 extern "C" fn cb_reg_stack_info<A>(
3103 ctxt: *mut c_void,
3104 stack: u32,
3105 result: *mut BNRegisterStackInfo,
3106 ) where
3107 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
3108 {
3109 let custom_arch = unsafe { &*(ctxt as *mut A) };
3110 let result = unsafe { &mut *result };
3111
3112 if let Some(stack) = custom_arch.register_stack_from_id(RegisterStackId(stack)) {
3113 let info = stack.info();
3114
3115 let (reg, count) = info.storage_regs();
3116 result.firstStorageReg = reg.id().0;
3117 result.storageCount = count as u32;
3118
3119 if let Some((reg, count)) = info.top_relative_regs() {
3120 result.firstTopRelativeReg = reg.id().0;
3121 result.topRelativeCount = count as u32;
3122 } else {
3123 result.firstTopRelativeReg = 0xffff_ffff;
3124 result.topRelativeCount = 0;
3125 }
3126
3127 result.stackTopReg = info.stack_top_reg().id().0;
3128 }
3129 }
3130
3131 extern "C" fn cb_intrinsic_class<A>(ctxt: *mut c_void, intrinsic: u32) -> BNIntrinsicClass
3132 where
3133 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
3134 {
3135 let custom_arch = unsafe { &*(ctxt as *mut A) };
3136 match custom_arch.intrinsic_from_id(IntrinsicId(intrinsic)) {
3137 Some(intrinsic) => intrinsic.class(),
3138 None => BNIntrinsicClass::GeneralIntrinsicClass,
3140 }
3141 }
3142
3143 extern "C" fn cb_intrinsic_name<A>(ctxt: *mut c_void, intrinsic: u32) -> *mut c_char
3144 where
3145 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
3146 {
3147 let custom_arch = unsafe { &*(ctxt as *mut A) };
3148 match custom_arch.intrinsic_from_id(IntrinsicId(intrinsic)) {
3149 Some(intrinsic) => BnString::into_raw(BnString::new(intrinsic.name())),
3150 None => BnString::into_raw(BnString::new("invalid_intrinsic")),
3151 }
3152 }
3153
3154 extern "C" fn cb_intrinsics<A>(ctxt: *mut c_void, count: *mut usize) -> *mut u32
3155 where
3156 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
3157 {
3158 let custom_arch = unsafe { &*(ctxt as *mut A) };
3159 let mut intrinsics: Box<[_]> = custom_arch.intrinsics().iter().map(|i| i.id().0).collect();
3160
3161 unsafe { *count = intrinsics.len() };
3163 let intrinsics_ptr = intrinsics.as_mut_ptr();
3164 std::mem::forget(intrinsics);
3165 intrinsics_ptr
3166 }
3167
3168 extern "C" fn cb_intrinsic_inputs<A>(
3169 ctxt: *mut c_void,
3170 intrinsic: u32,
3171 count: *mut usize,
3172 ) -> *mut BNNameAndType
3173 where
3174 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
3175 {
3176 let custom_arch = unsafe { &*(ctxt as *mut A) };
3177
3178 let Some(intrinsic) = custom_arch.intrinsic_from_id(IntrinsicId(intrinsic)) else {
3179 unsafe {
3181 *count = 0;
3182 }
3183 return std::ptr::null_mut();
3184 };
3185
3186 let inputs = intrinsic.inputs();
3187 let raw_inputs: Box<[_]> = inputs.into_iter().map(NameAndType::into_raw).collect();
3189
3190 unsafe {
3192 *count = raw_inputs.len();
3193 }
3194
3195 if raw_inputs.is_empty() {
3196 std::ptr::null_mut()
3197 } else {
3198 Box::leak(raw_inputs).as_mut_ptr()
3200 }
3201 }
3202
3203 extern "C" fn cb_free_name_and_types<A>(
3204 _ctxt: *mut c_void,
3205 nt: *mut BNNameAndType,
3206 count: usize,
3207 ) where
3208 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
3209 {
3210 if nt.is_null() {
3211 return;
3212 }
3213
3214 let nt_ptr = std::ptr::slice_from_raw_parts_mut(nt, count);
3216 let boxed_name_and_types = unsafe { Box::from_raw(nt_ptr) };
3218 for nt in boxed_name_and_types {
3219 NameAndType::free_raw(nt);
3220 }
3221 }
3222
3223 extern "C" fn cb_intrinsic_outputs<A>(
3224 ctxt: *mut c_void,
3225 intrinsic: u32,
3226 count: *mut usize,
3227 ) -> *mut BNTypeWithConfidence
3228 where
3229 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
3230 {
3231 let custom_arch = unsafe { &*(ctxt as *mut A) };
3232
3233 let Some(intrinsic) = custom_arch.intrinsic_from_id(IntrinsicId(intrinsic)) else {
3234 unsafe {
3236 *count = 0;
3237 }
3238 return std::ptr::null_mut();
3239 };
3240
3241 let outputs = intrinsic.outputs();
3242 let raw_outputs: Box<[BNTypeWithConfidence]> = outputs
3243 .into_iter()
3244 .map(Conf::<Ref<Type>>::into_raw)
3246 .collect();
3247
3248 unsafe {
3250 *count = raw_outputs.len();
3251 }
3252
3253 if raw_outputs.is_empty() {
3254 std::ptr::null_mut()
3255 } else {
3256 Box::leak(raw_outputs).as_mut_ptr()
3258 }
3259 }
3260
3261 extern "C" fn cb_free_type_list<A>(
3262 ctxt: *mut c_void,
3263 tl: *mut BNTypeWithConfidence,
3264 count: usize,
3265 ) where
3266 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
3267 {
3268 let _custom_arch = unsafe { &*(ctxt as *mut A) };
3269 if !tl.is_null() {
3270 let boxed_types =
3271 unsafe { Box::from_raw(std::ptr::slice_from_raw_parts_mut(tl, count)) };
3272 for ty in boxed_types {
3273 Conf::<Ref<Type>>::free_raw(ty);
3274 }
3275 }
3276 }
3277
3278 extern "C" fn cb_can_assemble<A>(ctxt: *mut c_void) -> bool
3279 where
3280 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
3281 {
3282 let custom_arch = unsafe { &*(ctxt as *mut A) };
3283 custom_arch.can_assemble()
3284 }
3285
3286 extern "C" fn cb_assemble<A>(
3287 ctxt: *mut c_void,
3288 code: *const c_char,
3289 addr: u64,
3290 buffer: *mut BNDataBuffer,
3291 errors: *mut *mut c_char,
3292 ) -> bool
3293 where
3294 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
3295 {
3296 let custom_arch = unsafe { &*(ctxt as *mut A) };
3297 let code = raw_to_string(code).unwrap_or("".into());
3298 let mut buffer = DataBuffer::from_raw(buffer);
3299
3300 let result = match custom_arch.assemble(&code, addr) {
3301 Ok(result) => {
3302 buffer.set_data(&result);
3303 unsafe {
3304 *errors = BnString::into_raw(BnString::new(""));
3305 }
3306 true
3307 }
3308 Err(result) => {
3309 unsafe {
3310 *errors = BnString::into_raw(BnString::new(result));
3311 }
3312 false
3313 }
3314 };
3315
3316 std::mem::forget(buffer);
3318
3319 result
3320 }
3321
3322 extern "C" fn cb_is_never_branch_patch_available<A>(
3323 ctxt: *mut c_void,
3324 data: *const u8,
3325 addr: u64,
3326 len: usize,
3327 ) -> bool
3328 where
3329 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
3330 {
3331 let custom_arch = unsafe { &*(ctxt as *mut A) };
3332 let data = unsafe { std::slice::from_raw_parts(data, len) };
3333 custom_arch.is_never_branch_patch_available(data, addr)
3334 }
3335
3336 extern "C" fn cb_is_always_branch_patch_available<A>(
3337 ctxt: *mut c_void,
3338 data: *const u8,
3339 addr: u64,
3340 len: usize,
3341 ) -> bool
3342 where
3343 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
3344 {
3345 let custom_arch = unsafe { &*(ctxt as *mut A) };
3346 let data = unsafe { std::slice::from_raw_parts(data, len) };
3347 custom_arch.is_always_branch_patch_available(data, addr)
3348 }
3349
3350 extern "C" fn cb_is_invert_branch_patch_available<A>(
3351 ctxt: *mut c_void,
3352 data: *const u8,
3353 addr: u64,
3354 len: usize,
3355 ) -> bool
3356 where
3357 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
3358 {
3359 let custom_arch = unsafe { &*(ctxt as *mut A) };
3360 let data = unsafe { std::slice::from_raw_parts(data, len) };
3361 custom_arch.is_invert_branch_patch_available(data, addr)
3362 }
3363
3364 extern "C" fn cb_is_skip_and_return_zero_patch_available<A>(
3365 ctxt: *mut c_void,
3366 data: *const u8,
3367 addr: u64,
3368 len: usize,
3369 ) -> bool
3370 where
3371 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
3372 {
3373 let custom_arch = unsafe { &*(ctxt as *mut A) };
3374 let data = unsafe { std::slice::from_raw_parts(data, len) };
3375 custom_arch.is_skip_and_return_zero_patch_available(data, addr)
3376 }
3377
3378 extern "C" fn cb_is_skip_and_return_value_patch_available<A>(
3379 ctxt: *mut c_void,
3380 data: *const u8,
3381 addr: u64,
3382 len: usize,
3383 ) -> bool
3384 where
3385 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
3386 {
3387 let custom_arch = unsafe { &*(ctxt as *mut A) };
3388 let data = unsafe { std::slice::from_raw_parts(data, len) };
3389 custom_arch.is_skip_and_return_value_patch_available(data, addr)
3390 }
3391
3392 extern "C" fn cb_convert_to_nop<A>(
3393 ctxt: *mut c_void,
3394 data: *mut u8,
3395 addr: u64,
3396 len: usize,
3397 ) -> bool
3398 where
3399 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
3400 {
3401 let custom_arch = unsafe { &*(ctxt as *mut A) };
3402 let data = unsafe { std::slice::from_raw_parts_mut(data, len) };
3403 custom_arch.convert_to_nop(data, addr)
3404 }
3405
3406 extern "C" fn cb_always_branch<A>(
3407 ctxt: *mut c_void,
3408 data: *mut u8,
3409 addr: u64,
3410 len: usize,
3411 ) -> bool
3412 where
3413 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
3414 {
3415 let custom_arch = unsafe { &*(ctxt as *mut A) };
3416 let data = unsafe { std::slice::from_raw_parts_mut(data, len) };
3417 custom_arch.always_branch(data, addr)
3418 }
3419
3420 extern "C" fn cb_invert_branch<A>(
3421 ctxt: *mut c_void,
3422 data: *mut u8,
3423 addr: u64,
3424 len: usize,
3425 ) -> bool
3426 where
3427 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
3428 {
3429 let custom_arch = unsafe { &*(ctxt as *mut A) };
3430 let data = unsafe { std::slice::from_raw_parts_mut(data, len) };
3431 custom_arch.invert_branch(data, addr)
3432 }
3433
3434 extern "C" fn cb_skip_and_return_value<A>(
3435 ctxt: *mut c_void,
3436 data: *mut u8,
3437 addr: u64,
3438 len: usize,
3439 val: u64,
3440 ) -> bool
3441 where
3442 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
3443 {
3444 let custom_arch = unsafe { &*(ctxt as *mut A) };
3445 let data = unsafe { std::slice::from_raw_parts_mut(data, len) };
3446 custom_arch.skip_and_return_value(data, addr, val)
3447 }
3448
3449 let name = name.to_cstr();
3450
3451 let uninit_arch = ArchitectureBuilder {
3452 arch: MaybeUninit::zeroed(),
3453 func: Some(func),
3454 };
3455
3456 let raw = Box::into_raw(Box::new(uninit_arch));
3457 let mut custom_arch = BNCustomArchitecture {
3458 context: raw as *mut _,
3459 init: Some(cb_init::<A, F>),
3460 getEndianness: Some(cb_endianness::<A>),
3461 getAddressSize: Some(cb_address_size::<A>),
3462 getDefaultIntegerSize: Some(cb_default_integer_size::<A>),
3463 getInstructionAlignment: Some(cb_instruction_alignment::<A>),
3464 getMaxInstructionLength: Some(cb_max_instr_len::<A>),
3466 getOpcodeDisplayLength: Some(cb_opcode_display_len::<A>),
3468 getAssociatedArchitectureByAddress: Some(cb_associated_arch_by_addr::<A>),
3469 getInstructionInfo: Some(cb_instruction_info::<A>),
3470 getInstructionText: Some(cb_get_instruction_text::<A>),
3471 freeInstructionText: Some(cb_free_instruction_text),
3472 getInstructionLowLevelIL: Some(cb_instruction_llil::<A>),
3473 analyzeBasicBlocks: Some(cb_analyze_basic_blocks::<A>),
3474
3475 getRegisterName: Some(cb_reg_name::<A>),
3476 getFlagName: Some(cb_flag_name::<A>),
3477 getFlagWriteTypeName: Some(cb_flag_write_name::<A>),
3478 getSemanticFlagClassName: Some(cb_semantic_flag_class_name::<A>),
3479 getSemanticFlagGroupName: Some(cb_semantic_flag_group_name::<A>),
3480
3481 getFullWidthRegisters: Some(cb_registers_full_width::<A>),
3482 getAllRegisters: Some(cb_registers_all::<A>),
3483 getAllFlags: Some(cb_flags::<A>),
3484 getAllFlagWriteTypes: Some(cb_flag_write_types::<A>),
3485 getAllSemanticFlagClasses: Some(cb_semantic_flag_classes::<A>),
3486 getAllSemanticFlagGroups: Some(cb_semantic_flag_groups::<A>),
3487
3488 getFlagRole: Some(cb_flag_role::<A>),
3489 getFlagsRequiredForFlagCondition: Some(cb_flags_required_for_flag_cond::<A>),
3490
3491 getFlagsRequiredForSemanticFlagGroup: Some(cb_flags_required_for_semantic_flag_group::<A>),
3492 getFlagConditionsForSemanticFlagGroup: Some(
3493 cb_flag_conditions_for_semantic_flag_group::<A>,
3494 ),
3495 freeFlagConditionsForSemanticFlagGroup: Some(
3496 cb_free_flag_conditions_for_semantic_flag_group::<A>,
3497 ),
3498
3499 getFlagsWrittenByFlagWriteType: Some(cb_flags_written_by_write_type::<A>),
3500 getSemanticClassForFlagWriteType: Some(cb_semantic_class_for_flag_write_type::<A>),
3501
3502 getFlagWriteLowLevelIL: Some(cb_flag_write_llil::<A>),
3503 getFlagConditionLowLevelIL: Some(cb_flag_cond_llil::<A>),
3504 getSemanticFlagGroupLowLevelIL: Some(cb_flag_group_llil::<A>),
3505
3506 freeRegisterList: Some(cb_free_register_list),
3507 getRegisterInfo: Some(cb_register_info::<A>),
3508 getStackPointerRegister: Some(cb_stack_pointer::<A>),
3509 getLinkRegister: Some(cb_link_reg::<A>),
3510 getGlobalRegisters: Some(cb_registers_global::<A>),
3511 getSystemRegisters: Some(cb_registers_system::<A>),
3512
3513 getRegisterStackName: Some(cb_reg_stack_name::<A>),
3514 getAllRegisterStacks: Some(cb_reg_stacks::<A>),
3515 getRegisterStackInfo: Some(cb_reg_stack_info::<A>),
3516
3517 getIntrinsicClass: Some(cb_intrinsic_class::<A>),
3518 getIntrinsicName: Some(cb_intrinsic_name::<A>),
3519 getAllIntrinsics: Some(cb_intrinsics::<A>),
3520 getIntrinsicInputs: Some(cb_intrinsic_inputs::<A>),
3521 freeNameAndTypeList: Some(cb_free_name_and_types::<A>),
3522 getIntrinsicOutputs: Some(cb_intrinsic_outputs::<A>),
3523 freeTypeList: Some(cb_free_type_list::<A>),
3524
3525 canAssemble: Some(cb_can_assemble::<A>),
3526 assemble: Some(cb_assemble::<A>),
3527
3528 isNeverBranchPatchAvailable: Some(cb_is_never_branch_patch_available::<A>),
3529 isAlwaysBranchPatchAvailable: Some(cb_is_always_branch_patch_available::<A>),
3530 isInvertBranchPatchAvailable: Some(cb_is_invert_branch_patch_available::<A>),
3531 isSkipAndReturnZeroPatchAvailable: Some(cb_is_skip_and_return_zero_patch_available::<A>),
3532 isSkipAndReturnValuePatchAvailable: Some(cb_is_skip_and_return_value_patch_available::<A>),
3533
3534 convertToNop: Some(cb_convert_to_nop::<A>),
3535 alwaysBranch: Some(cb_always_branch::<A>),
3536 invertBranch: Some(cb_invert_branch::<A>),
3537 skipAndReturnValue: Some(cb_skip_and_return_value::<A>),
3538 };
3539
3540 unsafe {
3541 let res = BNRegisterArchitecture(name.as_ptr(), &mut custom_arch as *mut _);
3542
3543 assert!(!res.is_null());
3544
3545 (*raw).arch.assume_init_mut()
3546 }
3547}
3548
3549#[derive(Debug)]
3550pub struct CustomArchitectureHandle<A>
3551where
3552 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
3553{
3554 handle: *mut A,
3555}
3556
3557unsafe impl<A> Send for CustomArchitectureHandle<A> where
3558 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync
3559{
3560}
3561
3562unsafe impl<A> Sync for CustomArchitectureHandle<A> where
3563 A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync
3564{
3565}
3566
3567impl<A> Clone for CustomArchitectureHandle<A>
3568where
3569 A: 'static + Architecture<Handle = Self> + Send + Sync,
3570{
3571 fn clone(&self) -> Self {
3572 *self
3573 }
3574}
3575
3576impl<A> Copy for CustomArchitectureHandle<A> where
3577 A: 'static + Architecture<Handle = Self> + Send + Sync
3578{
3579}
3580
3581impl<A> Borrow<A> for CustomArchitectureHandle<A>
3582where
3583 A: 'static + Architecture<Handle = Self> + Send + Sync,
3584{
3585 fn borrow(&self) -> &A {
3586 unsafe { &*self.handle }
3587 }
3588}
3589
3590#[repr(i32)]
3591pub enum LlvmServicesDialect {
3592 Unspecified = 0,
3593 Att = 1,
3594 Intel = 2,
3595}
3596
3597#[repr(i32)]
3598pub enum LlvmServicesCodeModel {
3599 Default = 0,
3600 Small = 1,
3601 Kernel = 2,
3602 Medium = 3,
3603 Large = 4,
3604}
3605
3606#[repr(i32)]
3607pub enum LlvmServicesRelocMode {
3608 Static = 0,
3609 PIC = 1,
3610 DynamicNoPIC = 2,
3611}
3612
3613pub fn llvm_assemble(
3614 code: &str,
3615 dialect: LlvmServicesDialect,
3616 arch_triple: &str,
3617 code_model: LlvmServicesCodeModel,
3618 reloc_mode: LlvmServicesRelocMode,
3619) -> Result<Vec<u8>, String> {
3620 let code = CString::new(code).map_err(|_| "Invalid encoding in code string".to_string())?;
3621 let arch_triple = CString::new(arch_triple)
3622 .map_err(|_| "Invalid encoding in architecture triple string".to_string())?;
3623 let mut out_bytes: *mut c_char = std::ptr::null_mut();
3624 let mut out_bytes_len: c_int = 0;
3625 let mut err_bytes: *mut c_char = std::ptr::null_mut();
3626 let mut err_len: c_int = 0;
3627
3628 unsafe {
3629 BNLlvmServicesInit();
3630 }
3631
3632 let result = unsafe {
3633 BNLlvmServicesAssemble(
3634 code.as_ptr(),
3635 dialect as i32,
3636 arch_triple.as_ptr(),
3637 code_model as i32,
3638 reloc_mode as i32,
3639 &mut out_bytes as *mut *mut c_char,
3640 &mut out_bytes_len as *mut c_int,
3641 &mut err_bytes as *mut *mut c_char,
3642 &mut err_len as *mut c_int,
3643 )
3644 };
3645
3646 let out = if out_bytes_len == 0 {
3647 Vec::new()
3648 } else {
3649 unsafe {
3650 std::slice::from_raw_parts(
3651 out_bytes as *const c_char as *const u8,
3652 out_bytes_len as usize,
3653 )
3654 }
3655 .to_vec()
3656 };
3657
3658 let errors = if err_len == 0 {
3659 "".into()
3660 } else {
3661 String::from_utf8_lossy(unsafe {
3662 std::slice::from_raw_parts(err_bytes as *const c_char as *const u8, err_len as usize)
3663 })
3664 .into_owned()
3665 };
3666
3667 unsafe {
3668 BNLlvmServicesAssembleFree(out_bytes, err_bytes);
3669 }
3670
3671 if result == 0 {
3672 Ok(out)
3673 } else {
3674 Err(errors)
3675 }
3676}