1use std::borrow::Borrow;
18use std::ffi::c_void;
19use std::fmt::{Debug, Formatter};
20use std::hash::{Hash, Hasher};
21use std::marker::PhantomData;
22
23use binaryninjacore_sys::*;
24
25use crate::architecture::{
26 Architecture, ArchitectureExt, CoreArchitecture, CoreRegister, Register, RegisterId,
27};
28use crate::rc::{Array, CoreArrayProvider, CoreArrayProviderInner, Guard, Ref, RefCountable};
29use crate::string::*;
30use crate::types::FunctionParameter;
31use crate::variable::Variable;
32pub trait CallingConvention: Sync {
38 fn caller_saved_registers(&self) -> Vec<RegisterId>;
39 fn callee_saved_registers(&self) -> Vec<RegisterId>;
40 fn int_arg_registers(&self) -> Vec<RegisterId>;
41 fn float_arg_registers(&self) -> Vec<RegisterId>;
42
43 fn arg_registers_shared_index(&self) -> bool;
44 fn reserved_stack_space_for_arg_registers(&self) -> bool;
45 fn stack_adjusted_on_return(&self) -> bool;
46 fn is_eligible_for_heuristics(&self) -> bool;
47
48 fn return_int_reg(&self) -> Option<RegisterId>;
49 fn return_hi_int_reg(&self) -> Option<RegisterId>;
50 fn return_float_reg(&self) -> Option<RegisterId>;
51
52 fn global_pointer_reg(&self) -> Option<RegisterId>;
53
54 fn implicitly_defined_registers(&self) -> Vec<RegisterId>;
55 fn are_argument_registers_used_for_var_args(&self) -> bool;
56}
57
58pub fn register_calling_convention<A, C>(arch: &A, name: &str, cc: C) -> Ref<CoreCallingConvention>
59where
60 A: Architecture,
61 C: 'static + CallingConvention,
62{
63 struct CustomCallingConventionContext<C>
64 where
65 C: CallingConvention,
66 {
67 raw_handle: *mut BNCallingConvention,
68 cc: C,
69 }
70
71 extern "C" fn cb_free<C>(ctxt: *mut c_void)
73 where
74 C: CallingConvention,
75 {
76 ffi_wrap!("CallingConvention::free", unsafe {
77 let _ctxt = Box::from_raw(ctxt as *mut CustomCallingConventionContext<C>);
78 })
79 }
80
81 extern "C" fn cb_free_register_list(_ctxt: *mut c_void, regs: *mut u32, count: usize) {
82 ffi_wrap!("CallingConvention::free_register_list", unsafe {
83 if regs.is_null() {
84 return;
85 }
86
87 let regs_ptr = std::ptr::slice_from_raw_parts_mut(regs, count);
88 let _regs = Box::from_raw(regs_ptr);
89 })
90 }
91
92 extern "C" fn cb_caller_saved<C>(ctxt: *mut c_void, count: *mut usize) -> *mut u32
93 where
94 C: CallingConvention,
95 {
96 ffi_wrap!("CallingConvention::caller_saved_registers", unsafe {
97 let ctxt = &*(ctxt as *mut CustomCallingConventionContext<C>);
98 let mut regs: Vec<_> = ctxt
99 .cc
100 .caller_saved_registers()
101 .iter()
102 .map(|r| r.0)
103 .collect();
104
105 *count = regs.len();
107 let regs_ptr = regs.as_mut_ptr();
108 std::mem::forget(regs);
109 regs_ptr
110 })
111 }
112
113 extern "C" fn cb_callee_saved<C>(ctxt: *mut c_void, count: *mut usize) -> *mut u32
114 where
115 C: CallingConvention,
116 {
117 ffi_wrap!("CallingConvention::callee_saved_registers", unsafe {
118 let ctxt = &*(ctxt as *mut CustomCallingConventionContext<C>);
119 let mut regs: Vec<_> = ctxt
120 .cc
121 .callee_saved_registers()
122 .iter()
123 .map(|r| r.0)
124 .collect();
125
126 *count = regs.len();
128 let regs_ptr = regs.as_mut_ptr();
129 std::mem::forget(regs);
130 regs_ptr
131 })
132 }
133
134 extern "C" fn cb_int_args<C>(ctxt: *mut c_void, count: *mut usize) -> *mut u32
135 where
136 C: CallingConvention,
137 {
138 ffi_wrap!("CallingConvention::int_arg_registers", unsafe {
139 let ctxt = &*(ctxt as *mut CustomCallingConventionContext<C>);
140 let mut regs: Vec<_> = ctxt.cc.int_arg_registers().iter().map(|r| r.0).collect();
141
142 *count = regs.len();
144 let regs_ptr = regs.as_mut_ptr();
145 std::mem::forget(regs);
146 regs_ptr
147 })
148 }
149
150 extern "C" fn cb_float_args<C>(ctxt: *mut c_void, count: *mut usize) -> *mut u32
151 where
152 C: CallingConvention,
153 {
154 ffi_wrap!("CallingConvention::float_arg_registers", unsafe {
155 let ctxt = &*(ctxt as *mut CustomCallingConventionContext<C>);
156 let mut regs: Vec<_> = ctxt.cc.float_arg_registers().iter().map(|r| r.0).collect();
157
158 *count = regs.len();
160 let regs_ptr = regs.as_mut_ptr();
161 std::mem::forget(regs);
162 regs_ptr
163 })
164 }
165
166 extern "C" fn cb_arg_shared_index<C>(ctxt: *mut c_void) -> bool
167 where
168 C: CallingConvention,
169 {
170 ffi_wrap!("CallingConvention::arg_registers_shared_index", unsafe {
171 let ctxt = &*(ctxt as *mut CustomCallingConventionContext<C>);
172
173 ctxt.cc.arg_registers_shared_index()
174 })
175 }
176
177 extern "C" fn cb_stack_reserved_arg_regs<C>(ctxt: *mut c_void) -> bool
178 where
179 C: CallingConvention,
180 {
181 ffi_wrap!(
182 "CallingConvention::reserved_stack_space_for_arg_registers",
183 unsafe {
184 let ctxt = &*(ctxt as *mut CustomCallingConventionContext<C>);
185
186 ctxt.cc.reserved_stack_space_for_arg_registers()
187 }
188 )
189 }
190
191 extern "C" fn cb_stack_adjusted_on_return<C>(ctxt: *mut c_void) -> bool
192 where
193 C: CallingConvention,
194 {
195 ffi_wrap!("CallingConvention::stack_adjusted_on_return", unsafe {
196 let ctxt = &*(ctxt as *mut CustomCallingConventionContext<C>);
197
198 ctxt.cc.stack_adjusted_on_return()
199 })
200 }
201
202 extern "C" fn cb_is_eligible_for_heuristics<C>(ctxt: *mut c_void) -> bool
203 where
204 C: CallingConvention,
205 {
206 ffi_wrap!("CallingConvention::is_eligible_for_heuristics", unsafe {
207 let ctxt = &*(ctxt as *mut CustomCallingConventionContext<C>);
208
209 ctxt.cc.is_eligible_for_heuristics()
210 })
211 }
212
213 extern "C" fn cb_return_int_reg<C>(ctxt: *mut c_void) -> u32
214 where
215 C: CallingConvention,
216 {
217 ffi_wrap!("CallingConvention::return_int_reg", unsafe {
218 let ctxt = &*(ctxt as *mut CustomCallingConventionContext<C>);
219
220 match ctxt.cc.return_int_reg() {
221 Some(r) => r.0,
222 _ => 0xffff_ffff,
223 }
224 })
225 }
226
227 extern "C" fn cb_return_hi_int_reg<C>(ctxt: *mut c_void) -> u32
228 where
229 C: CallingConvention,
230 {
231 ffi_wrap!("CallingConvention::return_hi_int_reg", unsafe {
232 let ctxt = &*(ctxt as *mut CustomCallingConventionContext<C>);
233
234 match ctxt.cc.return_hi_int_reg() {
235 Some(r) => r.0,
236 _ => 0xffff_ffff,
237 }
238 })
239 }
240
241 extern "C" fn cb_return_float_reg<C>(ctxt: *mut c_void) -> u32
242 where
243 C: CallingConvention,
244 {
245 ffi_wrap!("CallingConvention::return_float_reg", unsafe {
246 let ctxt = &*(ctxt as *mut CustomCallingConventionContext<C>);
247
248 match ctxt.cc.return_float_reg() {
249 Some(r) => r.0,
250 _ => 0xffff_ffff,
251 }
252 })
253 }
254
255 extern "C" fn cb_global_pointer_reg<C>(ctxt: *mut c_void) -> u32
256 where
257 C: CallingConvention,
258 {
259 ffi_wrap!("CallingConvention::global_pointer_reg", unsafe {
260 let ctxt = &*(ctxt as *mut CustomCallingConventionContext<C>);
261
262 match ctxt.cc.global_pointer_reg() {
263 Some(r) => r.0,
264 _ => 0xffff_ffff,
265 }
266 })
267 }
268
269 extern "C" fn cb_implicitly_defined_registers<C>(
270 ctxt: *mut c_void,
271 count: *mut usize,
272 ) -> *mut u32
273 where
274 C: CallingConvention,
275 {
276 ffi_wrap!("CallingConvention::implicitly_defined_registers", unsafe {
277 let ctxt = &*(ctxt as *mut CustomCallingConventionContext<C>);
278 let mut regs: Vec<_> = ctxt
279 .cc
280 .implicitly_defined_registers()
281 .iter()
282 .map(|r| r.0)
283 .collect();
284
285 *count = regs.len();
287 let regs_ptr = regs.as_mut_ptr();
288 std::mem::forget(regs);
289 regs_ptr
290 })
291 }
292
293 #[allow(clippy::extra_unused_type_parameters)]
294 extern "C" fn cb_incoming_reg_value<C>(
295 _ctxt: *mut c_void,
296 _reg: u32,
297 _func: *mut BNFunction,
298 val: *mut BNRegisterValue,
299 ) where
300 C: CallingConvention,
301 {
302 ffi_wrap!("CallingConvention::incoming_reg_value", unsafe {
304 let val = &mut *val;
306
307 val.state = BNRegisterValueType::EntryValue;
308 val.value = _reg as i64;
309 })
310 }
311
312 #[allow(clippy::extra_unused_type_parameters)]
313 extern "C" fn cb_incoming_flag_value<C>(
314 _ctxt: *mut c_void,
315 _flag: u32,
316 _func: *mut BNFunction,
317 val: *mut BNRegisterValue,
318 ) where
319 C: CallingConvention,
320 {
321 ffi_wrap!("CallingConvention::incoming_flag_value", unsafe {
323 let val = &mut *val;
325
326 val.state = BNRegisterValueType::EntryValue;
327 val.value = _flag as i64;
328 })
329 }
330
331 extern "C" fn cb_incoming_var_for_param<C>(
332 ctxt: *mut c_void,
333 var: *const BNVariable,
334 _func: *mut BNFunction,
335 param: *mut BNVariable,
336 ) where
337 C: CallingConvention,
338 {
339 ffi_wrap!("CallingConvention::incoming_var_for_param", unsafe {
340 let ctxt = &*(ctxt as *mut CustomCallingConventionContext<C>);
341 std::ptr::write(
342 param,
343 BNGetDefaultIncomingVariableForParameterVariable(ctxt.raw_handle, var),
344 );
345 })
346 }
347
348 extern "C" fn cb_incoming_param_for_var<C>(
349 ctxt: *mut c_void,
350 var: *const BNVariable,
351 _func: *mut BNFunction,
352 param: *mut BNVariable,
353 ) where
354 C: CallingConvention,
355 {
356 ffi_wrap!("CallingConvention::incoming_param_for_var", unsafe {
357 let ctxt = &*(ctxt as *mut CustomCallingConventionContext<C>);
358 std::ptr::write(
359 param,
360 BNGetDefaultParameterVariableForIncomingVariable(ctxt.raw_handle, var),
361 );
362 })
363 }
364
365 extern "C" fn cb_are_argument_registers_used_for_var_args<C>(ctxt: *mut c_void) -> bool
366 where
367 C: CallingConvention,
368 {
369 ffi_wrap!(
370 "CallingConvention::are_argument_registers_used_for_var_args",
371 unsafe {
372 let ctxt = &*(ctxt as *mut CustomCallingConventionContext<C>);
373
374 ctxt.cc.are_argument_registers_used_for_var_args()
375 }
376 )
377 }
378
379 let name = name.to_cstr();
380 let raw = Box::into_raw(Box::new(CustomCallingConventionContext {
381 raw_handle: std::ptr::null_mut(),
382 cc,
383 }));
384 let mut cc = BNCustomCallingConvention {
385 context: raw as *mut _,
386
387 freeObject: Some(cb_free::<C>),
388
389 getCallerSavedRegisters: Some(cb_caller_saved::<C>),
390 getCalleeSavedRegisters: Some(cb_callee_saved::<C>),
391 getIntegerArgumentRegisters: Some(cb_int_args::<C>),
392 getFloatArgumentRegisters: Some(cb_float_args::<C>),
393 freeRegisterList: Some(cb_free_register_list),
394
395 areArgumentRegistersSharedIndex: Some(cb_arg_shared_index::<C>),
396 isStackReservedForArgumentRegisters: Some(cb_stack_reserved_arg_regs::<C>),
397 isStackAdjustedOnReturn: Some(cb_stack_adjusted_on_return::<C>),
398 isEligibleForHeuristics: Some(cb_is_eligible_for_heuristics::<C>),
399
400 getIntegerReturnValueRegister: Some(cb_return_int_reg::<C>),
401 getHighIntegerReturnValueRegister: Some(cb_return_hi_int_reg::<C>),
402 getFloatReturnValueRegister: Some(cb_return_float_reg::<C>),
403 getGlobalPointerRegister: Some(cb_global_pointer_reg::<C>),
404
405 getImplicitlyDefinedRegisters: Some(cb_implicitly_defined_registers::<C>),
406 getIncomingRegisterValue: Some(cb_incoming_reg_value::<C>),
407 getIncomingFlagValue: Some(cb_incoming_flag_value::<C>),
408 getIncomingVariableForParameterVariable: Some(cb_incoming_var_for_param::<C>),
409 getParameterVariableForIncomingVariable: Some(cb_incoming_param_for_var::<C>),
410
411 areArgumentRegistersUsedForVarArgs: Some(cb_are_argument_registers_used_for_var_args::<C>),
412 };
413
414 unsafe {
415 let cc_name = name.as_ptr();
416 let result = BNCreateCallingConvention(arch.as_ref().handle, cc_name, &mut cc);
417
418 assert!(!result.is_null());
419
420 (*raw).raw_handle = result;
421
422 BNRegisterCallingConvention(arch.as_ref().handle, result);
423
424 Ref::new(CoreCallingConvention {
425 handle: result,
426 arch_handle: arch.as_ref().handle(),
427 })
428 }
429}
430
431pub struct CoreCallingConvention {
432 pub(crate) handle: *mut BNCallingConvention,
433 pub(crate) arch_handle: CoreArchitecture,
434}
435
436impl CoreCallingConvention {
437 pub(crate) unsafe fn from_raw(
438 handle: *mut BNCallingConvention,
439 arch: CoreArchitecture,
440 ) -> Self {
441 CoreCallingConvention {
442 handle,
443 arch_handle: arch,
444 }
445 }
446
447 pub(crate) unsafe fn ref_from_raw(
448 handle: *mut BNCallingConvention,
449 arch: CoreArchitecture,
450 ) -> Ref<Self> {
451 Ref::new(CoreCallingConvention {
452 handle,
453 arch_handle: arch,
454 })
455 }
456
457 pub fn name(&self) -> String {
458 unsafe { BnString::into_string(BNGetCallingConventionName(self.handle)) }
459 }
460
461 pub fn variables_for_parameters(
462 &self,
463 params: &[FunctionParameter],
464 permitted_registers: Option<&[CoreRegister]>,
465 ) -> Vec<Variable> {
466 let mut count: usize = 0;
467 let raw_params: Vec<BNFunctionParameter> = params
468 .iter()
469 .cloned()
470 .map(FunctionParameter::into_raw)
471 .collect();
472 let raw_vars_ptr: *mut BNVariable = if let Some(permitted_args) = permitted_registers {
473 let permitted_regs = permitted_args.iter().map(|r| r.id().0).collect::<Vec<_>>();
474
475 unsafe {
476 BNGetVariablesForParameters(
477 self.handle,
478 raw_params.as_ptr(),
479 raw_params.len(),
480 permitted_regs.as_ptr(),
481 permitted_regs.len(),
482 &mut count,
483 )
484 }
485 } else {
486 unsafe {
487 BNGetVariablesForParametersDefaultPermittedArgs(
488 self.handle,
489 raw_params.as_ptr(),
490 raw_params.len(),
491 &mut count,
492 )
493 }
494 };
495
496 for raw_param in raw_params {
497 FunctionParameter::free_raw(raw_param);
498 }
499
500 unsafe { Array::<Variable>::new(raw_vars_ptr, count, ()) }.to_vec()
501 }
502}
503
504unsafe impl Send for CoreCallingConvention {}
505unsafe impl Sync for CoreCallingConvention {}
506
507impl Eq for CoreCallingConvention {}
508impl PartialEq for CoreCallingConvention {
509 fn eq(&self, rhs: &Self) -> bool {
510 self.handle == rhs.handle
511 }
512}
513
514impl Debug for CoreCallingConvention {
515 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
516 f.debug_struct("CoreCallingConvention")
517 .field("name", &self.name())
518 .field("caller_saved_registers", &self.caller_saved_registers())
519 .field("callee_saved_registers", &self.callee_saved_registers())
520 .field("int_arg_registers", &self.int_arg_registers())
521 .field("float_arg_registers", &self.float_arg_registers())
522 .field(
523 "arg_registers_shared_index",
524 &self.arg_registers_shared_index(),
525 )
526 .field(
527 "reserved_stack_space_for_arg_registers",
528 &self.reserved_stack_space_for_arg_registers(),
529 )
530 .field("stack_adjusted_on_return", &self.stack_adjusted_on_return())
531 .field(
532 "is_eligible_for_heuristics",
533 &self.is_eligible_for_heuristics(),
534 )
535 .field("return_int_reg", &self.return_int_reg())
536 .field("return_hi_int_reg", &self.return_hi_int_reg())
537 .field("return_float_reg", &self.return_float_reg())
538 .field("global_pointer_reg", &self.global_pointer_reg())
539 .field(
540 "implicitly_defined_registers",
541 &self.implicitly_defined_registers(),
542 )
543 .field(
544 "are_argument_registers_used_for_var_args",
545 &self.are_argument_registers_used_for_var_args(),
546 )
547 .finish()
548 }
549}
550
551impl Hash for CoreCallingConvention {
552 fn hash<H: Hasher>(&self, state: &mut H) {
553 self.handle.hash(state);
554 }
555}
556
557impl CallingConvention for CoreCallingConvention {
558 fn caller_saved_registers(&self) -> Vec<RegisterId> {
559 unsafe {
560 let mut count = 0;
561 let regs_ptr = BNGetCallerSavedRegisters(self.handle, &mut count);
562 let regs: Vec<RegisterId> = std::slice::from_raw_parts(regs_ptr, count)
563 .iter()
564 .copied()
565 .map(RegisterId::from)
566 .collect();
567 BNFreeRegisterList(regs_ptr);
568 regs
569 }
570 }
571
572 fn callee_saved_registers(&self) -> Vec<RegisterId> {
573 unsafe {
574 let mut count = 0;
575 let regs_ptr = BNGetCalleeSavedRegisters(self.handle, &mut count);
576 let regs: Vec<RegisterId> = std::slice::from_raw_parts(regs_ptr, count)
577 .iter()
578 .copied()
579 .map(RegisterId::from)
580 .collect();
581 BNFreeRegisterList(regs_ptr);
582 regs
583 }
584 }
585
586 fn int_arg_registers(&self) -> Vec<RegisterId> {
587 unsafe {
588 let mut count = 0;
589 let regs_ptr = BNGetIntegerArgumentRegisters(self.handle, &mut count);
590 let regs: Vec<RegisterId> = std::slice::from_raw_parts(regs_ptr, count)
591 .iter()
592 .copied()
593 .map(RegisterId::from)
594 .collect();
595 BNFreeRegisterList(regs_ptr);
596 regs
597 }
598 }
599
600 fn float_arg_registers(&self) -> Vec<RegisterId> {
601 unsafe {
602 let mut count = 0;
603 let regs_ptr = BNGetFloatArgumentRegisters(self.handle, &mut count);
604 let regs: Vec<RegisterId> = std::slice::from_raw_parts(regs_ptr, count)
605 .iter()
606 .copied()
607 .map(RegisterId::from)
608 .collect();
609 BNFreeRegisterList(regs_ptr);
610 regs
611 }
612 }
613
614 fn arg_registers_shared_index(&self) -> bool {
615 unsafe { BNAreArgumentRegistersSharedIndex(self.handle) }
616 }
617
618 fn reserved_stack_space_for_arg_registers(&self) -> bool {
619 unsafe { BNIsStackReservedForArgumentRegisters(self.handle) }
620 }
621
622 fn stack_adjusted_on_return(&self) -> bool {
623 unsafe { BNIsStackAdjustedOnReturn(self.handle) }
624 }
625
626 fn is_eligible_for_heuristics(&self) -> bool {
627 unsafe { BNIsEligibleForHeuristics(self.handle) }
628 }
629
630 fn return_int_reg(&self) -> Option<RegisterId> {
631 match unsafe { BNGetIntegerReturnValueRegister(self.handle) } {
632 id if id < 0x8000_0000 => self
633 .arch_handle
634 .borrow()
635 .register_from_id(RegisterId(id))
636 .map(|r| r.id()),
637 _ => None,
638 }
639 }
640
641 fn return_hi_int_reg(&self) -> Option<RegisterId> {
642 match unsafe { BNGetHighIntegerReturnValueRegister(self.handle) } {
643 id if id < 0x8000_0000 => self
644 .arch_handle
645 .borrow()
646 .register_from_id(RegisterId(id))
647 .map(|r| r.id()),
648 _ => None,
649 }
650 }
651
652 fn return_float_reg(&self) -> Option<RegisterId> {
653 match unsafe { BNGetFloatReturnValueRegister(self.handle) } {
654 id if id < 0x8000_0000 => self
655 .arch_handle
656 .borrow()
657 .register_from_id(RegisterId(id))
658 .map(|r| r.id()),
659 _ => None,
660 }
661 }
662
663 fn global_pointer_reg(&self) -> Option<RegisterId> {
664 match unsafe { BNGetGlobalPointerRegister(self.handle) } {
665 id if id < 0x8000_0000 => self
666 .arch_handle
667 .borrow()
668 .register_from_id(RegisterId(id))
669 .map(|r| r.id()),
670 _ => None,
671 }
672 }
673
674 fn implicitly_defined_registers(&self) -> Vec<RegisterId> {
675 unsafe {
676 let mut count = 0;
677 let regs_ptr = BNGetImplicitlyDefinedRegisters(self.handle, &mut count);
678 let regs: Vec<RegisterId> = std::slice::from_raw_parts(regs_ptr, count)
679 .iter()
680 .copied()
681 .map(RegisterId::from)
682 .collect();
683 BNFreeRegisterList(regs_ptr);
684 regs
685 }
686 }
687
688 fn are_argument_registers_used_for_var_args(&self) -> bool {
689 unsafe { BNAreArgumentRegistersUsedForVarArgs(self.handle) }
690 }
691}
692
693impl ToOwned for CoreCallingConvention {
694 type Owned = Ref<Self>;
695
696 fn to_owned(&self) -> Self::Owned {
697 unsafe { RefCountable::inc_ref(self) }
698 }
699}
700
701unsafe impl RefCountable for CoreCallingConvention {
702 unsafe fn inc_ref(handle: &Self) -> Ref<Self> {
703 Ref::new(Self {
704 handle: BNNewCallingConventionReference(handle.handle),
705 arch_handle: handle.arch_handle,
706 })
707 }
708
709 unsafe fn dec_ref(handle: &Self) {
710 BNFreeCallingConvention(handle.handle);
711 }
712}
713
714impl CoreArrayProvider for CoreCallingConvention {
715 type Raw = *mut BNCallingConvention;
716 type Context = CoreArchitecture;
717 type Wrapped<'a> = Guard<'a, CoreCallingConvention>;
718}
719
720unsafe impl CoreArrayProviderInner for CoreCallingConvention {
721 unsafe fn free(raw: *mut *mut BNCallingConvention, count: usize, _content: &Self::Context) {
722 BNFreeCallingConventionList(raw, count);
723 }
724
725 unsafe fn wrap_raw<'a>(raw: &'a Self::Raw, context: &'a Self::Context) -> Self::Wrapped<'a> {
726 Guard::new(
727 CoreCallingConvention {
728 handle: *raw,
729 arch_handle: *context,
730 },
731 context,
732 )
733 }
734}
735
736pub struct ConventionBuilder<A: Architecture> {
737 caller_saved_registers: Vec<RegisterId>,
738 callee_saved_registers: Vec<RegisterId>,
739 int_arg_registers: Vec<RegisterId>,
740 float_arg_registers: Vec<RegisterId>,
741
742 arg_registers_shared_index: bool,
743 reserved_stack_space_for_arg_registers: bool,
744 stack_adjusted_on_return: bool,
745 is_eligible_for_heuristics: bool,
746
747 return_int_reg: Option<RegisterId>,
748 return_hi_int_reg: Option<RegisterId>,
749 return_float_reg: Option<RegisterId>,
750
751 global_pointer_reg: Option<RegisterId>,
752
753 implicitly_defined_registers: Vec<RegisterId>,
754
755 are_argument_registers_used_for_var_args: bool,
756
757 arch_handle: A::Handle,
758 _arch: PhantomData<*const A>,
759}
760
761macro_rules! bool_arg {
762 ($name:ident) => {
763 pub fn $name(mut self, val: bool) -> Self {
764 self.$name = val;
765 self
766 }
767 };
768}
769
770macro_rules! reg_list {
771 ($name:ident) => {
772 pub fn $name(mut self, regs: &[&str]) -> Self {
773 {
774 let arch = self.arch_handle.borrow();
776 let arch_regs = regs
777 .iter()
778 .filter_map(|&r| arch.register_by_name(r))
779 .map(|r| r.id());
780
781 self.$name = arch_regs.collect();
782 }
783
784 self
785 }
786 };
787}
788
789macro_rules! reg {
790 ($name:ident) => {
791 pub fn $name(mut self, reg: &str) -> Self {
792 {
793 let arch = self.arch_handle.borrow();
795 self.$name = arch.register_by_name(reg).map(|r| r.id());
796 }
797
798 self
799 }
800 };
801}
802
803impl<A: Architecture> ConventionBuilder<A> {
804 pub fn new(arch: &A) -> Self {
805 Self {
806 caller_saved_registers: Vec::new(),
807 callee_saved_registers: Vec::new(),
808 int_arg_registers: Vec::new(),
809 float_arg_registers: Vec::new(),
810
811 arg_registers_shared_index: false,
812 reserved_stack_space_for_arg_registers: false,
813 stack_adjusted_on_return: false,
814 is_eligible_for_heuristics: false,
815
816 return_int_reg: None,
817 return_hi_int_reg: None,
818 return_float_reg: None,
819
820 global_pointer_reg: None,
821
822 implicitly_defined_registers: Vec::new(),
823
824 are_argument_registers_used_for_var_args: false,
825
826 arch_handle: arch.handle(),
827 _arch: PhantomData,
828 }
829 }
830
831 reg_list!(caller_saved_registers);
832 reg_list!(callee_saved_registers);
833 reg_list!(int_arg_registers);
834 reg_list!(float_arg_registers);
835
836 bool_arg!(arg_registers_shared_index);
837 bool_arg!(reserved_stack_space_for_arg_registers);
838 bool_arg!(stack_adjusted_on_return);
839 bool_arg!(is_eligible_for_heuristics);
840
841 reg!(return_int_reg);
842 reg!(return_hi_int_reg);
843 reg!(return_float_reg);
844
845 reg!(global_pointer_reg);
846
847 reg_list!(implicitly_defined_registers);
848
849 bool_arg!(are_argument_registers_used_for_var_args);
850
851 pub fn register(self, name: &str) -> Ref<CoreCallingConvention> {
852 let arch = self.arch_handle.clone();
853 register_calling_convention(arch.borrow(), name, self)
854 }
855}
856
857impl<A: Architecture> CallingConvention for ConventionBuilder<A> {
858 fn caller_saved_registers(&self) -> Vec<RegisterId> {
859 self.caller_saved_registers.clone()
860 }
861
862 fn callee_saved_registers(&self) -> Vec<RegisterId> {
863 self.callee_saved_registers.clone()
864 }
865
866 fn int_arg_registers(&self) -> Vec<RegisterId> {
867 self.int_arg_registers.clone()
868 }
869
870 fn float_arg_registers(&self) -> Vec<RegisterId> {
871 self.float_arg_registers.clone()
872 }
873
874 fn arg_registers_shared_index(&self) -> bool {
875 self.arg_registers_shared_index
876 }
877
878 fn reserved_stack_space_for_arg_registers(&self) -> bool {
879 self.reserved_stack_space_for_arg_registers
880 }
881
882 fn stack_adjusted_on_return(&self) -> bool {
883 self.stack_adjusted_on_return
884 }
885
886 fn is_eligible_for_heuristics(&self) -> bool {
887 self.is_eligible_for_heuristics
888 }
889
890 fn return_int_reg(&self) -> Option<RegisterId> {
891 self.return_int_reg
892 }
893
894 fn return_hi_int_reg(&self) -> Option<RegisterId> {
895 self.return_hi_int_reg
896 }
897
898 fn return_float_reg(&self) -> Option<RegisterId> {
899 self.return_float_reg
900 }
901
902 fn global_pointer_reg(&self) -> Option<RegisterId> {
903 self.global_pointer_reg
904 }
905
906 fn implicitly_defined_registers(&self) -> Vec<RegisterId> {
907 self.implicitly_defined_registers.clone()
908 }
909
910 fn are_argument_registers_used_for_var_args(&self) -> bool {
911 self.are_argument_registers_used_for_var_args
912 }
913}
914
915unsafe impl<A: Architecture> Send for ConventionBuilder<A> {}
916unsafe impl<A: Architecture> Sync for ConventionBuilder<A> {}