1use binaryninjacore_sys::*;
18use std::fmt::{Debug, Formatter};
19
20use std::ops::Range;
21
22use crate::binary_view::BinaryView;
23use crate::rc::*;
24
25#[must_use]
26#[derive(Clone, Debug, Default, PartialEq, Eq, Hash)]
27pub struct SegmentBuilder {
28 ea: Range<u64>,
29 parent_backing: Option<Range<u64>>,
30 flags: SegmentFlags,
31 is_auto: bool,
32}
33
34impl SegmentBuilder {
35 pub fn new(ea: Range<u64>) -> Self {
36 SegmentBuilder {
37 ea,
38 parent_backing: None,
39 flags: Default::default(),
40 is_auto: false,
41 }
42 }
43
44 pub fn parent_backing(mut self, parent_backing: Range<u64>) -> Self {
45 self.parent_backing = Some(parent_backing);
46 self
47 }
48
49 pub fn flags(mut self, flags: SegmentFlags) -> Self {
50 self.flags = flags;
51 self
52 }
53
54 pub fn is_auto(mut self, is_auto: bool) -> Self {
55 self.is_auto = is_auto;
56 self
57 }
58
59 pub(crate) fn create(self, view: &BinaryView) {
60 let ea_start = self.ea.start;
61 let ea_len = self.ea.end.wrapping_sub(ea_start);
62 let (b_start, b_len) = self
63 .parent_backing
64 .map_or((0, 0), |s| (s.start, s.end.wrapping_sub(s.start)));
65
66 unsafe {
67 if self.is_auto {
68 BNAddAutoSegment(
69 view.handle,
70 ea_start,
71 ea_len,
72 b_start,
73 b_len,
74 self.flags.into_raw(),
75 );
76 } else {
77 BNAddUserSegment(
78 view.handle,
79 ea_start,
80 ea_len,
81 b_start,
82 b_len,
83 self.flags.into_raw(),
84 );
85 }
86 }
87 }
88}
89
90#[derive(PartialEq, Eq, Hash)]
91pub struct Segment {
92 handle: *mut BNSegment,
93}
94
95impl Segment {
96 pub(crate) unsafe fn from_raw(handle: *mut BNSegment) -> Self {
97 assert!(!handle.is_null());
98 Self { handle }
99 }
100
101 pub(crate) unsafe fn ref_from_raw(handle: *mut BNSegment) -> Ref<Self> {
102 assert!(!handle.is_null());
103 Ref::new(Self { handle })
104 }
105
106 pub fn builder(ea_range: Range<u64>) -> SegmentBuilder {
116 SegmentBuilder::new(ea_range)
117 }
118
119 pub fn address_range(&self) -> Range<u64> {
120 let start = unsafe { BNSegmentGetStart(self.handle) };
121 let end = unsafe { BNSegmentGetEnd(self.handle) };
122 start..end
123 }
124
125 pub fn parent_backing(&self) -> Option<Range<u64>> {
126 let start = unsafe { BNSegmentGetDataOffset(self.handle) };
127 let end = unsafe { BNSegmentGetDataEnd(self.handle) };
128
129 if start != end {
130 Some(start..end)
131 } else {
132 None
133 }
134 }
135
136 pub fn flags(&self) -> SegmentFlags {
137 let raw_flags = unsafe { BNSegmentGetFlags(self.handle) };
138 SegmentFlags::from_raw(raw_flags)
139 }
140
141 pub fn executable(&self) -> bool {
142 self.flags().executable
143 }
144
145 pub fn writable(&self) -> bool {
146 self.flags().writable
147 }
148
149 pub fn readable(&self) -> bool {
150 self.flags().readable
151 }
152
153 pub fn contains_data(&self) -> bool {
154 self.flags().contains_data
155 }
156
157 pub fn contains_code(&self) -> bool {
158 self.flags().contains_code
159 }
160
161 pub fn deny_write(&self) -> bool {
162 self.flags().deny_write
163 }
164
165 pub fn deny_execute(&self) -> bool {
166 self.flags().deny_execute
167 }
168
169 pub fn auto_defined(&self) -> bool {
170 unsafe { BNSegmentIsAutoDefined(self.handle) }
171 }
172}
173
174impl Debug for Segment {
175 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
176 f.debug_struct("Segment")
177 .field("address_range", &self.address_range())
178 .field("parent_backing", &self.parent_backing())
179 .field("auto_defined", &self.auto_defined())
180 .field("flags", &self.flags())
181 .finish()
182 }
183}
184
185impl ToOwned for Segment {
186 type Owned = Ref<Self>;
187
188 fn to_owned(&self) -> Self::Owned {
189 unsafe { RefCountable::inc_ref(self) }
190 }
191}
192
193unsafe impl RefCountable for Segment {
194 unsafe fn inc_ref(handle: &Self) -> Ref<Self> {
195 Ref::new(Self {
196 handle: BNNewSegmentReference(handle.handle),
197 })
198 }
199
200 unsafe fn dec_ref(handle: &Self) {
201 BNFreeSegment(handle.handle);
202 }
203}
204
205impl CoreArrayProvider for Segment {
206 type Raw = *mut BNSegment;
207 type Context = ();
208 type Wrapped<'a> = Guard<'a, Segment>;
209}
210
211unsafe impl CoreArrayProviderInner for Segment {
212 unsafe fn free(raw: *mut Self::Raw, count: usize, _context: &Self::Context) {
213 BNFreeSegmentList(raw, count);
214 }
215
216 unsafe fn wrap_raw<'a>(raw: &'a Self::Raw, context: &'a Self::Context) -> Self::Wrapped<'a> {
217 Guard::new(Segment::from_raw(*raw), context)
218 }
219}
220
221#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default)]
222pub struct SegmentFlags {
223 pub executable: bool,
224 pub writable: bool,
225 pub readable: bool,
226 pub contains_data: bool,
227 pub contains_code: bool,
228 pub deny_write: bool,
229 pub deny_execute: bool,
230}
231
232impl SegmentFlags {
233 pub fn new() -> Self {
234 Self::default()
235 }
236
237 pub fn executable(mut self, executable: bool) -> Self {
238 self.executable = executable;
239 self
240 }
241
242 pub fn writable(mut self, writable: bool) -> Self {
243 self.writable = writable;
244 self
245 }
246
247 pub fn readable(mut self, readable: bool) -> Self {
248 self.readable = readable;
249 self
250 }
251
252 pub fn contains_data(mut self, contains_data: bool) -> Self {
253 self.contains_data = contains_data;
254 self
255 }
256
257 pub fn contains_code(mut self, contains_code: bool) -> Self {
258 self.contains_code = contains_code;
259 self
260 }
261
262 pub fn deny_write(mut self, deny_write: bool) -> Self {
263 self.deny_write = deny_write;
264 self
265 }
266
267 pub fn deny_execute(mut self, deny_execute: bool) -> Self {
268 self.deny_execute = deny_execute;
269 self
270 }
271
272 pub(crate) fn from_raw(flags: u32) -> Self {
273 Self {
274 executable: flags & 0x01 != 0,
275 writable: flags & 0x02 != 0,
276 readable: flags & 0x04 != 0,
277 contains_data: flags & 0x08 != 0,
278 contains_code: flags & 0x10 != 0,
279 deny_write: flags & 0x20 != 0,
280 deny_execute: flags & 0x40 != 0,
281 }
282 }
283
284 #[allow(clippy::wrong_self_convention)]
285 pub(crate) fn into_raw(&self) -> u32 {
286 (self.executable as u32)
287 | (self.writable as u32) << 1
288 | (self.readable as u32) << 2
289 | (self.contains_data as u32) << 3
290 | (self.contains_code as u32) << 4
291 | (self.deny_write as u32) << 5
292 | (self.deny_execute as u32) << 6
293 }
294}