1use alloc::collections::BTreeMap;
2#[allow(unused_imports)] use alloc::vec::Vec;
4use core::fmt;
5
6use memory_addr::{AddrRange, MemoryAddr};
7
8use crate::{MappingBackend, MappingError, MappingResult, MemoryArea};
9
10pub struct MemorySet<B: MappingBackend> {
12 areas: BTreeMap<B::Addr, MemoryArea<B>>,
13}
14
15impl<B: MappingBackend> MemorySet<B> {
16 pub const fn new() -> Self {
18 Self {
19 areas: BTreeMap::new(),
20 }
21 }
22
23 pub fn len(&self) -> usize {
25 self.areas.len()
26 }
27
28 pub fn is_empty(&self) -> bool {
30 self.areas.is_empty()
31 }
32
33 pub fn iter(&self) -> impl Iterator<Item = &MemoryArea<B>> {
35 self.areas.values()
36 }
37
38 pub fn overlaps(&self, range: AddrRange<B::Addr>) -> bool {
40 if let Some((_, before)) = self.areas.range(..range.start).last() {
41 if before.va_range().overlaps(range) {
42 return true;
43 }
44 }
45 if let Some((_, after)) = self.areas.range(range.start..).next() {
46 if after.va_range().overlaps(range) {
47 return true;
48 }
49 }
50 false
51 }
52
53 pub fn find(&self, addr: B::Addr) -> Option<&MemoryArea<B>> {
55 let candidate = self.areas.range(..=addr).last().map(|(_, a)| a);
56 candidate.filter(|a| a.va_range().contains(addr))
57 }
58
59 pub fn find_free_area(
74 &self,
75 hint: B::Addr,
76 size: usize,
77 limit: AddrRange<B::Addr>,
78 align: usize,
79 ) -> Option<B::Addr> {
80 if size % align != 0 {
81 return None;
83 }
84 let mut last_end: <B as MappingBackend>::Addr = hint.max(limit.start).align_up(align);
86 if let Some((_, area)) = self.areas.range(..last_end).last() {
87 last_end = last_end.max(area.end()).align_up(align);
88 }
89 for (&addr, area) in self.areas.range(last_end..) {
90 if last_end.checked_add(size).is_some_and(|end| end <= addr) {
91 return Some(last_end);
92 }
93 last_end = area.end().align_up(align);
94 }
95 if last_end
96 .checked_add(size)
97 .is_some_and(|end| end <= limit.end)
98 {
99 Some(last_end)
100 } else {
101 None
102 }
103 }
104
105 pub fn map(
114 &mut self,
115 area: MemoryArea<B>,
116 page_table: &mut B::PageTable,
117 unmap_overlap: bool,
118 ) -> MappingResult {
119 if area.va_range().is_empty() {
120 return Err(MappingError::InvalidParam);
121 }
122
123 if self.overlaps(area.va_range()) {
124 if unmap_overlap {
125 self.unmap(area.start(), area.size(), page_table)?;
126 } else {
127 return Err(MappingError::AlreadyExists);
128 }
129 }
130
131 area.map_area(page_table)?;
132 assert!(self.areas.insert(area.start(), area).is_none());
133 Ok(())
134 }
135
136 pub fn unmap(
143 &mut self,
144 start: B::Addr,
145 size: usize,
146 page_table: &mut B::PageTable,
147 ) -> MappingResult {
148 let range =
149 AddrRange::try_from_start_size(start, size).ok_or(MappingError::InvalidParam)?;
150 if range.is_empty() {
151 return Ok(());
152 }
153
154 let end = range.end;
155
156 self.areas.retain(|_, area| {
158 if area.va_range().contained_in(range) {
159 area.unmap_area(page_table).unwrap();
160 false
161 } else {
162 true
163 }
164 });
165
166 if let Some((&before_start, before)) = self.areas.range_mut(..start).last() {
168 let before_end = before.end();
169 if before_end > start {
170 if before_end <= end {
171 before.shrink_right(start.sub_addr(before_start), page_table)?;
173 } else {
174 let right_part = before.split(end).unwrap();
176 before.shrink_right(start.sub_addr(before_start), page_table)?;
177 assert_eq!(right_part.start().into(), Into::<usize>::into(end));
178 self.areas.insert(end, right_part);
179 }
180 }
181 }
182
183 if let Some((&after_start, after)) = self.areas.range_mut(start..).next() {
185 let after_end = after.end();
186 if after_start < end {
187 let mut new_area = self.areas.remove(&after_start).unwrap();
189 new_area.shrink_left(after_end.sub_addr(end), page_table)?;
190 assert_eq!(new_area.start().into(), Into::<usize>::into(end));
191 self.areas.insert(end, new_area);
192 }
193 }
194
195 Ok(())
196 }
197
198 pub fn clear(&mut self, page_table: &mut B::PageTable) -> MappingResult {
200 for (_, area) in self.areas.iter() {
201 area.unmap_area(page_table)?;
202 }
203 self.areas.clear();
204 Ok(())
205 }
206
207 pub fn protect(
217 &mut self,
218 start: B::Addr,
219 size: usize,
220 update_flags: impl Fn(B::Flags) -> Option<B::Flags>,
221 page_table: &mut B::PageTable,
222 ) -> MappingResult {
223 let end = start.checked_add(size).ok_or(MappingError::InvalidParam)?;
224 let mut to_insert = Vec::new();
225 for (&area_start, area) in self.areas.iter_mut() {
226 let area_end = area.end();
227
228 if let Some(new_flags) = update_flags(area.flags()) {
229 if area_start >= end {
230 break;
233 } else if area_end <= start {
234 } else if area_start >= start && area_end <= end {
238 area.protect_area(new_flags, page_table)?;
241 area.set_flags(new_flags);
242 } else if area_start < start && area_end > end {
243 let right_part = area.split(end).unwrap();
246 area.set_end(start);
247
248 let mut middle_part =
249 MemoryArea::new(start, size, area.flags(), area.backend().clone());
250 middle_part.protect_area(new_flags, page_table)?;
251 middle_part.set_flags(new_flags);
252
253 to_insert.push((right_part.start(), right_part));
254 to_insert.push((middle_part.start(), middle_part));
255 } else if area_end > end {
256 let right_part = area.split(end).unwrap();
259 area.protect_area(new_flags, page_table)?;
260 area.set_flags(new_flags);
261
262 to_insert.push((right_part.start(), right_part));
263 } else {
264 let mut right_part = area.split(start).unwrap();
267 right_part.protect_area(new_flags, page_table)?;
268 right_part.set_flags(new_flags);
269
270 to_insert.push((right_part.start(), right_part));
271 }
272 }
273 }
274 self.areas.extend(to_insert);
275 Ok(())
276 }
277}
278
279impl<B: MappingBackend> fmt::Debug for MemorySet<B>
280where
281 B::Addr: fmt::Debug,
282 B::Flags: fmt::Debug,
283{
284 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
285 f.debug_list().entries(self.areas.values()).finish()
286 }
287}