The `AddressRangeExt` trait needed some improving and cleaning up, in preparation for making the address types more safe, sane, and compatible with strict provenance.
···59596060 for region in self.regions.iter().rev() {
6161 // only consider regions that we haven't already exhausted
6262- if offset < region.size() {
6262+ if offset < region.len() {
6363 // Allocating a contiguous range has different requirements than "regular" allocation
6464 // contiguous are rare and often happen in very critical paths where e.g. virtual
6565 // memory is not available yet. So we rather waste some memory than outright crash.
6666- if region.size() - offset < requested_size {
6666+ if region.len() - offset < requested_size {
6767 tracing::warn!(
6868 "Skipped memory region {region:?} since it was too small to fulfill request for {requested_size} bytes. Wasted {} bytes in the process...",
6969- region.size() - offset
6969+ region.len() - offset
7070 );
71717272- self.offset += region.size() - offset;
7272+ self.offset += region.len() - offset;
7373 offset = 0;
7474 continue;
7575 }
···8080 return Some(frame);
8181 }
82828383- offset -= region.size();
8383+ offset -= region.len();
8484 }
85858686 None
···122122 loop {
123123 let mut region = self.inner.next()?;
124124 // keep advancing past already fully used memory regions
125125- if self.offset >= region.size() {
126126- self.offset -= region.size();
125125+ if self.offset >= region.len() {
126126+ self.offset -= region.len();
127127 continue;
128128 } else if self.offset > 0 {
129129 region.end = region.end.checked_sub(self.offset).unwrap();
+6-6
kernel/src/mem/frame_alloc/arena.rs
···57575858impl Arena {
5959 pub fn from_selection(selection: ArenaSelection) -> Self {
6060- debug_assert!(selection.bookkeeping.size() >= bookkeeping_size(selection.arena.size()));
6060+ debug_assert!(selection.bookkeeping.len() >= bookkeeping_size(selection.arena.len()));
61616262 // Safety: arena selection has ensured the region is valid
6363 let slots: &mut [MaybeUninit<FrameInfo>] = unsafe {
···68686969 slice::from_raw_parts_mut(
7070 ptr,
7171- selection.bookkeeping.size() / ARENA_PAGE_BOOKKEEPING_SIZE,
7171+ selection.bookkeeping.len() / ARENA_PAGE_BOOKKEEPING_SIZE,
7272 )
7373 };
74747575- let mut remaining_bytes = selection.arena.size();
7575+ let mut remaining_bytes = selection.arena.len();
7676 let mut addr = selection.arena.start;
7777 let mut total_frames = 0;
7878 let mut max_order = 0;
···106106 }
107107108108 // Make sure we've accounted for all frames
109109- debug_assert_eq!(total_frames, selection.arena.size() / arch::PAGE_SIZE);
109109+ debug_assert_eq!(total_frames, selection.arena.len() / arch::PAGE_SIZE);
110110111111 Self {
112112 range: selection.arena,
···251251 while let Some(region) = self.free_regions.pop() {
252252 tracing::debug!(arena.end=?arena.end,region=?region, "Attempting to add free region");
253253254254- debug_assert!(!arena.is_overlapping(®ion));
254254+ debug_assert!(!arena.overlaps(®ion));
255255256256 let pages_in_hole = if arena.end <= region.start {
257257 // the region is higher than the current arena
···280280 }
281281282282 let mut aligned = arena.checked_align_in(arch::PAGE_SIZE).unwrap();
283283- let bookkeeping_size = bookkeeping_size(aligned.size());
283283+ let bookkeeping_size = bookkeeping_size(aligned.len());
284284285285 // We can't use empty arenas anyway
286286 if aligned.is_empty() {
+5-5
kernel/src/mem/mmap.rs
···119119 Permissions::READ | Permissions::WRITE,
120120 |range_virt, perms, _batch| {
121121 Ok(AddressSpaceRegion::new_phys(
122122- range_virt.clone(),
122122+ range_virt,
123123 perms,
124124 range_phys.clone(),
125125 name,
···175175176176 // Safety: checked by caller
177177 unsafe {
178178- let slice = slice::from_raw_parts(self.range.start.as_ptr(), self.range().size());
178178+ let slice = slice::from_raw_parts(self.range.start.as_ptr(), self.range().len());
179179180180 f(&slice[range]);
181181 }
···201201 // Safety: checked by caller
202202 unsafe {
203203 let slice =
204204- slice::from_raw_parts_mut(self.range.start.as_mut_ptr(), self.range().size());
204204+ slice::from_raw_parts_mut(self.range.start.as_mut_ptr(), self.range().len());
205205 f(&mut slice[range]);
206206 }
207207···236236 #[inline]
237237 pub fn len(&self) -> usize {
238238 // Safety: the constructor ensures that the NonNull is valid.
239239- self.range.size()
239239+ self.range.len()
240240 }
241241242242 /// Whether this is a mapping of zero bytes
···277277 unsafe {
278278 aspace.arch.update_flags(
279279 self.range.start,
280280- NonZeroUsize::new(self.range.size()).unwrap(),
280280+ NonZeroUsize::new(self.range.len()).unwrap(),
281281 new_permissions.into(),
282282 &mut flush,
283283 )?;
···6767 .and_then(|addr| usize::from_str(addr).ok())
6868 {
6969 // if the node is a CPU check its availability and populate the hart_mask
7070-7170 let available = find_cstr_property(node.properties(), "status")? == Some(c"okay");
72717372 if available {
···8685 memories.push({
8786 let start = PhysicalAddress::new(reg.starting_address);
88878989- start..start.checked_add(reg.size.unwrap_or(0)).unwrap()
8888+ Range::from_start_len(start, reg.size.unwrap_or(0))
9089 });
9190 }
9291 } else if stack[depth - 1].is_some_and(|(s, _)| s == "reserved-memory") {
···9998 reserved_memory.push({
10099 let start = PhysicalAddress::new(reg.starting_address);
101100102102- start..start.checked_add(reg.size.unwrap_or(0)).unwrap()
101101+ Range::from_start_len(start, reg.size.unwrap_or(0))
103102 });
104103 }
105104 } else if name.name == "chosen" {
···139138140139 // Apply reserved_entries
141140 while let Some(entry) = reservations.next()? {
142142- let region = {
143143- let start = PhysicalAddress::new(usize::try_from(entry.address)?);
141141+ let region = Range::from_start_len(
142142+ PhysicalAddress::try_from(entry.address)?,
143143+ usize::try_from(entry.size)?,
144144+ );
144145145145- start..start.checked_add(usize::try_from(entry.size)?).unwrap()
146146- };
147146 log::trace!("applying reservation {region:#x?}");
148147149148 exclude_region(region);
···157156 }
158157159158 // remove memory regions that are left as zero-sized from the previous step
160160- memories.retain(|region| region.size() > 0);
159159+ memories.retain(|region| !region.is_empty());
161160162161 // page-align all memory regions, this will waste some physical memory in the process,
163162 // but we can't make use of it either way