fix: allocator: freelist: reserve is not handling overlapped situations

This commit is contained in:
Paul Pan 2024-04-07 00:21:13 +08:00
parent 0bb447ca28
commit e9751585fe
2 changed files with 50 additions and 22 deletions

View File

@ -13,10 +13,10 @@ pub fn align_down(addr: usize, align: usize) -> usize {
addr & !(align - 1)
}
#[derive(Copy, Clone, Default, PartialOrd, PartialEq)]
#[derive(Copy, Clone, Default, Eq, Ord, PartialOrd, PartialEq)]
pub struct PhysAddr(pub usize);
#[derive(Copy, Clone, Default, PartialOrd, PartialEq)]
#[derive(Copy, Clone, Default, Eq, Ord, PartialOrd, PartialEq)]
pub struct VirtAddr(pub usize);
pub trait AddressOps {
@ -227,6 +227,12 @@ impl<T> From<*mut T> for PhysAddr {
}
}
impl<T> From<*const T> for PhysAddr {
fn from(addr: *const T) -> Self {
PhysAddr(addr as usize)
}
}
impl From<PhysAddr> for usize {
fn from(addr: PhysAddr) -> Self {
addr.0
@ -263,6 +269,12 @@ impl<T> From<*mut T> for VirtAddr {
}
}
impl<T> From<*const T> for VirtAddr {
fn from(addr: *const T) -> Self {
VirtAddr(addr as usize)
}
}
impl From<VirtAddr> for usize {
fn from(addr: VirtAddr) -> Self {
addr.0

View File

@ -3,6 +3,8 @@
use crate::utils::then::Then;
use crate::vspace::addr::{AddressOps, PhysAddr};
use core::alloc::{GlobalAlloc, Layout};
use core::cmp::{max, min};
use core::fmt::Debug;
use spin::Mutex;
struct ListNode {
@ -42,12 +44,21 @@ impl ListNode {
}
}
struct FreeList {
impl Debug for ListNode {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
f.debug_struct("ListNode")
.field("begin", &self.start_addr())
.field("end", &self.end_addr())
.finish()
}
}
pub struct FreeList {
head: ListNode,
}
impl FreeList {
const fn new() -> Self {
pub const fn new() -> Self {
Self {
head: ListNode::new(0),
}
@ -78,7 +89,7 @@ impl FreeList {
(size, layout.align())
}
unsafe fn alloc(&mut self, layout: Layout) -> *mut u8 {
pub unsafe fn alloc(&mut self, layout: Layout) -> *mut u8 {
let (size, align) = Self::align_layout(layout);
if let Some((region, alloc_start)) = self.alloc_node(|region| region.fit(size, align)) {
@ -93,7 +104,7 @@ impl FreeList {
}
}
unsafe fn dealloc(&mut self, start: PhysAddr, size: usize) {
pub unsafe fn dealloc(&mut self, start: PhysAddr, size: usize) {
assert_eq!(start.align_up(core::mem::align_of::<ListNode>()), start);
assert!(size >= core::mem::size_of::<ListNode>());
@ -105,32 +116,37 @@ impl FreeList {
}
pub fn reserve(&mut self, start: PhysAddr, size: usize) {
if let Some((region, _)) =
self.alloc_node(|region| (region.start_addr() <= start).and(|| region.fit(size, 1), ()))
{
/* layout
* region: | before | [start +: size] | after |
* ^ ^ ^ ^ region.end_addr()
* | | alloc_start |
* | | alloc_end
* | region.start_addr()
*/
if let Some((region, _)) = self.alloc_node(|region| {
let left = max(region.start_addr(), start);
let right = min(region.end_addr(), start + size);
(left < right).some(|| (), ())
}) {
let region_start = region.start_addr();
let region_end = region.end_addr();
let before_size = (start - region_start).as_usize();
if before_size > 0 {
unsafe { self.dealloc(region_start, before_size) }
}
let after_size = (region_end - (start + size)).as_usize();
if after_size > 0 {
unsafe { self.dealloc(start + size, after_size) }
}
let before_size = (start - region_start).as_usize();
if before_size > 0 {
unsafe { self.dealloc(region_start, before_size) }
}
}
}
}
impl Debug for FreeList {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
let mut current = &self.head;
while let Some(ref region) = current.next {
write!(f, "{:?} -> ", region)?;
current = region;
}
write!(f, "None")
}
}
pub struct FreeListAllocator {
list: Mutex<FreeList>,