Compare commits

...

5 Commits

9 changed files with 291 additions and 23 deletions

View File

@ -1,4 +1,5 @@
use crate::utils::extern_addr::ExternSymbol; use crate::utils::extern_addr::ExternSymbol;
use crate::utils::size::KIB;
extern "C" { extern "C" {
static __kernel_start: ExternSymbol; static __kernel_start: ExternSymbol;
@ -26,3 +27,5 @@ extern "C" {
pub static __tbss_start: ExternSymbol; pub static __tbss_start: ExternSymbol;
pub static __tbss_end: ExternSymbol; pub static __tbss_end: ExternSymbol;
} }
pub const PAGE_SIZE: usize = 4 * KIB;

View File

@ -5,6 +5,7 @@
#![feature(asm_const)] #![feature(asm_const)]
#![feature(const_mut_refs)] #![feature(const_mut_refs)]
#![feature(extern_types)] #![feature(extern_types)]
#![feature(let_chains)]
#![feature(naked_functions)] #![feature(naked_functions)]
#![feature(panic_info_message)] #![feature(panic_info_message)]
#![feature(stmt_expr_attributes)] #![feature(stmt_expr_attributes)]

View File

@ -1,3 +1,4 @@
pub mod extern_addr; pub mod extern_addr;
pub mod function_name; pub mod function_name;
pub mod size; pub mod size;
pub mod then;

23
kernel/src/utils/then.rs Normal file
View File

@ -0,0 +1,23 @@
pub trait Then {
fn and<T, E, F: FnOnce() -> Result<T, E>>(self, f: F, err: E) -> Result<T, E>;
fn some<T, E, F: FnOnce() -> T>(self, f: F, err: E) -> Result<T, E>;
}
impl Then for bool {
#[inline]
fn and<T, E, F: FnOnce() -> Result<T, E>>(self, f: F, err: E) -> Result<T, E> {
if self {
f()
} else {
Err(err)
}
}
fn some<T, E, F: FnOnce() -> T>(self, f: F, err: E) -> Result<T, E> {
if self {
Ok(f())
} else {
Err(err)
}
}
}

View File

@ -13,10 +13,10 @@ pub fn align_down(addr: usize, align: usize) -> usize {
addr & !(align - 1) addr & !(align - 1)
} }
#[derive(Copy, Clone, Default, PartialOrd, PartialEq)] #[derive(Copy, Clone, Default, Eq, Ord, PartialOrd, PartialEq)]
pub struct PhysAddr(pub usize); pub struct PhysAddr(pub usize);
#[derive(Copy, Clone, Default, PartialOrd, PartialEq)] #[derive(Copy, Clone, Default, Eq, Ord, PartialOrd, PartialEq)]
pub struct VirtAddr(pub usize); pub struct VirtAddr(pub usize);
pub trait AddressOps { pub trait AddressOps {
@ -227,6 +227,12 @@ impl<T> From<*mut T> for PhysAddr {
} }
} }
impl<T> From<*const T> for PhysAddr {
fn from(addr: *const T) -> Self {
PhysAddr(addr as usize)
}
}
impl From<PhysAddr> for usize { impl From<PhysAddr> for usize {
fn from(addr: PhysAddr) -> Self { fn from(addr: PhysAddr) -> Self {
addr.0 addr.0
@ -263,6 +269,12 @@ impl<T> From<*mut T> for VirtAddr {
} }
} }
impl<T> From<*const T> for VirtAddr {
fn from(addr: *const T) -> Self {
VirtAddr(addr as usize)
}
}
impl From<VirtAddr> for usize { impl From<VirtAddr> for usize {
fn from(addr: VirtAddr) -> Self { fn from(addr: VirtAddr) -> Self {
addr.0 addr.0

View File

@ -0,0 +1,211 @@
use crate::vspace::addr::{AddressOps, PhysAddr};
use core::alloc::Layout;
use core::cmp::min;
#[derive(Copy, Clone, Debug)]
struct Block {
start: PhysAddr,
size: usize,
}
impl Block {
fn start_addr(&self) -> PhysAddr {
self.start
}
fn end_addr(&self) -> PhysAddr {
self.start + self.size
}
fn could_merge(&self, start: PhysAddr, size: usize) -> bool {
self.start_addr() == start + size || self.end_addr() == start
}
fn merge(&mut self, start: PhysAddr, size: usize) -> bool {
if self.could_merge(start, size) {
self.start = min(self.start, start);
self.size += size;
true
} else {
false
}
}
fn could_fit(&self, layout: Layout) -> bool {
self.start_addr().align_up(layout.align()) + layout.size() <= self.end_addr()
}
}
#[derive(Debug)]
pub struct RamBlock<const N: usize> {
blocks: [Option<Block>; N],
}
impl<const N: usize> RamBlock<N> {
pub const fn new() -> Self {
Self { blocks: [None; N] }
}
fn insert(&mut self, start: PhysAddr, size: usize) -> Result<&mut Option<Block>, ()> {
if size == 0 {
return Err(());
}
for block in self.blocks.iter_mut() {
if block.is_none() {
*block = Some(Block { start, size });
return Ok(block);
}
}
Err(())
}
pub fn dealloc(&mut self, start: PhysAddr, size: usize) {
/* NOTE: blocks will not be fully merged, for example:
* we have [(0-10), (15-20)], if we dealloc (10, 15),
* we will have [(0-15), (15, 20)] instead of [(0-20)]
*/
// check whether we could combine with the previous block
for block in self.blocks.iter_mut().flatten() {
if block.merge(start, size) {
return;
}
}
// insert into a new slot
self.insert(start, size).expect("No free slot");
}
pub fn alloc(&mut self, layout: Layout) -> Option<PhysAddr> {
let victim = self
.blocks
.iter_mut()
.find(|block| block.is_some_and(|b| b.could_fit(layout)));
victim.as_ref()?;
let victim = victim.unwrap();
if let Some(block) = victim.take() {
let start = block.start_addr().align_up(layout.align());
let end = start + layout.size();
if block.end_addr() > end {
*victim = Some(Block {
start: end,
size: (block.end_addr() - end).as_usize(),
})
}
if block.start_addr() < start {
let _ = self.insert(block.start_addr(), (start - block.start_addr()).as_usize());
}
return Some(start);
}
None
}
pub fn reserve(&mut self, start: PhysAddr, size: usize) {
// NOTE: only support inclusive range
let victim = self
.blocks
.iter_mut()
.find(|block| block.is_some_and(|b| b.start_addr() <= start && start < b.end_addr()));
if victim.is_none() {
return;
}
let victim = victim.unwrap();
if let Some(block) = victim.take() {
let region_start = block.start_addr();
let region_end = block.end_addr();
let before_size = (start - region_start).as_usize();
let after_size = (region_end - (start + size)).as_usize();
// insert larger block first, then before block, then after block
if after_size > before_size {
// we could safely assert that before_size > 0 here
let _ = self.insert(start + size, after_size);
let _ = self.insert(region_start, before_size);
} else {
let _ = self.insert(region_start, before_size);
let _ = self.insert(start + size, after_size);
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test_case]
fn test_block() {
let mut blk = RamBlock::<4>::new();
blk.dealloc(PhysAddr(0), 100);
let ptr = blk.alloc(Layout::from_size_align(5, 8).unwrap());
assert_eq!(ptr, Some(PhysAddr(0)));
let ptr = blk.alloc(Layout::from_size_align(5, 8).unwrap());
assert_eq!(ptr, Some(PhysAddr(8)));
let ptr = blk.alloc(Layout::from_size_align(5, 1).unwrap());
assert_eq!(ptr, Some(PhysAddr(0xd)));
let ptr = blk.alloc(Layout::from_size_align(5, 0x20).unwrap());
assert_eq!(ptr, Some(PhysAddr(0x20)));
let ptr = blk.alloc(Layout::from_size_align(5, 0x40).unwrap());
assert_eq!(ptr, Some(PhysAddr(0x40)));
let ptr = blk.alloc(Layout::from_size_align(5, 0x80).unwrap());
assert_eq!(ptr, None);
blk.dealloc(PhysAddr(0), 5);
blk.dealloc(PhysAddr(32), 4);
let ptr = blk.alloc(Layout::from_size_align(31, 1).unwrap());
assert_eq!(ptr, Some(PhysAddr(0x45)));
let ptr = blk.alloc(Layout::from_size_align(8, 1).unwrap());
assert_eq!(ptr, Some(PhysAddr(0)));
let ptr = blk.alloc(Layout::from_size_align(18, 1).unwrap());
assert_eq!(ptr, Some(PhysAddr(0x12)));
let ptr = blk.alloc(Layout::from_size_align(27, 1).unwrap());
assert_eq!(ptr, Some(PhysAddr(0x25)));
let ptr = blk.alloc(Layout::from_size_align(1, 1).unwrap());
assert_eq!(ptr, None);
}
#[test_case]
fn test_block_reserve() {
let mut blk = RamBlock::<4>::new();
blk.dealloc(PhysAddr(0), 100);
blk.reserve(PhysAddr(0), 10);
let ptr = blk.alloc(Layout::from_size_align(10, 1).unwrap());
assert_eq!(ptr, Some(PhysAddr(10)));
blk.reserve(PhysAddr(90), 10);
let ptr = blk.alloc(Layout::from_size_align(70, 1).unwrap());
assert_eq!(ptr, Some(PhysAddr(0x14)));
blk.dealloc(PhysAddr(0), 30);
blk.reserve(PhysAddr(10), 10);
let ptr = blk.alloc(Layout::from_size_align(10, 1).unwrap());
assert_eq!(ptr, Some(PhysAddr(0)));
let ptr = blk.alloc(Layout::from_size_align(10, 1).unwrap());
assert_eq!(ptr, Some(PhysAddr(20)));
let ptr = blk.alloc(Layout::from_size_align(1, 1).unwrap());
assert_eq!(ptr, None);
}
}

View File

@ -3,6 +3,8 @@
use crate::utils::then::Then; use crate::utils::then::Then;
use crate::vspace::addr::{AddressOps, PhysAddr}; use crate::vspace::addr::{AddressOps, PhysAddr};
use core::alloc::{GlobalAlloc, Layout}; use core::alloc::{GlobalAlloc, Layout};
use core::cmp::{max, min};
use core::fmt::Debug;
use spin::Mutex; use spin::Mutex;
struct ListNode { struct ListNode {
@ -42,12 +44,21 @@ impl ListNode {
} }
} }
struct FreeList { impl Debug for ListNode {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
f.debug_struct("ListNode")
.field("begin", &self.start_addr())
.field("end", &self.end_addr())
.finish()
}
}
pub struct FreeList {
head: ListNode, head: ListNode,
} }
impl FreeList { impl FreeList {
const fn new() -> Self { pub const fn new() -> Self {
Self { Self {
head: ListNode::new(0), head: ListNode::new(0),
} }
@ -78,7 +89,7 @@ impl FreeList {
(size, layout.align()) (size, layout.align())
} }
unsafe fn alloc(&mut self, layout: Layout) -> *mut u8 { pub unsafe fn alloc(&mut self, layout: Layout) -> *mut u8 {
let (size, align) = Self::align_layout(layout); let (size, align) = Self::align_layout(layout);
if let Some((region, alloc_start)) = self.alloc_node(|region| region.fit(size, align)) { if let Some((region, alloc_start)) = self.alloc_node(|region| region.fit(size, align)) {
@ -93,7 +104,7 @@ impl FreeList {
} }
} }
unsafe fn dealloc(&mut self, start: PhysAddr, size: usize) { pub unsafe fn dealloc(&mut self, start: PhysAddr, size: usize) {
assert_eq!(start.align_up(core::mem::align_of::<ListNode>()), start); assert_eq!(start.align_up(core::mem::align_of::<ListNode>()), start);
assert!(size >= core::mem::size_of::<ListNode>()); assert!(size >= core::mem::size_of::<ListNode>());
@ -105,33 +116,37 @@ impl FreeList {
} }
pub fn reserve(&mut self, start: PhysAddr, size: usize) { pub fn reserve(&mut self, start: PhysAddr, size: usize) {
if let Some((region, _)) = self // NOTE: only support inclusive range
.alloc_node(|region| (region.start_addr() <= start).chain(|| region.fit(size, 1), ())) if let Some((region, _)) = self.alloc_node(|region| {
{ (region.start_addr() <= start && start < region.end_addr()).some(|| (), ())
/* layout }) {
* region: | before | [start +: size] | after |
* ^ ^ ^ ^ region.end_addr()
* | | alloc_start |
* | | alloc_end
* | region.start_addr()
*/
let region_start = region.start_addr(); let region_start = region.start_addr();
let region_end = region.end_addr(); let region_end = region.end_addr();
let before_size = (start - region_start).as_usize();
if before_size > 0 {
unsafe { self.dealloc(region_start, before_size) }
}
let after_size = (region_end - (start + size)).as_usize(); let after_size = (region_end - (start + size)).as_usize();
if after_size > 0 { if after_size > 0 {
unsafe { self.dealloc(start + size, after_size) } unsafe { self.dealloc(start + size, after_size) }
} }
let before_size = (start - region_start).as_usize();
if before_size > 0 {
unsafe { self.dealloc(region_start, before_size) }
}
} }
} }
} }
impl Debug for FreeList {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
let mut current = &self.head;
while let Some(ref region) = current.next {
write!(f, "{:?} -> ", region)?;
current = region;
}
write!(f, "None")
}
}
pub struct FreeListAllocator { pub struct FreeListAllocator {
list: Mutex<FreeList>, list: Mutex<FreeList>,
} }

View File

@ -1,5 +1,7 @@
mod bitmap; mod bitmap;
mod block;
mod freelist; mod freelist;
pub use bitmap::*; pub use bitmap::*;
pub use block::*;
pub use freelist::*; pub use freelist::*;

View File

@ -1,7 +1,7 @@
mod entry; mod entry;
mod table; mod table;
pub use crate::arch::vspace::*; pub use crate::arch::vspace::{Entry, Table};
pub use entry::*; pub use entry::*;
pub use table::*; pub use table::*;