axalloc/
page.rs

1use allocator::AllocError;
2use axerrno::{AxError, AxResult};
3use memory_addr::{PhysAddr, VirtAddr};
4
5use crate::{PAGE_SIZE, global_allocator};
6
7/// A RAII wrapper of contiguous 4K-sized pages.
8///
9/// It will automatically deallocate the pages when dropped.
10#[derive(Debug)]
11pub struct GlobalPage {
12    start_vaddr: VirtAddr,
13    num_pages: usize,
14}
15
16impl GlobalPage {
17    /// Allocate one 4K-sized page.
18    pub fn alloc() -> AxResult<Self> {
19        global_allocator()
20            .alloc_pages(1, PAGE_SIZE)
21            .map(|vaddr| Self {
22                start_vaddr: vaddr.into(),
23                num_pages: 1,
24            })
25            .map_err(alloc_err_to_ax_err)
26    }
27
28    /// Allocate one 4K-sized page and fill with zero.
29    pub fn alloc_zero() -> AxResult<Self> {
30        let mut p = Self::alloc()?;
31        p.zero();
32        Ok(p)
33    }
34
35    /// Allocate contiguous 4K-sized pages.
36    pub fn alloc_contiguous(num_pages: usize, align_pow2: usize) -> AxResult<Self> {
37        global_allocator()
38            .alloc_pages(num_pages, align_pow2)
39            .map(|vaddr| Self {
40                start_vaddr: vaddr.into(),
41                num_pages,
42            })
43            .map_err(alloc_err_to_ax_err)
44    }
45
46    /// Get the start virtual address of this page.
47    pub fn start_vaddr(&self) -> VirtAddr {
48        self.start_vaddr
49    }
50
51    /// Get the start physical address of this page.
52    pub fn start_paddr<F>(&self, virt_to_phys: F) -> PhysAddr
53    where
54        F: FnOnce(VirtAddr) -> PhysAddr,
55    {
56        virt_to_phys(self.start_vaddr)
57    }
58
59    /// Get the total size (in bytes) of these page(s).
60    pub fn size(&self) -> usize {
61        self.num_pages * PAGE_SIZE
62    }
63
64    /// Convert to a raw pointer.
65    pub fn as_ptr(&self) -> *const u8 {
66        self.start_vaddr.as_ptr()
67    }
68
69    /// Convert to a mutable raw pointer.
70    pub fn as_mut_ptr(&mut self) -> *mut u8 {
71        self.start_vaddr.as_mut_ptr()
72    }
73
74    /// Fill `self` with `byte`.
75    pub fn fill(&mut self, byte: u8) {
76        unsafe { core::ptr::write_bytes(self.as_mut_ptr(), byte, self.size()) }
77    }
78
79    /// Fill `self` with zero.
80    pub fn zero(&mut self) {
81        self.fill(0)
82    }
83
84    /// Forms a slice that can read data.
85    pub fn as_slice(&self) -> &[u8] {
86        unsafe { core::slice::from_raw_parts(self.as_ptr(), self.size()) }
87    }
88
89    /// Forms a mutable slice that can write data.
90    pub fn as_slice_mut(&mut self) -> &mut [u8] {
91        unsafe { core::slice::from_raw_parts_mut(self.as_mut_ptr(), self.size()) }
92    }
93}
94
95impl Drop for GlobalPage {
96    fn drop(&mut self) {
97        global_allocator().dealloc_pages(self.start_vaddr.into(), self.num_pages);
98    }
99}
100
101const fn alloc_err_to_ax_err(e: AllocError) -> AxError {
102    match e {
103        AllocError::InvalidParam | AllocError::MemoryOverlap | AllocError::NotAllocated => {
104            AxError::InvalidInput
105        }
106        AllocError::NoMemory => AxError::NoMemory,
107    }
108}