1use allocator::AllocError;
2use axerrno::{AxError, AxResult};
3use memory_addr::{PhysAddr, VirtAddr};
4
5use crate::{PAGE_SIZE, global_allocator};
6
7#[derive(Debug)]
11pub struct GlobalPage {
12    start_vaddr: VirtAddr,
13    num_pages: usize,
14}
15
16impl GlobalPage {
17    pub fn alloc() -> AxResult<Self> {
19        global_allocator()
20            .alloc_pages(1, PAGE_SIZE)
21            .map(|vaddr| Self {
22                start_vaddr: vaddr.into(),
23                num_pages: 1,
24            })
25            .map_err(alloc_err_to_ax_err)
26    }
27
28    pub fn alloc_zero() -> AxResult<Self> {
30        let mut p = Self::alloc()?;
31        p.zero();
32        Ok(p)
33    }
34
35    pub fn alloc_contiguous(num_pages: usize, align_pow2: usize) -> AxResult<Self> {
37        global_allocator()
38            .alloc_pages(num_pages, align_pow2)
39            .map(|vaddr| Self {
40                start_vaddr: vaddr.into(),
41                num_pages,
42            })
43            .map_err(alloc_err_to_ax_err)
44    }
45
46    pub fn start_vaddr(&self) -> VirtAddr {
48        self.start_vaddr
49    }
50
51    pub fn start_paddr<F>(&self, virt_to_phys: F) -> PhysAddr
53    where
54        F: FnOnce(VirtAddr) -> PhysAddr,
55    {
56        virt_to_phys(self.start_vaddr)
57    }
58
59    pub fn size(&self) -> usize {
61        self.num_pages * PAGE_SIZE
62    }
63
64    pub fn as_ptr(&self) -> *const u8 {
66        self.start_vaddr.as_ptr()
67    }
68
69    pub fn as_mut_ptr(&mut self) -> *mut u8 {
71        self.start_vaddr.as_mut_ptr()
72    }
73
74    pub fn fill(&mut self, byte: u8) {
76        unsafe { core::ptr::write_bytes(self.as_mut_ptr(), byte, self.size()) }
77    }
78
79    pub fn zero(&mut self) {
81        self.fill(0)
82    }
83
84    pub fn as_slice(&self) -> &[u8] {
86        unsafe { core::slice::from_raw_parts(self.as_ptr(), self.size()) }
87    }
88
89    pub fn as_slice_mut(&mut self) -> &mut [u8] {
91        unsafe { core::slice::from_raw_parts_mut(self.as_mut_ptr(), self.size()) }
92    }
93}
94
95impl Drop for GlobalPage {
96    fn drop(&mut self) {
97        global_allocator().dealloc_pages(self.start_vaddr.into(), self.num_pages);
98    }
99}
100
101const fn alloc_err_to_ax_err(e: AllocError) -> AxError {
102    match e {
103        AllocError::InvalidParam | AllocError::MemoryOverlap | AllocError::NotAllocated => {
104            AxError::InvalidInput
105        }
106        AllocError::NoMemory => AxError::NoMemory,
107    }
108}