page_table_entry/arch/
aarch64.rs1use aarch64_cpu::registers::MAIR_EL1;
4use core::fmt;
5use memory_addr::PhysAddr;
6
7use crate::{GenericPTE, MappingFlags};
8
9bitflags::bitflags! {
10 #[derive(Debug)]
12 pub struct DescriptorAttr: u64 {
13 const VALID = 1 << 0;
17 const NON_BLOCK = 1 << 1;
20 const ATTR_INDX = 0b111 << 2;
22 const NS = 1 << 5;
25 const AP_EL0 = 1 << 6;
27 const AP_RO = 1 << 7;
29 const INNER = 1 << 8;
31 const SHAREABLE = 1 << 9;
33 const AF = 1 << 10;
35 const NG = 1 << 11;
37 const CONTIGUOUS = 1 << 52;
39 const PXN = 1 << 53;
41 const UXN = 1 << 54;
43
44 const PXN_TABLE = 1 << 59;
48 const XN_TABLE = 1 << 60;
50 const AP_NO_EL0_TABLE = 1 << 61;
52 const AP_NO_WRITE_TABLE = 1 << 62;
54 const NS_TABLE = 1 << 63;
57 }
58}
59
60#[repr(u64)]
63#[derive(Debug, Clone, Copy, Eq, PartialEq)]
64pub enum MemAttr {
65 Device = 0,
67 Normal = 1,
69 NormalNonCacheable = 2,
71}
72
73impl DescriptorAttr {
74 #[allow(clippy::unusual_byte_groupings)]
75 const ATTR_INDEX_MASK: u64 = 0b111_00;
76
77 pub const fn from_mem_attr(idx: MemAttr) -> Self {
80 let mut bits = (idx as u64) << 2;
81 if matches!(idx, MemAttr::Normal | MemAttr::NormalNonCacheable) {
82 bits |= Self::INNER.bits() | Self::SHAREABLE.bits();
83 }
84 Self::from_bits_retain(bits)
85 }
86
87 pub const fn mem_attr(&self) -> Option<MemAttr> {
89 let idx = (self.bits() & Self::ATTR_INDEX_MASK) >> 2;
90 Some(match idx {
91 0 => MemAttr::Device,
92 1 => MemAttr::Normal,
93 2 => MemAttr::NormalNonCacheable,
94 _ => return None,
95 })
96 }
97}
98
99impl MemAttr {
100 pub const MAIR_VALUE: u64 = {
103 let attr0 = MAIR_EL1::Attr0_Device::nonGathering_nonReordering_EarlyWriteAck.value;
105 let attr1 = MAIR_EL1::Attr1_Normal_Inner::WriteBack_NonTransient_ReadWriteAlloc.value
107 | MAIR_EL1::Attr1_Normal_Outer::WriteBack_NonTransient_ReadWriteAlloc.value;
108 let attr2 = MAIR_EL1::Attr2_Normal_Inner::NonCacheable.value
109 + MAIR_EL1::Attr2_Normal_Outer::NonCacheable.value;
110 attr0 | attr1 | attr2 };
112}
113
114impl From<DescriptorAttr> for MappingFlags {
115 fn from(attr: DescriptorAttr) -> Self {
116 if !attr.contains(DescriptorAttr::VALID) {
117 return Self::empty();
118 }
119 let mut flags = Self::READ;
120 if !attr.contains(DescriptorAttr::AP_RO) {
121 flags |= Self::WRITE;
122 }
123 #[cfg(not(feature = "arm-el2"))]
124 {
125 if attr.contains(DescriptorAttr::AP_EL0) {
126 flags |= Self::USER;
127 if !attr.contains(DescriptorAttr::UXN) {
128 flags |= Self::EXECUTE;
129 }
130 } else if !attr.intersects(DescriptorAttr::PXN) {
131 flags |= Self::EXECUTE;
132 }
133 }
134 #[cfg(feature = "arm-el2")]
135 {
136 if !attr.intersects(DescriptorAttr::UXN) {
137 flags |= Self::EXECUTE;
138 }
139 }
140 match attr.mem_attr() {
141 Some(MemAttr::Device) => flags |= Self::DEVICE,
142 Some(MemAttr::NormalNonCacheable) => flags |= Self::UNCACHED,
143 _ => {}
144 }
145 flags
146 }
147}
148
149impl From<MappingFlags> for DescriptorAttr {
150 fn from(flags: MappingFlags) -> Self {
151 if flags.is_empty() {
152 return Self::empty();
153 }
154 let mut attr = if flags.contains(MappingFlags::DEVICE) {
155 Self::from_mem_attr(MemAttr::Device)
156 } else if flags.contains(MappingFlags::UNCACHED) {
157 Self::from_mem_attr(MemAttr::NormalNonCacheable)
158 } else {
159 Self::from_mem_attr(MemAttr::Normal)
160 };
161 if flags.contains(MappingFlags::READ) {
162 attr |= Self::VALID;
163 }
164 if !flags.contains(MappingFlags::WRITE) {
165 attr |= Self::AP_RO;
166 }
167 #[cfg(not(feature = "arm-el2"))]
168 {
169 if flags.contains(MappingFlags::USER) {
170 attr |= Self::AP_EL0 | Self::PXN;
171 if !flags.contains(MappingFlags::EXECUTE) {
172 attr |= Self::UXN;
173 }
174 } else {
175 attr |= Self::UXN;
176 if !flags.contains(MappingFlags::EXECUTE) {
177 attr |= Self::PXN;
178 }
179 }
180 }
181 #[cfg(feature = "arm-el2")]
182 {
183 if !flags.contains(MappingFlags::EXECUTE) {
184 attr |= Self::UXN;
185 }
186 }
187 attr
188 }
189}
190
191#[derive(Clone, Copy)]
197#[repr(transparent)]
198pub struct A64PTE(u64);
199
200impl A64PTE {
201 const PHYS_ADDR_MASK: u64 = 0x0000_ffff_ffff_f000; pub const fn empty() -> Self {
205 Self(0)
206 }
207}
208
209impl GenericPTE for A64PTE {
210 fn new_page(paddr: PhysAddr, flags: MappingFlags, is_huge: bool) -> Self {
211 let mut attr = DescriptorAttr::from(flags) | DescriptorAttr::AF;
212 if !is_huge {
213 attr |= DescriptorAttr::NON_BLOCK;
214 }
215 Self(attr.bits() | (paddr.as_usize() as u64 & Self::PHYS_ADDR_MASK))
216 }
217 fn new_table(paddr: PhysAddr) -> Self {
218 let attr = DescriptorAttr::NON_BLOCK | DescriptorAttr::VALID;
219 Self(attr.bits() | (paddr.as_usize() as u64 & Self::PHYS_ADDR_MASK))
220 }
221 fn paddr(&self) -> PhysAddr {
222 PhysAddr::from((self.0 & Self::PHYS_ADDR_MASK) as usize)
223 }
224 fn flags(&self) -> MappingFlags {
225 DescriptorAttr::from_bits_truncate(self.0).into()
226 }
227 fn set_paddr(&mut self, paddr: PhysAddr) {
228 self.0 = (self.0 & !Self::PHYS_ADDR_MASK) | (paddr.as_usize() as u64 & Self::PHYS_ADDR_MASK)
229 }
230 fn set_flags(&mut self, flags: MappingFlags, is_huge: bool) {
231 let mut attr = DescriptorAttr::from(flags) | DescriptorAttr::AF;
232 if !is_huge {
233 attr |= DescriptorAttr::NON_BLOCK;
234 }
235 self.0 = (self.0 & Self::PHYS_ADDR_MASK) | attr.bits();
236 }
237
238 fn bits(self) -> usize {
239 self.0 as usize
240 }
241 fn is_unused(&self) -> bool {
242 self.0 == 0
243 }
244 fn is_present(&self) -> bool {
245 DescriptorAttr::from_bits_truncate(self.0).contains(DescriptorAttr::VALID)
246 }
247 fn is_huge(&self) -> bool {
248 !DescriptorAttr::from_bits_truncate(self.0).contains(DescriptorAttr::NON_BLOCK)
249 }
250 fn clear(&mut self) {
251 self.0 = 0
252 }
253}
254
255impl fmt::Debug for A64PTE {
256 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
257 let mut f = f.debug_struct("A64PTE");
258 f.field("raw", &self.0)
259 .field("paddr", &self.paddr())
260 .field("attr", &DescriptorAttr::from_bits_truncate(self.0))
261 .field("flags", &self.flags())
262 .finish()
263 }
264}