page_table_entry/arch/
aarch64.rs1use core::fmt;
4
5use aarch64_cpu::registers::MAIR_EL1;
6use memory_addr::PhysAddr;
7
8use crate::{GenericPTE, MappingFlags};
9
10bitflags::bitflags! {
11 #[derive(Debug)]
13 pub struct DescriptorAttr: u64 {
14 const VALID = 1 << 0;
18 const NON_BLOCK = 1 << 1;
21 const ATTR_INDX = 0b111 << 2;
23 const NS = 1 << 5;
26 const AP_EL0 = 1 << 6;
28 const AP_RO = 1 << 7;
30 const INNER = 1 << 8;
32 const SHAREABLE = 1 << 9;
34 const AF = 1 << 10;
36 const NG = 1 << 11;
38 const CONTIGUOUS = 1 << 52;
40 const PXN = 1 << 53;
42 const UXN = 1 << 54;
44
45 const PXN_TABLE = 1 << 59;
49 const XN_TABLE = 1 << 60;
51 const AP_NO_EL0_TABLE = 1 << 61;
53 const AP_NO_WRITE_TABLE = 1 << 62;
55 const NS_TABLE = 1 << 63;
58 }
59}
60
61#[repr(u64)]
64#[derive(Debug, Clone, Copy, Eq, PartialEq)]
65pub enum MemAttr {
66 Device = 0,
68 Normal = 1,
70 NormalNonCacheable = 2,
72}
73
74impl DescriptorAttr {
75 #[allow(clippy::unusual_byte_groupings)]
76 const ATTR_INDEX_MASK: u64 = 0b111_00;
77
78 pub const fn from_mem_attr(idx: MemAttr) -> Self {
81 let mut bits = (idx as u64) << 2;
82 if matches!(idx, MemAttr::Normal | MemAttr::NormalNonCacheable) {
83 bits |= Self::INNER.bits() | Self::SHAREABLE.bits();
84 }
85 Self::from_bits_retain(bits)
86 }
87
88 pub const fn mem_attr(&self) -> Option<MemAttr> {
90 let idx = (self.bits() & Self::ATTR_INDEX_MASK) >> 2;
91 Some(match idx {
92 0 => MemAttr::Device,
93 1 => MemAttr::Normal,
94 2 => MemAttr::NormalNonCacheable,
95 _ => return None,
96 })
97 }
98}
99
100impl MemAttr {
101 pub const MAIR_VALUE: u64 = {
104 let attr0 = MAIR_EL1::Attr0_Device::nonGathering_nonReordering_EarlyWriteAck.value;
106 let attr1 = MAIR_EL1::Attr1_Normal_Inner::WriteBack_NonTransient_ReadWriteAlloc.value
108 | MAIR_EL1::Attr1_Normal_Outer::WriteBack_NonTransient_ReadWriteAlloc.value;
109 let attr2 = MAIR_EL1::Attr2_Normal_Inner::NonCacheable.value
110 + MAIR_EL1::Attr2_Normal_Outer::NonCacheable.value;
111 attr0 | attr1 | attr2 };
113}
114
115impl From<DescriptorAttr> for MappingFlags {
116 fn from(attr: DescriptorAttr) -> Self {
117 if !attr.contains(DescriptorAttr::VALID) {
118 return Self::empty();
119 }
120 let mut flags = Self::READ;
121 if !attr.contains(DescriptorAttr::AP_RO) {
122 flags |= Self::WRITE;
123 }
124 #[cfg(not(feature = "arm-el2"))]
125 {
126 if attr.contains(DescriptorAttr::AP_EL0) {
127 flags |= Self::USER;
128 if !attr.contains(DescriptorAttr::UXN) {
129 flags |= Self::EXECUTE;
130 }
131 } else if !attr.intersects(DescriptorAttr::PXN) {
132 flags |= Self::EXECUTE;
133 }
134 }
135 #[cfg(feature = "arm-el2")]
136 {
137 if !attr.intersects(DescriptorAttr::UXN) {
138 flags |= Self::EXECUTE;
139 }
140 }
141 match attr.mem_attr() {
142 Some(MemAttr::Device) => flags |= Self::DEVICE,
143 Some(MemAttr::NormalNonCacheable) => flags |= Self::UNCACHED,
144 _ => {}
145 }
146 flags
147 }
148}
149
150impl From<MappingFlags> for DescriptorAttr {
151 fn from(flags: MappingFlags) -> Self {
152 if flags.is_empty() {
153 return Self::empty();
154 }
155 let mut attr = if flags.contains(MappingFlags::DEVICE) {
156 Self::from_mem_attr(MemAttr::Device)
157 } else if flags.contains(MappingFlags::UNCACHED) {
158 Self::from_mem_attr(MemAttr::NormalNonCacheable)
159 } else {
160 Self::from_mem_attr(MemAttr::Normal)
161 };
162 if flags.contains(MappingFlags::READ) {
163 attr |= Self::VALID;
164 }
165 if !flags.contains(MappingFlags::WRITE) {
166 attr |= Self::AP_RO;
167 }
168 #[cfg(not(feature = "arm-el2"))]
169 {
170 if flags.contains(MappingFlags::USER) {
171 attr |= Self::AP_EL0 | Self::PXN;
172 if !flags.contains(MappingFlags::EXECUTE) {
173 attr |= Self::UXN;
174 }
175 } else {
176 attr |= Self::UXN;
177 if !flags.contains(MappingFlags::EXECUTE) {
178 attr |= Self::PXN;
179 }
180 }
181 }
182 #[cfg(feature = "arm-el2")]
183 {
184 if !flags.contains(MappingFlags::EXECUTE) {
185 attr |= Self::UXN;
186 }
187 }
188 attr
189 }
190}
191
192#[derive(Clone, Copy)]
198#[repr(transparent)]
199pub struct A64PTE(u64);
200
201impl A64PTE {
202 const PHYS_ADDR_MASK: u64 = 0x0000_ffff_ffff_f000;
204
205 pub const fn empty() -> Self {
207 Self(0)
208 }
209}
210
211impl GenericPTE for A64PTE {
212 fn new_page(paddr: PhysAddr, flags: MappingFlags, is_huge: bool) -> Self {
213 let mut attr = DescriptorAttr::from(flags) | DescriptorAttr::AF;
214 if !is_huge {
215 attr |= DescriptorAttr::NON_BLOCK;
216 }
217 Self(attr.bits() | (paddr.as_usize() as u64 & Self::PHYS_ADDR_MASK))
218 }
219
220 fn new_table(paddr: PhysAddr) -> Self {
221 let attr = DescriptorAttr::NON_BLOCK | DescriptorAttr::VALID;
222 Self(attr.bits() | (paddr.as_usize() as u64 & Self::PHYS_ADDR_MASK))
223 }
224
225 fn paddr(&self) -> PhysAddr {
226 PhysAddr::from((self.0 & Self::PHYS_ADDR_MASK) as usize)
227 }
228
229 fn flags(&self) -> MappingFlags {
230 DescriptorAttr::from_bits_truncate(self.0).into()
231 }
232
233 fn set_paddr(&mut self, paddr: PhysAddr) {
234 self.0 = (self.0 & !Self::PHYS_ADDR_MASK) | (paddr.as_usize() as u64 & Self::PHYS_ADDR_MASK)
235 }
236
237 fn set_flags(&mut self, flags: MappingFlags, is_huge: bool) {
238 let mut attr = DescriptorAttr::from(flags) | DescriptorAttr::AF;
239 if !is_huge {
240 attr |= DescriptorAttr::NON_BLOCK;
241 }
242 self.0 = (self.0 & Self::PHYS_ADDR_MASK) | attr.bits();
243 }
244
245 fn bits(self) -> usize {
246 self.0 as usize
247 }
248
249 fn is_unused(&self) -> bool {
250 self.0 == 0
251 }
252
253 fn is_present(&self) -> bool {
254 DescriptorAttr::from_bits_truncate(self.0).contains(DescriptorAttr::VALID)
255 }
256
257 fn is_huge(&self) -> bool {
258 !DescriptorAttr::from_bits_truncate(self.0).contains(DescriptorAttr::NON_BLOCK)
259 }
260
261 fn clear(&mut self) {
262 self.0 = 0
263 }
264}
265
266impl fmt::Debug for A64PTE {
267 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
268 let mut f = f.debug_struct("A64PTE");
269 f.field("raw", &self.0)
270 .field("paddr", &self.paddr())
271 .field("attr", &DescriptorAttr::from_bits_truncate(self.0))
272 .field("flags", &self.flags())
273 .finish()
274 }
275}