kernel_api/memory/
virtual.rs

1#![unstable(feature = "kernel_virtual_memory", issue = "none")]
2
3use core::mem::ManuallyDrop;
4use core::num::NonZero;
5use core::ops::DerefMut;
6use core::ptr;
7use auto_impl::auto_impl;
8use log::debug;
9use crate::bridge::paging::MapPageError;
10use crate::memory::{Frame, Page};
11use crate::memory::mapping::{Location, Protection};
12use crate::memory::r#virtual::address_space::{AddressSpace, AddressSpaceInner, Weak};
13use crate::sync::RwSpinlock;
14use super::AllocError;
15
16#[auto_impl(&, Box, Arc)]
17pub trait VirtualAllocator: Send + Sync {
18	fn allocate_contiguous(&self, len: usize) -> Result<Page, AllocError>;
19	fn allocate_contiguous_at(&self, at: Page, len: usize) -> Result<Page, AllocError>;
20	fn deallocate_contiguous(&self, base: Page, len: usize);
21}
22
23extern "Rust" {
24	#[link_name = "__popcorn_memory_virtual_kernel_global"]
25	static GLOBAL_VIRTUAL_ALLOCATOR: RwSpinlock<&'static dyn VirtualAllocator>;
26}
27
28mod private {
29	pub trait Sealed {}
30
31	impl Sealed for super::Kernel {}
32	impl Sealed for super::Userspace {}
33}
34
35pub struct Kernel;
36pub struct Userspace(pub(super) Weak);
37
38#[unstable(feature = "kernel_mmap_config", issue = "24")]
39pub trait AddressSpaceTy: private::Sealed {
40	type PageTable<'a>;
41	fn get_page_table(&self) -> Self::PageTable<'_>;
42	fn translate_page(table: &mut Self::PageTable<'_>, page: Page) -> Option<Frame>;
43	fn map_page(table: &mut Self::PageTable<'_>, page: Page, frame: Frame, reason: u16, protection: Protection) -> Result<(), MapPageError>;
44	fn unmap_page(table: &mut Self::PageTable<'_>, page: Page) -> Result<(), ()>;
45
46	fn allocate_contiguous(&self, len: usize) -> Result<Page, AllocError>;
47	fn allocate_contiguous_at(&self, at: Page, len: usize) -> Result<Page, AllocError>;
48	fn deallocate_contiguous(&self, base: Page, len: usize);
49}
50
51#[unstable(feature = "kernel_mmap_config", issue = "24")]
52impl AddressSpaceTy for Kernel {
53	type PageTable<'a> = impl DerefMut<Target = crate::bridge::paging::KTable>;
54	
55	fn get_page_table(&self) -> Self::PageTable<'_> {
56		unsafe { crate::bridge::paging::__popcorn_paging_get_ktable() }
57	}
58
59	#[define_opaque()]
60	fn translate_page(table: &mut Self::PageTable<'_>, page: Page) -> Option<Frame> {
61		unsafe { crate::bridge::paging::__popcorn_paging_ktable_translate_page(&mut **table, page) }
62	}
63
64	#[define_opaque()]
65	fn map_page(table: &mut Self::PageTable<'_>, page: Page, frame: Frame, reason: u16, protection: Protection) -> Result<(), MapPageError> {
66		unsafe { crate::bridge::paging::__popcorn_paging_ktable_map_page(&mut **table, page, frame, reason, protection) }
67	}
68
69	#[define_opaque()]
70	fn unmap_page(table: &mut Self::PageTable<'_>, page: Page) -> Result<(), ()> {
71		unsafe { crate::bridge::paging::__popcorn_paging_ktable_unmap_page(&mut **table, page) }
72	}
73
74	fn allocate_contiguous(&self, len: usize) -> Result<Page, AllocError> {
75		let at = unsafe { &GLOBAL_VIRTUAL_ALLOCATOR }.read().allocate_contiguous(len)?;
76		debug!("Global VMA allocated at {at:x?}+{len}");
77		Ok(at)
78	}
79
80	fn allocate_contiguous_at(&self, at: Page, len: usize) -> Result<Page, AllocError> {
81		debug!("Global VMA allocate contiguous {at:x?}+{len}");
82		unsafe { &GLOBAL_VIRTUAL_ALLOCATOR }.read().allocate_contiguous_at(at, len)
83	}
84	
85	fn deallocate_contiguous(&self, base: Page, len: usize) {
86		unsafe { &GLOBAL_VIRTUAL_ALLOCATOR }.read().deallocate_contiguous(base, len)
87	}
88}
89
90#[unstable(feature = "kernel_mmap_config", issue = "24")]
91impl AddressSpaceTy for Userspace {
92	type PageTable<'a> = Option<AddressSpace>;
93
94	fn get_page_table(&self) -> Self::PageTable<'_> {
95		Weak::upgrade(&self.0)
96	}
97
98	fn translate_page(table: &mut Self::PageTable<'_>, page: Page) -> Option<Frame> {
99		let Some(table) = table else { return None };
100		unsafe { crate::bridge::paging::__popcorn_paging_ttable_translate_page(table.as_ref(), page) }
101	}
102
103	fn map_page(table: &mut Self::PageTable<'_>, page: Page, frame: Frame, reason: u16, protection: Protection) -> Result<(), MapPageError> {
104		let Some(table) = table else { return Err(MapPageError::AllocError) };
105		unsafe { crate::bridge::paging::__popcorn_paging_ttable_map_page(table.as_ref(), page, frame, reason, protection) }
106	}
107
108	fn unmap_page(table: &mut Self::PageTable<'_>, page: Page) -> Result<(), ()> {
109		let Some(table) = table else { return Ok(()) };
110		unsafe { crate::bridge::paging::__popcorn_paging_ttable_unmap_page(table.as_ref(), page) }
111	}
112
113	fn allocate_contiguous(&self, len: usize) -> Result<Page, AllocError> {
114		let address_space = Weak::upgrade(&self.0).ok_or(AllocError)?;
115		let at = unsafe { crate::bridge::memory::__popcorn_address_space_allocate(address_space.as_ref(), len)? };
116		debug!("Userspace VMA allocated at {at:x?}+{len}");
117		Ok(at)
118	}
119
120	fn allocate_contiguous_at(&self, at: Page, len: usize) -> Result<Page, AllocError> {
121		let address_space = Weak::upgrade(&self.0).ok_or(AllocError)?;
122		debug!("Userapce VMA allocate contiguous {at:x?}+{len}");
123		unsafe { crate::bridge::memory::__popcorn_address_space_allocate_at(address_space.as_ref(), at, len) }
124	}
125
126	fn deallocate_contiguous(&self, base: Page, len: usize) {
127		let Some(address_space) = Weak::upgrade(&self.0) else { return; };
128		unsafe { crate::bridge::memory::__popcorn_address_space_deallocate(address_space.as_ref(), base, len) }
129	}
130}
131
132pub struct OwnedPages<A: AddressSpaceTy> {
133	base: Page,
134	len: NonZero<usize>,
135	address_space: A,
136}
137
138impl OwnedPages<Kernel> {
139	pub fn new(len: NonZero<usize>) -> Result<Self, AllocError> {
140		let base = Kernel.allocate_contiguous(len.get())?;
141		Ok(Self {
142			base,
143			len,
144			address_space: Kernel {}
145		})
146	}
147}
148
149impl OwnedPages<Userspace> {
150	pub fn new_in(len: NonZero<usize>, address_space: &AddressSpace) -> Result<Self, AllocError> {
151		let address_space = Userspace(AddressSpace::downgrade(address_space));
152		let base = address_space.allocate_contiguous(len.get())?;
153		Ok(Self {
154			base,
155			len,
156			address_space,
157		})
158	}
159	
160	pub fn xnew(count: NonZero<usize>, address_space: &AddressSpace, location: Location<Page>) -> Result<Self, AllocError> {
161		match location {
162			Location::Any => Self::new_in(count, address_space),
163			Location::At(f) => {
164				let address_space = Userspace(AddressSpace::downgrade(address_space));
165				let base = address_space.allocate_contiguous_at(f, count.get())?;
166				Ok(OwnedPages {
167					base,
168					len: count,
169					address_space
170				})
171			},
172			_ => todo!(),
173		}
174	}
175}
176
177impl<A: AddressSpaceTy> OwnedPages<A> {
178	pub fn into_raw_parts(self) -> (Page, NonZero<usize>, A) {
179		let this = ManuallyDrop::new(self);
180		(
181			this.base,
182			this.len,
183			unsafe { ptr::read(&this.address_space) }
184		)
185	}
186
187	pub unsafe fn from_raw_parts(base: Page, len: NonZero<usize>, address_space: A) -> Self {
188		Self {
189			base, len, address_space
190		}
191	}
192}
193
194impl<A: AddressSpaceTy> Drop for OwnedPages<A> {
195	fn drop(&mut self) {
196		self.address_space.deallocate_contiguous(self.base, self.len.get());
197	}
198}
199
200pub mod address_space {
201	use alloc::sync;
202	use alloc::sync::Arc;
203	use core::fmt::{Debug, Formatter};
204	use core::marker::PhantomData;
205	use core::ptr::NonNull;
206
207	extern "Rust" {
208		type _AddressSpaceInner;
209	}
210
211	#[repr(align(8))]
212	pub struct AddressSpaceInner((), PhantomData<_AddressSpaceInner>);
213
214	#[repr(transparent)]
215	pub struct AddressSpace(#[unstable(feature = "kernel_internals", issue = "none")] pub Arc<AddressSpaceInner>);
216	pub struct Weak(sync::Weak<AddressSpaceInner>);
217
218	impl AddressSpace {
219		pub fn as_ptr(&self) -> NonNull<AddressSpaceInner> {
220			NonNull::from(Self::as_ref(self))
221		}
222
223		pub fn as_ref(&self) -> &AddressSpaceInner {
224			Arc::as_ref(&self.0)
225		}
226
227		/// # Safety
228		///
229		/// The AddressSpace must stay alive until a new AddressSpace gets loaded
230		pub unsafe fn load(&self) {
231			unsafe { crate::bridge::paging::__popcorn_address_space_load(&self.0); }
232		}
233
234		pub fn clone_ref(this: &Self) -> AddressSpace {
235			AddressSpace(Arc::clone(&this.0))
236		}
237		
238		pub fn downgrade(this: &Self) -> Weak {
239			Weak(Arc::downgrade(&this.0))
240		}
241	}
242	
243	impl Weak {
244		pub fn upgrade(this: &Self) -> Option<AddressSpace> {
245			sync::Weak::upgrade(&this.0).map(AddressSpace)
246		}
247	}
248
249	impl Debug for AddressSpace {
250		fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
251			f.debug_tuple("AddressSpace")
252			 .finish_non_exhaustive()
253		}
254	}
255}