1#![stable(feature = "kernel_mmap", since = "1.1.0")]
14
15use core::fmt::{Debug, Formatter};
16use core::marker::PhantomData;
17use core::mem::ManuallyDrop;
18use core::num::NonZero;
19use core::{mem, ptr};
20use log::debug;
21use crate::memory::allocator::{PhysicalAllocator, SpecificLocation};
22use crate::memory::{AllocError, Frame, Page};
23use crate::memory::physical::{OwnedFrames, highmem};
24use crate::memory::r#virtual::{AddressSpaceTy, Kernel, OwnedPages, Userspace, VirtualAllocator};
25use crate::memory::r#virtual::address_space::{Weak, AddressSpace};
26
27#[unstable(feature = "kernel_mmap_trait", issue = "24")]
31pub trait Mappable {
32 fn physical_length_to_virtual_length(&self, physical_length: NonZero<usize>) -> NonZero<usize>;
34
35 fn physical_start_offset_from_virtual(&self) -> isize;
37}
38
39#[unstable(feature = "kernel_mmap_config", issue = "24")]
41#[derive(Copy, Clone, Debug, Eq, PartialEq)]
42pub enum Protection {
43 RWX,
45 RWXU,
47}
48
49#[unstable(feature = "kernel_mmap_config", issue = "24")]
50impl Default for Protection {
51 fn default() -> Self {
52 Self::RWX
53 }
54}
55
56mod private {
57 use crate::memory::{Frame, Page};
58
59 #[unstable(feature = "kernel_mmap_config", issue = "24")]
60 pub trait Sealed {}
61
62 #[unstable(feature = "kernel_mmap_config", issue = "24")]
63 impl Sealed for Page {}
64 #[unstable(feature = "kernel_mmap_config", issue = "24")]
65 impl Sealed for Frame {}
66}
67
68#[unstable(feature = "kernel_mmap_config", issue = "24")]
70pub trait Address: private::Sealed {}
71#[unstable(feature = "kernel_mmap_config", issue = "24")]
72impl Address for Page {}
73#[unstable(feature = "kernel_mmap_config", issue = "24")]
74impl Address for Frame {}
75
76#[stable(feature = "kernel_mmap", since = "1.1.0")]
78#[derive(Copy, Clone)]
79pub enum Location<A: Address> {
80 #[stable(feature = "kernel_mmap", since = "1.1.0")] Any,
82 #[unstable(feature = "kernel_mmap_config", issue = "24")] Aligned(NonZero<u32>),
84 #[stable(feature = "kernel_mmap", since = "1.1.0")] At(#[stable(feature = "kernel_mmap", since = "1.1.0")] A),
86 #[unstable(feature = "kernel_mmap_config", issue = "24")] Below { location: A, with_alignment: NonZero<u32> }
88}
89
90#[doc(hidden)]
91#[unstable(feature = "kernel_mmap_config", issue = "24")]
92impl From<Location<Frame>> for super::allocator::Location {
93 fn from(value: Location<Frame>) -> Self {
94 use super::allocator::{Location as XLocation, SpecificLocation};
95 match value {
96 Location::Any => XLocation::Any,
97 Location::Aligned(a) => XLocation::Specific(SpecificLocation::Aligned(a)),
98 Location::At(f) => XLocation::Specific(SpecificLocation::At(f)),
99 Location::Below { location, with_alignment } => XLocation::Specific(SpecificLocation::Below{ location, with_alignment }),
100 }
101 }
102}
103
104#[unstable(feature = "kernel_mmap_config", issue = "24")]
106pub enum Laziness { Lazy, Prefault }
107
108#[stable(feature = "kernel_mmap", since = "1.1.0")]
114pub struct Config<'physical_allocator, A: AddressSpaceTy = Kernel> {
115 physical_location: Location<Frame>,
116 virtual_location: Location<Page>,
117 _laziness: Laziness,
118 length: NonZero<usize>,
119 physical_allocator: &'physical_allocator dyn PhysicalAllocator,
120 address_space: A,
121 protection: Protection,
122}
123
124impl Config<'static, Kernel> {
125 #[stable(feature = "kernel_mmap", since = "1.1.0")]
136 pub fn new(length: NonZero<usize>) -> Self {
137 Config {
138 physical_location: Location::Any,
139 virtual_location: Location::Any,
140 _laziness: Laziness::Lazy,
141 length,
142 physical_allocator: highmem(),
143 address_space: Kernel {},
144 protection: Protection::RWX,
145 }
146 }
147}
148
149impl Config<'static, Userspace> {
150 #[stable(feature = "kernel_mmap", since = "1.1.0")]
161 pub fn new_in(length: NonZero<usize>, address_space: &AddressSpace) -> Self {
162 Config {
163 physical_location: Location::Any,
164 virtual_location: Location::Any,
165 _laziness: Laziness::Lazy,
166 length,
167 physical_allocator: highmem(),
168 address_space: Userspace(AddressSpace::downgrade(address_space)),
169 protection: Protection::RWX,
170 }
171 }
172}
173
174impl<'physical_allocator, A: AddressSpaceTy> Config<'physical_allocator, A> {
175 #[unstable(feature = "kernel_mmap_config", issue = "24")]
179 pub fn physical_allocator<'a>(self, allocator: &'a dyn PhysicalAllocator) -> Config<'a, A> {
180 Config {
181 physical_allocator: allocator,
182 ..self
183 }
184 }
185
186 #[unstable(feature = "kernel_mmap_config", issue = "24")]
187 pub fn protection(self, protection: Protection) -> Self {
188 Config {
189 protection,
190 .. self
191 }
192 }
193
194 #[stable(feature = "kernel_mmap", since = "1.1.0")]
196 pub fn physical_location(self, location: Location<Frame>) -> Self {
197 Config {
198 physical_location: location,
199 .. self
200 }
201 }
202
203 #[unstable(feature = "kernel_mmap_config", issue = "24")]
207 pub fn virtual_location(self, location: Location<Page>) -> Self {
208 Config {
209 virtual_location: location,
210 .. self
211 }
212 }
213}
214
215pub(super) enum RawMappingContiguity {
217 Contiguous(Frame),
219
220 Discontiguous,
222}
223
224#[derive(Debug)]
228#[unstable(feature = "kernel_mmap_to_parts", issue = "24")]
229pub struct DiscontiguityError(());
230
231#[stable(feature = "kernel_mmap", since = "1.1.0")]
236pub struct RawMapping<'phys_allocator, R: Mappable, A: AddressSpaceTy = Kernel> {
237 raw: R,
238
239 address_space: ManuallyDrop<A>,
241
242 contiguity: RawMappingContiguity,
244
245 virtual_valid_start: Page,
248
249 physical_len: NonZero<usize>,
251
252 allocator: &'phys_allocator dyn PhysicalAllocator,
254
255 protection: Protection,
257}
258
259#[stable(feature = "kernel_mmap", since = "1.1.0")]
260impl<R: Mappable, A: AddressSpaceTy> Debug for RawMapping<'_, R, A> {
261 fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
262 f.debug_struct("RawMapping")
263 .field(
264 "physical_base",
265 match self.physical_start() {
266 Ok(ref frame) => frame,
267 Err(_) => &"<discontiguous>",
268 }
269 )
270 .field("physical_length", &self.physical_len().get())
271 .field("virtual_base", &self.virtual_start())
272 .field("virtual_valid_start", &self.virtual_valid_start())
273 .field("address_space", &"<address space>")
274 .field("protection", &self.protection)
275 .finish()
276 }
277}
278
279impl<'phys_alloc, R: Mappable> RawMapping<'phys_alloc, R, Kernel> {
280 #[stable(feature = "kernel_mmap", since = "1.1.0")]
293 pub fn new(config: Config<'phys_alloc, Kernel>, reason: u16, raw: R) -> Result<Self, AllocError> {
294 let Location::Any = config.virtual_location else { todo!() };
295
296 let virtual_len = raw.physical_length_to_virtual_length(config.length);
297 let virtual_mem = OwnedPages::new(virtual_len)?;
298
299 Self::new_at(config, reason, virtual_mem, raw)
300 }
301}
302
303impl<'phys_alloc, R: Mappable> RawMapping<'phys_alloc, R, Userspace> {
304 #[stable(feature = "kernel_mmap", since = "1.1.0")]
317 pub fn new_in(config: Config<'phys_alloc, Userspace>, reason: u16, raw: R) -> Result<Self, AllocError> {
318 let Some(address_space) = Weak::upgrade(&config.address_space.0) else { return Err(AllocError) };
319 let virtual_len = raw.physical_length_to_virtual_length(config.length);
320 let virtual_mem = OwnedPages::xnew(virtual_len, &address_space, config.virtual_location)?;
321
322 Self::new_at(config, reason, virtual_mem, raw)
323 }
324}
325
326impl<'phys_alloc, R: Mappable, A: AddressSpaceTy> RawMapping<'phys_alloc, R, A> {
327 fn new_at(config: Config<'phys_alloc, A>, reason: u16, virtual_mem: OwnedPages<A>, raw: R) -> Result<Self, AllocError> {
328 let Config {
329 length: physical_len,
330 physical_allocator,
331 physical_location,
332 protection,
333 ..
334 } = config;
335
336 let physical_mem = OwnedFrames::xnew(physical_len, physical_allocator, physical_location.into())?;
337
338 let (physical_base, _, _) = physical_mem.into_raw_parts();
339 let (virtual_base, _, address_space) = virtual_mem.into_raw_parts();
340 let offset_base = virtual_base + raw.physical_start_offset_from_virtual();
341
342 let mut page_table = address_space.get_page_table();
345 for (frame, page) in (0..physical_len.get()).map(|i| (physical_base + i, offset_base + i)) {
346 A::map_page(&mut page_table, page, frame, reason, protection)
347 .expect("Virtual memory uniquely owned by the allocation so should not be mapped in this address space");
348 }
349 drop(page_table);
350
351 Ok(Self {
352 raw,
353 address_space: ManuallyDrop::new(address_space),
354 contiguity: RawMappingContiguity::Contiguous(physical_base),
355 virtual_valid_start: offset_base,
356 physical_len,
357 allocator: physical_allocator,
358 protection,
359 })
360 }
361
362 #[unstable(feature = "kernel_mmap_to_parts", issue = "24")]
372 pub fn into_contiguous_raw_parts(mut self) -> Result<(OwnedFrames<'phys_alloc>, OwnedPages<A>, Protection, R), DiscontiguityError> {
373 let frames = unsafe {
374 let RawMappingContiguity::Contiguous(base_frame) = self.contiguity else {
375 return Err(DiscontiguityError(()));
376 };
377
378 OwnedFrames::from_raw_parts(
379 base_frame,
380 self.physical_len(),
381 self.allocator,
382 )
383 };
384
385 let address_space = unsafe { ManuallyDrop::take(&mut self.address_space) };
386 let pages = unsafe {
387 OwnedPages::from_raw_parts(
388 self.virtual_start(),
389 self.virtual_len(),
390 address_space
391 )
392 };
393
394 let this = ManuallyDrop::new(self);
395 Ok((frames, pages, this.protection, unsafe { ptr::read(&this.raw) }))
396 }
397
398 #[unstable(feature = "kernel_mmap_to_parts", issue = "24")]
399 pub unsafe fn from_contiguous_raw_parts(frames: OwnedFrames<'phys_alloc>, pages: OwnedPages<A>, protection: Protection, raw: R) -> Self {
400 let (virtual_base, actual_vlen, address_space) = pages.into_raw_parts();
401 let (physical_base, physical_len, physical_allocator) = frames.into_raw_parts();
402 let correct_vlen = raw.physical_length_to_virtual_length(physical_len);
403 debug_assert_eq!(actual_vlen, correct_vlen);
404
405 Self {
406 virtual_valid_start: virtual_base + raw.physical_start_offset_from_virtual(),
407 raw,
408 address_space: ManuallyDrop::new(address_space),
409 contiguity: RawMappingContiguity::Contiguous(physical_base),
410 physical_len,
411 allocator: physical_allocator,
412 protection,
413 }
414 }
415
416 fn virtual_len(&self) -> NonZero<usize> {
417 self.raw.physical_length_to_virtual_length(self.physical_len())
418 }
419
420 #[unstable(feature = "kernel_mmap_to_parts", issue = "24")]
421 pub fn virtual_start(&self) -> Page {
422 self.virtual_valid_start() - self.raw.physical_start_offset_from_virtual()
423 }
424
425 fn virtual_valid_start(&self) -> Page {
426 self.virtual_valid_start
427 }
428
429 #[unstable(feature = "kernel_mmap_to_parts", issue = "24")]
430 pub fn virtual_end(&self) -> Page {
431 self.virtual_start() + self.virtual_len().get()
432 }
433
434 #[unstable(feature = "kernel_mmap_to_parts", issue = "24")]
435 pub fn physical_len(&self) -> NonZero<usize> {
436 self.physical_len
437 }
438
439 #[unstable(feature = "kernel_mmap_to_parts", issue = "24")]
440 pub fn physical_start(&self) -> Result<Frame, DiscontiguityError> {
441 match self.contiguity {
442 RawMappingContiguity::Contiguous(base_frame) => Ok(base_frame),
443 RawMappingContiguity::Discontiguous => Err(DiscontiguityError(())),
444 }
445 }
446
447 #[unstable(feature = "kernel_mmap_to_parts", issue = "24")]
448 pub fn physical_end(&self) -> Result<Frame, DiscontiguityError> {
449 match self.contiguity {
450 RawMappingContiguity::Contiguous(base_frame) => Ok(base_frame + self.physical_len().get()),
451 RawMappingContiguity::Discontiguous => Err(DiscontiguityError(())),
452 }
453 }
454
455 #[unstable(feature = "kernel_mmap_to_parts", issue = "24")]
456 #[inline]
457 pub fn protection(&self) -> Protection {
458 self.protection
459 }
460
461 #[stable(feature = "kernel_mmap", since = "1.1.0")]
466 pub fn resize_in_place(&mut self, new_len: NonZero<usize>) -> Result<Page, AllocError> {
467 if new_len == self.physical_len() { return Ok(self.virtual_end()); }
468
469 let original_physical_allocator = self.allocator;
470
471 if new_len < self.physical_len() {
472 todo!("actually free and unmap the extra memory")
473 } else {
474 let extra_len: NonZero<usize> = new_len.get().checked_sub(self.physical_len().get())
475 .expect("`new_len` is checked to be greater than `physical_len`")
476 .try_into()
477 .expect("`new_len` is checked to be not equal to `physical_len`");
478
479 let extra_virtual_mem = self.address_space.allocate_contiguous_at(self.virtual_end(), extra_len.get())?; debug_assert_eq!(self.virtual_end(), extra_virtual_mem);
482
483 let mut page_table = self.address_space.get_page_table();
485
486 let extra_physical_mem = OwnedFrames::xnew(extra_len, original_physical_allocator, super::allocator::Location::Any)?;
487 let (new_start_frame, _, _) = extra_physical_mem.into_raw_parts();
488
489 if let RawMappingContiguity::Contiguous(base_frame) = self.contiguity {
490 if base_frame + self.physical_len.get() != base_frame { self.contiguity = RawMappingContiguity::Discontiguous; }
491 }
492
493 for (frame, page) in (0..extra_len.get()).map(|i| (new_start_frame + i, extra_virtual_mem + i)) {
496 A::map_page(&mut page_table, page, frame, 25, self.protection)
497 .expect("todo");
498 }
499
500 self.physical_len = new_len;
501 Ok(extra_virtual_mem)
502 }
503 }
504}
505
506#[stable(feature = "kernel_mmap", since = "1.1.0")]
507impl<R: Mappable, A: AddressSpaceTy> Drop for RawMapping<'_, R, A> {
508 fn drop(&mut self) {
509 debug!("mmap dropped: {self:x?}");
510
511 let mut page_table = self.address_space.get_page_table();
512
513 match self.contiguity {
520 RawMappingContiguity::Contiguous(base_frame) => {
521 let _frames = unsafe { OwnedFrames::from_raw_parts(
522 base_frame,
523 self.physical_len(),
524 self.allocator,
525 ) };
526 },
527 RawMappingContiguity::Discontiguous => {
528 let page_iter = (0..self.physical_len().get()).map(|i| self.virtual_valid_start() + i);
529 let frame_iter = page_iter.map(|page| {
530 A::translate_page(&mut page_table, page)
531 .expect("Virtual memory uniquely owned by this mmap so shouldn't be unmapped")
532 });
533
534 let drop_frame_range = |(frame, len)| {
535 let _frames = unsafe { OwnedFrames::from_raw_parts(
536 frame,
537 len,
538 self.allocator,
539 ) };
540 };
541
542 let last_chunk = frame_iter.map(|frame| (frame, NonZero::<usize>::new(1).unwrap()))
548 .reduce(|prev_group, new_group| {
549 if prev_group.0 + prev_group.1.get() == new_group.0
550 && let Some(combined_len) = prev_group.1.checked_add(new_group.1.get()) { (prev_group.0, combined_len)
553 } else {
554 drop_frame_range(prev_group);
556 new_group
557 }
558 });
559 if let Some(last_chunk) = last_chunk {
560 drop_frame_range(last_chunk);
561 }
562 },
563 }
564
565 for page in (0..self.physical_len().get()).map(|i| self.virtual_valid_start() + i) {
566 debug!("unmapping page {page:x?}");
567 A::unmap_page(&mut page_table, page)
568 .expect("Virtual memory uniquely owned by this mmap so shouldn't be unmapped");
569 }
570
571 drop(page_table);
572 let address_space = unsafe { ManuallyDrop::take(&mut self.address_space) };
573 let _pages = unsafe {
574 OwnedPages::from_raw_parts(
575 self.virtual_start(),
576 self.virtual_len(),
577 address_space
578 )
579 };
580 }
581}
582
583#[doc(hidden)]
584#[stable(feature = "kernel_mmap", since = "1.1.0")]
585pub struct RawMmap;
586
587#[stable(feature = "kernel_mmap", since = "1.1.0")]
588impl Mappable for RawMmap {
589 fn physical_length_to_virtual_length(&self, physical_length: NonZero<usize>) -> NonZero<usize> { physical_length }
590 fn physical_start_offset_from_virtual(&self) -> isize { 0 }
591}
592
593#[doc(hidden)]
594#[stable(feature = "kernel_mmap", since = "1.1.0")]
595pub struct RawStack;
596
597#[stable(feature = "kernel_mmap", since = "1.1.0")]
598impl Mappable for RawStack {
599 fn physical_length_to_virtual_length(&self, physical_length: NonZero<usize>) -> NonZero<usize> {
600 physical_length.checked_add(1).expect("Stack size overflow")
601 }
602 fn physical_start_offset_from_virtual(&self) -> isize { 1 }
603}
604
605#[allow(type_alias_bounds)] #[stable(feature = "kernel_mmap", since = "1.1.0")]
610pub type Mapping<'phys_alloc, A: AddressSpaceTy = Kernel> = RawMapping<'phys_alloc, RawMmap, A>;
611
612#[stable(feature = "kernel_mmap", since = "1.1.0")]
613pub fn new_mapping(config: Config<'_, Kernel>, reason: u16) -> Result<Mapping<'_, Kernel>, AllocError> {
614 Mapping::new(config, reason, RawMmap {})
615}
616
617#[stable(feature = "kernel_mmap", since = "1.1.0")]
618pub fn new_mapping_in(config: Config<'_, Userspace>, reason: u16) -> Result<Mapping<'_, Userspace>, AllocError> {
619 Mapping::new_in(config, reason, RawMmap {})
620}
621
622#[allow(type_alias_bounds)] #[stable(feature = "kernel_mmap", since = "1.1.0")]
627pub type Stack<'phys_alloc, A: AddressSpaceTy = Kernel> = RawMapping<'phys_alloc, RawStack, A>;
628
629#[stable(feature = "kernel_mmap", since = "1.1.0")]
630pub fn new_stack(config: Config<'_, Kernel>, reason: u16) -> Result<Stack<'_, Kernel>, AllocError> {
631 Stack::new(config, reason, RawStack {})
632}
633
634#[stable(feature = "kernel_mmap", since = "1.1.0")]
635pub fn new_stack_in(config: Config<'_, Userspace>, reason: u16) -> Result<Stack<'_, Userspace>, AllocError> {
636 Stack::new_in(config, reason, RawStack {})
637}
638
639#[unstable(feature = "kernel_internals", issue = "none")]
640pub struct DynMapping {
641 physical_length_to_virtual_length: fn(*mut u8, physical_length: NonZero<usize>) -> NonZero<usize>,
642 physical_start_offset_from_virtual: fn(*mut u8) -> isize,
643}
644
645impl DynMapping {
646 #[unstable(feature = "kernel_internals", issue = "none")]
647 pub fn coerce<R: Mappable, A: AddressSpaceTy>(from: RawMapping<R, A>) -> RawMapping<DynMapping, A> where [(); 1 / ((size_of::<R>() == 0) as usize)]: {
648 let from = ManuallyDrop::new(from);
649
650 RawMapping {
651 raw: DynMapping {
652 physical_length_to_virtual_length: unsafe { mem::transmute(R::physical_length_to_virtual_length as fn(&R, physical_length: NonZero<usize>) -> NonZero<usize>) },
653 physical_start_offset_from_virtual: unsafe { mem::transmute(R::physical_start_offset_from_virtual as fn(&R) -> isize) },
654 },
655 address_space: unsafe { ptr::read(&from.address_space) },
656 contiguity: unsafe { ptr::read(&from.contiguity) },
657 virtual_valid_start: from.virtual_valid_start,
658 physical_len: from.physical_len,
659 allocator: from.allocator,
660 protection: from.protection,
661 }
662 }
663}
664
665#[unstable(feature = "kernel_internals", issue = "none")]
666impl Mappable for DynMapping {
667 fn physical_length_to_virtual_length(&self, physical_length: NonZero<usize>) -> NonZero<usize> {
668 (self.physical_length_to_virtual_length)(ptr::dangling_mut(), physical_length)
669 }
670
671 fn physical_start_offset_from_virtual(&self) -> isize {
672 (self.physical_start_offset_from_virtual)(ptr::dangling_mut())
673 }
674}