kernel_api/ptr/
user_ptr.rs1use core::mem::MaybeUninit;
2use crate::ptr::impls;
3use alloc::boxed::Box;
4use core::cmp;
5use core::ptr::NonNull;
6use log::debug;
7use crate::memory::r#virtual::address_space::{AddressSpace, AddressSpaceInner};
8
9pub enum PointerError {
10 InvalidAddress,
11}
12
13pub struct User<T>(T, NonNull<AddressSpaceInner>);
15
16macro_rules! user_ptr_impl_unsized {
17 ($ty: ident) => {
18 pub fn new_in(from: * $ty T, address_space: &AddressSpace) -> Self {
19 Self(from, address_space.as_ptr())
20 }
21
22 pub fn is_null(self) -> bool {
23 self.0.is_null()
24 }
25
26 pub fn cast<U>(self) -> User<* $ty U> {
27 User(self.0.cast(), self.1)
28 }
29
30 };
42}
43
44macro_rules! user_ptr_impl_sized {
45 ($ty: ident) => {
46 pub unsafe fn read(self) -> Result<T, PointerError> {
62 unsafe {
63 assert!(
64 crate::bridge::memory::__popcorn_check_address_space(self.1),
65 "Address space of User<*> should match current address space",
66 );
67 }
68 match size_of::<T>() {
69 1 => unsafe {
70 impls::checked_read_1(self.0.cast())
71 .map(|val| (&val as *const MaybeUninit<u8>).cast::<T>().read())
72 },
73 2 => unsafe {
74 impls::checked_read_2(self.0.cast())
75 .map(|val| (&val as *const MaybeUninit<u16>).cast::<T>().read())
76 },
77 4 => unsafe {
78 impls::checked_read_4(self.0.cast())
79 .map(|val| (&val as *const MaybeUninit<u32>).cast::<T>().read())
80 },
81 #[cfg(target_arch = "x86_64")] 8 => unsafe {
82 impls::checked_read_8(self.0.cast())
83 .map(|val| (&val as *const MaybeUninit<u64>).cast::<T>().read())
84 },
85 size => {
86 let mut buf = MaybeUninit::<T>::uninit();
87 impls::checked_memcpy(self.0.cast(), buf.as_mut_ptr().cast(), size)
88 .map(|_| unsafe { buf.assume_init() })
89 }
90 }.ok_or(PointerError::InvalidAddress)
91 }
92
93 pub unsafe fn read_unaligned(self) -> Result<T, PointerError> {
97 unsafe {
98 assert!(
99 crate::bridge::memory::__popcorn_check_address_space(self.1),
100 "Address space of User<*> should match current address space",
101 );
102 }
103 todo!()
104 }
105
106 pub unsafe fn copy_to_nonoverlapping(self, dest: *mut T, count: usize) -> Result<(), PointerError> {
112 unsafe {
113 assert!(
114 crate::bridge::memory::__popcorn_check_address_space(self.1),
115 "Address space of User<*> should match current address space",
116 );
117 }
118 impls::checked_memcpy(self.0.cast(), dest.cast(), size_of::<T>() * count).ok_or(PointerError::InvalidAddress)
119 }
120
121 pub fn copy_to_user(self, dest: User<*mut T>, _count: usize) -> Result<(), PointerError> {
122 unsafe {
123 assert!(
124 crate::bridge::memory::__popcorn_check_address_space(self.1),
125 "Address space of User<*> should match current address space",
126 );
127 assert!(
128 crate::bridge::memory::__popcorn_check_address_space(dest.1),
129 "Address space of User<*> should match current address space",
130 );
131 }
132 todo!()
133 }
134 };
135}
136
137macro_rules! user_ptr_impl_slice {
138 ($ty: ident) => {
139 pub unsafe fn read_to_buffer(self) -> Result<Box<[T]>, PointerError> {
143 unsafe {
144 assert!(
145 crate::bridge::memory::__popcorn_check_address_space(self.1),
146 "Address space of User<*> should match current address space",
147 );
148 }
149 let len = self.0.len();
150 let mut buf = Box::new_uninit_slice(len);
151 self.cast::<T>().copy_to_nonoverlapping(buf.as_mut_ptr().cast(), len)
152 .map(|_| buf.assume_init())
153 }
154
155 pub fn is_empty(self) -> bool {
156 self.len() == 0
157 }
158
159 pub fn len(self) -> usize {
160 self.0.len()
161 }
162 };
163}
164
165impl<T: ?Sized> User<*const T> {
166 user_ptr_impl_unsized!(const);
167
168 pub fn cast_mut(self) -> User<*mut T> {
169 User(self.0.cast_mut(), self.1)
170 }
171}
172
173impl<T: ?Sized> User<*mut T> {
174 user_ptr_impl_unsized!(mut);
175
176 pub fn cast_const(self) -> User<*const T> {
177 User(self.0.cast_const(), self.1)
178 }
179}
180
181impl<T> User<*const T> {
182 user_ptr_impl_sized!(const);
183}
184
185impl<T> User<*mut T> {
186 user_ptr_impl_sized!(mut);
187
188 pub fn write(self, val: T) -> Result<(), PointerError> {
189 unsafe {
190 assert!(
191 crate::bridge::memory::__popcorn_check_address_space(self.1),
192 "Address space of User<*> should match current address space",
193 );
194 }
195 match size_of::<T>() {
196 1 => unsafe {
197 impls::checked_write_1(self.0.cast(), (&val as *const T).cast::<MaybeUninit<u8>>().read())
198 },
199 2 => unsafe {
200 impls::checked_write_2(self.0.cast(), (&val as *const T).cast::<MaybeUninit<u16>>().read())
201 },
202 4 => unsafe {
203 impls::checked_write_4(self.0.cast(), (&val as *const T).cast::<MaybeUninit<u32>>().read())
204 },
205 #[cfg(target_arch = "x86_64")] 8 => unsafe {
206 impls::checked_write_8(self.0.cast(), (&val as *const T).cast::<MaybeUninit<u64>>().read())
207 },
208 size => {
209 impls::checked_memcpy((&val as *const T).cast(), self.0.cast(), size)
210 }
211 }.ok_or(PointerError::InvalidAddress)
212 }
213
214 pub fn write_unaligned(self, _val: T) -> Result<(), PointerError> {
215 unsafe {
216 assert!(
217 crate::bridge::memory::__popcorn_check_address_space(self.1),
218 "Address space of User<*> should match current address space",
219 );
220 }
221 todo!()
222 }
223
224 pub fn copy_from_nonoverlapping(self, src: *const T, count: usize) -> Result<(), PointerError> {
228 unsafe {
229 assert!(
230 crate::bridge::memory::__popcorn_check_address_space(self.1),
231 "Address space of User<*> should match current address space",
232 );
233 }
234 impls::checked_memcpy( src.cast(), self.0.cast(),size_of::<T>() * count).ok_or(PointerError::InvalidAddress)
235 }
236
237 pub fn copy_from_user(self, src: User<*const T>, count: usize) -> Result<(), PointerError> {
238 src.copy_to_user(self, count)
239 }
240}
241
242impl<T> User<*const [T]> {
243 user_ptr_impl_slice!(const);
244}
245
246impl<T> User<*mut [T]> {
247 user_ptr_impl_slice!(mut);
248
249 pub fn write_from_buffer(self, val: &[T]) -> Result<usize, PointerError> {
250 unsafe {
251 assert!(
252 crate::bridge::memory::__popcorn_check_address_space(self.1),
253 "Address space of User<*> should match current address space",
254 );
255 }
256 let len = cmp::min(self.0.len(), val.len());
257
258 debug!("copy {} bytes from {:p} to {:p}", len * size_of::<T>(), self.0, val.as_ptr());
259
260 self.cast::<u8>().copy_from_nonoverlapping(val.as_ptr().cast(), len * size_of::<T>())?;
261 Ok(len)
262 }
263}
264
265pub fn slice_from_raw_parts<T>(data: User<*const T>, len: usize) -> User<*const [T]> {
266 User(core::ptr::slice_from_raw_parts(data.0, len), data.1)
267}
268
269pub fn slice_from_raw_parts_mut<T>(data: User<*mut T>, len: usize) -> User<*mut [T]> {
270 User(core::ptr::slice_from_raw_parts_mut(data.0, len), data.1)
271}