aboutsummaryrefslogtreecommitdiffstatshomepage
path: root/rust/kernel/alloc/kbox.rs
diff options
context:
space:
mode:
authorMiguel Ojeda <ojeda@kernel.org>2025-09-08 00:09:41 +0200
committerMiguel Ojeda <ojeda@kernel.org>2025-09-08 00:09:41 +0200
commitf1d3703fa371332bded4ff174eddb732cef2b1a0 (patch)
treea167b4dc0dc3abed9d8b690f83dd7c70300cee6e /rust/kernel/alloc/kbox.rs
parentLinux 6.17-rc5 (diff)
parentrust: alloc: remove `allocator_test` (diff)
downloadwireguard-linux-f1d3703fa371332bded4ff174eddb732cef2b1a0.tar.xz
wireguard-linux-f1d3703fa371332bded4ff174eddb732cef2b1a0.zip
Merge tag 'alloc-next-v6.18-2025-09-04' of https://github.com/Rust-for-Linux/linux into rust-next
Pull alloc and DMA updates from Danilo Krummrich: Allocator: - Provide information about the minimum alignment guarantees of 'Kmalloc', 'Vmalloc' and 'KVmalloc'. - Take minimum alignment guarantees of allocators for 'ForeignOwnable' into account. - Remove the 'allocator_test' incl. 'Cmalloc'. Box: - Implement 'Box::pin_slice()', which constructs a pinned slice of elements. Vec: - Simplify KUnit test module name to 'rust_kvec'. - Add doc-test for 'Vec::as_slice()'. - Constify various methods. DMA: - Update 'ARef' and 'AlwaysRefCounted' imports. MISC: - Remove support for unused host '#[test]'s. - Constify 'ArrayLayout::new_unchecked()'. * tag 'alloc-next-v6.18-2025-09-04' of https://github.com/Rust-for-Linux/linux: rust: alloc: remove `allocator_test` rust: kernel: remove support for unused host `#[test]`s rust: alloc: implement Box::pin_slice() rust: alloc: add ARCH_KMALLOC_MINALIGN to bindgen blocklist rust: dma: Update ARef and AlwaysRefCounted imports from sync::aref rust: alloc: take the allocator into account for FOREIGN_ALIGN rust: alloc: specify the minimum alignment of each allocator rust: make `kvec::Vec` functions `const fn` rust: make `ArrayLayout::new_unchecked` a `const fn` rust: alloc: kvec: simplify KUnit test module name to "rust_kvec" rust: alloc: kvec: add doc example for as_slice method
Diffstat (limited to 'rust/kernel/alloc/kbox.rs')
-rw-r--r--rust/kernel/alloc/kbox.rs90
1 files changed, 86 insertions, 4 deletions
diff --git a/rust/kernel/alloc/kbox.rs b/rust/kernel/alloc/kbox.rs
index 856d05aa60f1..1aa83d751f10 100644
--- a/rust/kernel/alloc/kbox.rs
+++ b/rust/kernel/alloc/kbox.rs
@@ -290,6 +290,83 @@ where
Ok(Self::new(x, flags)?.into())
}
+ /// Construct a pinned slice of elements `Pin<Box<[T], A>>`.
+ ///
+ /// This is a convenient means for creation of e.g. slices of structrures containing spinlocks
+ /// or mutexes.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use kernel::sync::{new_spinlock, SpinLock};
+ ///
+ /// struct Inner {
+ /// a: u32,
+ /// b: u32,
+ /// }
+ ///
+ /// #[pin_data]
+ /// struct Example {
+ /// c: u32,
+ /// #[pin]
+ /// d: SpinLock<Inner>,
+ /// }
+ ///
+ /// impl Example {
+ /// fn new() -> impl PinInit<Self, Error> {
+ /// try_pin_init!(Self {
+ /// c: 10,
+ /// d <- new_spinlock!(Inner { a: 20, b: 30 }),
+ /// })
+ /// }
+ /// }
+ ///
+ /// // Allocate a boxed slice of 10 `Example`s.
+ /// let s = KBox::pin_slice(
+ /// | _i | Example::new(),
+ /// 10,
+ /// GFP_KERNEL
+ /// )?;
+ ///
+ /// assert_eq!(s[5].c, 10);
+ /// assert_eq!(s[3].d.lock().a, 20);
+ /// # Ok::<(), Error>(())
+ /// ```
+ pub fn pin_slice<Func, Item, E>(
+ mut init: Func,
+ len: usize,
+ flags: Flags,
+ ) -> Result<Pin<Box<[T], A>>, E>
+ where
+ Func: FnMut(usize) -> Item,
+ Item: PinInit<T, E>,
+ E: From<AllocError>,
+ {
+ let mut buffer = super::Vec::<T, A>::with_capacity(len, flags)?;
+ for i in 0..len {
+ let ptr = buffer.spare_capacity_mut().as_mut_ptr().cast();
+ // SAFETY:
+ // - `ptr` is a valid pointer to uninitialized memory.
+ // - `ptr` is not used if an error is returned.
+ // - `ptr` won't be moved until it is dropped, i.e. it is pinned.
+ unsafe { init(i).__pinned_init(ptr)? };
+
+ // SAFETY:
+ // - `i + 1 <= len`, hence we don't exceed the capacity, due to the call to
+ // `with_capacity()` above.
+ // - The new value at index buffer.len() + 1 is the only element being added here, and
+ // it has been initialized above by `init(i).__pinned_init(ptr)`.
+ unsafe { buffer.inc_len(1) };
+ }
+
+ let (ptr, _, _) = buffer.into_raw_parts();
+ let slice = core::ptr::slice_from_raw_parts_mut(ptr, len);
+
+ // SAFETY: `slice` points to an allocation allocated with `A` (`buffer`) and holds a valid
+ // `[T]`.
+ Ok(Pin::from(unsafe { Box::from_raw(slice) }))
+ }
+
/// Convert a [`Box<T,A>`] to a [`Pin<Box<T,A>>`]. If `T` does not implement
/// [`Unpin`], then `x` will be pinned in memory and can't be moved.
pub fn into_pin(this: Self) -> Pin<Self> {
@@ -401,12 +478,17 @@ where
}
// SAFETY: The pointer returned by `into_foreign` comes from a well aligned
-// pointer to `T`.
+// pointer to `T` allocated by `A`.
unsafe impl<T: 'static, A> ForeignOwnable for Box<T, A>
where
A: Allocator,
{
- const FOREIGN_ALIGN: usize = core::mem::align_of::<T>();
+ const FOREIGN_ALIGN: usize = if core::mem::align_of::<T>() < A::MIN_ALIGN {
+ A::MIN_ALIGN
+ } else {
+ core::mem::align_of::<T>()
+ };
+
type Borrowed<'a> = &'a T;
type BorrowedMut<'a> = &'a mut T;
@@ -435,12 +517,12 @@ where
}
// SAFETY: The pointer returned by `into_foreign` comes from a well aligned
-// pointer to `T`.
+// pointer to `T` allocated by `A`.
unsafe impl<T: 'static, A> ForeignOwnable for Pin<Box<T, A>>
where
A: Allocator,
{
- const FOREIGN_ALIGN: usize = core::mem::align_of::<T>();
+ const FOREIGN_ALIGN: usize = <Box<T, A> as ForeignOwnable>::FOREIGN_ALIGN;
type Borrowed<'a> = Pin<&'a T>;
type BorrowedMut<'a> = Pin<&'a mut T>;