From nobody Thu Sep 19 16:44:11 2024 Received: from smtp.kernel.org (aws-us-west-2-korg-mail-1.web.codeaurora.org [10.30.226.201]) (using TLSv1.2 with cipher ECDHE-RSA-AES256-GCM-SHA384 (256/256 bits)) (No client certificate requested) by smtp.subspace.kernel.org (Postfix) with ESMTPS id 19FE912CDB0; Fri, 16 Aug 2024 00:14:22 +0000 (UTC) Authentication-Results: smtp.subspace.kernel.org; arc=none smtp.client-ip=10.30.226.201 ARC-Seal: i=1; a=rsa-sha256; d=subspace.kernel.org; s=arc-20240116; t=1723767262; cv=none; b=KYyU67UGi/J++BXJ6ZtVwmuJm2Qz3xEpGUJZOon0JToTZjPWBjOmQquksZb/oXkK4CgrgptOEq5nVDJxWZWkW/dVOocX1pEFyeKZKZE7S5V8/BHYRMUNAKsXJIMwq6yV4iVkbPU34aRGBSYn0o4d1+rCfKhhqC6slvkw84DI/W8= ARC-Message-Signature: i=1; a=rsa-sha256; d=subspace.kernel.org; s=arc-20240116; t=1723767262; c=relaxed/simple; bh=bQS+R7rUpUCrHg7w7Wbs4uih1faovavqPA1dZK4lOE8=; h=From:To:Cc:Subject:Date:Message-ID:In-Reply-To:References: MIME-Version; b=eU/M5Sc5f1IZVJLUwsjQzBaLoqfA/oF4v0pRxYS8sA5xAyQrsvMn+mm/H/3r2YRpo9r0gKw52raJQcqacwWVsNBQv4L2Ir0aP5jO71jKhyCIrG3gpiI2UIdU6z4fWHzZXoWAr0BIc9Jhsz674lgRu9iwZ3eMls4SCppGCXtSHbA= ARC-Authentication-Results: i=1; smtp.subspace.kernel.org; dkim=pass (2048-bit key) header.d=kernel.org header.i=@kernel.org header.b=co2Cxo/8; arc=none smtp.client-ip=10.30.226.201 Authentication-Results: smtp.subspace.kernel.org; dkim=pass (2048-bit key) header.d=kernel.org header.i=@kernel.org header.b="co2Cxo/8" Received: by smtp.kernel.org (Postfix) with ESMTPSA id 181A4C4AF09; Fri, 16 Aug 2024 00:14:16 +0000 (UTC) DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/simple; d=kernel.org; s=k20201202; t=1723767261; bh=bQS+R7rUpUCrHg7w7Wbs4uih1faovavqPA1dZK4lOE8=; h=From:To:Cc:Subject:Date:In-Reply-To:References:From; b=co2Cxo/87VbXdBwuqojeKmzm23LO/RIK14f+FTu+LF+rprPRCXT1tDINHnYhyjnCt SMao0AWgVzzXC3OLvkzmIycax/C7a0Me8IzH4XSHoSVYrGzD+0Ly/fgqfy1UB/L4XP yDLZfq7gI55st1vASvGOaQqaHp2mfSwwB2KQU7C4Qu2+qz5DYsQA0054VeSdBfRSdF tOwcSDxZ1RJ9zwxXORqx3C/RBxVpHy9cObcopIwOK4aRSTV13Ggief/V6FpgiMOiFK w1JrC6/279Gd+5vkJED2Rq33iguFOL3+0v7aWmKz0376Sxz9W8WoE7eWYmB2KOY+TK 0Ieyst9xxUVeA== From: Danilo Krummrich To: ojeda@kernel.org, alex.gaynor@gmail.com, wedsonaf@gmail.com, boqun.feng@gmail.com, gary@garyguo.net, bjorn3_gh@protonmail.com, benno.lossin@proton.me, a.hindborg@samsung.com, aliceryhl@google.com, akpm@linux-foundation.org Cc: daniel.almeida@collabora.com, faith.ekstrand@collabora.com, boris.brezillon@collabora.com, lina@asahilina.net, mcanal@igalia.com, zhiw@nvidia.com, cjia@nvidia.com, jhubbard@nvidia.com, airlied@redhat.com, ajanulgu@redhat.com, lyude@redhat.com, linux-kernel@vger.kernel.org, rust-for-linux@vger.kernel.org, linux-mm@kvack.org, Danilo Krummrich Subject: [PATCH v6 22/26] rust: alloc: implement `Cmalloc` in module allocator_test Date: Fri, 16 Aug 2024 02:11:04 +0200 Message-ID: <20240816001216.26575-23-dakr@kernel.org> X-Mailer: git-send-email 2.46.0 In-Reply-To: <20240816001216.26575-1-dakr@kernel.org> References: <20240816001216.26575-1-dakr@kernel.org> Precedence: bulk X-Mailing-List: linux-kernel@vger.kernel.org List-Id: List-Subscribe: List-Unsubscribe: MIME-Version: 1.0 Content-Transfer-Encoding: quoted-printable Content-Type: text/plain; charset="utf-8" So far the kernel's `Box` and `Vec` types can't be used by userspace test cases, since all users of those types (e.g. `CString`) use kernel allocators for instantiation. In order to allow userspace test cases to make use of such types as well, implement the `Cmalloc` allocator within the allocator_test module and type alias all kernel allocators to `Cmalloc`. The `Cmalloc` allocator uses libc's realloc() function as allocator backend. Signed-off-by: Danilo Krummrich --- rust/kernel/alloc/allocator_test.rs | 178 ++++++++++++++++++++++++++-- 1 file changed, 171 insertions(+), 7 deletions(-) diff --git a/rust/kernel/alloc/allocator_test.rs b/rust/kernel/alloc/alloca= tor_test.rs index 1b2642c547ec..7fff308d02dc 100644 --- a/rust/kernel/alloc/allocator_test.rs +++ b/rust/kernel/alloc/allocator_test.rs @@ -2,20 +2,184 @@ =20 #![allow(missing_docs)] =20 -use super::{AllocError, Allocator, Flags}; +use super::{flags::*, AllocError, Allocator, Flags}; use core::alloc::Layout; +use core::cmp; +use core::mem; +use core::ptr; use core::ptr::NonNull; =20 -pub struct Kmalloc; +pub struct Cmalloc; +pub type Kmalloc =3D Cmalloc; pub type Vmalloc =3D Kmalloc; pub type KVmalloc =3D Kmalloc; =20 -unsafe impl Allocator for Kmalloc { +extern "C" { + #[link_name =3D "aligned_alloc"] + fn libc_aligned_alloc(align: usize, size: usize) -> *mut core::ffi::c_= void; + + #[link_name =3D "free"] + fn libc_free(ptr: *mut core::ffi::c_void); +} + +struct CmallocData { + // The actual size as requested through `Cmalloc::alloc` or `Cmalloc::= realloc`. + size: usize, + // The offset from the pointer returned to the caller of `Cmalloc::all= oc` or `Cmalloc::realloc` + // to the actual base address of the allocation. + offset: usize, +} + +impl Cmalloc { + /// Adjust the size and alignment such that we can additionally store = `CmallocData` right + /// before the actual data described by `layout`. + /// + /// Example: + /// + /// For `CmallocData` assume an alignment of 8 and a size of 16. + /// For `layout` assume and alignment of 16 and a size of 64. + /// + /// 0 16 32 = 96 + /// |----------------|----------------|-------------------------------= -----------------| + /// empty CmallocData data + /// + /// For this example the returned `Layout` has an alignment of 32 and = a size of 96. + fn layout_adjust(layout: Layout) -> Result { + let layout =3D layout.pad_to_align(); + + // Ensure that `CmallocData` fits into half the alignment. Additio= nally, this guarantees + // that advancing a pointer aligned to `align` by `align / 2` we s= till satisfy or exceed + // the alignment requested through `layout`. + let align =3D cmp::max( + layout.align(), + mem::size_of::().next_power_of_two(), + ) * 2; + + // Add the additional space required for `CmallocData`. + let size =3D layout.size() + mem::size_of::(); + + Ok(Layout::from_size_align(size, align) + .map_err(|_| AllocError)? + .pad_to_align()) + } + + fn alloc_store_data(layout: Layout) -> Result, AllocError>= { + let requested_size =3D layout.size(); + + let layout =3D Self::layout_adjust(layout)?; + let min_align =3D layout.align() / 2; + + // SAFETY: Returns either NULL or a pointer to a memory allocation= that satisfies or + // exceeds the given size and alignment requirements. + let raw_ptr =3D unsafe { libc_aligned_alloc(layout.align(), layout= .size()) } as *mut u8; + + let priv_ptr =3D NonNull::new(raw_ptr).ok_or(AllocError)?; + + // SAFETY: Advance the pointer by `min_align`. The adjustments fro= m `Self::layout_adjust` + // ensure that after this operation the original size and alignmen= t requirements are still + // satisfied or exceeded. + let ptr =3D unsafe { priv_ptr.as_ptr().add(min_align) }; + + // SAFETY: `min_align` is greater than or equal to the size of `Cm= allocData`, hence we + // don't exceed the allocation boundaries. + let data_ptr: *mut CmallocData =3D unsafe { ptr.sub(mem::size_of::= ()) }.cast(); + + let data =3D CmallocData { + size: requested_size, + offset: min_align, + }; + + // SAFETY: `data_ptr` is properly aligned and within the allocatio= n boundaries reserved for + // `CmallocData`. + unsafe { data_ptr.write(data) }; + + NonNull::new(ptr).ok_or(AllocError) + } + + /// # Safety + /// + /// `ptr` must have been previously allocated with `Self::alloc_store_= data`. + unsafe fn data<'a>(ptr: NonNull) -> &'a CmallocData { + // SAFETY: `Self::alloc_store_data` stores the `CmallocData` right= before the address + // returned to callers of `Self::alloc_store_data`. + let data_ptr: *mut CmallocData =3D + unsafe { ptr.as_ptr().sub(mem::size_of::()) }.cas= t(); + + // SAFETY: The `CmallocData` has been previously stored at this of= fset with + // `Self::alloc_store_data`. + unsafe { &*data_ptr } + } + + /// # Safety + /// + /// This function must not be called more than once for the same alloc= ation. + /// + /// `ptr` must have been previously allocated with `Self::alloc_store_= data`. + unsafe fn free_read_data(ptr: NonNull) { + // SAFETY: `ptr` has been created by `Self::alloc_store_data`. + let data =3D unsafe { Self::data(ptr) }; + + // SAFETY: `ptr` has been created by `Self::alloc_store_data`. + let priv_ptr =3D unsafe { ptr.as_ptr().sub(data.offset) }; + + // SAFETY: `priv_ptr` has previously been allocatored with this `A= llocator`. + unsafe { libc_free(priv_ptr.cast()) }; + } +} + +unsafe impl Allocator for Cmalloc { + fn alloc(layout: Layout, flags: Flags) -> Result, AllocE= rror> { + if layout.size() =3D=3D 0 { + return Ok(NonNull::slice_from_raw_parts(NonNull::dangling(), 0= )); + } + + let ptr =3D Self::alloc_store_data(layout)?; + + if flags.contains(__GFP_ZERO) { + // SAFETY: `Self::alloc_store_data` guarantees that `ptr` poin= ts to memory of at least + // `layout.size()` bytes. + unsafe { ptr.as_ptr().write_bytes(0, layout.size()) }; + } + + Ok(NonNull::slice_from_raw_parts(ptr, layout.size())) + } + unsafe fn realloc( - _ptr: Option>, - _layout: Layout, - _flags: Flags, + ptr: Option>, + layout: Layout, + flags: Flags, ) -> Result, AllocError> { - panic!(); + let src: NonNull =3D if let Some(src) =3D ptr { + src.cast() + } else { + return Self::alloc(layout, flags); + }; + + if layout.size() =3D=3D 0 { + // SAFETY: `src` has been created by `Self::alloc_store_data`. + unsafe { Self::free_read_data(src) }; + + return Ok(NonNull::slice_from_raw_parts(NonNull::dangling(), 0= )); + } + + let dst =3D Self::alloc(layout, flags)?; + + // SAFETY: `src` has been created by `Self::alloc_store_data`. + let data =3D unsafe { Self::data(src) }; + + // SAFETY: `src` has previously been allocated with this `Allocato= r`; `dst` has just been + // newly allocated. Copy up to the smaller of both sizes. + unsafe { + ptr::copy_nonoverlapping( + src.as_ptr(), + dst.as_ptr().cast(), + cmp::min(layout.size(), data.size), + ) + }; + + // SAFETY: `src` has been created by `Self::alloc_store_data`. + unsafe { Self::free_read_data(src) }; + + Ok(dst) } } --=20 2.46.0